hash
stringlengths
64
64
content
stringlengths
0
1.51M
bab69f164decdcffd394c7b09243b950b067cdd27542fc6ed7475babeacc1fcf
import datetime import sys import unittest from django.contrib.admin import ( AllValuesFieldListFilter, BooleanFieldListFilter, EmptyFieldListFilter, FieldListFilter, ModelAdmin, RelatedOnlyFieldListFilter, SimpleListFilter, site, ) from django.contrib.admin.options import IncorrectLookupParameters from django.contrib.auth.admin import UserAdmin from django.contrib.auth.models import User from django.core.exceptions import ImproperlyConfigured from django.test import RequestFactory, TestCase, override_settings from .models import ( Book, Bookmark, Department, Employee, ImprovedBook, TaggedItem, ) def select_by(dictlist, key, value): return [x for x in dictlist if x[key] == value][0] class DecadeListFilter(SimpleListFilter): def lookups(self, request, model_admin): return ( ('the 80s', "the 1980's"), ('the 90s', "the 1990's"), ('the 00s', "the 2000's"), ('other', "other decades"), ) def queryset(self, request, queryset): decade = self.value() if decade == 'the 80s': return queryset.filter(year__gte=1980, year__lte=1989) if decade == 'the 90s': return queryset.filter(year__gte=1990, year__lte=1999) if decade == 'the 00s': return queryset.filter(year__gte=2000, year__lte=2009) class NotNinetiesListFilter(SimpleListFilter): title = "Not nineties books" parameter_name = "book_year" def lookups(self, request, model_admin): return ( ('the 90s', "the 1990's"), ) def queryset(self, request, queryset): if self.value() == 'the 90s': return queryset.filter(year__gte=1990, year__lte=1999) else: return queryset.exclude(year__gte=1990, year__lte=1999) class DecadeListFilterWithTitleAndParameter(DecadeListFilter): title = 'publication decade' parameter_name = 'publication-decade' class DecadeListFilterWithoutTitle(DecadeListFilter): parameter_name = 'publication-decade' class DecadeListFilterWithoutParameter(DecadeListFilter): title = 'publication decade' class DecadeListFilterWithNoneReturningLookups(DecadeListFilterWithTitleAndParameter): def lookups(self, request, model_admin): pass class DecadeListFilterWithFailingQueryset(DecadeListFilterWithTitleAndParameter): def queryset(self, request, queryset): raise 1 / 0 class DecadeListFilterWithQuerysetBasedLookups(DecadeListFilterWithTitleAndParameter): def lookups(self, request, model_admin): qs = model_admin.get_queryset(request) if qs.filter(year__gte=1980, year__lte=1989).exists(): yield ('the 80s', "the 1980's") if qs.filter(year__gte=1990, year__lte=1999).exists(): yield ('the 90s', "the 1990's") if qs.filter(year__gte=2000, year__lte=2009).exists(): yield ('the 00s', "the 2000's") class DecadeListFilterParameterEndsWith__In(DecadeListFilter): title = 'publication decade' parameter_name = 'decade__in' # Ends with '__in" class DecadeListFilterParameterEndsWith__Isnull(DecadeListFilter): title = 'publication decade' parameter_name = 'decade__isnull' # Ends with '__isnull" class DepartmentListFilterLookupWithNonStringValue(SimpleListFilter): title = 'department' parameter_name = 'department' def lookups(self, request, model_admin): return sorted({ (employee.department.id, # Intentionally not a string (Refs #19318) employee.department.code) for employee in model_admin.get_queryset(request).all() }) def queryset(self, request, queryset): if self.value(): return queryset.filter(department__id=self.value()) class DepartmentListFilterLookupWithUnderscoredParameter(DepartmentListFilterLookupWithNonStringValue): parameter_name = 'department__whatever' class DepartmentListFilterLookupWithDynamicValue(DecadeListFilterWithTitleAndParameter): def lookups(self, request, model_admin): if self.value() == 'the 80s': return (('the 90s', "the 1990's"),) elif self.value() == 'the 90s': return (('the 80s', "the 1980's"),) else: return (('the 80s', "the 1980's"), ('the 90s', "the 1990's"),) class EmployeeNameCustomDividerFilter(FieldListFilter): list_separator = '|' def __init__(self, field, request, params, model, model_admin, field_path): self.lookup_kwarg = '%s__in' % field_path super().__init__(field, request, params, model, model_admin, field_path) def expected_parameters(self): return [self.lookup_kwarg] class CustomUserAdmin(UserAdmin): list_filter = ('books_authored', 'books_contributed') class BookAdmin(ModelAdmin): list_filter = ('year', 'author', 'contributors', 'is_best_seller', 'date_registered', 'no', 'availability') ordering = ('-id',) class BookAdminWithTupleBooleanFilter(BookAdmin): list_filter = ( 'year', 'author', 'contributors', ('is_best_seller', BooleanFieldListFilter), 'date_registered', 'no', ('availability', BooleanFieldListFilter), ) class BookAdminWithUnderscoreLookupAndTuple(BookAdmin): list_filter = ( 'year', ('author__email', AllValuesFieldListFilter), 'contributors', 'is_best_seller', 'date_registered', 'no', ) class BookAdminWithCustomQueryset(ModelAdmin): def __init__(self, user, *args, **kwargs): self.user = user super().__init__(*args, **kwargs) list_filter = ('year',) def get_queryset(self, request): return super().get_queryset(request).filter(author=self.user) class BookAdminRelatedOnlyFilter(ModelAdmin): list_filter = ( 'year', 'is_best_seller', 'date_registered', 'no', ('author', RelatedOnlyFieldListFilter), ('contributors', RelatedOnlyFieldListFilter), ('employee__department', RelatedOnlyFieldListFilter), ) ordering = ('-id',) class DecadeFilterBookAdmin(ModelAdmin): list_filter = ('author', DecadeListFilterWithTitleAndParameter) ordering = ('-id',) class NotNinetiesListFilterAdmin(ModelAdmin): list_filter = (NotNinetiesListFilter,) class DecadeFilterBookAdminWithoutTitle(ModelAdmin): list_filter = (DecadeListFilterWithoutTitle,) class DecadeFilterBookAdminWithoutParameter(ModelAdmin): list_filter = (DecadeListFilterWithoutParameter,) class DecadeFilterBookAdminWithNoneReturningLookups(ModelAdmin): list_filter = (DecadeListFilterWithNoneReturningLookups,) class DecadeFilterBookAdminWithFailingQueryset(ModelAdmin): list_filter = (DecadeListFilterWithFailingQueryset,) class DecadeFilterBookAdminWithQuerysetBasedLookups(ModelAdmin): list_filter = (DecadeListFilterWithQuerysetBasedLookups,) class DecadeFilterBookAdminParameterEndsWith__In(ModelAdmin): list_filter = (DecadeListFilterParameterEndsWith__In,) class DecadeFilterBookAdminParameterEndsWith__Isnull(ModelAdmin): list_filter = (DecadeListFilterParameterEndsWith__Isnull,) class EmployeeAdmin(ModelAdmin): list_display = ['name', 'department'] list_filter = ['department'] class EmployeeCustomDividerFilterAdmin(EmployeeAdmin): list_filter = [ ('name', EmployeeNameCustomDividerFilter), ] class DepartmentFilterEmployeeAdmin(EmployeeAdmin): list_filter = [DepartmentListFilterLookupWithNonStringValue] class DepartmentFilterUnderscoredEmployeeAdmin(EmployeeAdmin): list_filter = [DepartmentListFilterLookupWithUnderscoredParameter] class DepartmentFilterDynamicValueBookAdmin(EmployeeAdmin): list_filter = [DepartmentListFilterLookupWithDynamicValue] class BookmarkAdminGenericRelation(ModelAdmin): list_filter = ['tags__tag'] class BookAdminWithEmptyFieldListFilter(ModelAdmin): list_filter = [ ('author', EmptyFieldListFilter), ('title', EmptyFieldListFilter), ('improvedbook', EmptyFieldListFilter), ] class DepartmentAdminWithEmptyFieldListFilter(ModelAdmin): list_filter = [ ('description', EmptyFieldListFilter), ('employee', EmptyFieldListFilter), ] class ListFiltersTests(TestCase): request_factory = RequestFactory() @classmethod def setUpTestData(cls): cls.today = datetime.date.today() cls.tomorrow = cls.today + datetime.timedelta(days=1) cls.one_week_ago = cls.today - datetime.timedelta(days=7) if cls.today.month == 12: cls.next_month = cls.today.replace(year=cls.today.year + 1, month=1, day=1) else: cls.next_month = cls.today.replace(month=cls.today.month + 1, day=1) cls.next_year = cls.today.replace(year=cls.today.year + 1, month=1, day=1) # Users cls.alfred = User.objects.create_superuser('alfred', '[email protected]', 'password') cls.bob = User.objects.create_user('bob', '[email protected]') cls.lisa = User.objects.create_user('lisa', '[email protected]') # Books cls.djangonaut_book = Book.objects.create( title='Djangonaut: an art of living', year=2009, author=cls.alfred, is_best_seller=True, date_registered=cls.today, availability=True, ) cls.bio_book = Book.objects.create( title='Django: a biography', year=1999, author=cls.alfred, is_best_seller=False, no=207, availability=False, ) cls.django_book = Book.objects.create( title='The Django Book', year=None, author=cls.bob, is_best_seller=None, date_registered=cls.today, no=103, availability=True, ) cls.guitar_book = Book.objects.create( title='Guitar for dummies', year=2002, is_best_seller=True, date_registered=cls.one_week_ago, availability=None, ) cls.guitar_book.contributors.set([cls.bob, cls.lisa]) # Departments cls.dev = Department.objects.create(code='DEV', description='Development') cls.design = Department.objects.create(code='DSN', description='Design') # Employees cls.john = Employee.objects.create(name='John Blue', department=cls.dev) cls.jack = Employee.objects.create(name='Jack Red', department=cls.design) def test_choicesfieldlistfilter_has_none_choice(self): """ The last choice is for the None value. """ class BookmarkChoicesAdmin(ModelAdmin): list_display = ['none_or_null'] list_filter = ['none_or_null'] modeladmin = BookmarkChoicesAdmin(Bookmark, site) request = self.request_factory.get('/', {}) request.user = self.alfred changelist = modeladmin.get_changelist_instance(request) filterspec = changelist.get_filters(request)[0][0] choices = list(filterspec.choices(changelist)) self.assertEqual(choices[-1]['display'], 'None') self.assertEqual(choices[-1]['query_string'], '?none_or_null__isnull=True') def test_datefieldlistfilter(self): modeladmin = BookAdmin(Book, site) request = self.request_factory.get('/') request.user = self.alfred changelist = modeladmin.get_changelist(request) request = self.request_factory.get('/', { 'date_registered__gte': self.today, 'date_registered__lt': self.tomorrow}, ) request.user = self.alfred changelist = modeladmin.get_changelist_instance(request) # Make sure the correct queryset is returned queryset = changelist.get_queryset(request) self.assertEqual(list(queryset), [self.django_book, self.djangonaut_book]) # Make sure the correct choice is selected filterspec = changelist.get_filters(request)[0][4] self.assertEqual(filterspec.title, 'date registered') choice = select_by(filterspec.choices(changelist), "display", "Today") self.assertIs(choice['selected'], True) self.assertEqual( choice['query_string'], '?date_registered__gte=%s&date_registered__lt=%s' % ( self.today, self.tomorrow, ) ) request = self.request_factory.get('/', { 'date_registered__gte': self.today.replace(day=1), 'date_registered__lt': self.next_month}, ) request.user = self.alfred changelist = modeladmin.get_changelist_instance(request) # Make sure the correct queryset is returned queryset = changelist.get_queryset(request) if (self.today.year, self.today.month) == (self.one_week_ago.year, self.one_week_ago.month): # In case one week ago is in the same month. self.assertEqual(list(queryset), [self.guitar_book, self.django_book, self.djangonaut_book]) else: self.assertEqual(list(queryset), [self.django_book, self.djangonaut_book]) # Make sure the correct choice is selected filterspec = changelist.get_filters(request)[0][4] self.assertEqual(filterspec.title, 'date registered') choice = select_by(filterspec.choices(changelist), "display", "This month") self.assertIs(choice['selected'], True) self.assertEqual( choice['query_string'], '?date_registered__gte=%s&date_registered__lt=%s' % ( self.today.replace(day=1), self.next_month, ) ) request = self.request_factory.get('/', { 'date_registered__gte': self.today.replace(month=1, day=1), 'date_registered__lt': self.next_year}, ) request.user = self.alfred changelist = modeladmin.get_changelist_instance(request) # Make sure the correct queryset is returned queryset = changelist.get_queryset(request) if self.today.year == self.one_week_ago.year: # In case one week ago is in the same year. self.assertEqual(list(queryset), [self.guitar_book, self.django_book, self.djangonaut_book]) else: self.assertEqual(list(queryset), [self.django_book, self.djangonaut_book]) # Make sure the correct choice is selected filterspec = changelist.get_filters(request)[0][4] self.assertEqual(filterspec.title, 'date registered') choice = select_by(filterspec.choices(changelist), "display", "This year") self.assertIs(choice['selected'], True) self.assertEqual( choice['query_string'], '?date_registered__gte=%s&date_registered__lt=%s' % ( self.today.replace(month=1, day=1), self.next_year, ) ) request = self.request_factory.get('/', { 'date_registered__gte': str(self.one_week_ago), 'date_registered__lt': str(self.tomorrow), }) request.user = self.alfred changelist = modeladmin.get_changelist_instance(request) # Make sure the correct queryset is returned queryset = changelist.get_queryset(request) self.assertEqual(list(queryset), [self.guitar_book, self.django_book, self.djangonaut_book]) # Make sure the correct choice is selected filterspec = changelist.get_filters(request)[0][4] self.assertEqual(filterspec.title, 'date registered') choice = select_by(filterspec.choices(changelist), "display", "Past 7 days") self.assertIs(choice['selected'], True) self.assertEqual( choice['query_string'], '?date_registered__gte=%s&date_registered__lt=%s' % ( str(self.one_week_ago), str(self.tomorrow), ) ) # Null/not null queries request = self.request_factory.get('/', {'date_registered__isnull': 'True'}) request.user = self.alfred changelist = modeladmin.get_changelist_instance(request) # Make sure the correct queryset is returned queryset = changelist.get_queryset(request) self.assertEqual(queryset.count(), 1) self.assertEqual(queryset[0], self.bio_book) # Make sure the correct choice is selected filterspec = changelist.get_filters(request)[0][4] self.assertEqual(filterspec.title, 'date registered') choice = select_by(filterspec.choices(changelist), 'display', 'No date') self.assertIs(choice['selected'], True) self.assertEqual(choice['query_string'], '?date_registered__isnull=True') request = self.request_factory.get('/', {'date_registered__isnull': 'False'}) request.user = self.alfred changelist = modeladmin.get_changelist_instance(request) # Make sure the correct queryset is returned queryset = changelist.get_queryset(request) self.assertEqual(queryset.count(), 3) self.assertEqual(list(queryset), [self.guitar_book, self.django_book, self.djangonaut_book]) # Make sure the correct choice is selected filterspec = changelist.get_filters(request)[0][4] self.assertEqual(filterspec.title, 'date registered') choice = select_by(filterspec.choices(changelist), 'display', 'Has date') self.assertIs(choice['selected'], True) self.assertEqual(choice['query_string'], '?date_registered__isnull=False') @unittest.skipIf( sys.platform == 'win32', "Windows doesn't support setting a timezone that differs from the " "system timezone." ) @override_settings(USE_TZ=True) def test_datefieldlistfilter_with_time_zone_support(self): # Regression for #17830 self.test_datefieldlistfilter() def test_allvaluesfieldlistfilter(self): modeladmin = BookAdmin(Book, site) request = self.request_factory.get('/', {'year__isnull': 'True'}) request.user = self.alfred changelist = modeladmin.get_changelist_instance(request) # Make sure the correct queryset is returned queryset = changelist.get_queryset(request) self.assertEqual(list(queryset), [self.django_book]) # Make sure the last choice is None and is selected filterspec = changelist.get_filters(request)[0][0] self.assertEqual(filterspec.title, 'year') choices = list(filterspec.choices(changelist)) self.assertIs(choices[-1]['selected'], True) self.assertEqual(choices[-1]['query_string'], '?year__isnull=True') request = self.request_factory.get('/', {'year': '2002'}) request.user = self.alfred changelist = modeladmin.get_changelist_instance(request) # Make sure the correct choice is selected filterspec = changelist.get_filters(request)[0][0] self.assertEqual(filterspec.title, 'year') choices = list(filterspec.choices(changelist)) self.assertIs(choices[2]['selected'], True) self.assertEqual(choices[2]['query_string'], '?year=2002') def test_allvaluesfieldlistfilter_custom_qs(self): # Make sure that correct filters are returned with custom querysets modeladmin = BookAdminWithCustomQueryset(self.alfred, Book, site) request = self.request_factory.get('/') request.user = self.alfred changelist = modeladmin.get_changelist_instance(request) filterspec = changelist.get_filters(request)[0][0] choices = list(filterspec.choices(changelist)) # Should have 'All', 1999 and 2009 options i.e. the subset of years of # books written by alfred (which is the filtering criteria set by # BookAdminWithCustomQueryset.get_queryset()) self.assertEqual(3, len(choices)) self.assertEqual(choices[0]['query_string'], '?') self.assertEqual(choices[1]['query_string'], '?year=1999') self.assertEqual(choices[2]['query_string'], '?year=2009') def test_relatedfieldlistfilter_foreignkey(self): modeladmin = BookAdmin(Book, site) request = self.request_factory.get('/') request.user = self.alfred changelist = modeladmin.get_changelist_instance(request) # Make sure that all users are present in the author's list filter filterspec = changelist.get_filters(request)[0][1] expected = [(self.alfred.pk, 'alfred'), (self.bob.pk, 'bob'), (self.lisa.pk, 'lisa')] self.assertEqual(sorted(filterspec.lookup_choices), sorted(expected)) request = self.request_factory.get('/', {'author__isnull': 'True'}) request.user = self.alfred changelist = modeladmin.get_changelist_instance(request) # Make sure the correct queryset is returned queryset = changelist.get_queryset(request) self.assertEqual(list(queryset), [self.guitar_book]) # Make sure the last choice is None and is selected filterspec = changelist.get_filters(request)[0][1] self.assertEqual(filterspec.title, 'Verbose Author') choices = list(filterspec.choices(changelist)) self.assertIs(choices[-1]['selected'], True) self.assertEqual(choices[-1]['query_string'], '?author__isnull=True') request = self.request_factory.get('/', {'author__id__exact': self.alfred.pk}) request.user = self.alfred changelist = modeladmin.get_changelist_instance(request) # Make sure the correct choice is selected filterspec = changelist.get_filters(request)[0][1] self.assertEqual(filterspec.title, 'Verbose Author') # order of choices depends on User model, which has no order choice = select_by(filterspec.choices(changelist), "display", "alfred") self.assertIs(choice['selected'], True) self.assertEqual(choice['query_string'], '?author__id__exact=%d' % self.alfred.pk) def test_relatedfieldlistfilter_foreignkey_ordering(self): """RelatedFieldListFilter ordering respects ModelAdmin.ordering.""" class EmployeeAdminWithOrdering(ModelAdmin): ordering = ('name',) class BookAdmin(ModelAdmin): list_filter = ('employee',) site.register(Employee, EmployeeAdminWithOrdering) self.addCleanup(lambda: site.unregister(Employee)) modeladmin = BookAdmin(Book, site) request = self.request_factory.get('/') request.user = self.alfred changelist = modeladmin.get_changelist_instance(request) filterspec = changelist.get_filters(request)[0][0] expected = [(self.jack.pk, 'Jack Red'), (self.john.pk, 'John Blue')] self.assertEqual(filterspec.lookup_choices, expected) def test_relatedfieldlistfilter_foreignkey_ordering_reverse(self): class EmployeeAdminWithOrdering(ModelAdmin): ordering = ('-name',) class BookAdmin(ModelAdmin): list_filter = ('employee',) site.register(Employee, EmployeeAdminWithOrdering) self.addCleanup(lambda: site.unregister(Employee)) modeladmin = BookAdmin(Book, site) request = self.request_factory.get('/') request.user = self.alfred changelist = modeladmin.get_changelist_instance(request) filterspec = changelist.get_filters(request)[0][0] expected = [(self.john.pk, 'John Blue'), (self.jack.pk, 'Jack Red')] self.assertEqual(filterspec.lookup_choices, expected) def test_relatedfieldlistfilter_foreignkey_default_ordering(self): """RelatedFieldListFilter ordering respects Model.ordering.""" class BookAdmin(ModelAdmin): list_filter = ('employee',) self.addCleanup(setattr, Employee._meta, 'ordering', Employee._meta.ordering) Employee._meta.ordering = ('name',) modeladmin = BookAdmin(Book, site) request = self.request_factory.get('/') request.user = self.alfred changelist = modeladmin.get_changelist_instance(request) filterspec = changelist.get_filters(request)[0][0] expected = [(self.jack.pk, 'Jack Red'), (self.john.pk, 'John Blue')] self.assertEqual(filterspec.lookup_choices, expected) def test_relatedfieldlistfilter_manytomany(self): modeladmin = BookAdmin(Book, site) request = self.request_factory.get('/') request.user = self.alfred changelist = modeladmin.get_changelist_instance(request) # Make sure that all users are present in the contrib's list filter filterspec = changelist.get_filters(request)[0][2] expected = [(self.alfred.pk, 'alfred'), (self.bob.pk, 'bob'), (self.lisa.pk, 'lisa')] self.assertEqual(sorted(filterspec.lookup_choices), sorted(expected)) request = self.request_factory.get('/', {'contributors__isnull': 'True'}) request.user = self.alfred changelist = modeladmin.get_changelist_instance(request) # Make sure the correct queryset is returned queryset = changelist.get_queryset(request) self.assertEqual(list(queryset), [self.django_book, self.bio_book, self.djangonaut_book]) # Make sure the last choice is None and is selected filterspec = changelist.get_filters(request)[0][2] self.assertEqual(filterspec.title, 'Verbose Contributors') choices = list(filterspec.choices(changelist)) self.assertIs(choices[-1]['selected'], True) self.assertEqual(choices[-1]['query_string'], '?contributors__isnull=True') request = self.request_factory.get('/', {'contributors__id__exact': self.bob.pk}) request.user = self.alfred changelist = modeladmin.get_changelist_instance(request) # Make sure the correct choice is selected filterspec = changelist.get_filters(request)[0][2] self.assertEqual(filterspec.title, 'Verbose Contributors') choice = select_by(filterspec.choices(changelist), "display", "bob") self.assertIs(choice['selected'], True) self.assertEqual(choice['query_string'], '?contributors__id__exact=%d' % self.bob.pk) def test_relatedfieldlistfilter_reverse_relationships(self): modeladmin = CustomUserAdmin(User, site) # FK relationship ----- request = self.request_factory.get('/', {'books_authored__isnull': 'True'}) request.user = self.alfred changelist = modeladmin.get_changelist_instance(request) # Make sure the correct queryset is returned queryset = changelist.get_queryset(request) self.assertEqual(list(queryset), [self.lisa]) # Make sure the last choice is None and is selected filterspec = changelist.get_filters(request)[0][0] self.assertEqual(filterspec.title, 'book') choices = list(filterspec.choices(changelist)) self.assertIs(choices[-1]['selected'], True) self.assertEqual(choices[-1]['query_string'], '?books_authored__isnull=True') request = self.request_factory.get('/', {'books_authored__id__exact': self.bio_book.pk}) request.user = self.alfred changelist = modeladmin.get_changelist_instance(request) # Make sure the correct choice is selected filterspec = changelist.get_filters(request)[0][0] self.assertEqual(filterspec.title, 'book') choice = select_by(filterspec.choices(changelist), "display", self.bio_book.title) self.assertIs(choice['selected'], True) self.assertEqual(choice['query_string'], '?books_authored__id__exact=%d' % self.bio_book.pk) # M2M relationship ----- request = self.request_factory.get('/', {'books_contributed__isnull': 'True'}) request.user = self.alfred changelist = modeladmin.get_changelist_instance(request) # Make sure the correct queryset is returned queryset = changelist.get_queryset(request) self.assertEqual(list(queryset), [self.alfred]) # Make sure the last choice is None and is selected filterspec = changelist.get_filters(request)[0][1] self.assertEqual(filterspec.title, 'book') choices = list(filterspec.choices(changelist)) self.assertIs(choices[-1]['selected'], True) self.assertEqual(choices[-1]['query_string'], '?books_contributed__isnull=True') request = self.request_factory.get('/', {'books_contributed__id__exact': self.django_book.pk}) request.user = self.alfred changelist = modeladmin.get_changelist_instance(request) # Make sure the correct choice is selected filterspec = changelist.get_filters(request)[0][1] self.assertEqual(filterspec.title, 'book') choice = select_by(filterspec.choices(changelist), "display", self.django_book.title) self.assertIs(choice['selected'], True) self.assertEqual(choice['query_string'], '?books_contributed__id__exact=%d' % self.django_book.pk) # With one book, the list filter should appear because there is also a # (None) option. Book.objects.exclude(pk=self.djangonaut_book.pk).delete() filterspec = changelist.get_filters(request)[0] self.assertEqual(len(filterspec), 2) # With no books remaining, no list filters should appear. Book.objects.all().delete() filterspec = changelist.get_filters(request)[0] self.assertEqual(len(filterspec), 0) def test_relatedfieldlistfilter_reverse_relationships_default_ordering(self): self.addCleanup(setattr, Book._meta, 'ordering', Book._meta.ordering) Book._meta.ordering = ('title',) modeladmin = CustomUserAdmin(User, site) request = self.request_factory.get('/') request.user = self.alfred changelist = modeladmin.get_changelist_instance(request) filterspec = changelist.get_filters(request)[0][0] expected = [ (self.bio_book.pk, 'Django: a biography'), (self.djangonaut_book.pk, 'Djangonaut: an art of living'), (self.guitar_book.pk, 'Guitar for dummies'), (self.django_book.pk, 'The Django Book') ] self.assertEqual(filterspec.lookup_choices, expected) def test_relatedonlyfieldlistfilter_foreignkey(self): modeladmin = BookAdminRelatedOnlyFilter(Book, site) request = self.request_factory.get('/') request.user = self.alfred changelist = modeladmin.get_changelist_instance(request) # Make sure that only actual authors are present in author's list filter filterspec = changelist.get_filters(request)[0][4] expected = [(self.alfred.pk, 'alfred'), (self.bob.pk, 'bob')] self.assertEqual(sorted(filterspec.lookup_choices), sorted(expected)) def test_relatedonlyfieldlistfilter_foreignkey_reverse_relationships(self): class EmployeeAdminReverseRelationship(ModelAdmin): list_filter = ( ('book', RelatedOnlyFieldListFilter), ) self.djangonaut_book.employee = self.john self.djangonaut_book.save() self.django_book.employee = self.jack self.django_book.save() modeladmin = EmployeeAdminReverseRelationship(Employee, site) request = self.request_factory.get('/') request.user = self.alfred changelist = modeladmin.get_changelist_instance(request) filterspec = changelist.get_filters(request)[0][0] self.assertEqual(filterspec.lookup_choices, [ (self.djangonaut_book.pk, 'Djangonaut: an art of living'), (self.django_book.pk, 'The Django Book'), ]) def test_relatedonlyfieldlistfilter_manytomany_reverse_relationships(self): class UserAdminReverseRelationship(ModelAdmin): list_filter = ( ('books_contributed', RelatedOnlyFieldListFilter), ) modeladmin = UserAdminReverseRelationship(User, site) request = self.request_factory.get('/') request.user = self.alfred changelist = modeladmin.get_changelist_instance(request) filterspec = changelist.get_filters(request)[0][0] self.assertEqual( filterspec.lookup_choices, [(self.guitar_book.pk, 'Guitar for dummies')], ) def test_relatedonlyfieldlistfilter_foreignkey_ordering(self): """RelatedOnlyFieldListFilter ordering respects ModelAdmin.ordering.""" class EmployeeAdminWithOrdering(ModelAdmin): ordering = ('name',) class BookAdmin(ModelAdmin): list_filter = ( ('employee', RelatedOnlyFieldListFilter), ) albert = Employee.objects.create(name='Albert Green', department=self.dev) self.djangonaut_book.employee = albert self.djangonaut_book.save() self.bio_book.employee = self.jack self.bio_book.save() site.register(Employee, EmployeeAdminWithOrdering) self.addCleanup(lambda: site.unregister(Employee)) modeladmin = BookAdmin(Book, site) request = self.request_factory.get('/') request.user = self.alfred changelist = modeladmin.get_changelist_instance(request) filterspec = changelist.get_filters(request)[0][0] expected = [(albert.pk, 'Albert Green'), (self.jack.pk, 'Jack Red')] self.assertEqual(filterspec.lookup_choices, expected) def test_relatedonlyfieldlistfilter_foreignkey_default_ordering(self): """RelatedOnlyFieldListFilter ordering respects Meta.ordering.""" class BookAdmin(ModelAdmin): list_filter = ( ('employee', RelatedOnlyFieldListFilter), ) albert = Employee.objects.create(name='Albert Green', department=self.dev) self.djangonaut_book.employee = albert self.djangonaut_book.save() self.bio_book.employee = self.jack self.bio_book.save() self.addCleanup(setattr, Employee._meta, 'ordering', Employee._meta.ordering) Employee._meta.ordering = ('name',) modeladmin = BookAdmin(Book, site) request = self.request_factory.get('/') request.user = self.alfred changelist = modeladmin.get_changelist_instance(request) filterspec = changelist.get_filters(request)[0][0] expected = [(albert.pk, 'Albert Green'), (self.jack.pk, 'Jack Red')] self.assertEqual(filterspec.lookup_choices, expected) def test_relatedonlyfieldlistfilter_underscorelookup_foreignkey(self): Department.objects.create(code='TEST', description='Testing') self.djangonaut_book.employee = self.john self.djangonaut_book.save() self.bio_book.employee = self.jack self.bio_book.save() modeladmin = BookAdminRelatedOnlyFilter(Book, site) request = self.request_factory.get('/') request.user = self.alfred changelist = modeladmin.get_changelist_instance(request) # Only actual departments should be present in employee__department's # list filter. filterspec = changelist.get_filters(request)[0][6] expected = [ (self.dev.code, str(self.dev)), (self.design.code, str(self.design)), ] self.assertEqual(sorted(filterspec.lookup_choices), sorted(expected)) def test_relatedonlyfieldlistfilter_manytomany(self): modeladmin = BookAdminRelatedOnlyFilter(Book, site) request = self.request_factory.get('/') request.user = self.alfred changelist = modeladmin.get_changelist_instance(request) # Make sure that only actual contributors are present in contrib's list filter filterspec = changelist.get_filters(request)[0][5] expected = [(self.bob.pk, 'bob'), (self.lisa.pk, 'lisa')] self.assertEqual(sorted(filterspec.lookup_choices), sorted(expected)) def test_listfilter_genericrelation(self): django_bookmark = Bookmark.objects.create(url='https://www.djangoproject.com/') python_bookmark = Bookmark.objects.create(url='https://www.python.org/') kernel_bookmark = Bookmark.objects.create(url='https://www.kernel.org/') TaggedItem.objects.create(content_object=django_bookmark, tag='python') TaggedItem.objects.create(content_object=python_bookmark, tag='python') TaggedItem.objects.create(content_object=kernel_bookmark, tag='linux') modeladmin = BookmarkAdminGenericRelation(Bookmark, site) request = self.request_factory.get('/', {'tags__tag': 'python'}) request.user = self.alfred changelist = modeladmin.get_changelist_instance(request) queryset = changelist.get_queryset(request) expected = [python_bookmark, django_bookmark] self.assertEqual(list(queryset), expected) def test_booleanfieldlistfilter(self): modeladmin = BookAdmin(Book, site) self.verify_booleanfieldlistfilter(modeladmin) def test_booleanfieldlistfilter_tuple(self): modeladmin = BookAdminWithTupleBooleanFilter(Book, site) self.verify_booleanfieldlistfilter(modeladmin) def verify_booleanfieldlistfilter(self, modeladmin): request = self.request_factory.get('/') request.user = self.alfred changelist = modeladmin.get_changelist_instance(request) request = self.request_factory.get('/', {'is_best_seller__exact': 0}) request.user = self.alfred changelist = modeladmin.get_changelist_instance(request) # Make sure the correct queryset is returned queryset = changelist.get_queryset(request) self.assertEqual(list(queryset), [self.bio_book]) # Make sure the correct choice is selected filterspec = changelist.get_filters(request)[0][3] self.assertEqual(filterspec.title, 'is best seller') choice = select_by(filterspec.choices(changelist), "display", "No") self.assertIs(choice['selected'], True) self.assertEqual(choice['query_string'], '?is_best_seller__exact=0') request = self.request_factory.get('/', {'is_best_seller__exact': 1}) request.user = self.alfred changelist = modeladmin.get_changelist_instance(request) # Make sure the correct queryset is returned queryset = changelist.get_queryset(request) self.assertEqual(list(queryset), [self.guitar_book, self.djangonaut_book]) # Make sure the correct choice is selected filterspec = changelist.get_filters(request)[0][3] self.assertEqual(filterspec.title, 'is best seller') choice = select_by(filterspec.choices(changelist), "display", "Yes") self.assertIs(choice['selected'], True) self.assertEqual(choice['query_string'], '?is_best_seller__exact=1') request = self.request_factory.get('/', {'is_best_seller__isnull': 'True'}) request.user = self.alfred changelist = modeladmin.get_changelist_instance(request) # Make sure the correct queryset is returned queryset = changelist.get_queryset(request) self.assertEqual(list(queryset), [self.django_book]) # Make sure the correct choice is selected filterspec = changelist.get_filters(request)[0][3] self.assertEqual(filterspec.title, 'is best seller') choice = select_by(filterspec.choices(changelist), "display", "Unknown") self.assertIs(choice['selected'], True) self.assertEqual(choice['query_string'], '?is_best_seller__isnull=True') def test_booleanfieldlistfilter_choices(self): modeladmin = BookAdmin(Book, site) self.verify_booleanfieldlistfilter_choices(modeladmin) def test_booleanfieldlistfilter_tuple_choices(self): modeladmin = BookAdminWithTupleBooleanFilter(Book, site) self.verify_booleanfieldlistfilter_choices(modeladmin) def verify_booleanfieldlistfilter_choices(self, modeladmin): # False. request = self.request_factory.get('/', {'availability__exact': 0}) request.user = self.alfred changelist = modeladmin.get_changelist_instance(request) queryset = changelist.get_queryset(request) self.assertEqual(list(queryset), [self.bio_book]) filterspec = changelist.get_filters(request)[0][6] self.assertEqual(filterspec.title, 'availability') choice = select_by(filterspec.choices(changelist), 'display', 'Paid') self.assertIs(choice['selected'], True) self.assertEqual(choice['query_string'], '?availability__exact=0') # True. request = self.request_factory.get('/', {'availability__exact': 1}) request.user = self.alfred changelist = modeladmin.get_changelist_instance(request) queryset = changelist.get_queryset(request) self.assertEqual(list(queryset), [self.django_book, self.djangonaut_book]) filterspec = changelist.get_filters(request)[0][6] self.assertEqual(filterspec.title, 'availability') choice = select_by(filterspec.choices(changelist), 'display', 'Free') self.assertIs(choice['selected'], True) self.assertEqual(choice['query_string'], '?availability__exact=1') # None. request = self.request_factory.get('/', {'availability__isnull': 'True'}) request.user = self.alfred changelist = modeladmin.get_changelist_instance(request) queryset = changelist.get_queryset(request) self.assertEqual(list(queryset), [self.guitar_book]) filterspec = changelist.get_filters(request)[0][6] self.assertEqual(filterspec.title, 'availability') choice = select_by(filterspec.choices(changelist), 'display', 'Obscure') self.assertIs(choice['selected'], True) self.assertEqual(choice['query_string'], '?availability__isnull=True') # All. request = self.request_factory.get('/') request.user = self.alfred changelist = modeladmin.get_changelist_instance(request) queryset = changelist.get_queryset(request) self.assertEqual( list(queryset), [self.guitar_book, self.django_book, self.bio_book, self.djangonaut_book], ) filterspec = changelist.get_filters(request)[0][6] self.assertEqual(filterspec.title, 'availability') choice = select_by(filterspec.choices(changelist), 'display', 'All') self.assertIs(choice['selected'], True) self.assertEqual(choice['query_string'], '?') def test_fieldlistfilter_underscorelookup_tuple(self): """ Ensure ('fieldpath', ClassName ) lookups pass lookup_allowed checks when fieldpath contains double underscore in value (#19182). """ modeladmin = BookAdminWithUnderscoreLookupAndTuple(Book, site) request = self.request_factory.get('/') request.user = self.alfred changelist = modeladmin.get_changelist_instance(request) request = self.request_factory.get('/', {'author__email': '[email protected]'}) request.user = self.alfred changelist = modeladmin.get_changelist_instance(request) # Make sure the correct queryset is returned queryset = changelist.get_queryset(request) self.assertEqual(list(queryset), [self.bio_book, self.djangonaut_book]) def test_fieldlistfilter_invalid_lookup_parameters(self): """Filtering by an invalid value.""" modeladmin = BookAdmin(Book, site) request = self.request_factory.get('/', {'author__id__exact': 'StringNotInteger!'}) request.user = self.alfred with self.assertRaises(IncorrectLookupParameters): modeladmin.get_changelist_instance(request) def test_simplelistfilter(self): modeladmin = DecadeFilterBookAdmin(Book, site) # Make sure that the first option is 'All' --------------------------- request = self.request_factory.get('/', {}) request.user = self.alfred changelist = modeladmin.get_changelist_instance(request) # Make sure the correct queryset is returned queryset = changelist.get_queryset(request) self.assertEqual(list(queryset), list(Book.objects.all().order_by('-id'))) # Make sure the correct choice is selected filterspec = changelist.get_filters(request)[0][1] self.assertEqual(filterspec.title, 'publication decade') choices = list(filterspec.choices(changelist)) self.assertEqual(choices[0]['display'], 'All') self.assertIs(choices[0]['selected'], True) self.assertEqual(choices[0]['query_string'], '?') # Look for books in the 1980s ---------------------------------------- request = self.request_factory.get('/', {'publication-decade': 'the 80s'}) request.user = self.alfred changelist = modeladmin.get_changelist_instance(request) # Make sure the correct queryset is returned queryset = changelist.get_queryset(request) self.assertEqual(list(queryset), []) # Make sure the correct choice is selected filterspec = changelist.get_filters(request)[0][1] self.assertEqual(filterspec.title, 'publication decade') choices = list(filterspec.choices(changelist)) self.assertEqual(choices[1]['display'], 'the 1980\'s') self.assertIs(choices[1]['selected'], True) self.assertEqual(choices[1]['query_string'], '?publication-decade=the+80s') # Look for books in the 1990s ---------------------------------------- request = self.request_factory.get('/', {'publication-decade': 'the 90s'}) request.user = self.alfred changelist = modeladmin.get_changelist_instance(request) # Make sure the correct queryset is returned queryset = changelist.get_queryset(request) self.assertEqual(list(queryset), [self.bio_book]) # Make sure the correct choice is selected filterspec = changelist.get_filters(request)[0][1] self.assertEqual(filterspec.title, 'publication decade') choices = list(filterspec.choices(changelist)) self.assertEqual(choices[2]['display'], 'the 1990\'s') self.assertIs(choices[2]['selected'], True) self.assertEqual(choices[2]['query_string'], '?publication-decade=the+90s') # Look for books in the 2000s ---------------------------------------- request = self.request_factory.get('/', {'publication-decade': 'the 00s'}) request.user = self.alfred changelist = modeladmin.get_changelist_instance(request) # Make sure the correct queryset is returned queryset = changelist.get_queryset(request) self.assertEqual(list(queryset), [self.guitar_book, self.djangonaut_book]) # Make sure the correct choice is selected filterspec = changelist.get_filters(request)[0][1] self.assertEqual(filterspec.title, 'publication decade') choices = list(filterspec.choices(changelist)) self.assertEqual(choices[3]['display'], 'the 2000\'s') self.assertIs(choices[3]['selected'], True) self.assertEqual(choices[3]['query_string'], '?publication-decade=the+00s') # Combine multiple filters ------------------------------------------- request = self.request_factory.get('/', {'publication-decade': 'the 00s', 'author__id__exact': self.alfred.pk}) request.user = self.alfred changelist = modeladmin.get_changelist_instance(request) # Make sure the correct queryset is returned queryset = changelist.get_queryset(request) self.assertEqual(list(queryset), [self.djangonaut_book]) # Make sure the correct choices are selected filterspec = changelist.get_filters(request)[0][1] self.assertEqual(filterspec.title, 'publication decade') choices = list(filterspec.choices(changelist)) self.assertEqual(choices[3]['display'], 'the 2000\'s') self.assertIs(choices[3]['selected'], True) self.assertEqual( choices[3]['query_string'], '?author__id__exact=%s&publication-decade=the+00s' % self.alfred.pk ) filterspec = changelist.get_filters(request)[0][0] self.assertEqual(filterspec.title, 'Verbose Author') choice = select_by(filterspec.choices(changelist), "display", "alfred") self.assertIs(choice['selected'], True) self.assertEqual(choice['query_string'], '?author__id__exact=%s&publication-decade=the+00s' % self.alfred.pk) def test_listfilter_without_title(self): """ Any filter must define a title. """ modeladmin = DecadeFilterBookAdminWithoutTitle(Book, site) request = self.request_factory.get('/', {}) request.user = self.alfred msg = "The list filter 'DecadeListFilterWithoutTitle' does not specify a 'title'." with self.assertRaisesMessage(ImproperlyConfigured, msg): modeladmin.get_changelist_instance(request) def test_simplelistfilter_without_parameter(self): """ Any SimpleListFilter must define a parameter_name. """ modeladmin = DecadeFilterBookAdminWithoutParameter(Book, site) request = self.request_factory.get('/', {}) request.user = self.alfred msg = "The list filter 'DecadeListFilterWithoutParameter' does not specify a 'parameter_name'." with self.assertRaisesMessage(ImproperlyConfigured, msg): modeladmin.get_changelist_instance(request) def test_simplelistfilter_with_none_returning_lookups(self): """ A SimpleListFilter lookups method can return None but disables the filter completely. """ modeladmin = DecadeFilterBookAdminWithNoneReturningLookups(Book, site) request = self.request_factory.get('/', {}) request.user = self.alfred changelist = modeladmin.get_changelist_instance(request) filterspec = changelist.get_filters(request)[0] self.assertEqual(len(filterspec), 0) def test_filter_with_failing_queryset(self): """ When a filter's queryset method fails, it fails loudly and the corresponding exception doesn't get swallowed (#17828). """ modeladmin = DecadeFilterBookAdminWithFailingQueryset(Book, site) request = self.request_factory.get('/', {}) request.user = self.alfred with self.assertRaises(ZeroDivisionError): modeladmin.get_changelist_instance(request) def test_simplelistfilter_with_queryset_based_lookups(self): modeladmin = DecadeFilterBookAdminWithQuerysetBasedLookups(Book, site) request = self.request_factory.get('/', {}) request.user = self.alfred changelist = modeladmin.get_changelist_instance(request) filterspec = changelist.get_filters(request)[0][0] self.assertEqual(filterspec.title, 'publication decade') choices = list(filterspec.choices(changelist)) self.assertEqual(len(choices), 3) self.assertEqual(choices[0]['display'], 'All') self.assertIs(choices[0]['selected'], True) self.assertEqual(choices[0]['query_string'], '?') self.assertEqual(choices[1]['display'], 'the 1990\'s') self.assertIs(choices[1]['selected'], False) self.assertEqual(choices[1]['query_string'], '?publication-decade=the+90s') self.assertEqual(choices[2]['display'], 'the 2000\'s') self.assertIs(choices[2]['selected'], False) self.assertEqual(choices[2]['query_string'], '?publication-decade=the+00s') def test_two_characters_long_field(self): """ list_filter works with two-characters long field names (#16080). """ modeladmin = BookAdmin(Book, site) request = self.request_factory.get('/', {'no': '207'}) request.user = self.alfred changelist = modeladmin.get_changelist_instance(request) # Make sure the correct queryset is returned queryset = changelist.get_queryset(request) self.assertEqual(list(queryset), [self.bio_book]) filterspec = changelist.get_filters(request)[0][5] self.assertEqual(filterspec.title, 'number') choices = list(filterspec.choices(changelist)) self.assertIs(choices[2]['selected'], True) self.assertEqual(choices[2]['query_string'], '?no=207') def test_parameter_ends_with__in__or__isnull(self): """ A SimpleListFilter's parameter name is not mistaken for a model field if it ends with '__isnull' or '__in' (#17091). """ # When it ends with '__in' ----------------------------------------- modeladmin = DecadeFilterBookAdminParameterEndsWith__In(Book, site) request = self.request_factory.get('/', {'decade__in': 'the 90s'}) request.user = self.alfred changelist = modeladmin.get_changelist_instance(request) # Make sure the correct queryset is returned queryset = changelist.get_queryset(request) self.assertEqual(list(queryset), [self.bio_book]) # Make sure the correct choice is selected filterspec = changelist.get_filters(request)[0][0] self.assertEqual(filterspec.title, 'publication decade') choices = list(filterspec.choices(changelist)) self.assertEqual(choices[2]['display'], 'the 1990\'s') self.assertIs(choices[2]['selected'], True) self.assertEqual(choices[2]['query_string'], '?decade__in=the+90s') # When it ends with '__isnull' --------------------------------------- modeladmin = DecadeFilterBookAdminParameterEndsWith__Isnull(Book, site) request = self.request_factory.get('/', {'decade__isnull': 'the 90s'}) request.user = self.alfred changelist = modeladmin.get_changelist_instance(request) # Make sure the correct queryset is returned queryset = changelist.get_queryset(request) self.assertEqual(list(queryset), [self.bio_book]) # Make sure the correct choice is selected filterspec = changelist.get_filters(request)[0][0] self.assertEqual(filterspec.title, 'publication decade') choices = list(filterspec.choices(changelist)) self.assertEqual(choices[2]['display'], 'the 1990\'s') self.assertIs(choices[2]['selected'], True) self.assertEqual(choices[2]['query_string'], '?decade__isnull=the+90s') def test_lookup_with_non_string_value(self): """ Ensure choices are set the selected class when using non-string values for lookups in SimpleListFilters (#19318). """ modeladmin = DepartmentFilterEmployeeAdmin(Employee, site) request = self.request_factory.get('/', {'department': self.john.department.pk}) request.user = self.alfred changelist = modeladmin.get_changelist_instance(request) queryset = changelist.get_queryset(request) self.assertEqual(list(queryset), [self.john]) filterspec = changelist.get_filters(request)[0][-1] self.assertEqual(filterspec.title, 'department') choices = list(filterspec.choices(changelist)) self.assertEqual(choices[1]['display'], 'DEV') self.assertIs(choices[1]['selected'], True) self.assertEqual(choices[1]['query_string'], '?department=%s' % self.john.department.pk) def test_lookup_with_non_string_value_underscored(self): """ Ensure SimpleListFilter lookups pass lookup_allowed checks when parameter_name attribute contains double-underscore value (#19182). """ modeladmin = DepartmentFilterUnderscoredEmployeeAdmin(Employee, site) request = self.request_factory.get('/', {'department__whatever': self.john.department.pk}) request.user = self.alfred changelist = modeladmin.get_changelist_instance(request) queryset = changelist.get_queryset(request) self.assertEqual(list(queryset), [self.john]) filterspec = changelist.get_filters(request)[0][-1] self.assertEqual(filterspec.title, 'department') choices = list(filterspec.choices(changelist)) self.assertEqual(choices[1]['display'], 'DEV') self.assertIs(choices[1]['selected'], True) self.assertEqual(choices[1]['query_string'], '?department__whatever=%s' % self.john.department.pk) def test_fk_with_to_field(self): """ A filter on a FK respects the FK's to_field attribute (#17972). """ modeladmin = EmployeeAdmin(Employee, site) request = self.request_factory.get('/', {}) request.user = self.alfred changelist = modeladmin.get_changelist_instance(request) # Make sure the correct queryset is returned queryset = changelist.get_queryset(request) self.assertEqual(list(queryset), [self.jack, self.john]) filterspec = changelist.get_filters(request)[0][-1] self.assertEqual(filterspec.title, 'department') choices = list(filterspec.choices(changelist)) self.assertEqual(choices[0]['display'], 'All') self.assertIs(choices[0]['selected'], True) self.assertEqual(choices[0]['query_string'], '?') self.assertEqual(choices[1]['display'], 'Development') self.assertIs(choices[1]['selected'], False) self.assertEqual(choices[1]['query_string'], '?department__code__exact=DEV') self.assertEqual(choices[2]['display'], 'Design') self.assertIs(choices[2]['selected'], False) self.assertEqual(choices[2]['query_string'], '?department__code__exact=DSN') # Filter by Department=='Development' -------------------------------- request = self.request_factory.get('/', {'department__code__exact': 'DEV'}) request.user = self.alfred changelist = modeladmin.get_changelist_instance(request) # Make sure the correct queryset is returned queryset = changelist.get_queryset(request) self.assertEqual(list(queryset), [self.john]) filterspec = changelist.get_filters(request)[0][-1] self.assertEqual(filterspec.title, 'department') choices = list(filterspec.choices(changelist)) self.assertEqual(choices[0]['display'], 'All') self.assertIs(choices[0]['selected'], False) self.assertEqual(choices[0]['query_string'], '?') self.assertEqual(choices[1]['display'], 'Development') self.assertIs(choices[1]['selected'], True) self.assertEqual(choices[1]['query_string'], '?department__code__exact=DEV') self.assertEqual(choices[2]['display'], 'Design') self.assertIs(choices[2]['selected'], False) self.assertEqual(choices[2]['query_string'], '?department__code__exact=DSN') def test_lookup_with_dynamic_value(self): """ Ensure SimpleListFilter can access self.value() inside the lookup. """ modeladmin = DepartmentFilterDynamicValueBookAdmin(Book, site) def _test_choices(request, expected_displays): request.user = self.alfred changelist = modeladmin.get_changelist_instance(request) filterspec = changelist.get_filters(request)[0][0] self.assertEqual(filterspec.title, 'publication decade') choices = tuple(c['display'] for c in filterspec.choices(changelist)) self.assertEqual(choices, expected_displays) _test_choices(self.request_factory.get('/', {}), ("All", "the 1980's", "the 1990's")) _test_choices(self.request_factory.get('/', {'publication-decade': 'the 80s'}), ("All", "the 1990's")) _test_choices(self.request_factory.get('/', {'publication-decade': 'the 90s'}), ("All", "the 1980's")) def test_list_filter_queryset_filtered_by_default(self): """ A list filter that filters the queryset by default gives the correct full_result_count. """ modeladmin = NotNinetiesListFilterAdmin(Book, site) request = self.request_factory.get('/', {}) request.user = self.alfred changelist = modeladmin.get_changelist_instance(request) changelist.get_results(request) self.assertEqual(changelist.full_result_count, 4) def test_emptylistfieldfilter(self): empty_description = Department.objects.create(code='EMPT', description='') none_description = Department.objects.create(code='NONE', description=None) empty_title = Book.objects.create(title='', author=self.alfred) department_admin = DepartmentAdminWithEmptyFieldListFilter(Department, site) book_admin = BookAdminWithEmptyFieldListFilter(Book, site) tests = [ # Allows nulls and empty strings. ( department_admin, {'description__isempty': '1'}, [empty_description, none_description], ), ( department_admin, {'description__isempty': '0'}, [self.dev, self.design], ), # Allows nulls. (book_admin, {'author__isempty': '1'}, [self.guitar_book]), ( book_admin, {'author__isempty': '0'}, [self.django_book, self.bio_book, self.djangonaut_book, empty_title], ), # Allows empty strings. (book_admin, {'title__isempty': '1'}, [empty_title]), ( book_admin, {'title__isempty': '0'}, [self.django_book, self.bio_book, self.djangonaut_book, self.guitar_book], ), ] for modeladmin, query_string, expected_result in tests: with self.subTest( modeladmin=modeladmin.__class__.__name__, query_string=query_string, ): request = self.request_factory.get('/', query_string) request.user = self.alfred changelist = modeladmin.get_changelist_instance(request) queryset = changelist.get_queryset(request) self.assertCountEqual(queryset, expected_result) def test_emptylistfieldfilter_reverse_relationships(self): class UserAdminReverseRelationship(UserAdmin): list_filter = ( ('books_contributed', EmptyFieldListFilter), ) ImprovedBook.objects.create(book=self.guitar_book) no_employees = Department.objects.create(code='NONE', description=None) book_admin = BookAdminWithEmptyFieldListFilter(Book, site) department_admin = DepartmentAdminWithEmptyFieldListFilter(Department, site) user_admin = UserAdminReverseRelationship(User, site) tests = [ # Reverse one-to-one relationship. ( book_admin, {'improvedbook__isempty': '1'}, [self.django_book, self.bio_book, self.djangonaut_book], ), (book_admin, {'improvedbook__isempty': '0'}, [self.guitar_book]), # Reverse foreign key relationship. (department_admin, {'employee__isempty': '1'}, [no_employees]), (department_admin, {'employee__isempty': '0'}, [self.dev, self.design]), # Reverse many-to-many relationship. (user_admin, {'books_contributed__isempty': '1'}, [self.alfred]), (user_admin, {'books_contributed__isempty': '0'}, [self.bob, self.lisa]), ] for modeladmin, query_string, expected_result in tests: with self.subTest( modeladmin=modeladmin.__class__.__name__, query_string=query_string, ): request = self.request_factory.get('/', query_string) request.user = self.alfred changelist = modeladmin.get_changelist_instance(request) queryset = changelist.get_queryset(request) self.assertCountEqual(queryset, expected_result) def test_emptylistfieldfilter_genericrelation(self): class BookmarkGenericRelation(ModelAdmin): list_filter = ( ('tags', EmptyFieldListFilter), ) modeladmin = BookmarkGenericRelation(Bookmark, site) django_bookmark = Bookmark.objects.create(url='https://www.djangoproject.com/') python_bookmark = Bookmark.objects.create(url='https://www.python.org/') none_tags = Bookmark.objects.create(url='https://www.kernel.org/') TaggedItem.objects.create(content_object=django_bookmark, tag='python') TaggedItem.objects.create(content_object=python_bookmark, tag='python') tests = [ ({'tags__isempty': '1'}, [none_tags]), ({'tags__isempty': '0'}, [django_bookmark, python_bookmark]), ] for query_string, expected_result in tests: with self.subTest(query_string=query_string): request = self.request_factory.get('/', query_string) request.user = self.alfred changelist = modeladmin.get_changelist_instance(request) queryset = changelist.get_queryset(request) self.assertCountEqual(queryset, expected_result) def test_emptylistfieldfilter_choices(self): modeladmin = BookAdminWithEmptyFieldListFilter(Book, site) request = self.request_factory.get('/') request.user = self.alfred changelist = modeladmin.get_changelist_instance(request) filterspec = changelist.get_filters(request)[0][0] self.assertEqual(filterspec.title, 'Verbose Author') choices = list(filterspec.choices(changelist)) self.assertEqual(len(choices), 3) self.assertEqual(choices[0]['display'], 'All') self.assertIs(choices[0]['selected'], True) self.assertEqual(choices[0]['query_string'], '?') self.assertEqual(choices[1]['display'], 'Empty') self.assertIs(choices[1]['selected'], False) self.assertEqual(choices[1]['query_string'], '?author__isempty=1') self.assertEqual(choices[2]['display'], 'Not empty') self.assertIs(choices[2]['selected'], False) self.assertEqual(choices[2]['query_string'], '?author__isempty=0') def test_emptylistfieldfilter_non_empty_field(self): class EmployeeAdminWithEmptyFieldListFilter(ModelAdmin): list_filter = [('department', EmptyFieldListFilter)] modeladmin = EmployeeAdminWithEmptyFieldListFilter(Employee, site) request = self.request_factory.get('/') request.user = self.alfred msg = ( "The list filter 'EmptyFieldListFilter' cannot be used with field " "'department' which doesn't allow empty strings and nulls." ) with self.assertRaisesMessage(ImproperlyConfigured, msg): modeladmin.get_changelist_instance(request) def test_emptylistfieldfilter_invalid_lookup_parameters(self): modeladmin = BookAdminWithEmptyFieldListFilter(Book, site) request = self.request_factory.get('/', {'author__isempty': 42}) request.user = self.alfred with self.assertRaises(IncorrectLookupParameters): modeladmin.get_changelist_instance(request) def test_lookup_using_custom_divider(self): """ Filter __in lookups with a custom divider. """ jane = Employee.objects.create(name='Jane,Green', department=self.design) modeladmin = EmployeeCustomDividerFilterAdmin(Employee, site) employees = [jane, self.jack] request = self.request_factory.get( '/', {'name__in': "|".join(e.name for e in employees)} ) # test for lookup with custom divider request.user = self.alfred changelist = modeladmin.get_changelist_instance(request) # Make sure the correct queryset is returned queryset = changelist.get_queryset(request) self.assertEqual(list(queryset), employees) # test for lookup with comma in the lookup string request = self.request_factory.get('/', {'name': jane.name}) request.user = self.alfred changelist = modeladmin.get_changelist_instance(request) # Make sure the correct queryset is returned queryset = changelist.get_queryset(request) self.assertEqual(list(queryset), [jane])
81174282115b8a8906e4445874b57fb77b358dce48c3fbc05ce5e85afda19df5
import logging from contextlib import contextmanager from io import StringIO from admin_scripts.tests import AdminScriptTestCase from django.conf import settings from django.core import mail from django.core.exceptions import ( DisallowedHost, PermissionDenied, SuspiciousOperation, ) from django.core.files.temp import NamedTemporaryFile from django.core.management import color from django.http.multipartparser import MultiPartParserError from django.test import RequestFactory, SimpleTestCase, override_settings from django.test.utils import LoggingCaptureMixin from django.utils.log import ( DEFAULT_LOGGING, AdminEmailHandler, CallbackFilter, RequireDebugFalse, RequireDebugTrue, ServerFormatter, ) from django.views.debug import ExceptionReporter from . import views from .logconfig import MyEmailBackend class LoggingFiltersTest(SimpleTestCase): def test_require_debug_false_filter(self): """ Test the RequireDebugFalse filter class. """ filter_ = RequireDebugFalse() with self.settings(DEBUG=True): self.assertIs(filter_.filter("record is not used"), False) with self.settings(DEBUG=False): self.assertIs(filter_.filter("record is not used"), True) def test_require_debug_true_filter(self): """ Test the RequireDebugTrue filter class. """ filter_ = RequireDebugTrue() with self.settings(DEBUG=True): self.assertIs(filter_.filter("record is not used"), True) with self.settings(DEBUG=False): self.assertIs(filter_.filter("record is not used"), False) class SetupDefaultLoggingMixin: @classmethod def setUpClass(cls): super().setUpClass() logging.config.dictConfig(DEFAULT_LOGGING) cls.addClassCleanup(logging.config.dictConfig, settings.LOGGING) class DefaultLoggingTests(SetupDefaultLoggingMixin, LoggingCaptureMixin, SimpleTestCase): def test_django_logger(self): """ The 'django' base logger only output anything when DEBUG=True. """ self.logger.error("Hey, this is an error.") self.assertEqual(self.logger_output.getvalue(), '') with self.settings(DEBUG=True): self.logger.error("Hey, this is an error.") self.assertEqual(self.logger_output.getvalue(), 'Hey, this is an error.\n') @override_settings(DEBUG=True) def test_django_logger_warning(self): self.logger.warning('warning') self.assertEqual(self.logger_output.getvalue(), 'warning\n') @override_settings(DEBUG=True) def test_django_logger_info(self): self.logger.info('info') self.assertEqual(self.logger_output.getvalue(), 'info\n') @override_settings(DEBUG=True) def test_django_logger_debug(self): self.logger.debug('debug') self.assertEqual(self.logger_output.getvalue(), '') class LoggingAssertionMixin: def assertLogsRequest(self, url, level, msg, status_code, logger='django.request', exc_class=None): with self.assertLogs(logger, level) as cm: try: self.client.get(url) except views.UncaughtException: pass self.assertEqual( len(cm.records), 1, "Wrong number of calls for logger %r in %r level." % (logger, level) ) record = cm.records[0] self.assertEqual(record.getMessage(), msg) self.assertEqual(record.status_code, status_code) if exc_class: self.assertIsNotNone(record.exc_info) self.assertEqual(record.exc_info[0], exc_class) @override_settings(DEBUG=True, ROOT_URLCONF='logging_tests.urls') class HandlerLoggingTests(SetupDefaultLoggingMixin, LoggingAssertionMixin, LoggingCaptureMixin, SimpleTestCase): def test_page_found_no_warning(self): self.client.get('/innocent/') self.assertEqual(self.logger_output.getvalue(), '') def test_redirect_no_warning(self): self.client.get('/redirect/') self.assertEqual(self.logger_output.getvalue(), '') def test_page_not_found_warning(self): self.assertLogsRequest( url='/does_not_exist/', level='WARNING', status_code=404, msg='Not Found: /does_not_exist/', ) def test_page_not_found_raised(self): self.assertLogsRequest( url='/does_not_exist_raised/', level='WARNING', status_code=404, msg='Not Found: /does_not_exist_raised/', ) def test_uncaught_exception(self): self.assertLogsRequest( url='/uncaught_exception/', level='ERROR', status_code=500, msg='Internal Server Error: /uncaught_exception/', exc_class=views.UncaughtException, ) def test_internal_server_error(self): self.assertLogsRequest( url='/internal_server_error/', level='ERROR', status_code=500, msg='Internal Server Error: /internal_server_error/', ) def test_internal_server_error_599(self): self.assertLogsRequest( url='/internal_server_error/?status=599', level='ERROR', status_code=599, msg='Unknown Status Code: /internal_server_error/', ) def test_permission_denied(self): self.assertLogsRequest( url='/permission_denied/', level='WARNING', status_code=403, msg='Forbidden (Permission denied): /permission_denied/', exc_class=PermissionDenied, ) def test_multi_part_parser_error(self): self.assertLogsRequest( url='/multi_part_parser_error/', level='WARNING', status_code=400, msg='Bad request (Unable to parse request body): /multi_part_parser_error/', exc_class=MultiPartParserError, ) @override_settings( DEBUG=True, USE_I18N=True, LANGUAGES=[('en', 'English')], MIDDLEWARE=[ 'django.middleware.locale.LocaleMiddleware', 'django.middleware.common.CommonMiddleware', ], ROOT_URLCONF='logging_tests.urls_i18n', ) class I18nLoggingTests(SetupDefaultLoggingMixin, LoggingCaptureMixin, SimpleTestCase): def test_i18n_page_found_no_warning(self): self.client.get('/exists/') self.client.get('/en/exists/') self.assertEqual(self.logger_output.getvalue(), '') def test_i18n_page_not_found_warning(self): self.client.get('/this_does_not/') self.client.get('/en/nor_this/') self.assertEqual(self.logger_output.getvalue(), 'Not Found: /this_does_not/\nNot Found: /en/nor_this/\n') class CallbackFilterTest(SimpleTestCase): def test_sense(self): f_false = CallbackFilter(lambda r: False) f_true = CallbackFilter(lambda r: True) self.assertFalse(f_false.filter('record')) self.assertTrue(f_true.filter('record')) def test_passes_on_record(self): collector = [] def _callback(record): collector.append(record) return True f = CallbackFilter(_callback) f.filter("a record") self.assertEqual(collector, ["a record"]) class AdminEmailHandlerTest(SimpleTestCase): logger = logging.getLogger('django') request_factory = RequestFactory() def get_admin_email_handler(self, logger): # AdminEmailHandler does not get filtered out # even with DEBUG=True. return [ h for h in logger.handlers if h.__class__.__name__ == "AdminEmailHandler" ][0] def test_fail_silently(self): admin_email_handler = self.get_admin_email_handler(self.logger) self.assertTrue(admin_email_handler.connection().fail_silently) @override_settings( ADMINS=[('whatever admin', '[email protected]')], EMAIL_SUBJECT_PREFIX='-SuperAwesomeSubject-' ) def test_accepts_args(self): """ User-supplied arguments and the EMAIL_SUBJECT_PREFIX setting are used to compose the email subject (#16736). """ message = "Custom message that says '%s' and '%s'" token1 = 'ping' token2 = 'pong' admin_email_handler = self.get_admin_email_handler(self.logger) # Backup then override original filters orig_filters = admin_email_handler.filters try: admin_email_handler.filters = [] self.logger.error(message, token1, token2) self.assertEqual(len(mail.outbox), 1) self.assertEqual(mail.outbox[0].to, ['[email protected]']) self.assertEqual(mail.outbox[0].subject, "-SuperAwesomeSubject-ERROR: Custom message that says 'ping' and 'pong'") finally: # Restore original filters admin_email_handler.filters = orig_filters @override_settings( ADMINS=[('whatever admin', '[email protected]')], EMAIL_SUBJECT_PREFIX='-SuperAwesomeSubject-', INTERNAL_IPS=['127.0.0.1'], ) def test_accepts_args_and_request(self): """ The subject is also handled if being passed a request object. """ message = "Custom message that says '%s' and '%s'" token1 = 'ping' token2 = 'pong' admin_email_handler = self.get_admin_email_handler(self.logger) # Backup then override original filters orig_filters = admin_email_handler.filters try: admin_email_handler.filters = [] request = self.request_factory.get('/') self.logger.error( message, token1, token2, extra={ 'status_code': 403, 'request': request, } ) self.assertEqual(len(mail.outbox), 1) self.assertEqual(mail.outbox[0].to, ['[email protected]']) self.assertEqual(mail.outbox[0].subject, "-SuperAwesomeSubject-ERROR (internal IP): Custom message that says 'ping' and 'pong'") finally: # Restore original filters admin_email_handler.filters = orig_filters @override_settings( ADMINS=[('admin', '[email protected]')], EMAIL_SUBJECT_PREFIX='', DEBUG=False, ) def test_subject_accepts_newlines(self): """ Newlines in email reports' subjects are escaped to prevent AdminErrorHandler from failing (#17281). """ message = 'Message \r\n with newlines' expected_subject = 'ERROR: Message \\r\\n with newlines' self.assertEqual(len(mail.outbox), 0) self.logger.error(message) self.assertEqual(len(mail.outbox), 1) self.assertNotIn('\n', mail.outbox[0].subject) self.assertNotIn('\r', mail.outbox[0].subject) self.assertEqual(mail.outbox[0].subject, expected_subject) @override_settings( ADMINS=[('admin', '[email protected]')], DEBUG=False, ) def test_uses_custom_email_backend(self): """ Refs #19325 """ message = 'All work and no play makes Jack a dull boy' admin_email_handler = self.get_admin_email_handler(self.logger) mail_admins_called = {'called': False} def my_mail_admins(*args, **kwargs): connection = kwargs['connection'] self.assertIsInstance(connection, MyEmailBackend) mail_admins_called['called'] = True # Monkeypatches orig_mail_admins = mail.mail_admins orig_email_backend = admin_email_handler.email_backend mail.mail_admins = my_mail_admins admin_email_handler.email_backend = ( 'logging_tests.logconfig.MyEmailBackend') try: self.logger.error(message) self.assertTrue(mail_admins_called['called']) finally: # Revert Monkeypatches mail.mail_admins = orig_mail_admins admin_email_handler.email_backend = orig_email_backend @override_settings( ADMINS=[('whatever admin', '[email protected]')], ) def test_emit_non_ascii(self): """ #23593 - AdminEmailHandler should allow Unicode characters in the request. """ handler = self.get_admin_email_handler(self.logger) record = self.logger.makeRecord('name', logging.ERROR, 'function', 'lno', 'message', None, None) url_path = '/º' record.request = self.request_factory.get(url_path) handler.emit(record) self.assertEqual(len(mail.outbox), 1) msg = mail.outbox[0] self.assertEqual(msg.to, ['[email protected]']) self.assertEqual(msg.subject, "[Django] ERROR (EXTERNAL IP): message") self.assertIn("Report at %s" % url_path, msg.body) @override_settings( MANAGERS=[('manager', '[email protected]')], DEBUG=False, ) def test_customize_send_mail_method(self): class ManagerEmailHandler(AdminEmailHandler): def send_mail(self, subject, message, *args, **kwargs): mail.mail_managers(subject, message, *args, connection=self.connection(), **kwargs) handler = ManagerEmailHandler() record = self.logger.makeRecord('name', logging.ERROR, 'function', 'lno', 'message', None, None) self.assertEqual(len(mail.outbox), 0) handler.emit(record) self.assertEqual(len(mail.outbox), 1) self.assertEqual(mail.outbox[0].to, ['[email protected]']) @override_settings(ALLOWED_HOSTS='example.com') def test_disallowed_host_doesnt_crash(self): admin_email_handler = self.get_admin_email_handler(self.logger) old_include_html = admin_email_handler.include_html # Text email admin_email_handler.include_html = False try: self.client.get('/', HTTP_HOST='evil.com') finally: admin_email_handler.include_html = old_include_html # HTML email admin_email_handler.include_html = True try: self.client.get('/', HTTP_HOST='evil.com') finally: admin_email_handler.include_html = old_include_html def test_default_exception_reporter_class(self): admin_email_handler = self.get_admin_email_handler(self.logger) self.assertEqual(admin_email_handler.reporter_class, ExceptionReporter) @override_settings(ADMINS=[('A.N.Admin', '[email protected]')]) def test_custom_exception_reporter_is_used(self): record = self.logger.makeRecord('name', logging.ERROR, 'function', 'lno', 'message', None, None) record.request = self.request_factory.get('/') handler = AdminEmailHandler(reporter_class='logging_tests.logconfig.CustomExceptionReporter') handler.emit(record) self.assertEqual(len(mail.outbox), 1) msg = mail.outbox[0] self.assertEqual(msg.body, 'message\n\ncustom traceback text') @override_settings(ADMINS=[('admin', '[email protected]')]) def test_emit_no_form_tag(self): """HTML email doesn't contain forms.""" handler = AdminEmailHandler(include_html=True) record = self.logger.makeRecord( 'name', logging.ERROR, 'function', 'lno', 'message', None, None, ) handler.emit(record) self.assertEqual(len(mail.outbox), 1) msg = mail.outbox[0] self.assertEqual(msg.subject, '[Django] ERROR: message') self.assertEqual(len(msg.alternatives), 1) body_html = str(msg.alternatives[0][0]) self.assertIn('<div id="traceback">', body_html) self.assertNotIn('<form', body_html) class SettingsConfigTest(AdminScriptTestCase): """ Accessing settings in a custom logging handler does not trigger a circular import error. """ def setUp(self): super().setUp() log_config = """{ 'version': 1, 'handlers': { 'custom_handler': { 'level': 'INFO', 'class': 'logging_tests.logconfig.MyHandler', } } }""" self.write_settings('settings.py', sdict={'LOGGING': log_config}) def test_circular_dependency(self): # validate is just an example command to trigger settings configuration out, err = self.run_manage(['check']) self.assertNoOutput(err) self.assertOutput(out, "System check identified no issues (0 silenced).") def dictConfig(config): dictConfig.called = True dictConfig.called = False class SetupConfigureLogging(SimpleTestCase): """ Calling django.setup() initializes the logging configuration. """ def test_configure_initializes_logging(self): from django import setup try: with override_settings( LOGGING_CONFIG='logging_tests.tests.dictConfig', ): setup() finally: # Restore logging from settings. setup() self.assertTrue(dictConfig.called) @override_settings(DEBUG=True, ROOT_URLCONF='logging_tests.urls') class SecurityLoggerTest(LoggingAssertionMixin, SimpleTestCase): def test_suspicious_operation_creates_log_message(self): self.assertLogsRequest( url='/suspicious/', level='ERROR', msg='dubious', status_code=400, logger='django.security.SuspiciousOperation', exc_class=SuspiciousOperation, ) def test_suspicious_operation_uses_sublogger(self): self.assertLogsRequest( url='/suspicious_spec/', level='ERROR', msg='dubious', status_code=400, logger='django.security.DisallowedHost', exc_class=DisallowedHost, ) @override_settings( ADMINS=[('admin', '[email protected]')], DEBUG=False, ) def test_suspicious_email_admins(self): self.client.get('/suspicious/') self.assertEqual(len(mail.outbox), 1) self.assertIn('SuspiciousOperation at /suspicious/', mail.outbox[0].body) class SettingsCustomLoggingTest(AdminScriptTestCase): """ Using a logging defaults are still applied when using a custom callable in LOGGING_CONFIG (i.e., logging.config.fileConfig). """ def setUp(self): super().setUp() logging_conf = """ [loggers] keys=root [handlers] keys=stream [formatters] keys=simple [logger_root] handlers=stream [handler_stream] class=StreamHandler formatter=simple args=(sys.stdout,) [formatter_simple] format=%(message)s """ self.temp_file = NamedTemporaryFile() self.temp_file.write(logging_conf.encode()) self.temp_file.flush() self.write_settings('settings.py', sdict={ 'LOGGING_CONFIG': '"logging.config.fileConfig"', 'LOGGING': 'r"%s"' % self.temp_file.name, }) def tearDown(self): self.temp_file.close() def test_custom_logging(self): out, err = self.run_manage(['check']) self.assertNoOutput(err) self.assertOutput(out, "System check identified no issues (0 silenced).") class LogFormattersTests(SimpleTestCase): def test_server_formatter_styles(self): color_style = color.make_style('') formatter = ServerFormatter() formatter.style = color_style log_msg = 'log message' status_code_styles = [ (200, 'HTTP_SUCCESS'), (100, 'HTTP_INFO'), (304, 'HTTP_NOT_MODIFIED'), (300, 'HTTP_REDIRECT'), (404, 'HTTP_NOT_FOUND'), (400, 'HTTP_BAD_REQUEST'), (500, 'HTTP_SERVER_ERROR'), ] for status_code, style in status_code_styles: record = logging.makeLogRecord({'msg': log_msg, 'status_code': status_code}) self.assertEqual(formatter.format(record), getattr(color_style, style)(log_msg)) record = logging.makeLogRecord({'msg': log_msg}) self.assertEqual(formatter.format(record), log_msg) def test_server_formatter_default_format(self): server_time = '2016-09-25 10:20:30' log_msg = 'log message' logger = logging.getLogger('django.server') @contextmanager def patch_django_server_logger(): old_stream = logger.handlers[0].stream new_stream = StringIO() logger.handlers[0].stream = new_stream yield new_stream logger.handlers[0].stream = old_stream with patch_django_server_logger() as logger_output: logger.info(log_msg, extra={'server_time': server_time}) self.assertEqual('[%s] %s\n' % (server_time, log_msg), logger_output.getvalue()) with patch_django_server_logger() as logger_output: logger.info(log_msg) self.assertRegex(logger_output.getvalue(), r'^\[[/:,\w\s\d]+\] %s\n' % log_msg)
f9b19d9e5bc643a535ba9711b5aaab4d97a11035bd0783ebb0d12ab2082cd79e
import os from io import StringIO from unittest import mock from admin_scripts.tests import AdminScriptTestCase from django.apps import apps from django.core import management from django.core.checks import Tags from django.core.management import BaseCommand, CommandError, find_commands from django.core.management.utils import ( find_command, get_random_secret_key, is_ignored_path, normalize_path_patterns, popen_wrapper, ) from django.db import connection from django.test import SimpleTestCase, override_settings from django.test.utils import captured_stderr, extend_sys_path from django.utils import translation from .management.commands import dance # A minimal set of apps to avoid system checks running on all apps. @override_settings( INSTALLED_APPS=[ 'django.contrib.auth', 'django.contrib.contenttypes', 'user_commands', ], ) class CommandTests(SimpleTestCase): def test_command(self): out = StringIO() management.call_command('dance', stdout=out) self.assertIn("I don't feel like dancing Rock'n'Roll.\n", out.getvalue()) def test_command_style(self): out = StringIO() management.call_command('dance', style='Jive', stdout=out) self.assertIn("I don't feel like dancing Jive.\n", out.getvalue()) # Passing options as arguments also works (thanks argparse) management.call_command('dance', '--style', 'Jive', stdout=out) self.assertIn("I don't feel like dancing Jive.\n", out.getvalue()) def test_language_preserved(self): with translation.override('fr'): management.call_command('dance', verbosity=0) self.assertEqual(translation.get_language(), 'fr') def test_explode(self): """ An unknown command raises CommandError """ with self.assertRaisesMessage(CommandError, "Unknown command: 'explode'"): management.call_command(('explode',)) def test_system_exit(self): """ Exception raised in a command should raise CommandError with call_command, but SystemExit when run from command line """ with self.assertRaises(CommandError) as cm: management.call_command('dance', example="raise") self.assertEqual(cm.exception.returncode, 3) dance.Command.requires_system_checks = [] try: with captured_stderr() as stderr, self.assertRaises(SystemExit) as cm: management.ManagementUtility(['manage.py', 'dance', '--example=raise']).execute() self.assertEqual(cm.exception.code, 3) finally: dance.Command.requires_system_checks = '__all__' self.assertIn("CommandError", stderr.getvalue()) def test_no_translations_deactivate_translations(self): """ When the Command handle method is decorated with @no_translations, translations are deactivated inside the command. """ current_locale = translation.get_language() with translation.override('pl'): result = management.call_command('no_translations') self.assertIsNone(result) self.assertEqual(translation.get_language(), current_locale) def test_find_command_without_PATH(self): """ find_command should still work when the PATH environment variable doesn't exist (#22256). """ current_path = os.environ.pop('PATH', None) try: self.assertIsNone(find_command('_missing_')) finally: if current_path is not None: os.environ['PATH'] = current_path def test_discover_commands_in_eggs(self): """ Management commands can also be loaded from Python eggs. """ egg_dir = '%s/eggs' % os.path.dirname(__file__) egg_name = '%s/basic.egg' % egg_dir with extend_sys_path(egg_name): with self.settings(INSTALLED_APPS=['commandegg']): cmds = find_commands(os.path.join(apps.get_app_config('commandegg').path, 'management')) self.assertEqual(cmds, ['eggcommand']) def test_call_command_option_parsing(self): """ When passing the long option name to call_command, the available option key is the option dest name (#22985). """ out = StringIO() management.call_command('dance', stdout=out, opt_3=True) self.assertIn("option3", out.getvalue()) self.assertNotIn("opt_3", out.getvalue()) self.assertNotIn("opt-3", out.getvalue()) def test_call_command_option_parsing_non_string_arg(self): """ It should be possible to pass non-string arguments to call_command. """ out = StringIO() management.call_command('dance', 1, verbosity=0, stdout=out) self.assertIn("You passed 1 as a positional argument.", out.getvalue()) def test_calling_a_command_with_only_empty_parameter_should_ends_gracefully(self): out = StringIO() management.call_command('hal', "--empty", stdout=out) self.assertEqual(out.getvalue(), "\nDave, I can't do that.\n") def test_calling_command_with_app_labels_and_parameters_should_be_ok(self): out = StringIO() management.call_command('hal', 'myapp', "--verbosity", "3", stdout=out) self.assertIn("Dave, my mind is going. I can feel it. I can feel it.\n", out.getvalue()) def test_calling_command_with_parameters_and_app_labels_at_the_end_should_be_ok(self): out = StringIO() management.call_command('hal', "--verbosity", "3", "myapp", stdout=out) self.assertIn("Dave, my mind is going. I can feel it. I can feel it.\n", out.getvalue()) def test_calling_a_command_with_no_app_labels_and_parameters_should_raise_a_command_error(self): with self.assertRaises(CommandError): management.call_command('hal') def test_output_transaction(self): output = management.call_command('transaction', stdout=StringIO(), no_color=True) self.assertTrue(output.strip().startswith(connection.ops.start_transaction_sql())) self.assertTrue(output.strip().endswith(connection.ops.end_transaction_sql())) def test_call_command_no_checks(self): """ By default, call_command should not trigger the check framework, unless specifically asked. """ self.counter = 0 def patched_check(self_, **kwargs): self.counter += 1 self.kwargs = kwargs saved_check = BaseCommand.check BaseCommand.check = patched_check try: management.call_command("dance", verbosity=0) self.assertEqual(self.counter, 0) management.call_command("dance", verbosity=0, skip_checks=False) self.assertEqual(self.counter, 1) self.assertEqual(self.kwargs, {}) finally: BaseCommand.check = saved_check def test_requires_system_checks_empty(self): with mock.patch('django.core.management.base.BaseCommand.check') as mocked_check: management.call_command('no_system_checks') self.assertIs(mocked_check.called, False) def test_requires_system_checks_specific(self): with mock.patch('django.core.management.base.BaseCommand.check') as mocked_check: management.call_command('specific_system_checks') mocked_check.called_once_with(tags=[Tags.staticfiles, Tags.models]) def test_requires_system_checks_invalid(self): class Command(BaseCommand): requires_system_checks = 'x' msg = 'requires_system_checks must be a list or tuple.' with self.assertRaisesMessage(TypeError, msg): Command() def test_check_migrations(self): requires_migrations_checks = dance.Command.requires_migrations_checks self.assertIs(requires_migrations_checks, False) try: with mock.patch.object(BaseCommand, 'check_migrations') as check_migrations: management.call_command('dance', verbosity=0) self.assertFalse(check_migrations.called) dance.Command.requires_migrations_checks = True management.call_command('dance', verbosity=0) self.assertTrue(check_migrations.called) finally: dance.Command.requires_migrations_checks = requires_migrations_checks def test_call_command_unrecognized_option(self): msg = ( 'Unknown option(s) for dance command: unrecognized. Valid options ' 'are: example, force_color, help, integer, no_color, opt_3, ' 'option3, pythonpath, settings, skip_checks, stderr, stdout, ' 'style, traceback, verbosity, version.' ) with self.assertRaisesMessage(TypeError, msg): management.call_command('dance', unrecognized=1) msg = ( 'Unknown option(s) for dance command: unrecognized, unrecognized2. ' 'Valid options are: example, force_color, help, integer, no_color, ' 'opt_3, option3, pythonpath, settings, skip_checks, stderr, ' 'stdout, style, traceback, verbosity, version.' ) with self.assertRaisesMessage(TypeError, msg): management.call_command('dance', unrecognized=1, unrecognized2=1) def test_call_command_with_required_parameters_in_options(self): out = StringIO() management.call_command('required_option', need_me='foo', needme2='bar', stdout=out) self.assertIn('need_me', out.getvalue()) self.assertIn('needme2', out.getvalue()) def test_call_command_with_required_parameters_in_mixed_options(self): out = StringIO() management.call_command('required_option', '--need-me=foo', needme2='bar', stdout=out) self.assertIn('need_me', out.getvalue()) self.assertIn('needme2', out.getvalue()) def test_command_add_arguments_after_common_arguments(self): out = StringIO() management.call_command('common_args', stdout=out) self.assertIn('Detected that --version already exists', out.getvalue()) def test_mutually_exclusive_group_required_options(self): out = StringIO() management.call_command('mutually_exclusive_required', foo_id=1, stdout=out) self.assertIn('foo_id', out.getvalue()) management.call_command('mutually_exclusive_required', foo_name='foo', stdout=out) self.assertIn('foo_name', out.getvalue()) msg = ( 'Error: one of the arguments --foo-id --foo-name --foo-list ' '--append_const --const --count --flag_false --flag_true is ' 'required' ) with self.assertRaisesMessage(CommandError, msg): management.call_command('mutually_exclusive_required', stdout=out) def test_mutually_exclusive_group_required_const_options(self): tests = [ ('append_const', [42]), ('const', 31), ('count', 1), ('flag_false', False), ('flag_true', True), ] for arg, value in tests: out = StringIO() expected_output = '%s=%s' % (arg, value) with self.subTest(arg=arg): management.call_command( 'mutually_exclusive_required', '--%s' % arg, stdout=out, ) self.assertIn(expected_output, out.getvalue()) out.truncate(0) management.call_command( 'mutually_exclusive_required', **{arg: value, 'stdout': out}, ) self.assertIn(expected_output, out.getvalue()) def test_mutually_exclusive_group_required_with_same_dest_options(self): tests = [ {'until': '2'}, {'for': '1', 'until': '2'}, ] msg = ( "Cannot pass the dest 'until' that matches multiple arguments via " "**options." ) for options in tests: with self.subTest(options=options): with self.assertRaisesMessage(TypeError, msg): management.call_command( 'mutually_exclusive_required_with_same_dest', **options, ) def test_mutually_exclusive_group_required_with_same_dest_args(self): tests = [ ('--until=1',), ('--until', 1), ('--for=1',), ('--for', 1), ] for args in tests: out = StringIO() with self.subTest(options=args): management.call_command( 'mutually_exclusive_required_with_same_dest', *args, stdout=out, ) output = out.getvalue() self.assertIn('until=1', output) def test_required_list_option(self): tests = [ (('--foo-list', [1, 2]), {}), ((), {'foo_list': [1, 2]}), ] for command in ['mutually_exclusive_required', 'required_list_option']: for args, kwargs in tests: with self.subTest(command=command, args=args, kwargs=kwargs): out = StringIO() management.call_command( command, *args, **{**kwargs, 'stdout': out}, ) self.assertIn('foo_list=[1, 2]', out.getvalue()) def test_required_const_options(self): args = { 'append_const': [42], 'const': 31, 'count': 1, 'flag_false': False, 'flag_true': True, } expected_output = '\n'.join( '%s=%s' % (arg, value) for arg, value in args.items() ) out = StringIO() management.call_command( 'required_constant_option', '--append_const', '--const', '--count', '--flag_false', '--flag_true', stdout=out, ) self.assertIn(expected_output, out.getvalue()) out.truncate(0) management.call_command('required_constant_option', **{**args, 'stdout': out}) self.assertIn(expected_output, out.getvalue()) def test_subparser(self): out = StringIO() management.call_command('subparser', 'foo', 12, stdout=out) self.assertIn('bar', out.getvalue()) def test_subparser_dest_args(self): out = StringIO() management.call_command('subparser_dest', 'foo', bar=12, stdout=out) self.assertIn('bar', out.getvalue()) def test_subparser_dest_required_args(self): out = StringIO() management.call_command('subparser_required', 'foo_1', 'foo_2', bar=12, stdout=out) self.assertIn('bar', out.getvalue()) def test_subparser_invalid_option(self): msg = "invalid choice: 'test' (choose from 'foo')" with self.assertRaisesMessage(CommandError, msg): management.call_command('subparser', 'test', 12) msg = 'Error: the following arguments are required: subcommand' with self.assertRaisesMessage(CommandError, msg): management.call_command('subparser_dest', subcommand='foo', bar=12) def test_create_parser_kwargs(self): """BaseCommand.create_parser() passes kwargs to CommandParser.""" epilog = 'some epilog text' parser = BaseCommand().create_parser('prog_name', 'subcommand', epilog=epilog) self.assertEqual(parser.epilog, epilog) def test_outputwrapper_flush(self): out = StringIO() with mock.patch.object(out, 'flush') as mocked_flush: management.call_command('outputwrapper', stdout=out) self.assertIn('Working...', out.getvalue()) self.assertIs(mocked_flush.called, True) class CommandRunTests(AdminScriptTestCase): """ Tests that need to run by simulating the command line, not by call_command. """ def test_script_prefix_set_in_commands(self): self.write_settings('settings.py', apps=['user_commands'], sdict={ 'ROOT_URLCONF': '"user_commands.urls"', 'FORCE_SCRIPT_NAME': '"/PREFIX/"', }) out, err = self.run_manage(['reverse_url']) self.assertNoOutput(err) self.assertEqual(out.strip(), '/PREFIX/some/url/') def test_disallowed_abbreviated_options(self): """ To avoid conflicts with custom options, commands don't allow abbreviated forms of the --setting and --pythonpath options. """ self.write_settings('settings.py', apps=['user_commands']) out, err = self.run_manage(['set_option', '--set', 'foo']) self.assertNoOutput(err) self.assertEqual(out.strip(), 'Set foo') def test_skip_checks(self): self.write_settings('settings.py', apps=['django.contrib.staticfiles', 'user_commands'], sdict={ # (staticfiles.E001) The STATICFILES_DIRS setting is not a tuple or # list. 'STATICFILES_DIRS': '"foo"', }) out, err = self.run_manage(['set_option', '--skip-checks', '--set', 'foo']) self.assertNoOutput(err) self.assertEqual(out.strip(), 'Set foo') class UtilsTests(SimpleTestCase): def test_no_existent_external_program(self): msg = 'Error executing a_42_command_that_doesnt_exist_42' with self.assertRaisesMessage(CommandError, msg): popen_wrapper(['a_42_command_that_doesnt_exist_42']) def test_get_random_secret_key(self): key = get_random_secret_key() self.assertEqual(len(key), 50) for char in key: self.assertIn(char, 'abcdefghijklmnopqrstuvwxyz0123456789!@#$%^&*(-_=+)') def test_is_ignored_path_true(self): patterns = ( ['foo/bar/baz'], ['baz'], ['foo/bar/baz'], ['*/baz'], ['*'], ['b?z'], ['[abc]az'], ['*/ba[!z]/baz'], ) for ignore_patterns in patterns: with self.subTest(ignore_patterns=ignore_patterns): self.assertIs(is_ignored_path('foo/bar/baz', ignore_patterns=ignore_patterns), True) def test_is_ignored_path_false(self): self.assertIs(is_ignored_path('foo/bar/baz', ignore_patterns=['foo/bar/bat', 'bar', 'flub/blub']), False) def test_normalize_path_patterns_truncates_wildcard_base(self): expected = [os.path.normcase(p) for p in ['foo/bar', 'bar/*/']] self.assertEqual(normalize_path_patterns(['foo/bar/*', 'bar/*/']), expected)
5cee12f91bcefd5941c88e6c5842b7270070f044104bf827a2f27643d0611909
import os from django.apps import AppConfig, apps from django.apps.registry import Apps from django.contrib.admin.models import LogEntry from django.core.exceptions import AppRegistryNotReady, ImproperlyConfigured from django.db import models from django.test import SimpleTestCase, override_settings from django.test.utils import extend_sys_path, isolate_apps from .models import SoAlternative, TotallyNormal, new_apps from .one_config_app.apps import OneConfig from .two_configs_one_default_app.apps import TwoConfig # Small list with a variety of cases for tests that iterate on installed apps. # Intentionally not in alphabetical order to check if the order is preserved. SOME_INSTALLED_APPS = [ 'apps.apps.MyAdmin', 'apps.apps.MyAuth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.messages', 'django.contrib.staticfiles', ] SOME_INSTALLED_APPS_NAMES = [ 'django.contrib.admin', 'django.contrib.auth', ] + SOME_INSTALLED_APPS[2:] HERE = os.path.dirname(__file__) class AppsTests(SimpleTestCase): def test_singleton_master(self): """ Only one master registry can exist. """ with self.assertRaises(RuntimeError): Apps(installed_apps=None) def test_ready(self): """ Tests the ready property of the master registry. """ # The master app registry is always ready when the tests run. self.assertIs(apps.ready, True) # Non-master app registries are populated in __init__. self.assertIs(Apps().ready, True) # The condition is set when apps are ready self.assertIs(apps.ready_event.is_set(), True) self.assertIs(Apps().ready_event.is_set(), True) def test_bad_app_config(self): """ Tests when INSTALLED_APPS contains an incorrect app config. """ msg = "'apps.apps.BadConfig' must supply a name attribute." with self.assertRaisesMessage(ImproperlyConfigured, msg): with self.settings(INSTALLED_APPS=['apps.apps.BadConfig']): pass def test_not_an_app_config(self): """ Tests when INSTALLED_APPS contains a class that isn't an app config. """ msg = "'apps.apps.NotAConfig' isn't a subclass of AppConfig." with self.assertRaisesMessage(ImproperlyConfigured, msg): with self.settings(INSTALLED_APPS=['apps.apps.NotAConfig']): pass def test_no_such_app(self): """ Tests when INSTALLED_APPS contains an app that doesn't exist, either directly or via an app config. """ with self.assertRaises(ImportError): with self.settings(INSTALLED_APPS=['there is no such app']): pass msg = "Cannot import 'there is no such app'. Check that 'apps.apps.NoSuchApp.name' is correct." with self.assertRaisesMessage(ImproperlyConfigured, msg): with self.settings(INSTALLED_APPS=['apps.apps.NoSuchApp']): pass def test_no_such_app_config(self): msg = "Module 'apps' does not contain a 'NoSuchConfig' class." with self.assertRaisesMessage(ImportError, msg): with self.settings(INSTALLED_APPS=['apps.NoSuchConfig']): pass def test_no_such_app_config_with_choices(self): msg = ( "Module 'apps.apps' does not contain a 'NoSuchConfig' class. " "Choices are: 'BadConfig', 'ModelPKAppsConfig', 'MyAdmin', " "'MyAuth', 'NoSuchApp', 'PlainAppsConfig', 'RelabeledAppsConfig'." ) with self.assertRaisesMessage(ImportError, msg): with self.settings(INSTALLED_APPS=['apps.apps.NoSuchConfig']): pass def test_no_config_app(self): """Load an app that doesn't provide an AppConfig class.""" with self.settings(INSTALLED_APPS=['apps.no_config_app']): config = apps.get_app_config('no_config_app') self.assertIsInstance(config, AppConfig) def test_one_config_app(self): """Load an app that provides an AppConfig class.""" with self.settings(INSTALLED_APPS=['apps.one_config_app']): config = apps.get_app_config('one_config_app') self.assertIsInstance(config, OneConfig) def test_two_configs_app(self): """Load an app that provides two AppConfig classes.""" with self.settings(INSTALLED_APPS=['apps.two_configs_app']): config = apps.get_app_config('two_configs_app') self.assertIsInstance(config, AppConfig) def test_two_default_configs_app(self): """Load an app that provides two default AppConfig classes.""" msg = ( "'apps.two_default_configs_app.apps' declares more than one " "default AppConfig: 'TwoConfig', 'TwoConfigBis'." ) with self.assertRaisesMessage(RuntimeError, msg): with self.settings(INSTALLED_APPS=['apps.two_default_configs_app']): pass def test_two_configs_one_default_app(self): """ Load an app that provides two AppConfig classes, one being the default. """ with self.settings(INSTALLED_APPS=['apps.two_configs_one_default_app']): config = apps.get_app_config('two_configs_one_default_app') self.assertIsInstance(config, TwoConfig) @override_settings(INSTALLED_APPS=SOME_INSTALLED_APPS) def test_get_app_configs(self): """ Tests apps.get_app_configs(). """ app_configs = apps.get_app_configs() self.assertEqual([app_config.name for app_config in app_configs], SOME_INSTALLED_APPS_NAMES) @override_settings(INSTALLED_APPS=SOME_INSTALLED_APPS) def test_get_app_config(self): """ Tests apps.get_app_config(). """ app_config = apps.get_app_config('admin') self.assertEqual(app_config.name, 'django.contrib.admin') app_config = apps.get_app_config('staticfiles') self.assertEqual(app_config.name, 'django.contrib.staticfiles') with self.assertRaises(LookupError): apps.get_app_config('admindocs') msg = "No installed app with label 'django.contrib.auth'. Did you mean 'myauth'" with self.assertRaisesMessage(LookupError, msg): apps.get_app_config('django.contrib.auth') @override_settings(INSTALLED_APPS=SOME_INSTALLED_APPS) def test_is_installed(self): """ Tests apps.is_installed(). """ self.assertIs(apps.is_installed('django.contrib.admin'), True) self.assertIs(apps.is_installed('django.contrib.auth'), True) self.assertIs(apps.is_installed('django.contrib.staticfiles'), True) self.assertIs(apps.is_installed('django.contrib.admindocs'), False) @override_settings(INSTALLED_APPS=SOME_INSTALLED_APPS) def test_get_model(self): """ Tests apps.get_model(). """ self.assertEqual(apps.get_model('admin', 'LogEntry'), LogEntry) with self.assertRaises(LookupError): apps.get_model('admin', 'LogExit') # App label is case-sensitive, Model name is case-insensitive. self.assertEqual(apps.get_model('admin', 'loGentrY'), LogEntry) with self.assertRaises(LookupError): apps.get_model('Admin', 'LogEntry') # A single argument is accepted. self.assertEqual(apps.get_model('admin.LogEntry'), LogEntry) with self.assertRaises(LookupError): apps.get_model('admin.LogExit') with self.assertRaises(ValueError): apps.get_model('admin_LogEntry') @override_settings(INSTALLED_APPS=['apps.apps.RelabeledAppsConfig']) def test_relabeling(self): self.assertEqual(apps.get_app_config('relabeled').name, 'apps') def test_duplicate_labels(self): with self.assertRaisesMessage(ImproperlyConfigured, "Application labels aren't unique"): with self.settings(INSTALLED_APPS=['apps.apps.PlainAppsConfig', 'apps']): pass def test_duplicate_names(self): with self.assertRaisesMessage(ImproperlyConfigured, "Application names aren't unique"): with self.settings(INSTALLED_APPS=['apps.apps.RelabeledAppsConfig', 'apps']): pass def test_import_exception_is_not_masked(self): """ App discovery should preserve stack traces. Regression test for #22920. """ with self.assertRaisesMessage(ImportError, "Oops"): with self.settings(INSTALLED_APPS=['import_error_package']): pass def test_models_py(self): """ The models in the models.py file were loaded correctly. """ self.assertEqual(apps.get_model("apps", "TotallyNormal"), TotallyNormal) with self.assertRaises(LookupError): apps.get_model("apps", "SoAlternative") with self.assertRaises(LookupError): new_apps.get_model("apps", "TotallyNormal") self.assertEqual(new_apps.get_model("apps", "SoAlternative"), SoAlternative) def test_models_not_loaded(self): """ apps.get_models() raises an exception if apps.models_ready isn't True. """ apps.models_ready = False try: # The cache must be cleared to trigger the exception. apps.get_models.cache_clear() with self.assertRaisesMessage(AppRegistryNotReady, "Models aren't loaded yet."): apps.get_models() finally: apps.models_ready = True def test_dynamic_load(self): """ Makes a new model at runtime and ensures it goes into the right place. """ old_models = list(apps.get_app_config("apps").get_models()) # Construct a new model in a new app registry body = {} new_apps = Apps(["apps"]) meta_contents = { 'app_label': "apps", 'apps': new_apps, } meta = type("Meta", (), meta_contents) body['Meta'] = meta body['__module__'] = TotallyNormal.__module__ temp_model = type("SouthPonies", (models.Model,), body) # Make sure it appeared in the right place! self.assertEqual(list(apps.get_app_config("apps").get_models()), old_models) with self.assertRaises(LookupError): apps.get_model("apps", "SouthPonies") self.assertEqual(new_apps.get_model("apps", "SouthPonies"), temp_model) def test_model_clash(self): """ Test for behavior when two models clash in the app registry. """ new_apps = Apps(["apps"]) meta_contents = { 'app_label': "apps", 'apps': new_apps, } body = {} body['Meta'] = type("Meta", (), meta_contents) body['__module__'] = TotallyNormal.__module__ type("SouthPonies", (models.Model,), body) # When __name__ and __module__ match we assume the module # was reloaded and issue a warning. This use-case is # useful for REPL. Refs #23621. body = {} body['Meta'] = type("Meta", (), meta_contents) body['__module__'] = TotallyNormal.__module__ msg = ( "Model 'apps.southponies' was already registered. " "Reloading models is not advised as it can lead to inconsistencies, " "most notably with related models." ) with self.assertRaisesMessage(RuntimeWarning, msg): type("SouthPonies", (models.Model,), body) # If it doesn't appear to be a reloaded module then we expect # a RuntimeError. body = {} body['Meta'] = type("Meta", (), meta_contents) body['__module__'] = TotallyNormal.__module__ + '.whatever' with self.assertRaisesMessage(RuntimeError, "Conflicting 'southponies' models in application 'apps':"): type("SouthPonies", (models.Model,), body) def test_get_containing_app_config_apps_not_ready(self): """ apps.get_containing_app_config() should raise an exception if apps.apps_ready isn't True. """ apps.apps_ready = False try: with self.assertRaisesMessage(AppRegistryNotReady, "Apps aren't loaded yet"): apps.get_containing_app_config('foo') finally: apps.apps_ready = True @isolate_apps('apps', kwarg_name='apps') def test_lazy_model_operation(self, apps): """ Tests apps.lazy_model_operation(). """ model_classes = [] initial_pending = set(apps._pending_operations) def test_func(*models): model_classes[:] = models class LazyA(models.Model): pass # Test models appearing twice, and models appearing consecutively model_keys = [('apps', model_name) for model_name in ['lazya', 'lazyb', 'lazyb', 'lazyc', 'lazya']] apps.lazy_model_operation(test_func, *model_keys) # LazyModelA shouldn't be waited on since it's already registered, # and LazyModelC shouldn't be waited on until LazyModelB exists. self.assertEqual(set(apps._pending_operations) - initial_pending, {('apps', 'lazyb')}) # Multiple operations can wait on the same model apps.lazy_model_operation(test_func, ('apps', 'lazyb')) class LazyB(models.Model): pass self.assertEqual(model_classes, [LazyB]) # Now we are just waiting on LazyModelC. self.assertEqual(set(apps._pending_operations) - initial_pending, {('apps', 'lazyc')}) class LazyC(models.Model): pass # Everything should be loaded - make sure the callback was executed properly. self.assertEqual(model_classes, [LazyA, LazyB, LazyB, LazyC, LazyA]) class Stub: def __init__(self, **kwargs): self.__dict__.update(kwargs) class AppConfigTests(SimpleTestCase): """Unit tests for AppConfig class.""" def test_path_set_explicitly(self): """If subclass sets path as class attr, no module attributes needed.""" class MyAppConfig(AppConfig): path = 'foo' ac = MyAppConfig('label', Stub()) self.assertEqual(ac.path, 'foo') def test_explicit_path_overrides(self): """If path set as class attr, overrides __path__ and __file__.""" class MyAppConfig(AppConfig): path = 'foo' ac = MyAppConfig('label', Stub(__path__=['a'], __file__='b/__init__.py')) self.assertEqual(ac.path, 'foo') def test_dunder_path(self): """If single element in __path__, use it (in preference to __file__).""" ac = AppConfig('label', Stub(__path__=['a'], __file__='b/__init__.py')) self.assertEqual(ac.path, 'a') def test_no_dunder_path_fallback_to_dunder_file(self): """If there is no __path__ attr, use __file__.""" ac = AppConfig('label', Stub(__file__='b/__init__.py')) self.assertEqual(ac.path, 'b') def test_empty_dunder_path_fallback_to_dunder_file(self): """If the __path__ attr is empty, use __file__ if set.""" ac = AppConfig('label', Stub(__path__=[], __file__='b/__init__.py')) self.assertEqual(ac.path, 'b') def test_multiple_dunder_path_fallback_to_dunder_file(self): """If the __path__ attr is length>1, use __file__ if set.""" ac = AppConfig('label', Stub(__path__=['a', 'b'], __file__='c/__init__.py')) self.assertEqual(ac.path, 'c') def test_no_dunder_path_or_dunder_file(self): """If there is no __path__ or __file__, raise ImproperlyConfigured.""" with self.assertRaises(ImproperlyConfigured): AppConfig('label', Stub()) def test_empty_dunder_path_no_dunder_file(self): """If the __path__ attr is empty and there is no __file__, raise.""" with self.assertRaises(ImproperlyConfigured): AppConfig('label', Stub(__path__=[])) def test_multiple_dunder_path_no_dunder_file(self): """If the __path__ attr is length>1 and there is no __file__, raise.""" with self.assertRaises(ImproperlyConfigured): AppConfig('label', Stub(__path__=['a', 'b'])) def test_duplicate_dunder_path_no_dunder_file(self): """ If the __path__ attr contains duplicate paths and there is no __file__, they duplicates should be deduplicated (#25246). """ ac = AppConfig('label', Stub(__path__=['a', 'a'])) self.assertEqual(ac.path, 'a') def test_repr(self): ac = AppConfig('label', Stub(__path__=['a'])) self.assertEqual(repr(ac), '<AppConfig: label>') def test_invalid_label(self): class MyAppConfig(AppConfig): label = 'invalid.label' msg = "The app label 'invalid.label' is not a valid Python identifier." with self.assertRaisesMessage(ImproperlyConfigured, msg): MyAppConfig('test_app', Stub()) @override_settings( INSTALLED_APPS=['apps.apps.ModelPKAppsConfig'], DEFAULT_AUTO_FIELD='django.db.models.SmallAutoField', ) def test_app_default_auto_field(self): apps_config = apps.get_app_config('apps') self.assertEqual( apps_config.default_auto_field, 'django.db.models.BigAutoField', ) self.assertIs(apps_config._is_default_auto_field_overridden, True) @override_settings( INSTALLED_APPS=['apps.apps.PlainAppsConfig'], DEFAULT_AUTO_FIELD='django.db.models.SmallAutoField', ) def test_default_auto_field_setting(self): apps_config = apps.get_app_config('apps') self.assertEqual( apps_config.default_auto_field, 'django.db.models.SmallAutoField', ) self.assertIs(apps_config._is_default_auto_field_overridden, False) class NamespacePackageAppTests(SimpleTestCase): # We need nsapp to be top-level so our multiple-paths tests can add another # location for it (if its inside a normal package with an __init__.py that # isn't possible). In order to avoid cluttering the already-full tests/ dir # (which is on sys.path), we add these new entries to sys.path temporarily. base_location = os.path.join(HERE, 'namespace_package_base') other_location = os.path.join(HERE, 'namespace_package_other_base') app_path = os.path.join(base_location, 'nsapp') def test_single_path(self): """ A Py3.3+ namespace package can be an app if it has only one path. """ with extend_sys_path(self.base_location): with self.settings(INSTALLED_APPS=['nsapp']): app_config = apps.get_app_config('nsapp') self.assertEqual(app_config.path, self.app_path) def test_multiple_paths(self): """ A Py3.3+ namespace package with multiple locations cannot be an app. (Because then we wouldn't know where to load its templates, static assets, etc. from.) """ # Temporarily add two directories to sys.path that both contain # components of the "nsapp" package. with extend_sys_path(self.base_location, self.other_location): with self.assertRaises(ImproperlyConfigured): with self.settings(INSTALLED_APPS=['nsapp']): pass def test_multiple_paths_explicit_path(self): """ Multiple locations are ok only if app-config has explicit path. """ # Temporarily add two directories to sys.path that both contain # components of the "nsapp" package. with extend_sys_path(self.base_location, self.other_location): with self.settings(INSTALLED_APPS=['nsapp.apps.NSAppConfig']): app_config = apps.get_app_config('nsapp') self.assertEqual(app_config.path, self.app_path)
707dfd9fc16aab43286d8bdd766758b46dd1b88bf920020296dc8a5a6a1a61db
import sys import unittest from django.conf import settings from django.contrib import admin from django.contrib.admindocs import utils, views from django.contrib.admindocs.views import get_return_data_type, simplify_regex from django.contrib.sites.models import Site from django.db import models from django.db.models import fields from django.test import SimpleTestCase, modify_settings, override_settings from django.test.utils import captured_stderr from django.urls import include, path, reverse from django.utils.functional import SimpleLazyObject from .models import Company, Person from .tests import AdminDocsTestCase, TestDataMixin @unittest.skipUnless(utils.docutils_is_available, "no docutils installed.") class AdminDocViewTests(TestDataMixin, AdminDocsTestCase): def setUp(self): self.client.force_login(self.superuser) def test_index(self): response = self.client.get(reverse('django-admindocs-docroot')) self.assertContains(response, '<h1>Documentation</h1>', html=True) self.assertContains(response, '<h1 id="site-name"><a href="/admin/">Django administration</a></h1>') self.client.logout() response = self.client.get(reverse('django-admindocs-docroot'), follow=True) # Should display the login screen self.assertContains(response, '<input type="hidden" name="next" value="/admindocs/">', html=True) def test_bookmarklets(self): response = self.client.get(reverse('django-admindocs-bookmarklets')) self.assertContains(response, '/admindocs/views/') def test_templatetag_index(self): response = self.client.get(reverse('django-admindocs-tags')) self.assertContains(response, '<h3 id="built_in-extends">extends</h3>', html=True) def test_templatefilter_index(self): response = self.client.get(reverse('django-admindocs-filters')) self.assertContains(response, '<h3 id="built_in-first">first</h3>', html=True) def test_view_index(self): response = self.client.get(reverse('django-admindocs-views-index')) self.assertContains( response, '<h3><a href="/admindocs/views/django.contrib.admindocs.views.BaseAdminDocsView/">/admindocs/</a></h3>', html=True ) self.assertContains(response, 'Views by namespace test') self.assertContains(response, 'Name: <code>test:func</code>.') self.assertContains( response, '<h3><a href="/admindocs/views/admin_docs.views.XViewCallableObject/">' '/xview/callable_object_without_xview/</a></h3>', html=True, ) def test_view_index_with_method(self): """ Views that are methods are listed correctly. """ response = self.client.get(reverse('django-admindocs-views-index')) self.assertContains( response, '<h3><a href="/admindocs/views/django.contrib.admin.sites.AdminSite.index/">/admin/</a></h3>', html=True ) def test_view_detail(self): url = reverse('django-admindocs-views-detail', args=['django.contrib.admindocs.views.BaseAdminDocsView']) response = self.client.get(url) # View docstring self.assertContains(response, 'Base view for admindocs views.') @override_settings(ROOT_URLCONF='admin_docs.namespace_urls') def test_namespaced_view_detail(self): url = reverse('django-admindocs-views-detail', args=['admin_docs.views.XViewClass']) response = self.client.get(url) self.assertContains(response, '<h1>admin_docs.views.XViewClass</h1>') def test_view_detail_illegal_import(self): url = reverse('django-admindocs-views-detail', args=['urlpatterns_reverse.nonimported_module.view']) response = self.client.get(url) self.assertEqual(response.status_code, 404) self.assertNotIn("urlpatterns_reverse.nonimported_module", sys.modules) def test_view_detail_as_method(self): """ Views that are methods can be displayed. """ url = reverse('django-admindocs-views-detail', args=['django.contrib.admin.sites.AdminSite.index']) response = self.client.get(url) self.assertEqual(response.status_code, 200) def test_model_index(self): response = self.client.get(reverse('django-admindocs-models-index')) self.assertContains( response, '<h2 id="app-auth">Authentication and Authorization (django.contrib.auth)</h2>', html=True ) def test_template_detail(self): response = self.client.get(reverse('django-admindocs-templates', args=['admin_doc/template_detail.html'])) self.assertContains(response, '<h1>Template: <q>admin_doc/template_detail.html</q></h1>', html=True) def test_missing_docutils(self): utils.docutils_is_available = False try: response = self.client.get(reverse('django-admindocs-docroot')) self.assertContains( response, '<h3>The admin documentation system requires Python’s ' '<a href="https://docutils.sourceforge.io/">docutils</a> ' 'library.</h3>' '<p>Please ask your administrators to install ' '<a href="https://docutils.sourceforge.io/">docutils</a>.</p>', html=True ) self.assertContains(response, '<h1 id="site-name"><a href="/admin/">Django administration</a></h1>') finally: utils.docutils_is_available = True @modify_settings(INSTALLED_APPS={'remove': 'django.contrib.sites'}) @override_settings(SITE_ID=None) # will restore SITE_ID after the test def test_no_sites_framework(self): """ Without the sites framework, should not access SITE_ID or Site objects. Deleting settings is fine here as UserSettingsHolder is used. """ Site.objects.all().delete() del settings.SITE_ID response = self.client.get(reverse('django-admindocs-views-index')) self.assertContains(response, 'View documentation') def test_callable_urlconf(self): """ Index view should correctly resolve view patterns when ROOT_URLCONF is not a string. """ def urlpatterns(): return ( path('admin/doc/', include('django.contrib.admindocs.urls')), path('admin/', admin.site.urls), ) with self.settings(ROOT_URLCONF=SimpleLazyObject(urlpatterns)): response = self.client.get(reverse('django-admindocs-views-index')) self.assertEqual(response.status_code, 200) @unittest.skipUnless(utils.docutils_is_available, 'no docutils installed.') class AdminDocViewDefaultEngineOnly(TestDataMixin, AdminDocsTestCase): def setUp(self): self.client.force_login(self.superuser) def test_template_detail_path_traversal(self): cases = ['/etc/passwd', '../passwd'] for fpath in cases: with self.subTest(path=fpath): response = self.client.get( reverse('django-admindocs-templates', args=[fpath]), ) self.assertEqual(response.status_code, 400) @override_settings(TEMPLATES=[{ 'NAME': 'ONE', 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'APP_DIRS': True, }, { 'NAME': 'TWO', 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'APP_DIRS': True, }]) @unittest.skipUnless(utils.docutils_is_available, "no docutils installed.") class AdminDocViewWithMultipleEngines(AdminDocViewTests): def test_templatefilter_index(self): # Overridden because non-trivial TEMPLATES settings aren't supported # but the page shouldn't crash (#24125). response = self.client.get(reverse('django-admindocs-filters')) self.assertContains(response, '<title>Template filters</title>', html=True) def test_templatetag_index(self): # Overridden because non-trivial TEMPLATES settings aren't supported # but the page shouldn't crash (#24125). response = self.client.get(reverse('django-admindocs-tags')) self.assertContains(response, '<title>Template tags</title>', html=True) @unittest.skipUnless(utils.docutils_is_available, "no docutils installed.") class TestModelDetailView(TestDataMixin, AdminDocsTestCase): def setUp(self): self.client.force_login(self.superuser) with captured_stderr() as self.docutils_stderr: self.response = self.client.get(reverse('django-admindocs-models-detail', args=['admin_docs', 'Person'])) def test_method_excludes(self): """ Methods that begin with strings defined in ``django.contrib.admindocs.views.MODEL_METHODS_EXCLUDE`` shouldn't be displayed in the admin docs. """ self.assertContains(self.response, "<td>get_full_name</td>") self.assertNotContains(self.response, "<td>_get_full_name</td>") self.assertNotContains(self.response, "<td>add_image</td>") self.assertNotContains(self.response, "<td>delete_image</td>") self.assertNotContains(self.response, "<td>set_status</td>") self.assertNotContains(self.response, "<td>save_changes</td>") def test_methods_with_arguments(self): """ Methods that take arguments should also displayed. """ self.assertContains(self.response, "<h3>Methods with arguments</h3>") self.assertContains(self.response, "<td>rename_company</td>") self.assertContains(self.response, "<td>dummy_function</td>") self.assertContains(self.response, "<td>suffix_company_name</td>") def test_methods_with_arguments_display_arguments(self): """ Methods with arguments should have their arguments displayed. """ self.assertContains(self.response, "<td>new_name</td>") def test_methods_with_arguments_display_arguments_default_value(self): """ Methods with keyword arguments should have their arguments displayed. """ self.assertContains(self.response, '<td>suffix=&#x27;ltd&#x27;</td>') def test_methods_with_multiple_arguments_display_arguments(self): """ Methods with multiple arguments should have all their arguments displayed, but omitting 'self'. """ self.assertContains(self.response, "<td>baz, rox, *some_args, **some_kwargs</td>") def test_instance_of_property_methods_are_displayed(self): """Model properties are displayed as fields.""" self.assertContains(self.response, '<td>a_property</td>') def test_instance_of_cached_property_methods_are_displayed(self): """Model cached properties are displayed as fields.""" self.assertContains(self.response, '<td>a_cached_property</td>') def test_method_data_types(self): company = Company.objects.create(name="Django") person = Person.objects.create(first_name="Human", last_name="User", company=company) self.assertEqual(get_return_data_type(person.get_status_count.__name__), 'Integer') self.assertEqual(get_return_data_type(person.get_groups_list.__name__), 'List') def test_descriptions_render_correctly(self): """ The ``description`` field should render correctly for each field type. """ # help text in fields self.assertContains(self.response, "<td>first name - The person's first name</td>") self.assertContains(self.response, "<td>last name - The person's last name</td>") # method docstrings self.assertContains(self.response, "<p>Get the full name of the person</p>") link = '<a class="reference external" href="/admindocs/models/%s/">%s</a>' markup = '<p>the related %s object</p>' company_markup = markup % (link % ("admin_docs.company", "admin_docs.Company")) # foreign keys self.assertContains(self.response, company_markup) # foreign keys with help text self.assertContains(self.response, "%s\n - place of work" % company_markup) # many to many fields self.assertContains( self.response, "number of related %s objects" % (link % ("admin_docs.group", "admin_docs.Group")) ) self.assertContains( self.response, "all related %s objects" % (link % ("admin_docs.group", "admin_docs.Group")) ) # "raw" and "include" directives are disabled self.assertContains(self.response, '<p>&quot;raw&quot; directive disabled.</p>',) self.assertContains(self.response, '.. raw:: html\n :file: admin_docs/evilfile.txt') self.assertContains(self.response, '<p>&quot;include&quot; directive disabled.</p>',) self.assertContains(self.response, '.. include:: admin_docs/evilfile.txt') out = self.docutils_stderr.getvalue() self.assertIn('"raw" directive disabled', out) self.assertIn('"include" directive disabled', out) def test_model_with_many_to_one(self): link = '<a class="reference external" href="/admindocs/models/%s/">%s</a>' response = self.client.get( reverse('django-admindocs-models-detail', args=['admin_docs', 'company']) ) self.assertContains( response, "number of related %s objects" % (link % ("admin_docs.person", "admin_docs.Person")) ) self.assertContains( response, "all related %s objects" % (link % ("admin_docs.person", "admin_docs.Person")) ) def test_model_with_no_backward_relations_render_only_relevant_fields(self): """ A model with ``related_name`` of `+` shouldn't show backward relationship links. """ response = self.client.get(reverse('django-admindocs-models-detail', args=['admin_docs', 'family'])) fields = response.context_data.get('fields') self.assertEqual(len(fields), 2) def test_model_docstring_renders_correctly(self): summary = ( '<h2 class="subhead"><p>Stores information about a person, related to <a class="reference external" ' 'href="/admindocs/models/myapp.company/">myapp.Company</a>.</p></h2>' ) subheading = '<p><strong>Notes</strong></p>' body = '<p>Use <tt class="docutils literal">save_changes()</tt> when saving this object.</p>' model_body = ( '<dl class="docutils"><dt><tt class="' 'docutils literal">company</tt></dt><dd>Field storing <a class="' 'reference external" href="/admindocs/models/myapp.company/">' 'myapp.Company</a> where the person works.</dd></dl>' ) self.assertContains(self.response, 'DESCRIPTION') self.assertContains(self.response, summary, html=True) self.assertContains(self.response, subheading, html=True) self.assertContains(self.response, body, html=True) self.assertContains(self.response, model_body, html=True) def test_model_detail_title(self): self.assertContains(self.response, '<h1>admin_docs.Person</h1>', html=True) def test_app_not_found(self): response = self.client.get(reverse('django-admindocs-models-detail', args=['doesnotexist', 'Person'])) self.assertEqual(response.context['exception'], "App 'doesnotexist' not found") self.assertEqual(response.status_code, 404) def test_model_not_found(self): response = self.client.get(reverse('django-admindocs-models-detail', args=['admin_docs', 'doesnotexist'])) self.assertEqual(response.context['exception'], "Model 'doesnotexist' not found in app 'admin_docs'") self.assertEqual(response.status_code, 404) class CustomField(models.Field): description = "A custom field type" class DescriptionLackingField(models.Field): pass class TestFieldType(unittest.TestCase): def test_field_name(self): with self.assertRaises(AttributeError): views.get_readable_field_data_type("NotAField") def test_builtin_fields(self): self.assertEqual( views.get_readable_field_data_type(fields.BooleanField()), 'Boolean (Either True or False)' ) def test_custom_fields(self): self.assertEqual(views.get_readable_field_data_type(CustomField()), 'A custom field type') self.assertEqual( views.get_readable_field_data_type(DescriptionLackingField()), 'Field of type: DescriptionLackingField' ) class AdminDocViewFunctionsTests(SimpleTestCase): def test_simplify_regex(self): tests = ( # Named and unnamed groups. (r'^(?P<a>\w+)/b/(?P<c>\w+)/$', '/<a>/b/<c>/'), (r'^(?P<a>\w+)/b/(?P<c>\w+)$', '/<a>/b/<c>'), (r'^(?P<a>\w+)/b/(?P<c>\w+)', '/<a>/b/<c>'), (r'^(?P<a>\w+)/b/(\w+)$', '/<a>/b/<var>'), (r'^(?P<a>\w+)/b/(\w+)', '/<a>/b/<var>'), (r'^(?P<a>\w+)/b/((x|y)\w+)$', '/<a>/b/<var>'), (r'^(?P<a>\w+)/b/((x|y)\w+)', '/<a>/b/<var>'), (r'^(?P<a>(x|y))/b/(?P<c>\w+)$', '/<a>/b/<c>'), (r'^(?P<a>(x|y))/b/(?P<c>\w+)', '/<a>/b/<c>'), (r'^(?P<a>(x|y))/b/(?P<c>\w+)ab', '/<a>/b/<c>ab'), (r'^(?P<a>(x|y)(\(|\)))/b/(?P<c>\w+)ab', '/<a>/b/<c>ab'), # Non-capturing groups. (r'^a(?:\w+)b', '/ab'), (r'^a(?:(x|y))', '/a'), (r'^(?:\w+(?:\w+))a', '/a'), (r'^a(?:\w+)/b(?:\w+)', '/a/b'), (r'(?P<a>\w+)/b/(?:\w+)c(?:\w+)', '/<a>/b/c'), (r'(?P<a>\w+)/b/(\w+)/(?:\w+)c(?:\w+)', '/<a>/b/<var>/c'), # Single and repeated metacharacters. (r'^a', '/a'), (r'^^a', '/a'), (r'^^^a', '/a'), (r'a$', '/a'), (r'a$$', '/a'), (r'a$$$', '/a'), (r'a?', '/a'), (r'a??', '/a'), (r'a???', '/a'), (r'a*', '/a'), (r'a**', '/a'), (r'a***', '/a'), (r'a+', '/a'), (r'a++', '/a'), (r'a+++', '/a'), (r'\Aa', '/a'), (r'\A\Aa', '/a'), (r'\A\A\Aa', '/a'), (r'a\Z', '/a'), (r'a\Z\Z', '/a'), (r'a\Z\Z\Z', '/a'), (r'\ba', '/a'), (r'\b\ba', '/a'), (r'\b\b\ba', '/a'), (r'a\B', '/a'), (r'a\B\B', '/a'), (r'a\B\B\B', '/a'), # Multiple mixed metacharacters. (r'^a/?$', '/a/'), (r'\Aa\Z', '/a'), (r'\ba\B', '/a'), # Escaped single metacharacters. (r'\^a', r'/^a'), (r'\\^a', r'/\\a'), (r'\\\^a', r'/\\^a'), (r'\\\\^a', r'/\\\\a'), (r'\\\\\^a', r'/\\\\^a'), (r'a\$', r'/a$'), (r'a\\$', r'/a\\'), (r'a\\\$', r'/a\\$'), (r'a\\\\$', r'/a\\\\'), (r'a\\\\\$', r'/a\\\\$'), (r'a\?', r'/a?'), (r'a\\?', r'/a\\'), (r'a\\\?', r'/a\\?'), (r'a\\\\?', r'/a\\\\'), (r'a\\\\\?', r'/a\\\\?'), (r'a\*', r'/a*'), (r'a\\*', r'/a\\'), (r'a\\\*', r'/a\\*'), (r'a\\\\*', r'/a\\\\'), (r'a\\\\\*', r'/a\\\\*'), (r'a\+', r'/a+'), (r'a\\+', r'/a\\'), (r'a\\\+', r'/a\\+'), (r'a\\\\+', r'/a\\\\'), (r'a\\\\\+', r'/a\\\\+'), (r'\\Aa', r'/\Aa'), (r'\\\Aa', r'/\\a'), (r'\\\\Aa', r'/\\\Aa'), (r'\\\\\Aa', r'/\\\\a'), (r'\\\\\\Aa', r'/\\\\\Aa'), (r'a\\Z', r'/a\Z'), (r'a\\\Z', r'/a\\'), (r'a\\\\Z', r'/a\\\Z'), (r'a\\\\\Z', r'/a\\\\'), (r'a\\\\\\Z', r'/a\\\\\Z'), # Escaped mixed metacharacters. (r'^a\?$', r'/a?'), (r'^a\\?$', r'/a\\'), (r'^a\\\?$', r'/a\\?'), (r'^a\\\\?$', r'/a\\\\'), (r'^a\\\\\?$', r'/a\\\\?'), # Adjacent escaped metacharacters. (r'^a\?\$', r'/a?$'), (r'^a\\?\\$', r'/a\\\\'), (r'^a\\\?\\\$', r'/a\\?\\$'), (r'^a\\\\?\\\\$', r'/a\\\\\\\\'), (r'^a\\\\\?\\\\\$', r'/a\\\\?\\\\$'), # Complex examples with metacharacters and (un)named groups. (r'^\b(?P<slug>\w+)\B/(\w+)?', '/<slug>/<var>'), (r'^\A(?P<slug>\w+)\Z', '/<slug>'), ) for pattern, output in tests: with self.subTest(pattern=pattern): self.assertEqual(simplify_regex(pattern), output)
05ba9ea76d435d60d7448c46ae0f2369169e65bfb345455455ef4db67903c72a
import errno import gzip import os import struct import tempfile import unittest from io import BytesIO, StringIO, TextIOWrapper from pathlib import Path from unittest import mock from django.core.files import File, locks from django.core.files.base import ContentFile from django.core.files.move import file_move_safe from django.core.files.temp import NamedTemporaryFile from django.core.files.uploadedfile import ( InMemoryUploadedFile, SimpleUploadedFile, TemporaryUploadedFile, UploadedFile, ) from django.test import override_settings try: from PIL import Image, features HAS_WEBP = features.check('webp') except ImportError: Image = None HAS_WEBP = False else: from django.core.files import images class FileTests(unittest.TestCase): def test_unicode_uploadedfile_name(self): uf = UploadedFile(name='¿Cómo?', content_type='text') self.assertIs(type(repr(uf)), str) def test_unicode_file_name(self): f = File(None, 'djángö') self.assertIs(type(repr(f)), str) def test_context_manager(self): orig_file = tempfile.TemporaryFile() base_file = File(orig_file) with base_file as f: self.assertIs(base_file, f) self.assertFalse(f.closed) self.assertTrue(f.closed) self.assertTrue(orig_file.closed) def test_open_resets_opened_file_to_start_and_returns_context_manager(self): file = File(BytesIO(b'content')) file.read() with file.open() as f: self.assertEqual(f.read(), b'content') def test_open_reopens_closed_file_and_returns_context_manager(self): temporary_file = tempfile.NamedTemporaryFile(delete=False) file = File(temporary_file) try: file.close() with file.open() as f: self.assertFalse(f.closed) finally: # remove temporary file os.unlink(file.name) def test_namedtemporaryfile_closes(self): """ The symbol django.core.files.NamedTemporaryFile is assigned as a different class on different operating systems. In any case, the result should minimally mock some of the API of tempfile.NamedTemporaryFile from the Python standard library. """ tempfile = NamedTemporaryFile() self.assertTrue(hasattr(tempfile, "closed")) self.assertFalse(tempfile.closed) tempfile.close() self.assertTrue(tempfile.closed) def test_file_mode(self): # Should not set mode to None if it is not present. # See #14681, stdlib gzip module crashes if mode is set to None file = SimpleUploadedFile("mode_test.txt", b"content") self.assertFalse(hasattr(file, 'mode')) gzip.GzipFile(fileobj=file) def test_file_iteration(self): """ File objects should yield lines when iterated over. Refs #22107. """ file = File(BytesIO(b'one\ntwo\nthree')) self.assertEqual(list(file), [b'one\n', b'two\n', b'three']) def test_file_iteration_windows_newlines(self): """ #8149 - File objects with \r\n line endings should yield lines when iterated over. """ f = File(BytesIO(b'one\r\ntwo\r\nthree')) self.assertEqual(list(f), [b'one\r\n', b'two\r\n', b'three']) def test_file_iteration_mac_newlines(self): """ #8149 - File objects with \r line endings should yield lines when iterated over. """ f = File(BytesIO(b'one\rtwo\rthree')) self.assertEqual(list(f), [b'one\r', b'two\r', b'three']) def test_file_iteration_mixed_newlines(self): f = File(BytesIO(b'one\rtwo\nthree\r\nfour')) self.assertEqual(list(f), [b'one\r', b'two\n', b'three\r\n', b'four']) def test_file_iteration_with_unix_newline_at_chunk_boundary(self): f = File(BytesIO(b'one\ntwo\nthree')) # Set chunk size to create a boundary after \n: # b'one\n... # ^ f.DEFAULT_CHUNK_SIZE = 4 self.assertEqual(list(f), [b'one\n', b'two\n', b'three']) def test_file_iteration_with_windows_newline_at_chunk_boundary(self): f = File(BytesIO(b'one\r\ntwo\r\nthree')) # Set chunk size to create a boundary between \r and \n: # b'one\r\n... # ^ f.DEFAULT_CHUNK_SIZE = 4 self.assertEqual(list(f), [b'one\r\n', b'two\r\n', b'three']) def test_file_iteration_with_mac_newline_at_chunk_boundary(self): f = File(BytesIO(b'one\rtwo\rthree')) # Set chunk size to create a boundary after \r: # b'one\r... # ^ f.DEFAULT_CHUNK_SIZE = 4 self.assertEqual(list(f), [b'one\r', b'two\r', b'three']) def test_file_iteration_with_text(self): f = File(StringIO('one\ntwo\nthree')) self.assertEqual(list(f), ['one\n', 'two\n', 'three']) def test_readable(self): with tempfile.TemporaryFile() as temp, File(temp, name='something.txt') as test_file: self.assertTrue(test_file.readable()) self.assertFalse(test_file.readable()) def test_writable(self): with tempfile.TemporaryFile() as temp, File(temp, name='something.txt') as test_file: self.assertTrue(test_file.writable()) self.assertFalse(test_file.writable()) with tempfile.TemporaryFile('rb') as temp, File(temp, name='something.txt') as test_file: self.assertFalse(test_file.writable()) def test_seekable(self): with tempfile.TemporaryFile() as temp, File(temp, name='something.txt') as test_file: self.assertTrue(test_file.seekable()) self.assertFalse(test_file.seekable()) def test_io_wrapper(self): content = "vive l'été\n" with tempfile.TemporaryFile() as temp, File(temp, name='something.txt') as test_file: test_file.write(content.encode()) test_file.seek(0) wrapper = TextIOWrapper(test_file, 'utf-8', newline='\n') self.assertEqual(wrapper.read(), content) wrapper.write(content) wrapper.seek(0) self.assertEqual(wrapper.read(), content * 2) test_file = wrapper.detach() test_file.seek(0) self.assertEqual(test_file.read(), (content * 2).encode()) def test_exclusive_lock(self): file_path = Path(__file__).parent / 'test.png' with open(file_path) as f1, open(file_path) as f2: self.assertIs(locks.lock(f1, locks.LOCK_EX), True) self.assertIs(locks.lock(f2, locks.LOCK_EX | locks.LOCK_NB), False) self.assertIs(locks.lock(f2, locks.LOCK_SH | locks.LOCK_NB), False) self.assertIs(locks.unlock(f1), True) def test_shared_lock(self): file_path = Path(__file__).parent / 'test.png' with open(file_path) as f1, open(file_path) as f2: self.assertIs(locks.lock(f1, locks.LOCK_SH), True) self.assertIs(locks.lock(f2, locks.LOCK_SH | locks.LOCK_NB), True) self.assertIs(locks.unlock(f1), True) self.assertIs(locks.unlock(f2), True) class NoNameFileTestCase(unittest.TestCase): """ Other examples of unnamed files may be tempfile.SpooledTemporaryFile or urllib.urlopen() """ def test_noname_file_default_name(self): self.assertIsNone(File(BytesIO(b'A file with no name')).name) def test_noname_file_get_size(self): self.assertEqual(File(BytesIO(b'A file with no name')).size, 19) class ContentFileTestCase(unittest.TestCase): def test_content_file_default_name(self): self.assertIsNone(ContentFile(b"content").name) def test_content_file_custom_name(self): """ The constructor of ContentFile accepts 'name' (#16590). """ name = "I can have a name too!" self.assertEqual(ContentFile(b"content", name=name).name, name) def test_content_file_input_type(self): """ ContentFile can accept both bytes and strings and the retrieved content is of the same type. """ self.assertIsInstance(ContentFile(b"content").read(), bytes) self.assertIsInstance(ContentFile("español").read(), str) def test_open_resets_file_to_start_and_returns_context_manager(self): file = ContentFile(b'content') with file.open() as f: self.assertEqual(f.read(), b'content') with file.open() as f: self.assertEqual(f.read(), b'content') def test_size_changing_after_writing(self): """ContentFile.size changes after a write().""" f = ContentFile('') self.assertEqual(f.size, 0) f.write('Test ') f.write('string') self.assertEqual(f.size, 11) with f.open() as fh: self.assertEqual(fh.read(), 'Test string') class InMemoryUploadedFileTests(unittest.TestCase): def test_open_resets_file_to_start_and_returns_context_manager(self): uf = InMemoryUploadedFile(StringIO('1'), '', 'test', 'text/plain', 1, 'utf8') uf.read() with uf.open() as f: self.assertEqual(f.read(), '1') class TemporaryUploadedFileTests(unittest.TestCase): def test_extension_kept(self): """The temporary file name has the same suffix as the original file.""" with TemporaryUploadedFile('test.txt', 'text/plain', 1, 'utf8') as temp_file: self.assertTrue(temp_file.file.name.endswith('.upload.txt')) def test_file_upload_temp_dir_pathlib(self): with tempfile.TemporaryDirectory() as tmp_dir: with override_settings(FILE_UPLOAD_TEMP_DIR=Path(tmp_dir)): with TemporaryUploadedFile('test.txt', 'text/plain', 1, 'utf-8') as temp_file: self.assertTrue(os.path.exists(temp_file.file.name)) class DimensionClosingBug(unittest.TestCase): """ get_image_dimensions() properly closes files (#8817) """ @unittest.skipUnless(Image, "Pillow not installed") def test_not_closing_of_files(self): """ Open files passed into get_image_dimensions() should stay opened. """ empty_io = BytesIO() try: images.get_image_dimensions(empty_io) finally: self.assertTrue(not empty_io.closed) @unittest.skipUnless(Image, "Pillow not installed") def test_closing_of_filenames(self): """ get_image_dimensions() called with a filename should closed the file. """ # We need to inject a modified open() builtin into the images module # that checks if the file was closed properly if the function is # called with a filename instead of a file object. # get_image_dimensions will call our catching_open instead of the # regular builtin one. class FileWrapper: _closed = [] def __init__(self, f): self.f = f def __getattr__(self, name): return getattr(self.f, name) def close(self): self._closed.append(True) self.f.close() def catching_open(*args): return FileWrapper(open(*args)) images.open = catching_open try: images.get_image_dimensions(os.path.join(os.path.dirname(__file__), "test1.png")) finally: del images.open self.assertTrue(FileWrapper._closed) class InconsistentGetImageDimensionsBug(unittest.TestCase): """ get_image_dimensions() works properly after various calls using a file handler (#11158) """ @unittest.skipUnless(Image, "Pillow not installed") def test_multiple_calls(self): """ Multiple calls of get_image_dimensions() should return the same size. """ img_path = os.path.join(os.path.dirname(__file__), "test.png") with open(img_path, 'rb') as fh: image = images.ImageFile(fh) image_pil = Image.open(fh) size_1 = images.get_image_dimensions(image) size_2 = images.get_image_dimensions(image) self.assertEqual(image_pil.size, size_1) self.assertEqual(size_1, size_2) @unittest.skipUnless(Image, "Pillow not installed") def test_bug_19457(self): """ Regression test for #19457 get_image_dimensions fails on some pngs, while Image.size is working good on them """ img_path = os.path.join(os.path.dirname(__file__), "magic.png") size = images.get_image_dimensions(img_path) with open(img_path, 'rb') as fh: self.assertEqual(size, Image.open(fh).size) @unittest.skipUnless(Image, "Pillow not installed") class GetImageDimensionsTests(unittest.TestCase): def test_invalid_image(self): """ get_image_dimensions() should return (None, None) for the dimensions of invalid images (#24441). brokenimg.png is not a valid image and it has been generated by: $ echo "123" > brokenimg.png """ img_path = os.path.join(os.path.dirname(__file__), "brokenimg.png") with open(img_path, 'rb') as fh: size = images.get_image_dimensions(fh) self.assertEqual(size, (None, None)) def test_valid_image(self): """ get_image_dimensions() should catch struct.error while feeding the PIL Image parser (#24544). Emulates the Parser feed error. Since the error is raised on every feed attempt, the resulting image size should be invalid: (None, None). """ img_path = os.path.join(os.path.dirname(__file__), "test.png") with mock.patch('PIL.ImageFile.Parser.feed', side_effect=struct.error): with open(img_path, 'rb') as fh: size = images.get_image_dimensions(fh) self.assertEqual(size, (None, None)) def test_missing_file(self): size = images.get_image_dimensions('missing.png') self.assertEqual(size, (None, None)) @unittest.skipUnless(HAS_WEBP, 'WEBP not installed') def test_webp(self): img_path = os.path.join(os.path.dirname(__file__), 'test.webp') with open(img_path, 'rb') as fh: size = images.get_image_dimensions(fh) self.assertEqual(size, (540, 405)) class FileMoveSafeTests(unittest.TestCase): def test_file_move_overwrite(self): handle_a, self.file_a = tempfile.mkstemp() handle_b, self.file_b = tempfile.mkstemp() # file_move_safe() raises OSError if the destination file exists and # allow_overwrite is False. with self.assertRaises(FileExistsError): file_move_safe(self.file_a, self.file_b, allow_overwrite=False) # should allow it and continue on if allow_overwrite is True self.assertIsNone(file_move_safe(self.file_a, self.file_b, allow_overwrite=True)) os.close(handle_a) os.close(handle_b) def test_file_move_copystat_cifs(self): """ file_move_safe() ignores a copystat() EPERM PermissionError. This happens when the destination filesystem is CIFS, for example. """ copystat_EACCES_error = PermissionError(errno.EACCES, 'msg') copystat_EPERM_error = PermissionError(errno.EPERM, 'msg') handle_a, self.file_a = tempfile.mkstemp() handle_b, self.file_b = tempfile.mkstemp() try: # This exception is required to reach the copystat() call in # file_safe_move(). with mock.patch('django.core.files.move.os.rename', side_effect=OSError()): # An error besides EPERM isn't ignored. with mock.patch('django.core.files.move.copystat', side_effect=copystat_EACCES_error): with self.assertRaises(PermissionError): file_move_safe(self.file_a, self.file_b, allow_overwrite=True) # EPERM is ignored. with mock.patch('django.core.files.move.copystat', side_effect=copystat_EPERM_error): self.assertIsNone(file_move_safe(self.file_a, self.file_b, allow_overwrite=True)) finally: os.close(handle_a) os.close(handle_b) class SpooledTempTests(unittest.TestCase): def test_in_memory_spooled_temp(self): with tempfile.SpooledTemporaryFile() as temp: temp.write(b"foo bar baz quux\n") django_file = File(temp, name="something.txt") self.assertEqual(django_file.size, 17) def test_written_spooled_temp(self): with tempfile.SpooledTemporaryFile(max_size=4) as temp: temp.write(b"foo bar baz quux\n") django_file = File(temp, name="something.txt") self.assertEqual(django_file.size, 17)
24a7024305b3bb63eb1515b4a77f876b05ad92a8861819362d535597709dec29
import decimal import enum import json import unittest import uuid from django import forms from django.core import checks, exceptions, serializers, validators from django.core.exceptions import FieldError from django.core.management import call_command from django.db import IntegrityError, connection, models from django.db.models.expressions import Exists, OuterRef, RawSQL, Value from django.db.models.functions import Cast, JSONObject, Upper from django.test import TransactionTestCase, modify_settings, override_settings from django.test.utils import isolate_apps from django.utils import timezone from . import ( PostgreSQLSimpleTestCase, PostgreSQLTestCase, PostgreSQLWidgetTestCase, ) from .models import ( ArrayEnumModel, ArrayFieldSubclass, CharArrayModel, DateTimeArrayModel, IntegerArrayModel, NestedIntegerArrayModel, NullableIntegerArrayModel, OtherTypesArrayModel, PostgreSQLModel, Tag, ) try: from psycopg2.extras import NumericRange from django.contrib.postgres.aggregates import ArrayAgg from django.contrib.postgres.expressions import ArraySubquery from django.contrib.postgres.fields import ArrayField from django.contrib.postgres.fields.array import ( IndexTransform, SliceTransform, ) from django.contrib.postgres.forms import ( SimpleArrayField, SplitArrayField, SplitArrayWidget, ) except ImportError: pass @isolate_apps('postgres_tests') class BasicTests(PostgreSQLSimpleTestCase): def test_get_field_display(self): class MyModel(PostgreSQLModel): field = ArrayField( models.CharField(max_length=16), choices=[ ['Media', [(['vinyl', 'cd'], 'Audio')]], (('mp3', 'mp4'), 'Digital'), ], ) tests = ( (['vinyl', 'cd'], 'Audio'), (('mp3', 'mp4'), 'Digital'), (('a', 'b'), "('a', 'b')"), (['c', 'd'], "['c', 'd']"), ) for value, display in tests: with self.subTest(value=value, display=display): instance = MyModel(field=value) self.assertEqual(instance.get_field_display(), display) def test_get_field_display_nested_array(self): class MyModel(PostgreSQLModel): field = ArrayField( ArrayField(models.CharField(max_length=16)), choices=[ [ 'Media', [([['vinyl', 'cd'], ('x',)], 'Audio')], ], ((['mp3'], ('mp4',)), 'Digital'), ], ) tests = ( ([['vinyl', 'cd'], ('x',)], 'Audio'), ((['mp3'], ('mp4',)), 'Digital'), ((('a', 'b'), ('c',)), "(('a', 'b'), ('c',))"), ([['a', 'b'], ['c']], "[['a', 'b'], ['c']]"), ) for value, display in tests: with self.subTest(value=value, display=display): instance = MyModel(field=value) self.assertEqual(instance.get_field_display(), display) class TestSaveLoad(PostgreSQLTestCase): def test_integer(self): instance = IntegerArrayModel(field=[1, 2, 3]) instance.save() loaded = IntegerArrayModel.objects.get() self.assertEqual(instance.field, loaded.field) def test_char(self): instance = CharArrayModel(field=['hello', 'goodbye']) instance.save() loaded = CharArrayModel.objects.get() self.assertEqual(instance.field, loaded.field) def test_dates(self): instance = DateTimeArrayModel( datetimes=[timezone.now()], dates=[timezone.now().date()], times=[timezone.now().time()], ) instance.save() loaded = DateTimeArrayModel.objects.get() self.assertEqual(instance.datetimes, loaded.datetimes) self.assertEqual(instance.dates, loaded.dates) self.assertEqual(instance.times, loaded.times) def test_tuples(self): instance = IntegerArrayModel(field=(1,)) instance.save() loaded = IntegerArrayModel.objects.get() self.assertSequenceEqual(instance.field, loaded.field) def test_integers_passed_as_strings(self): # This checks that get_prep_value is deferred properly instance = IntegerArrayModel(field=['1']) instance.save() loaded = IntegerArrayModel.objects.get() self.assertEqual(loaded.field, [1]) def test_default_null(self): instance = NullableIntegerArrayModel() instance.save() loaded = NullableIntegerArrayModel.objects.get(pk=instance.pk) self.assertIsNone(loaded.field) self.assertEqual(instance.field, loaded.field) def test_null_handling(self): instance = NullableIntegerArrayModel(field=None) instance.save() loaded = NullableIntegerArrayModel.objects.get() self.assertEqual(instance.field, loaded.field) instance = IntegerArrayModel(field=None) with self.assertRaises(IntegrityError): instance.save() def test_nested(self): instance = NestedIntegerArrayModel(field=[[1, 2], [3, 4]]) instance.save() loaded = NestedIntegerArrayModel.objects.get() self.assertEqual(instance.field, loaded.field) def test_other_array_types(self): instance = OtherTypesArrayModel( ips=['192.168.0.1', '::1'], uuids=[uuid.uuid4()], decimals=[decimal.Decimal(1.25), 1.75], tags=[Tag(1), Tag(2), Tag(3)], json=[{'a': 1}, {'b': 2}], int_ranges=[NumericRange(10, 20), NumericRange(30, 40)], bigint_ranges=[ NumericRange(7000000000, 10000000000), NumericRange(50000000000, 70000000000), ] ) instance.save() loaded = OtherTypesArrayModel.objects.get() self.assertEqual(instance.ips, loaded.ips) self.assertEqual(instance.uuids, loaded.uuids) self.assertEqual(instance.decimals, loaded.decimals) self.assertEqual(instance.tags, loaded.tags) self.assertEqual(instance.json, loaded.json) self.assertEqual(instance.int_ranges, loaded.int_ranges) self.assertEqual(instance.bigint_ranges, loaded.bigint_ranges) def test_null_from_db_value_handling(self): instance = OtherTypesArrayModel.objects.create( ips=['192.168.0.1', '::1'], uuids=[uuid.uuid4()], decimals=[decimal.Decimal(1.25), 1.75], tags=None, ) instance.refresh_from_db() self.assertIsNone(instance.tags) self.assertEqual(instance.json, []) self.assertIsNone(instance.int_ranges) self.assertIsNone(instance.bigint_ranges) def test_model_set_on_base_field(self): instance = IntegerArrayModel() field = instance._meta.get_field('field') self.assertEqual(field.model, IntegerArrayModel) self.assertEqual(field.base_field.model, IntegerArrayModel) def test_nested_nullable_base_field(self): instance = NullableIntegerArrayModel.objects.create( field_nested=[[None, None], [None, None]], ) self.assertEqual(instance.field_nested, [[None, None], [None, None]]) class TestQuerying(PostgreSQLTestCase): @classmethod def setUpTestData(cls): cls.objs = NullableIntegerArrayModel.objects.bulk_create([ NullableIntegerArrayModel(order=1, field=[1]), NullableIntegerArrayModel(order=2, field=[2]), NullableIntegerArrayModel(order=3, field=[2, 3]), NullableIntegerArrayModel(order=4, field=[20, 30, 40]), NullableIntegerArrayModel(order=5, field=None), ]) def test_empty_list(self): NullableIntegerArrayModel.objects.create(field=[]) obj = NullableIntegerArrayModel.objects.annotate( empty_array=models.Value([], output_field=ArrayField(models.IntegerField())), ).filter(field=models.F('empty_array')).get() self.assertEqual(obj.field, []) self.assertEqual(obj.empty_array, []) def test_exact(self): self.assertSequenceEqual( NullableIntegerArrayModel.objects.filter(field__exact=[1]), self.objs[:1] ) def test_exact_with_expression(self): self.assertSequenceEqual( NullableIntegerArrayModel.objects.filter(field__exact=[Value(1)]), self.objs[:1], ) def test_exact_charfield(self): instance = CharArrayModel.objects.create(field=['text']) self.assertSequenceEqual( CharArrayModel.objects.filter(field=['text']), [instance] ) def test_exact_nested(self): instance = NestedIntegerArrayModel.objects.create(field=[[1, 2], [3, 4]]) self.assertSequenceEqual( NestedIntegerArrayModel.objects.filter(field=[[1, 2], [3, 4]]), [instance] ) def test_isnull(self): self.assertSequenceEqual( NullableIntegerArrayModel.objects.filter(field__isnull=True), self.objs[-1:] ) def test_gt(self): self.assertSequenceEqual( NullableIntegerArrayModel.objects.filter(field__gt=[0]), self.objs[:4] ) def test_lt(self): self.assertSequenceEqual( NullableIntegerArrayModel.objects.filter(field__lt=[2]), self.objs[:1] ) def test_in(self): self.assertSequenceEqual( NullableIntegerArrayModel.objects.filter(field__in=[[1], [2]]), self.objs[:2] ) def test_in_subquery(self): IntegerArrayModel.objects.create(field=[2, 3]) self.assertSequenceEqual( NullableIntegerArrayModel.objects.filter( field__in=IntegerArrayModel.objects.all().values_list('field', flat=True) ), self.objs[2:3] ) @unittest.expectedFailure def test_in_including_F_object(self): # This test asserts that Array objects passed to filters can be # constructed to contain F objects. This currently doesn't work as the # psycopg2 mogrify method that generates the ARRAY() syntax is # expecting literals, not column references (#27095). self.assertSequenceEqual( NullableIntegerArrayModel.objects.filter(field__in=[[models.F('id')]]), self.objs[:2] ) def test_in_as_F_object(self): self.assertSequenceEqual( NullableIntegerArrayModel.objects.filter(field__in=[models.F('field')]), self.objs[:4] ) def test_contained_by(self): self.assertSequenceEqual( NullableIntegerArrayModel.objects.filter(field__contained_by=[1, 2]), self.objs[:2] ) def test_contained_by_including_F_object(self): self.assertSequenceEqual( NullableIntegerArrayModel.objects.filter(field__contained_by=[models.F('order'), 2]), self.objs[:3], ) def test_contains(self): self.assertSequenceEqual( NullableIntegerArrayModel.objects.filter(field__contains=[2]), self.objs[1:3] ) def test_contains_subquery(self): IntegerArrayModel.objects.create(field=[2, 3]) inner_qs = IntegerArrayModel.objects.values_list('field', flat=True) self.assertSequenceEqual( NullableIntegerArrayModel.objects.filter(field__contains=inner_qs[:1]), self.objs[2:3], ) inner_qs = IntegerArrayModel.objects.filter(field__contains=OuterRef('field')) self.assertSequenceEqual( NullableIntegerArrayModel.objects.filter(Exists(inner_qs)), self.objs[1:3], ) def test_contains_including_expression(self): self.assertSequenceEqual( NullableIntegerArrayModel.objects.filter( field__contains=[2, Value(6) / Value(2)], ), self.objs[2:3], ) def test_icontains(self): # Using the __icontains lookup with ArrayField is inefficient. instance = CharArrayModel.objects.create(field=['FoO']) self.assertSequenceEqual( CharArrayModel.objects.filter(field__icontains='foo'), [instance] ) def test_contains_charfield(self): # Regression for #22907 self.assertSequenceEqual( CharArrayModel.objects.filter(field__contains=['text']), [] ) def test_contained_by_charfield(self): self.assertSequenceEqual( CharArrayModel.objects.filter(field__contained_by=['text']), [] ) def test_overlap_charfield(self): self.assertSequenceEqual( CharArrayModel.objects.filter(field__overlap=['text']), [] ) def test_overlap_charfield_including_expression(self): obj_1 = CharArrayModel.objects.create(field=['TEXT', 'lower text']) obj_2 = CharArrayModel.objects.create(field=['lower text', 'TEXT']) CharArrayModel.objects.create(field=['lower text', 'text']) self.assertSequenceEqual( CharArrayModel.objects.filter(field__overlap=[ Upper(Value('text')), 'other', ]), [obj_1, obj_2], ) def test_lookups_autofield_array(self): qs = NullableIntegerArrayModel.objects.filter( field__0__isnull=False, ).values('field__0').annotate( arrayagg=ArrayAgg('id'), ).order_by('field__0') tests = ( ('contained_by', [self.objs[1].pk, self.objs[2].pk, 0], [2]), ('contains', [self.objs[2].pk], [2]), ('exact', [self.objs[3].pk], [20]), ('overlap', [self.objs[1].pk, self.objs[3].pk], [2, 20]), ) for lookup, value, expected in tests: with self.subTest(lookup=lookup): self.assertSequenceEqual( qs.filter( **{'arrayagg__' + lookup: value}, ).values_list('field__0', flat=True), expected, ) def test_index(self): self.assertSequenceEqual( NullableIntegerArrayModel.objects.filter(field__0=2), self.objs[1:3] ) def test_index_chained(self): self.assertSequenceEqual( NullableIntegerArrayModel.objects.filter(field__0__lt=3), self.objs[0:3] ) def test_index_nested(self): instance = NestedIntegerArrayModel.objects.create(field=[[1, 2], [3, 4]]) self.assertSequenceEqual( NestedIntegerArrayModel.objects.filter(field__0__0=1), [instance] ) @unittest.expectedFailure def test_index_used_on_nested_data(self): instance = NestedIntegerArrayModel.objects.create(field=[[1, 2], [3, 4]]) self.assertSequenceEqual( NestedIntegerArrayModel.objects.filter(field__0=[1, 2]), [instance] ) def test_index_transform_expression(self): expr = RawSQL("string_to_array(%s, ';')", ['1;2']) self.assertSequenceEqual( NullableIntegerArrayModel.objects.filter( field__0=Cast( IndexTransform(1, models.IntegerField, expr), output_field=models.IntegerField(), ), ), self.objs[:1], ) def test_index_annotation(self): qs = NullableIntegerArrayModel.objects.annotate(second=models.F('field__1')) self.assertCountEqual( qs.values_list('second', flat=True), [None, None, None, 3, 30], ) def test_overlap(self): self.assertSequenceEqual( NullableIntegerArrayModel.objects.filter(field__overlap=[1, 2]), self.objs[0:3] ) def test_len(self): self.assertSequenceEqual( NullableIntegerArrayModel.objects.filter(field__len__lte=2), self.objs[0:3] ) def test_len_empty_array(self): obj = NullableIntegerArrayModel.objects.create(field=[]) self.assertSequenceEqual( NullableIntegerArrayModel.objects.filter(field__len=0), [obj] ) def test_slice(self): self.assertSequenceEqual( NullableIntegerArrayModel.objects.filter(field__0_1=[2]), self.objs[1:3] ) self.assertSequenceEqual( NullableIntegerArrayModel.objects.filter(field__0_2=[2, 3]), self.objs[2:3] ) def test_order_by_slice(self): more_objs = ( NullableIntegerArrayModel.objects.create(field=[1, 637]), NullableIntegerArrayModel.objects.create(field=[2, 1]), NullableIntegerArrayModel.objects.create(field=[3, -98123]), NullableIntegerArrayModel.objects.create(field=[4, 2]), ) self.assertSequenceEqual( NullableIntegerArrayModel.objects.order_by('field__1'), [ more_objs[2], more_objs[1], more_objs[3], self.objs[2], self.objs[3], more_objs[0], self.objs[4], self.objs[1], self.objs[0], ] ) @unittest.expectedFailure def test_slice_nested(self): instance = NestedIntegerArrayModel.objects.create(field=[[1, 2], [3, 4]]) self.assertSequenceEqual( NestedIntegerArrayModel.objects.filter(field__0__0_1=[1]), [instance] ) def test_slice_transform_expression(self): expr = RawSQL("string_to_array(%s, ';')", ['9;2;3']) self.assertSequenceEqual( NullableIntegerArrayModel.objects.filter(field__0_2=SliceTransform(2, 3, expr)), self.objs[2:3], ) def test_slice_annotation(self): qs = NullableIntegerArrayModel.objects.annotate( first_two=models.F('field__0_2'), ) self.assertCountEqual( qs.values_list('first_two', flat=True), [None, [1], [2], [2, 3], [20, 30]], ) def test_usage_in_subquery(self): self.assertSequenceEqual( NullableIntegerArrayModel.objects.filter( id__in=NullableIntegerArrayModel.objects.filter(field__len=3) ), [self.objs[3]] ) def test_enum_lookup(self): class TestEnum(enum.Enum): VALUE_1 = 'value_1' instance = ArrayEnumModel.objects.create(array_of_enums=[TestEnum.VALUE_1]) self.assertSequenceEqual( ArrayEnumModel.objects.filter(array_of_enums__contains=[TestEnum.VALUE_1]), [instance] ) def test_unsupported_lookup(self): msg = "Unsupported lookup '0_bar' for ArrayField or join on the field not permitted." with self.assertRaisesMessage(FieldError, msg): list(NullableIntegerArrayModel.objects.filter(field__0_bar=[2])) msg = "Unsupported lookup '0bar' for ArrayField or join on the field not permitted." with self.assertRaisesMessage(FieldError, msg): list(NullableIntegerArrayModel.objects.filter(field__0bar=[2])) def test_grouping_by_annotations_with_array_field_param(self): value = models.Value([1], output_field=ArrayField(models.IntegerField())) self.assertEqual( NullableIntegerArrayModel.objects.annotate( array_length=models.Func( value, 1, function='ARRAY_LENGTH', output_field=models.IntegerField(), ), ).values('array_length').annotate( count=models.Count('pk'), ).get()['array_length'], 1, ) def test_filter_by_array_subquery(self): inner_qs = NullableIntegerArrayModel.objects.filter( field__len=models.OuterRef('field__len'), ).values('field') self.assertSequenceEqual( NullableIntegerArrayModel.objects.alias( same_sized_fields=ArraySubquery(inner_qs), ).filter(same_sized_fields__len__gt=1), self.objs[0:2], ) def test_annotated_array_subquery(self): inner_qs = NullableIntegerArrayModel.objects.exclude( pk=models.OuterRef('pk') ).values('order') self.assertSequenceEqual( NullableIntegerArrayModel.objects.annotate( sibling_ids=ArraySubquery(inner_qs), ).get(order=1).sibling_ids, [2, 3, 4, 5], ) def test_group_by_with_annotated_array_subquery(self): inner_qs = NullableIntegerArrayModel.objects.exclude( pk=models.OuterRef('pk') ).values('order') self.assertSequenceEqual( NullableIntegerArrayModel.objects.annotate( sibling_ids=ArraySubquery(inner_qs), sibling_count=models.Max('sibling_ids__len'), ).values_list('sibling_count', flat=True), [len(self.objs) - 1] * len(self.objs), ) def test_annotated_ordered_array_subquery(self): inner_qs = NullableIntegerArrayModel.objects.order_by('-order').values('order') self.assertSequenceEqual( NullableIntegerArrayModel.objects.annotate( ids=ArraySubquery(inner_qs), ).first().ids, [5, 4, 3, 2, 1], ) def test_annotated_array_subquery_with_json_objects(self): inner_qs = NullableIntegerArrayModel.objects.exclude( pk=models.OuterRef('pk') ).values(json=JSONObject(order='order', field='field')) siblings_json = NullableIntegerArrayModel.objects.annotate( siblings_json=ArraySubquery(inner_qs), ).values_list('siblings_json', flat=True).get(order=1) self.assertSequenceEqual( siblings_json, [ {'field': [2], 'order': 2}, {'field': [2, 3], 'order': 3}, {'field': [20, 30, 40], 'order': 4}, {'field': None, 'order': 5}, ], ) class TestDateTimeExactQuerying(PostgreSQLTestCase): @classmethod def setUpTestData(cls): now = timezone.now() cls.datetimes = [now] cls.dates = [now.date()] cls.times = [now.time()] cls.objs = [ DateTimeArrayModel.objects.create(datetimes=cls.datetimes, dates=cls.dates, times=cls.times), ] def test_exact_datetimes(self): self.assertSequenceEqual( DateTimeArrayModel.objects.filter(datetimes=self.datetimes), self.objs ) def test_exact_dates(self): self.assertSequenceEqual( DateTimeArrayModel.objects.filter(dates=self.dates), self.objs ) def test_exact_times(self): self.assertSequenceEqual( DateTimeArrayModel.objects.filter(times=self.times), self.objs ) class TestOtherTypesExactQuerying(PostgreSQLTestCase): @classmethod def setUpTestData(cls): cls.ips = ['192.168.0.1', '::1'] cls.uuids = [uuid.uuid4()] cls.decimals = [decimal.Decimal(1.25), 1.75] cls.tags = [Tag(1), Tag(2), Tag(3)] cls.objs = [ OtherTypesArrayModel.objects.create( ips=cls.ips, uuids=cls.uuids, decimals=cls.decimals, tags=cls.tags, ) ] def test_exact_ip_addresses(self): self.assertSequenceEqual( OtherTypesArrayModel.objects.filter(ips=self.ips), self.objs ) def test_exact_uuids(self): self.assertSequenceEqual( OtherTypesArrayModel.objects.filter(uuids=self.uuids), self.objs ) def test_exact_decimals(self): self.assertSequenceEqual( OtherTypesArrayModel.objects.filter(decimals=self.decimals), self.objs ) def test_exact_tags(self): self.assertSequenceEqual( OtherTypesArrayModel.objects.filter(tags=self.tags), self.objs ) @isolate_apps('postgres_tests') class TestChecks(PostgreSQLSimpleTestCase): def test_field_checks(self): class MyModel(PostgreSQLModel): field = ArrayField(models.CharField()) model = MyModel() errors = model.check() self.assertEqual(len(errors), 1) # The inner CharField is missing a max_length. self.assertEqual(errors[0].id, 'postgres.E001') self.assertIn('max_length', errors[0].msg) def test_invalid_base_fields(self): class MyModel(PostgreSQLModel): field = ArrayField(models.ManyToManyField('postgres_tests.IntegerArrayModel')) model = MyModel() errors = model.check() self.assertEqual(len(errors), 1) self.assertEqual(errors[0].id, 'postgres.E002') def test_invalid_default(self): class MyModel(PostgreSQLModel): field = ArrayField(models.IntegerField(), default=[]) model = MyModel() self.assertEqual(model.check(), [ checks.Warning( msg=( "ArrayField default should be a callable instead of an " "instance so that it's not shared between all field " "instances." ), hint='Use a callable instead, e.g., use `list` instead of `[]`.', obj=MyModel._meta.get_field('field'), id='fields.E010', ) ]) def test_valid_default(self): class MyModel(PostgreSQLModel): field = ArrayField(models.IntegerField(), default=list) model = MyModel() self.assertEqual(model.check(), []) def test_valid_default_none(self): class MyModel(PostgreSQLModel): field = ArrayField(models.IntegerField(), default=None) model = MyModel() self.assertEqual(model.check(), []) def test_nested_field_checks(self): """ Nested ArrayFields are permitted. """ class MyModel(PostgreSQLModel): field = ArrayField(ArrayField(models.CharField())) model = MyModel() errors = model.check() self.assertEqual(len(errors), 1) # The inner CharField is missing a max_length. self.assertEqual(errors[0].id, 'postgres.E001') self.assertIn('max_length', errors[0].msg) def test_choices_tuple_list(self): class MyModel(PostgreSQLModel): field = ArrayField( models.CharField(max_length=16), choices=[ [ 'Media', [(['vinyl', 'cd'], 'Audio'), (('vhs', 'dvd'), 'Video')], ], (['mp3', 'mp4'], 'Digital'), ], ) self.assertEqual(MyModel._meta.get_field('field').check(), []) @unittest.skipUnless(connection.vendor == 'postgresql', "PostgreSQL specific tests") class TestMigrations(TransactionTestCase): available_apps = ['postgres_tests'] def test_deconstruct(self): field = ArrayField(models.IntegerField()) name, path, args, kwargs = field.deconstruct() new = ArrayField(*args, **kwargs) self.assertEqual(type(new.base_field), type(field.base_field)) self.assertIsNot(new.base_field, field.base_field) def test_deconstruct_with_size(self): field = ArrayField(models.IntegerField(), size=3) name, path, args, kwargs = field.deconstruct() new = ArrayField(*args, **kwargs) self.assertEqual(new.size, field.size) def test_deconstruct_args(self): field = ArrayField(models.CharField(max_length=20)) name, path, args, kwargs = field.deconstruct() new = ArrayField(*args, **kwargs) self.assertEqual(new.base_field.max_length, field.base_field.max_length) def test_subclass_deconstruct(self): field = ArrayField(models.IntegerField()) name, path, args, kwargs = field.deconstruct() self.assertEqual(path, 'django.contrib.postgres.fields.ArrayField') field = ArrayFieldSubclass() name, path, args, kwargs = field.deconstruct() self.assertEqual(path, 'postgres_tests.models.ArrayFieldSubclass') @override_settings(MIGRATION_MODULES={ "postgres_tests": "postgres_tests.array_default_migrations", }) def test_adding_field_with_default(self): # See #22962 table_name = 'postgres_tests_integerarraydefaultmodel' with connection.cursor() as cursor: self.assertNotIn(table_name, connection.introspection.table_names(cursor)) call_command('migrate', 'postgres_tests', verbosity=0) with connection.cursor() as cursor: self.assertIn(table_name, connection.introspection.table_names(cursor)) call_command('migrate', 'postgres_tests', 'zero', verbosity=0) with connection.cursor() as cursor: self.assertNotIn(table_name, connection.introspection.table_names(cursor)) @override_settings(MIGRATION_MODULES={ "postgres_tests": "postgres_tests.array_index_migrations", }) def test_adding_arrayfield_with_index(self): """ ArrayField shouldn't have varchar_patterns_ops or text_patterns_ops indexes. """ table_name = 'postgres_tests_chartextarrayindexmodel' call_command('migrate', 'postgres_tests', verbosity=0) with connection.cursor() as cursor: like_constraint_columns_list = [ v['columns'] for k, v in list(connection.introspection.get_constraints(cursor, table_name).items()) if k.endswith('_like') ] # Only the CharField should have a LIKE index. self.assertEqual(like_constraint_columns_list, [['char2']]) # All fields should have regular indexes. with connection.cursor() as cursor: indexes = [ c['columns'][0] for c in connection.introspection.get_constraints(cursor, table_name).values() if c['index'] and len(c['columns']) == 1 ] self.assertIn('char', indexes) self.assertIn('char2', indexes) self.assertIn('text', indexes) call_command('migrate', 'postgres_tests', 'zero', verbosity=0) with connection.cursor() as cursor: self.assertNotIn(table_name, connection.introspection.table_names(cursor)) class TestSerialization(PostgreSQLSimpleTestCase): test_data = ( '[{"fields": {"field": "[\\"1\\", \\"2\\", null]"}, "model": "postgres_tests.integerarraymodel", "pk": null}]' ) def test_dumping(self): instance = IntegerArrayModel(field=[1, 2, None]) data = serializers.serialize('json', [instance]) self.assertEqual(json.loads(data), json.loads(self.test_data)) def test_loading(self): instance = list(serializers.deserialize('json', self.test_data))[0].object self.assertEqual(instance.field, [1, 2, None]) class TestValidation(PostgreSQLSimpleTestCase): def test_unbounded(self): field = ArrayField(models.IntegerField()) with self.assertRaises(exceptions.ValidationError) as cm: field.clean([1, None], None) self.assertEqual(cm.exception.code, 'item_invalid') self.assertEqual( cm.exception.message % cm.exception.params, 'Item 2 in the array did not validate: This field cannot be null.' ) def test_blank_true(self): field = ArrayField(models.IntegerField(blank=True, null=True)) # This should not raise a validation error field.clean([1, None], None) def test_with_size(self): field = ArrayField(models.IntegerField(), size=3) field.clean([1, 2, 3], None) with self.assertRaises(exceptions.ValidationError) as cm: field.clean([1, 2, 3, 4], None) self.assertEqual(cm.exception.messages[0], 'List contains 4 items, it should contain no more than 3.') def test_nested_array_mismatch(self): field = ArrayField(ArrayField(models.IntegerField())) field.clean([[1, 2], [3, 4]], None) with self.assertRaises(exceptions.ValidationError) as cm: field.clean([[1, 2], [3, 4, 5]], None) self.assertEqual(cm.exception.code, 'nested_array_mismatch') self.assertEqual(cm.exception.messages[0], 'Nested arrays must have the same length.') def test_with_base_field_error_params(self): field = ArrayField(models.CharField(max_length=2)) with self.assertRaises(exceptions.ValidationError) as cm: field.clean(['abc'], None) self.assertEqual(len(cm.exception.error_list), 1) exception = cm.exception.error_list[0] self.assertEqual( exception.message, 'Item 1 in the array did not validate: Ensure this value has at most 2 characters (it has 3).' ) self.assertEqual(exception.code, 'item_invalid') self.assertEqual(exception.params, {'nth': 1, 'value': 'abc', 'limit_value': 2, 'show_value': 3}) def test_with_validators(self): field = ArrayField(models.IntegerField(validators=[validators.MinValueValidator(1)])) field.clean([1, 2], None) with self.assertRaises(exceptions.ValidationError) as cm: field.clean([0], None) self.assertEqual(len(cm.exception.error_list), 1) exception = cm.exception.error_list[0] self.assertEqual( exception.message, 'Item 1 in the array did not validate: Ensure this value is greater than or equal to 1.' ) self.assertEqual(exception.code, 'item_invalid') self.assertEqual(exception.params, {'nth': 1, 'value': 0, 'limit_value': 1, 'show_value': 0}) class TestSimpleFormField(PostgreSQLSimpleTestCase): def test_valid(self): field = SimpleArrayField(forms.CharField()) value = field.clean('a,b,c') self.assertEqual(value, ['a', 'b', 'c']) def test_to_python_fail(self): field = SimpleArrayField(forms.IntegerField()) with self.assertRaises(exceptions.ValidationError) as cm: field.clean('a,b,9') self.assertEqual(cm.exception.messages[0], 'Item 1 in the array did not validate: Enter a whole number.') def test_validate_fail(self): field = SimpleArrayField(forms.CharField(required=True)) with self.assertRaises(exceptions.ValidationError) as cm: field.clean('a,b,') self.assertEqual(cm.exception.messages[0], 'Item 3 in the array did not validate: This field is required.') def test_validate_fail_base_field_error_params(self): field = SimpleArrayField(forms.CharField(max_length=2)) with self.assertRaises(exceptions.ValidationError) as cm: field.clean('abc,c,defg') errors = cm.exception.error_list self.assertEqual(len(errors), 2) first_error = errors[0] self.assertEqual( first_error.message, 'Item 1 in the array did not validate: Ensure this value has at most 2 characters (it has 3).' ) self.assertEqual(first_error.code, 'item_invalid') self.assertEqual(first_error.params, {'nth': 1, 'value': 'abc', 'limit_value': 2, 'show_value': 3}) second_error = errors[1] self.assertEqual( second_error.message, 'Item 3 in the array did not validate: Ensure this value has at most 2 characters (it has 4).' ) self.assertEqual(second_error.code, 'item_invalid') self.assertEqual(second_error.params, {'nth': 3, 'value': 'defg', 'limit_value': 2, 'show_value': 4}) def test_validators_fail(self): field = SimpleArrayField(forms.RegexField('[a-e]{2}')) with self.assertRaises(exceptions.ValidationError) as cm: field.clean('a,bc,de') self.assertEqual(cm.exception.messages[0], 'Item 1 in the array did not validate: Enter a valid value.') def test_delimiter(self): field = SimpleArrayField(forms.CharField(), delimiter='|') value = field.clean('a|b|c') self.assertEqual(value, ['a', 'b', 'c']) def test_delimiter_with_nesting(self): field = SimpleArrayField(SimpleArrayField(forms.CharField()), delimiter='|') value = field.clean('a,b|c,d') self.assertEqual(value, [['a', 'b'], ['c', 'd']]) def test_prepare_value(self): field = SimpleArrayField(forms.CharField()) value = field.prepare_value(['a', 'b', 'c']) self.assertEqual(value, 'a,b,c') def test_max_length(self): field = SimpleArrayField(forms.CharField(), max_length=2) with self.assertRaises(exceptions.ValidationError) as cm: field.clean('a,b,c') self.assertEqual(cm.exception.messages[0], 'List contains 3 items, it should contain no more than 2.') def test_min_length(self): field = SimpleArrayField(forms.CharField(), min_length=4) with self.assertRaises(exceptions.ValidationError) as cm: field.clean('a,b,c') self.assertEqual(cm.exception.messages[0], 'List contains 3 items, it should contain no fewer than 4.') def test_required(self): field = SimpleArrayField(forms.CharField(), required=True) with self.assertRaises(exceptions.ValidationError) as cm: field.clean('') self.assertEqual(cm.exception.messages[0], 'This field is required.') def test_model_field_formfield(self): model_field = ArrayField(models.CharField(max_length=27)) form_field = model_field.formfield() self.assertIsInstance(form_field, SimpleArrayField) self.assertIsInstance(form_field.base_field, forms.CharField) self.assertEqual(form_field.base_field.max_length, 27) def test_model_field_formfield_size(self): model_field = ArrayField(models.CharField(max_length=27), size=4) form_field = model_field.formfield() self.assertIsInstance(form_field, SimpleArrayField) self.assertEqual(form_field.max_length, 4) def test_model_field_choices(self): model_field = ArrayField(models.IntegerField(choices=((1, 'A'), (2, 'B')))) form_field = model_field.formfield() self.assertEqual(form_field.clean('1,2'), [1, 2]) def test_already_converted_value(self): field = SimpleArrayField(forms.CharField()) vals = ['a', 'b', 'c'] self.assertEqual(field.clean(vals), vals) def test_has_changed(self): field = SimpleArrayField(forms.IntegerField()) self.assertIs(field.has_changed([1, 2], [1, 2]), False) self.assertIs(field.has_changed([1, 2], '1,2'), False) self.assertIs(field.has_changed([1, 2], '1,2,3'), True) self.assertIs(field.has_changed([1, 2], 'a,b'), True) def test_has_changed_empty(self): field = SimpleArrayField(forms.CharField()) self.assertIs(field.has_changed(None, None), False) self.assertIs(field.has_changed(None, ''), False) self.assertIs(field.has_changed(None, []), False) self.assertIs(field.has_changed([], None), False) self.assertIs(field.has_changed([], ''), False) class TestSplitFormField(PostgreSQLSimpleTestCase): def test_valid(self): class SplitForm(forms.Form): array = SplitArrayField(forms.CharField(), size=3) data = {'array_0': 'a', 'array_1': 'b', 'array_2': 'c'} form = SplitForm(data) self.assertTrue(form.is_valid()) self.assertEqual(form.cleaned_data, {'array': ['a', 'b', 'c']}) def test_required(self): class SplitForm(forms.Form): array = SplitArrayField(forms.CharField(), required=True, size=3) data = {'array_0': '', 'array_1': '', 'array_2': ''} form = SplitForm(data) self.assertFalse(form.is_valid()) self.assertEqual(form.errors, {'array': ['This field is required.']}) def test_remove_trailing_nulls(self): class SplitForm(forms.Form): array = SplitArrayField(forms.CharField(required=False), size=5, remove_trailing_nulls=True) data = {'array_0': 'a', 'array_1': '', 'array_2': 'b', 'array_3': '', 'array_4': ''} form = SplitForm(data) self.assertTrue(form.is_valid(), form.errors) self.assertEqual(form.cleaned_data, {'array': ['a', '', 'b']}) def test_remove_trailing_nulls_not_required(self): class SplitForm(forms.Form): array = SplitArrayField( forms.CharField(required=False), size=2, remove_trailing_nulls=True, required=False, ) data = {'array_0': '', 'array_1': ''} form = SplitForm(data) self.assertTrue(form.is_valid()) self.assertEqual(form.cleaned_data, {'array': []}) def test_required_field(self): class SplitForm(forms.Form): array = SplitArrayField(forms.CharField(), size=3) data = {'array_0': 'a', 'array_1': 'b', 'array_2': ''} form = SplitForm(data) self.assertFalse(form.is_valid()) self.assertEqual(form.errors, {'array': ['Item 3 in the array did not validate: This field is required.']}) def test_invalid_integer(self): msg = 'Item 2 in the array did not validate: Ensure this value is less than or equal to 100.' with self.assertRaisesMessage(exceptions.ValidationError, msg): SplitArrayField(forms.IntegerField(max_value=100), size=2).clean([0, 101]) # To locate the widget's template. @modify_settings(INSTALLED_APPS={'append': 'django.contrib.postgres'}) def test_rendering(self): class SplitForm(forms.Form): array = SplitArrayField(forms.CharField(), size=3) self.assertHTMLEqual(str(SplitForm()), ''' <tr> <th><label for="id_array_0">Array:</label></th> <td> <input id="id_array_0" name="array_0" type="text" required> <input id="id_array_1" name="array_1" type="text" required> <input id="id_array_2" name="array_2" type="text" required> </td> </tr> ''') def test_invalid_char_length(self): field = SplitArrayField(forms.CharField(max_length=2), size=3) with self.assertRaises(exceptions.ValidationError) as cm: field.clean(['abc', 'c', 'defg']) self.assertEqual(cm.exception.messages, [ 'Item 1 in the array did not validate: Ensure this value has at most 2 characters (it has 3).', 'Item 3 in the array did not validate: Ensure this value has at most 2 characters (it has 4).', ]) def test_splitarraywidget_value_omitted_from_data(self): class Form(forms.ModelForm): field = SplitArrayField(forms.IntegerField(), required=False, size=2) class Meta: model = IntegerArrayModel fields = ('field',) form = Form({'field_0': '1', 'field_1': '2'}) self.assertEqual(form.errors, {}) obj = form.save(commit=False) self.assertEqual(obj.field, [1, 2]) def test_splitarrayfield_has_changed(self): class Form(forms.ModelForm): field = SplitArrayField(forms.IntegerField(), required=False, size=2) class Meta: model = IntegerArrayModel fields = ('field',) tests = [ ({}, {'field_0': '', 'field_1': ''}, True), ({'field': None}, {'field_0': '', 'field_1': ''}, True), ({'field': [1]}, {'field_0': '', 'field_1': ''}, True), ({'field': [1]}, {'field_0': '1', 'field_1': '0'}, True), ({'field': [1, 2]}, {'field_0': '1', 'field_1': '2'}, False), ({'field': [1, 2]}, {'field_0': 'a', 'field_1': 'b'}, True), ] for initial, data, expected_result in tests: with self.subTest(initial=initial, data=data): obj = IntegerArrayModel(**initial) form = Form(data, instance=obj) self.assertIs(form.has_changed(), expected_result) def test_splitarrayfield_remove_trailing_nulls_has_changed(self): class Form(forms.ModelForm): field = SplitArrayField(forms.IntegerField(), required=False, size=2, remove_trailing_nulls=True) class Meta: model = IntegerArrayModel fields = ('field',) tests = [ ({}, {'field_0': '', 'field_1': ''}, False), ({'field': None}, {'field_0': '', 'field_1': ''}, False), ({'field': []}, {'field_0': '', 'field_1': ''}, False), ({'field': [1]}, {'field_0': '1', 'field_1': ''}, False), ] for initial, data, expected_result in tests: with self.subTest(initial=initial, data=data): obj = IntegerArrayModel(**initial) form = Form(data, instance=obj) self.assertIs(form.has_changed(), expected_result) class TestSplitFormWidget(PostgreSQLWidgetTestCase): def test_get_context(self): self.assertEqual( SplitArrayWidget(forms.TextInput(), size=2).get_context('name', ['val1', 'val2']), { 'widget': { 'name': 'name', 'is_hidden': False, 'required': False, 'value': "['val1', 'val2']", 'attrs': {}, 'template_name': 'postgres/widgets/split_array.html', 'subwidgets': [ { 'name': 'name_0', 'is_hidden': False, 'required': False, 'value': 'val1', 'attrs': {}, 'template_name': 'django/forms/widgets/text.html', 'type': 'text', }, { 'name': 'name_1', 'is_hidden': False, 'required': False, 'value': 'val2', 'attrs': {}, 'template_name': 'django/forms/widgets/text.html', 'type': 'text', }, ] } } ) def test_checkbox_get_context_attrs(self): context = SplitArrayWidget( forms.CheckboxInput(), size=2, ).get_context('name', [True, False]) self.assertEqual(context['widget']['value'], '[True, False]') self.assertEqual( [subwidget['attrs'] for subwidget in context['widget']['subwidgets']], [{'checked': True}, {}] ) def test_render(self): self.check_html( SplitArrayWidget(forms.TextInput(), size=2), 'array', None, """ <input name="array_0" type="text"> <input name="array_1" type="text"> """ ) def test_render_attrs(self): self.check_html( SplitArrayWidget(forms.TextInput(), size=2), 'array', ['val1', 'val2'], attrs={'id': 'foo'}, html=( """ <input id="foo_0" name="array_0" type="text" value="val1"> <input id="foo_1" name="array_1" type="text" value="val2"> """ ) ) def test_value_omitted_from_data(self): widget = SplitArrayWidget(forms.TextInput(), size=2) self.assertIs(widget.value_omitted_from_data({}, {}, 'field'), True) self.assertIs(widget.value_omitted_from_data({'field_0': 'value'}, {}, 'field'), False) self.assertIs(widget.value_omitted_from_data({'field_1': 'value'}, {}, 'field'), False) self.assertIs(widget.value_omitted_from_data({'field_0': 'value', 'field_1': 'value'}, {}, 'field'), False)
b07b40f62a2c36d9d3c624929c0311824ac23e17180a6e73b937a24590fcd94d
from django.test import modify_settings from . import PostgreSQLTestCase from .models import CharFieldModel, TextFieldModel try: from django.contrib.postgres.search import ( TrigramDistance, TrigramSimilarity, TrigramWordDistance, TrigramWordSimilarity, ) except ImportError: pass @modify_settings(INSTALLED_APPS={'append': 'django.contrib.postgres'}) class TrigramTest(PostgreSQLTestCase): Model = CharFieldModel @classmethod def setUpTestData(cls): cls.Model.objects.bulk_create([ cls.Model(field='Matthew'), cls.Model(field='Cat sat on mat.'), cls.Model(field='Dog sat on rug.'), ]) def test_trigram_search(self): self.assertQuerysetEqual( self.Model.objects.filter(field__trigram_similar='Mathew'), ['Matthew'], transform=lambda instance: instance.field, ) def test_trigram_word_search(self): obj = self.Model.objects.create( field='Gumby rides on the path of Middlesbrough', ) self.assertSequenceEqual( self.Model.objects.filter(field__trigram_word_similar='Middlesborough'), [obj], ) def test_trigram_similarity(self): search = 'Bat sat on cat.' # Round result of similarity because PostgreSQL 12+ uses greater # precision. self.assertQuerysetEqual( self.Model.objects.filter( field__trigram_similar=search, ).annotate(similarity=TrigramSimilarity('field', search)).order_by('-similarity'), [('Cat sat on mat.', 0.625), ('Dog sat on rug.', 0.333333)], transform=lambda instance: (instance.field, round(instance.similarity, 6)), ordered=True, ) def test_trigram_word_similarity(self): search = 'mat' self.assertSequenceEqual( self.Model.objects.filter( field__trigram_word_similar=search, ).annotate( word_similarity=TrigramWordSimilarity(search, 'field'), ).values('field', 'word_similarity').order_by('-word_similarity'), [ {'field': 'Cat sat on mat.', 'word_similarity': 1.0}, {'field': 'Matthew', 'word_similarity': 0.75}, ], ) def test_trigram_similarity_alternate(self): # Round result of distance because PostgreSQL 12+ uses greater # precision. self.assertQuerysetEqual( self.Model.objects.annotate( distance=TrigramDistance('field', 'Bat sat on cat.'), ).filter(distance__lte=0.7).order_by('distance'), [('Cat sat on mat.', 0.375), ('Dog sat on rug.', 0.666667)], transform=lambda instance: (instance.field, round(instance.distance, 6)), ordered=True, ) def test_trigram_word_similarity_alternate(self): self.assertSequenceEqual( self.Model.objects.annotate( word_distance=TrigramWordDistance('mat', 'field'), ).filter( word_distance__lte=0.7, ).values('field', 'word_distance').order_by('word_distance'), [ {'field': 'Cat sat on mat.', 'word_distance': 0}, {'field': 'Matthew', 'word_distance': 0.25}, ], ) class TrigramTextFieldTest(TrigramTest): """ TextField has the same behavior as CharField regarding trigram lookups. """ Model = TextFieldModel
690319f267dabf52d7b734e771ec9f3fc3b47182b56b71149db8513d20447f4f
from django.db import connection from django.db.models import ( CharField, F, Func, IntegerField, OuterRef, Q, Subquery, Value, ) from django.db.models.fields.json import KeyTextTransform, KeyTransform from django.db.models.functions import Cast, Concat, Substr from django.test import skipUnlessDBFeature from django.test.utils import Approximate, ignore_warnings from django.utils import timezone from django.utils.deprecation import RemovedInDjango50Warning from . import PostgreSQLTestCase from .models import AggregateTestModel, HotelReservation, Room, StatTestModel try: from django.contrib.postgres.aggregates import ( ArrayAgg, BitAnd, BitOr, BitXor, BoolAnd, BoolOr, Corr, CovarPop, JSONBAgg, RegrAvgX, RegrAvgY, RegrCount, RegrIntercept, RegrR2, RegrSlope, RegrSXX, RegrSXY, RegrSYY, StatAggregate, StringAgg, ) from django.contrib.postgres.fields import ArrayField except ImportError: pass # psycopg2 is not installed class TestGeneralAggregate(PostgreSQLTestCase): @classmethod def setUpTestData(cls): cls.aggs = AggregateTestModel.objects.bulk_create([ AggregateTestModel( boolean_field=True, char_field='Foo1', text_field='Text1', integer_field=0, ), AggregateTestModel( boolean_field=False, char_field='Foo2', text_field='Text2', integer_field=1, json_field={'lang': 'pl'}, ), AggregateTestModel( boolean_field=False, char_field='Foo4', text_field='Text4', integer_field=2, json_field={'lang': 'en'}, ), AggregateTestModel( boolean_field=True, char_field='Foo3', text_field='Text3', integer_field=0, json_field={'breed': 'collie'}, ), ]) @ignore_warnings(category=RemovedInDjango50Warning) def test_empty_result_set(self): AggregateTestModel.objects.all().delete() tests = [ (ArrayAgg('char_field'), []), (ArrayAgg('integer_field'), []), (ArrayAgg('boolean_field'), []), (BitAnd('integer_field'), None), (BitOr('integer_field'), None), (BoolAnd('boolean_field'), None), (BoolOr('boolean_field'), None), (JSONBAgg('integer_field'), []), (StringAgg('char_field', delimiter=';'), ''), ] if connection.features.has_bit_xor: tests.append((BitXor('integer_field'), None)) for aggregation, expected_result in tests: with self.subTest(aggregation=aggregation): # Empty result with non-execution optimization. with self.assertNumQueries(0): values = AggregateTestModel.objects.none().aggregate( aggregation=aggregation, ) self.assertEqual(values, {'aggregation': expected_result}) # Empty result when query must be executed. with self.assertNumQueries(1): values = AggregateTestModel.objects.aggregate( aggregation=aggregation, ) self.assertEqual(values, {'aggregation': expected_result}) def test_default_argument(self): AggregateTestModel.objects.all().delete() tests = [ (ArrayAgg('char_field', default=['<empty>']), ['<empty>']), (ArrayAgg('integer_field', default=[0]), [0]), (ArrayAgg('boolean_field', default=[False]), [False]), (BitAnd('integer_field', default=0), 0), (BitOr('integer_field', default=0), 0), (BoolAnd('boolean_field', default=False), False), (BoolOr('boolean_field', default=False), False), (JSONBAgg('integer_field', default=Value('["<empty>"]')), ['<empty>']), (StringAgg('char_field', delimiter=';', default=Value('<empty>')), '<empty>'), ] if connection.features.has_bit_xor: tests.append((BitXor('integer_field', default=0), 0)) for aggregation, expected_result in tests: with self.subTest(aggregation=aggregation): # Empty result with non-execution optimization. with self.assertNumQueries(0): values = AggregateTestModel.objects.none().aggregate( aggregation=aggregation, ) self.assertEqual(values, {'aggregation': expected_result}) # Empty result when query must be executed. with self.assertNumQueries(1): values = AggregateTestModel.objects.aggregate( aggregation=aggregation, ) self.assertEqual(values, {'aggregation': expected_result}) def test_convert_value_deprecation(self): AggregateTestModel.objects.all().delete() queryset = AggregateTestModel.objects.all() with self.assertWarnsMessage(RemovedInDjango50Warning, ArrayAgg.deprecation_msg): queryset.aggregate(aggregation=ArrayAgg('boolean_field')) with self.assertWarnsMessage(RemovedInDjango50Warning, JSONBAgg.deprecation_msg): queryset.aggregate(aggregation=JSONBAgg('integer_field')) with self.assertWarnsMessage(RemovedInDjango50Warning, StringAgg.deprecation_msg): queryset.aggregate(aggregation=StringAgg('char_field', delimiter=';')) # No warnings raised if default argument provided. self.assertEqual( queryset.aggregate(aggregation=ArrayAgg('boolean_field', default=None)), {'aggregation': None}, ) self.assertEqual( queryset.aggregate(aggregation=JSONBAgg('integer_field', default=None)), {'aggregation': None}, ) self.assertEqual( queryset.aggregate( aggregation=StringAgg('char_field', delimiter=';', default=None), ), {'aggregation': None}, ) self.assertEqual( queryset.aggregate(aggregation=ArrayAgg('boolean_field', default=Value([]))), {'aggregation': []}, ) self.assertEqual( queryset.aggregate(aggregation=JSONBAgg('integer_field', default=Value('[]'))), {'aggregation': []}, ) self.assertEqual( queryset.aggregate( aggregation=StringAgg('char_field', delimiter=';', default=Value('')), ), {'aggregation': ''}, ) def test_array_agg_charfield(self): values = AggregateTestModel.objects.aggregate(arrayagg=ArrayAgg('char_field')) self.assertEqual(values, {'arrayagg': ['Foo1', 'Foo2', 'Foo4', 'Foo3']}) def test_array_agg_charfield_ordering(self): ordering_test_cases = ( (F('char_field').desc(), ['Foo4', 'Foo3', 'Foo2', 'Foo1']), (F('char_field').asc(), ['Foo1', 'Foo2', 'Foo3', 'Foo4']), (F('char_field'), ['Foo1', 'Foo2', 'Foo3', 'Foo4']), ([F('boolean_field'), F('char_field').desc()], ['Foo4', 'Foo2', 'Foo3', 'Foo1']), ((F('boolean_field'), F('char_field').desc()), ['Foo4', 'Foo2', 'Foo3', 'Foo1']), ('char_field', ['Foo1', 'Foo2', 'Foo3', 'Foo4']), ('-char_field', ['Foo4', 'Foo3', 'Foo2', 'Foo1']), (Concat('char_field', Value('@')), ['Foo1', 'Foo2', 'Foo3', 'Foo4']), (Concat('char_field', Value('@')).desc(), ['Foo4', 'Foo3', 'Foo2', 'Foo1']), ( (Substr('char_field', 1, 1), F('integer_field'), Substr('char_field', 4, 1).desc()), ['Foo3', 'Foo1', 'Foo2', 'Foo4'], ), ) for ordering, expected_output in ordering_test_cases: with self.subTest(ordering=ordering, expected_output=expected_output): values = AggregateTestModel.objects.aggregate( arrayagg=ArrayAgg('char_field', ordering=ordering) ) self.assertEqual(values, {'arrayagg': expected_output}) def test_array_agg_integerfield(self): values = AggregateTestModel.objects.aggregate(arrayagg=ArrayAgg('integer_field')) self.assertEqual(values, {'arrayagg': [0, 1, 2, 0]}) def test_array_agg_integerfield_ordering(self): values = AggregateTestModel.objects.aggregate( arrayagg=ArrayAgg('integer_field', ordering=F('integer_field').desc()) ) self.assertEqual(values, {'arrayagg': [2, 1, 0, 0]}) def test_array_agg_booleanfield(self): values = AggregateTestModel.objects.aggregate(arrayagg=ArrayAgg('boolean_field')) self.assertEqual(values, {'arrayagg': [True, False, False, True]}) def test_array_agg_booleanfield_ordering(self): ordering_test_cases = ( (F('boolean_field').asc(), [False, False, True, True]), (F('boolean_field').desc(), [True, True, False, False]), (F('boolean_field'), [False, False, True, True]), ) for ordering, expected_output in ordering_test_cases: with self.subTest(ordering=ordering, expected_output=expected_output): values = AggregateTestModel.objects.aggregate( arrayagg=ArrayAgg('boolean_field', ordering=ordering) ) self.assertEqual(values, {'arrayagg': expected_output}) def test_array_agg_jsonfield(self): values = AggregateTestModel.objects.aggregate( arrayagg=ArrayAgg( KeyTransform('lang', 'json_field'), filter=Q(json_field__lang__isnull=False), ), ) self.assertEqual(values, {'arrayagg': ['pl', 'en']}) def test_array_agg_jsonfield_ordering(self): values = AggregateTestModel.objects.aggregate( arrayagg=ArrayAgg( KeyTransform('lang', 'json_field'), filter=Q(json_field__lang__isnull=False), ordering=KeyTransform('lang', 'json_field'), ), ) self.assertEqual(values, {'arrayagg': ['en', 'pl']}) def test_array_agg_filter(self): values = AggregateTestModel.objects.aggregate( arrayagg=ArrayAgg('integer_field', filter=Q(integer_field__gt=0)), ) self.assertEqual(values, {'arrayagg': [1, 2]}) def test_array_agg_lookups(self): aggr1 = AggregateTestModel.objects.create() aggr2 = AggregateTestModel.objects.create() StatTestModel.objects.create(related_field=aggr1, int1=1, int2=0) StatTestModel.objects.create(related_field=aggr1, int1=2, int2=0) StatTestModel.objects.create(related_field=aggr2, int1=3, int2=0) StatTestModel.objects.create(related_field=aggr2, int1=4, int2=0) qs = StatTestModel.objects.values('related_field').annotate( array=ArrayAgg('int1') ).filter(array__overlap=[2]).values_list('array', flat=True) self.assertCountEqual(qs.get(), [1, 2]) def test_bit_and_general(self): values = AggregateTestModel.objects.filter( integer_field__in=[0, 1]).aggregate(bitand=BitAnd('integer_field')) self.assertEqual(values, {'bitand': 0}) def test_bit_and_on_only_true_values(self): values = AggregateTestModel.objects.filter( integer_field=1).aggregate(bitand=BitAnd('integer_field')) self.assertEqual(values, {'bitand': 1}) def test_bit_and_on_only_false_values(self): values = AggregateTestModel.objects.filter( integer_field=0).aggregate(bitand=BitAnd('integer_field')) self.assertEqual(values, {'bitand': 0}) def test_bit_or_general(self): values = AggregateTestModel.objects.filter( integer_field__in=[0, 1]).aggregate(bitor=BitOr('integer_field')) self.assertEqual(values, {'bitor': 1}) def test_bit_or_on_only_true_values(self): values = AggregateTestModel.objects.filter( integer_field=1).aggregate(bitor=BitOr('integer_field')) self.assertEqual(values, {'bitor': 1}) def test_bit_or_on_only_false_values(self): values = AggregateTestModel.objects.filter( integer_field=0).aggregate(bitor=BitOr('integer_field')) self.assertEqual(values, {'bitor': 0}) @skipUnlessDBFeature('has_bit_xor') def test_bit_xor_general(self): AggregateTestModel.objects.create(integer_field=3) values = AggregateTestModel.objects.filter( integer_field__in=[1, 3], ).aggregate(bitxor=BitXor('integer_field')) self.assertEqual(values, {'bitxor': 2}) @skipUnlessDBFeature('has_bit_xor') def test_bit_xor_on_only_true_values(self): values = AggregateTestModel.objects.filter( integer_field=1, ).aggregate(bitxor=BitXor('integer_field')) self.assertEqual(values, {'bitxor': 1}) @skipUnlessDBFeature('has_bit_xor') def test_bit_xor_on_only_false_values(self): values = AggregateTestModel.objects.filter( integer_field=0, ).aggregate(bitxor=BitXor('integer_field')) self.assertEqual(values, {'bitxor': 0}) def test_bool_and_general(self): values = AggregateTestModel.objects.aggregate(booland=BoolAnd('boolean_field')) self.assertEqual(values, {'booland': False}) def test_bool_and_q_object(self): values = AggregateTestModel.objects.aggregate( booland=BoolAnd(Q(integer_field__gt=2)), ) self.assertEqual(values, {'booland': False}) def test_bool_or_general(self): values = AggregateTestModel.objects.aggregate(boolor=BoolOr('boolean_field')) self.assertEqual(values, {'boolor': True}) def test_bool_or_q_object(self): values = AggregateTestModel.objects.aggregate( boolor=BoolOr(Q(integer_field__gt=2)), ) self.assertEqual(values, {'boolor': False}) def test_string_agg_requires_delimiter(self): with self.assertRaises(TypeError): AggregateTestModel.objects.aggregate(stringagg=StringAgg('char_field')) def test_string_agg_delimiter_escaping(self): values = AggregateTestModel.objects.aggregate(stringagg=StringAgg('char_field', delimiter="'")) self.assertEqual(values, {'stringagg': "Foo1'Foo2'Foo4'Foo3"}) def test_string_agg_charfield(self): values = AggregateTestModel.objects.aggregate(stringagg=StringAgg('char_field', delimiter=';')) self.assertEqual(values, {'stringagg': 'Foo1;Foo2;Foo4;Foo3'}) def test_string_agg_default_output_field(self): values = AggregateTestModel.objects.aggregate( stringagg=StringAgg('text_field', delimiter=';'), ) self.assertEqual(values, {'stringagg': 'Text1;Text2;Text4;Text3'}) def test_string_agg_charfield_ordering(self): ordering_test_cases = ( (F('char_field').desc(), 'Foo4;Foo3;Foo2;Foo1'), (F('char_field').asc(), 'Foo1;Foo2;Foo3;Foo4'), (F('char_field'), 'Foo1;Foo2;Foo3;Foo4'), ('char_field', 'Foo1;Foo2;Foo3;Foo4'), ('-char_field', 'Foo4;Foo3;Foo2;Foo1'), (Concat('char_field', Value('@')), 'Foo1;Foo2;Foo3;Foo4'), (Concat('char_field', Value('@')).desc(), 'Foo4;Foo3;Foo2;Foo1'), ) for ordering, expected_output in ordering_test_cases: with self.subTest(ordering=ordering, expected_output=expected_output): values = AggregateTestModel.objects.aggregate( stringagg=StringAgg('char_field', delimiter=';', ordering=ordering) ) self.assertEqual(values, {'stringagg': expected_output}) def test_string_agg_jsonfield_ordering(self): values = AggregateTestModel.objects.aggregate( stringagg=StringAgg( KeyTextTransform('lang', 'json_field'), delimiter=';', ordering=KeyTextTransform('lang', 'json_field'), output_field=CharField(), ), ) self.assertEqual(values, {'stringagg': 'en;pl'}) def test_string_agg_filter(self): values = AggregateTestModel.objects.aggregate( stringagg=StringAgg( 'char_field', delimiter=';', filter=Q(char_field__endswith='3') | Q(char_field__endswith='1'), ) ) self.assertEqual(values, {'stringagg': 'Foo1;Foo3'}) def test_orderable_agg_alternative_fields(self): values = AggregateTestModel.objects.aggregate( arrayagg=ArrayAgg('integer_field', ordering=F('char_field').asc()) ) self.assertEqual(values, {'arrayagg': [0, 1, 0, 2]}) def test_jsonb_agg(self): values = AggregateTestModel.objects.aggregate(jsonbagg=JSONBAgg('char_field')) self.assertEqual(values, {'jsonbagg': ['Foo1', 'Foo2', 'Foo4', 'Foo3']}) def test_jsonb_agg_charfield_ordering(self): ordering_test_cases = ( (F('char_field').desc(), ['Foo4', 'Foo3', 'Foo2', 'Foo1']), (F('char_field').asc(), ['Foo1', 'Foo2', 'Foo3', 'Foo4']), (F('char_field'), ['Foo1', 'Foo2', 'Foo3', 'Foo4']), ('char_field', ['Foo1', 'Foo2', 'Foo3', 'Foo4']), ('-char_field', ['Foo4', 'Foo3', 'Foo2', 'Foo1']), (Concat('char_field', Value('@')), ['Foo1', 'Foo2', 'Foo3', 'Foo4']), (Concat('char_field', Value('@')).desc(), ['Foo4', 'Foo3', 'Foo2', 'Foo1']), ) for ordering, expected_output in ordering_test_cases: with self.subTest(ordering=ordering, expected_output=expected_output): values = AggregateTestModel.objects.aggregate( jsonbagg=JSONBAgg('char_field', ordering=ordering), ) self.assertEqual(values, {'jsonbagg': expected_output}) def test_jsonb_agg_integerfield_ordering(self): values = AggregateTestModel.objects.aggregate( jsonbagg=JSONBAgg('integer_field', ordering=F('integer_field').desc()), ) self.assertEqual(values, {'jsonbagg': [2, 1, 0, 0]}) def test_jsonb_agg_booleanfield_ordering(self): ordering_test_cases = ( (F('boolean_field').asc(), [False, False, True, True]), (F('boolean_field').desc(), [True, True, False, False]), (F('boolean_field'), [False, False, True, True]), ) for ordering, expected_output in ordering_test_cases: with self.subTest(ordering=ordering, expected_output=expected_output): values = AggregateTestModel.objects.aggregate( jsonbagg=JSONBAgg('boolean_field', ordering=ordering), ) self.assertEqual(values, {'jsonbagg': expected_output}) def test_jsonb_agg_jsonfield_ordering(self): values = AggregateTestModel.objects.aggregate( jsonbagg=JSONBAgg( KeyTransform('lang', 'json_field'), filter=Q(json_field__lang__isnull=False), ordering=KeyTransform('lang', 'json_field'), ), ) self.assertEqual(values, {'jsonbagg': ['en', 'pl']}) def test_jsonb_agg_key_index_transforms(self): room101 = Room.objects.create(number=101) room102 = Room.objects.create(number=102) datetimes = [ timezone.datetime(2018, 6, 20), timezone.datetime(2018, 6, 24), timezone.datetime(2018, 6, 28), ] HotelReservation.objects.create( datespan=(datetimes[0].date(), datetimes[1].date()), start=datetimes[0], end=datetimes[1], room=room102, requirements={'double_bed': True, 'parking': True}, ) HotelReservation.objects.create( datespan=(datetimes[1].date(), datetimes[2].date()), start=datetimes[1], end=datetimes[2], room=room102, requirements={'double_bed': False, 'sea_view': True, 'parking': False}, ) HotelReservation.objects.create( datespan=(datetimes[0].date(), datetimes[2].date()), start=datetimes[0], end=datetimes[2], room=room101, requirements={'sea_view': False}, ) values = Room.objects.annotate( requirements=JSONBAgg( 'hotelreservation__requirements', ordering='-hotelreservation__start', ) ).filter(requirements__0__sea_view=True).values('number', 'requirements') self.assertSequenceEqual(values, [ {'number': 102, 'requirements': [ {'double_bed': False, 'sea_view': True, 'parking': False}, {'double_bed': True, 'parking': True}, ]}, ]) def test_string_agg_array_agg_ordering_in_subquery(self): stats = [] for i, agg in enumerate(AggregateTestModel.objects.order_by('char_field')): stats.append(StatTestModel(related_field=agg, int1=i, int2=i + 1)) stats.append(StatTestModel(related_field=agg, int1=i + 1, int2=i)) StatTestModel.objects.bulk_create(stats) for aggregate, expected_result in ( ( ArrayAgg('stattestmodel__int1', ordering='-stattestmodel__int2'), [('Foo1', [0, 1]), ('Foo2', [1, 2]), ('Foo3', [2, 3]), ('Foo4', [3, 4])], ), ( StringAgg( Cast('stattestmodel__int1', CharField()), delimiter=';', ordering='-stattestmodel__int2', ), [('Foo1', '0;1'), ('Foo2', '1;2'), ('Foo3', '2;3'), ('Foo4', '3;4')], ), ): with self.subTest(aggregate=aggregate.__class__.__name__): subquery = AggregateTestModel.objects.filter( pk=OuterRef('pk'), ).annotate(agg=aggregate).values('agg') values = AggregateTestModel.objects.annotate( agg=Subquery(subquery), ).order_by('char_field').values_list('char_field', 'agg') self.assertEqual(list(values), expected_result) def test_string_agg_array_agg_filter_in_subquery(self): StatTestModel.objects.bulk_create([ StatTestModel(related_field=self.aggs[0], int1=0, int2=5), StatTestModel(related_field=self.aggs[0], int1=1, int2=4), StatTestModel(related_field=self.aggs[0], int1=2, int2=3), ]) for aggregate, expected_result in ( ( ArrayAgg('stattestmodel__int1', filter=Q(stattestmodel__int2__gt=3)), [('Foo1', [0, 1]), ('Foo2', None)], ), ( StringAgg( Cast('stattestmodel__int2', CharField()), delimiter=';', filter=Q(stattestmodel__int1__lt=2), ), [('Foo1', '5;4'), ('Foo2', None)], ), ): with self.subTest(aggregate=aggregate.__class__.__name__): subquery = AggregateTestModel.objects.filter( pk=OuterRef('pk'), ).annotate(agg=aggregate).values('agg') values = AggregateTestModel.objects.annotate( agg=Subquery(subquery), ).filter( char_field__in=['Foo1', 'Foo2'], ).order_by('char_field').values_list('char_field', 'agg') self.assertEqual(list(values), expected_result) def test_string_agg_filter_in_subquery_with_exclude(self): subquery = AggregateTestModel.objects.annotate( stringagg=StringAgg( 'char_field', delimiter=';', filter=Q(char_field__endswith='1'), ) ).exclude(stringagg='').values('id') self.assertSequenceEqual( AggregateTestModel.objects.filter(id__in=Subquery(subquery)), [self.aggs[0]], ) def test_ordering_isnt_cleared_for_array_subquery(self): inner_qs = AggregateTestModel.objects.order_by('-integer_field') qs = AggregateTestModel.objects.annotate( integers=Func( Subquery(inner_qs.values('integer_field')), function='ARRAY', output_field=ArrayField(base_field=IntegerField()), ), ) self.assertSequenceEqual( qs.first().integers, inner_qs.values_list('integer_field', flat=True), ) class TestAggregateDistinct(PostgreSQLTestCase): @classmethod def setUpTestData(cls): AggregateTestModel.objects.create(char_field='Foo') AggregateTestModel.objects.create(char_field='Foo') AggregateTestModel.objects.create(char_field='Bar') def test_string_agg_distinct_false(self): values = AggregateTestModel.objects.aggregate(stringagg=StringAgg('char_field', delimiter=' ', distinct=False)) self.assertEqual(values['stringagg'].count('Foo'), 2) self.assertEqual(values['stringagg'].count('Bar'), 1) def test_string_agg_distinct_true(self): values = AggregateTestModel.objects.aggregate(stringagg=StringAgg('char_field', delimiter=' ', distinct=True)) self.assertEqual(values['stringagg'].count('Foo'), 1) self.assertEqual(values['stringagg'].count('Bar'), 1) def test_array_agg_distinct_false(self): values = AggregateTestModel.objects.aggregate(arrayagg=ArrayAgg('char_field', distinct=False)) self.assertEqual(sorted(values['arrayagg']), ['Bar', 'Foo', 'Foo']) def test_array_agg_distinct_true(self): values = AggregateTestModel.objects.aggregate(arrayagg=ArrayAgg('char_field', distinct=True)) self.assertEqual(sorted(values['arrayagg']), ['Bar', 'Foo']) def test_jsonb_agg_distinct_false(self): values = AggregateTestModel.objects.aggregate( jsonbagg=JSONBAgg('char_field', distinct=False), ) self.assertEqual(sorted(values['jsonbagg']), ['Bar', 'Foo', 'Foo']) def test_jsonb_agg_distinct_true(self): values = AggregateTestModel.objects.aggregate( jsonbagg=JSONBAgg('char_field', distinct=True), ) self.assertEqual(sorted(values['jsonbagg']), ['Bar', 'Foo']) class TestStatisticsAggregate(PostgreSQLTestCase): @classmethod def setUpTestData(cls): StatTestModel.objects.create( int1=1, int2=3, related_field=AggregateTestModel.objects.create(integer_field=0), ) StatTestModel.objects.create( int1=2, int2=2, related_field=AggregateTestModel.objects.create(integer_field=1), ) StatTestModel.objects.create( int1=3, int2=1, related_field=AggregateTestModel.objects.create(integer_field=2), ) # Tests for base class (StatAggregate) def test_missing_arguments_raises_exception(self): with self.assertRaisesMessage(ValueError, 'Both y and x must be provided.'): StatAggregate(x=None, y=None) def test_correct_source_expressions(self): func = StatAggregate(x='test', y=13) self.assertIsInstance(func.source_expressions[0], Value) self.assertIsInstance(func.source_expressions[1], F) def test_alias_is_required(self): class SomeFunc(StatAggregate): function = 'TEST' with self.assertRaisesMessage(TypeError, 'Complex aggregates require an alias'): StatTestModel.objects.aggregate(SomeFunc(y='int2', x='int1')) # Test aggregates def test_empty_result_set(self): StatTestModel.objects.all().delete() tests = [ (Corr(y='int2', x='int1'), None), (CovarPop(y='int2', x='int1'), None), (CovarPop(y='int2', x='int1', sample=True), None), (RegrAvgX(y='int2', x='int1'), None), (RegrAvgY(y='int2', x='int1'), None), (RegrCount(y='int2', x='int1'), 0), (RegrIntercept(y='int2', x='int1'), None), (RegrR2(y='int2', x='int1'), None), (RegrSlope(y='int2', x='int1'), None), (RegrSXX(y='int2', x='int1'), None), (RegrSXY(y='int2', x='int1'), None), (RegrSYY(y='int2', x='int1'), None), ] for aggregation, expected_result in tests: with self.subTest(aggregation=aggregation): # Empty result with non-execution optimization. with self.assertNumQueries(0): values = StatTestModel.objects.none().aggregate( aggregation=aggregation, ) self.assertEqual(values, {'aggregation': expected_result}) # Empty result when query must be executed. with self.assertNumQueries(1): values = StatTestModel.objects.aggregate( aggregation=aggregation, ) self.assertEqual(values, {'aggregation': expected_result}) def test_default_argument(self): StatTestModel.objects.all().delete() tests = [ (Corr(y='int2', x='int1', default=0), 0), (CovarPop(y='int2', x='int1', default=0), 0), (CovarPop(y='int2', x='int1', sample=True, default=0), 0), (RegrAvgX(y='int2', x='int1', default=0), 0), (RegrAvgY(y='int2', x='int1', default=0), 0), # RegrCount() doesn't support the default argument. (RegrIntercept(y='int2', x='int1', default=0), 0), (RegrR2(y='int2', x='int1', default=0), 0), (RegrSlope(y='int2', x='int1', default=0), 0), (RegrSXX(y='int2', x='int1', default=0), 0), (RegrSXY(y='int2', x='int1', default=0), 0), (RegrSYY(y='int2', x='int1', default=0), 0), ] for aggregation, expected_result in tests: with self.subTest(aggregation=aggregation): # Empty result with non-execution optimization. with self.assertNumQueries(0): values = StatTestModel.objects.none().aggregate( aggregation=aggregation, ) self.assertEqual(values, {'aggregation': expected_result}) # Empty result when query must be executed. with self.assertNumQueries(1): values = StatTestModel.objects.aggregate( aggregation=aggregation, ) self.assertEqual(values, {'aggregation': expected_result}) def test_corr_general(self): values = StatTestModel.objects.aggregate(corr=Corr(y='int2', x='int1')) self.assertEqual(values, {'corr': -1.0}) def test_covar_pop_general(self): values = StatTestModel.objects.aggregate(covarpop=CovarPop(y='int2', x='int1')) self.assertEqual(values, {'covarpop': Approximate(-0.66, places=1)}) def test_covar_pop_sample(self): values = StatTestModel.objects.aggregate(covarpop=CovarPop(y='int2', x='int1', sample=True)) self.assertEqual(values, {'covarpop': -1.0}) def test_regr_avgx_general(self): values = StatTestModel.objects.aggregate(regravgx=RegrAvgX(y='int2', x='int1')) self.assertEqual(values, {'regravgx': 2.0}) def test_regr_avgy_general(self): values = StatTestModel.objects.aggregate(regravgy=RegrAvgY(y='int2', x='int1')) self.assertEqual(values, {'regravgy': 2.0}) def test_regr_count_general(self): values = StatTestModel.objects.aggregate(regrcount=RegrCount(y='int2', x='int1')) self.assertEqual(values, {'regrcount': 3}) def test_regr_count_default(self): msg = 'RegrCount does not allow default.' with self.assertRaisesMessage(TypeError, msg): RegrCount(y='int2', x='int1', default=0) def test_regr_intercept_general(self): values = StatTestModel.objects.aggregate(regrintercept=RegrIntercept(y='int2', x='int1')) self.assertEqual(values, {'regrintercept': 4}) def test_regr_r2_general(self): values = StatTestModel.objects.aggregate(regrr2=RegrR2(y='int2', x='int1')) self.assertEqual(values, {'regrr2': 1}) def test_regr_slope_general(self): values = StatTestModel.objects.aggregate(regrslope=RegrSlope(y='int2', x='int1')) self.assertEqual(values, {'regrslope': -1}) def test_regr_sxx_general(self): values = StatTestModel.objects.aggregate(regrsxx=RegrSXX(y='int2', x='int1')) self.assertEqual(values, {'regrsxx': 2.0}) def test_regr_sxy_general(self): values = StatTestModel.objects.aggregate(regrsxy=RegrSXY(y='int2', x='int1')) self.assertEqual(values, {'regrsxy': -2.0}) def test_regr_syy_general(self): values = StatTestModel.objects.aggregate(regrsyy=RegrSYY(y='int2', x='int1')) self.assertEqual(values, {'regrsyy': 2.0}) def test_regr_avgx_with_related_obj_and_number_as_argument(self): """ This is more complex test to check if JOIN on field and number as argument works as expected. """ values = StatTestModel.objects.aggregate(complex_regravgx=RegrAvgX(y=5, x='related_field__integer_field')) self.assertEqual(values, {'complex_regravgx': 1.0})
86b7815c32ea2e13551701975c6f49029e75e98e06e443b11fd6601c6b60799e
import datetime import json from decimal import Decimal from django import forms from django.core import exceptions, serializers from django.db.models import DateField, DateTimeField, F, Func, Value from django.http import QueryDict from django.test import override_settings from django.test.utils import isolate_apps from django.utils import timezone from . import PostgreSQLSimpleTestCase, PostgreSQLTestCase from .models import ( BigAutoFieldModel, PostgreSQLModel, RangeLookupsModel, RangesModel, SmallAutoFieldModel, ) try: from psycopg2.extras import DateRange, DateTimeTZRange, NumericRange from django.contrib.postgres import fields as pg_fields, forms as pg_forms from django.contrib.postgres.validators import ( RangeMaxValueValidator, RangeMinValueValidator, ) except ImportError: pass @isolate_apps('postgres_tests') class BasicTests(PostgreSQLSimpleTestCase): def test_get_field_display(self): class Model(PostgreSQLModel): field = pg_fields.IntegerRangeField( choices=[ ['1-50', [((1, 25), '1-25'), ([26, 50], '26-50')]], ((51, 100), '51-100'), ], ) tests = ( ((1, 25), '1-25'), ([26, 50], '26-50'), ((51, 100), '51-100'), ((1, 2), '(1, 2)'), ([1, 2], '[1, 2]'), ) for value, display in tests: with self.subTest(value=value, display=display): instance = Model(field=value) self.assertEqual(instance.get_field_display(), display) def test_discrete_range_fields_unsupported_default_bounds(self): discrete_range_types = [ pg_fields.BigIntegerRangeField, pg_fields.IntegerRangeField, pg_fields.DateRangeField, ] for field_type in discrete_range_types: msg = f"Cannot use 'default_bounds' with {field_type.__name__}." with self.assertRaisesMessage(TypeError, msg): field_type(choices=[((51, 100), '51-100')], default_bounds='[]') def test_continuous_range_fields_default_bounds(self): continuous_range_types = [ pg_fields.DecimalRangeField, pg_fields.DateTimeRangeField, ] for field_type in continuous_range_types: field = field_type(choices=[((51, 100), '51-100')], default_bounds='[]') self.assertEqual(field.default_bounds, '[]') def test_invalid_default_bounds(self): tests = [')]', ')[', '](', '])', '([', '[(', 'x', '', None] msg = "default_bounds must be one of '[)', '(]', '()', or '[]'." for invalid_bounds in tests: with self.assertRaisesMessage(ValueError, msg): pg_fields.DecimalRangeField(default_bounds=invalid_bounds) def test_deconstruct(self): field = pg_fields.DecimalRangeField() *_, kwargs = field.deconstruct() self.assertEqual(kwargs, {}) field = pg_fields.DecimalRangeField(default_bounds='[]') *_, kwargs = field.deconstruct() self.assertEqual(kwargs, {'default_bounds': '[]'}) class TestSaveLoad(PostgreSQLTestCase): def test_all_fields(self): now = timezone.now() instance = RangesModel( ints=NumericRange(0, 10), bigints=NumericRange(10, 20), decimals=NumericRange(20, 30), timestamps=DateTimeTZRange(now - datetime.timedelta(hours=1), now), dates=DateRange(now.date() - datetime.timedelta(days=1), now.date()), ) instance.save() loaded = RangesModel.objects.get() self.assertEqual(instance.ints, loaded.ints) self.assertEqual(instance.bigints, loaded.bigints) self.assertEqual(instance.decimals, loaded.decimals) self.assertEqual(instance.timestamps, loaded.timestamps) self.assertEqual(instance.dates, loaded.dates) def test_range_object(self): r = NumericRange(0, 10) instance = RangesModel(ints=r) instance.save() loaded = RangesModel.objects.get() self.assertEqual(r, loaded.ints) def test_tuple(self): instance = RangesModel(ints=(0, 10)) instance.save() loaded = RangesModel.objects.get() self.assertEqual(NumericRange(0, 10), loaded.ints) def test_tuple_range_with_default_bounds(self): range_ = (timezone.now(), timezone.now() + datetime.timedelta(hours=1)) RangesModel.objects.create(timestamps_closed_bounds=range_, timestamps=range_) loaded = RangesModel.objects.get() self.assertEqual( loaded.timestamps_closed_bounds, DateTimeTZRange(range_[0], range_[1], '[]'), ) self.assertEqual( loaded.timestamps, DateTimeTZRange(range_[0], range_[1], '[)'), ) def test_range_object_boundaries(self): r = NumericRange(0, 10, '[]') instance = RangesModel(decimals=r) instance.save() loaded = RangesModel.objects.get() self.assertEqual(r, loaded.decimals) self.assertIn(10, loaded.decimals) def test_range_object_boundaries_range_with_default_bounds(self): range_ = DateTimeTZRange( timezone.now(), timezone.now() + datetime.timedelta(hours=1), bounds='()', ) RangesModel.objects.create(timestamps_closed_bounds=range_) loaded = RangesModel.objects.get() self.assertEqual(loaded.timestamps_closed_bounds, range_) def test_unbounded(self): r = NumericRange(None, None, '()') instance = RangesModel(decimals=r) instance.save() loaded = RangesModel.objects.get() self.assertEqual(r, loaded.decimals) def test_empty(self): r = NumericRange(empty=True) instance = RangesModel(ints=r) instance.save() loaded = RangesModel.objects.get() self.assertEqual(r, loaded.ints) def test_null(self): instance = RangesModel(ints=None) instance.save() loaded = RangesModel.objects.get() self.assertIsNone(loaded.ints) def test_model_set_on_base_field(self): instance = RangesModel() field = instance._meta.get_field('ints') self.assertEqual(field.model, RangesModel) self.assertEqual(field.base_field.model, RangesModel) class TestRangeContainsLookup(PostgreSQLTestCase): @classmethod def setUpTestData(cls): cls.timestamps = [ datetime.datetime(year=2016, month=1, day=1), datetime.datetime(year=2016, month=1, day=2, hour=1), datetime.datetime(year=2016, month=1, day=2, hour=12), datetime.datetime(year=2016, month=1, day=3), datetime.datetime(year=2016, month=1, day=3, hour=1), datetime.datetime(year=2016, month=2, day=2), ] cls.aware_timestamps = [ timezone.make_aware(timestamp) for timestamp in cls.timestamps ] cls.dates = [ datetime.date(year=2016, month=1, day=1), datetime.date(year=2016, month=1, day=2), datetime.date(year=2016, month=1, day=3), datetime.date(year=2016, month=1, day=4), datetime.date(year=2016, month=2, day=2), datetime.date(year=2016, month=2, day=3), ] cls.obj = RangesModel.objects.create( dates=(cls.dates[0], cls.dates[3]), dates_inner=(cls.dates[1], cls.dates[2]), timestamps=(cls.timestamps[0], cls.timestamps[3]), timestamps_inner=(cls.timestamps[1], cls.timestamps[2]), ) cls.aware_obj = RangesModel.objects.create( dates=(cls.dates[0], cls.dates[3]), dates_inner=(cls.dates[1], cls.dates[2]), timestamps=(cls.aware_timestamps[0], cls.aware_timestamps[3]), timestamps_inner=(cls.timestamps[1], cls.timestamps[2]), ) # Objects that don't match any queries. for i in range(3, 4): RangesModel.objects.create( dates=(cls.dates[i], cls.dates[i + 1]), timestamps=(cls.timestamps[i], cls.timestamps[i + 1]), ) RangesModel.objects.create( dates=(cls.dates[i], cls.dates[i + 1]), timestamps=(cls.aware_timestamps[i], cls.aware_timestamps[i + 1]), ) def test_datetime_range_contains(self): filter_args = ( self.timestamps[1], self.aware_timestamps[1], (self.timestamps[1], self.timestamps[2]), (self.aware_timestamps[1], self.aware_timestamps[2]), Value(self.dates[0]), Func(F('dates'), function='lower', output_field=DateTimeField()), F('timestamps_inner'), ) for filter_arg in filter_args: with self.subTest(filter_arg=filter_arg): self.assertCountEqual( RangesModel.objects.filter(**{'timestamps__contains': filter_arg}), [self.obj, self.aware_obj], ) def test_date_range_contains(self): filter_args = ( self.timestamps[1], (self.dates[1], self.dates[2]), Value(self.dates[0], output_field=DateField()), Func(F('timestamps'), function='lower', output_field=DateField()), F('dates_inner'), ) for filter_arg in filter_args: with self.subTest(filter_arg=filter_arg): self.assertCountEqual( RangesModel.objects.filter(**{'dates__contains': filter_arg}), [self.obj, self.aware_obj], ) class TestQuerying(PostgreSQLTestCase): @classmethod def setUpTestData(cls): cls.objs = RangesModel.objects.bulk_create([ RangesModel(ints=NumericRange(0, 10)), RangesModel(ints=NumericRange(5, 15)), RangesModel(ints=NumericRange(None, 0)), RangesModel(ints=NumericRange(empty=True)), RangesModel(ints=None), ]) def test_exact(self): self.assertSequenceEqual( RangesModel.objects.filter(ints__exact=NumericRange(0, 10)), [self.objs[0]], ) def test_isnull(self): self.assertSequenceEqual( RangesModel.objects.filter(ints__isnull=True), [self.objs[4]], ) def test_isempty(self): self.assertSequenceEqual( RangesModel.objects.filter(ints__isempty=True), [self.objs[3]], ) def test_contains(self): self.assertSequenceEqual( RangesModel.objects.filter(ints__contains=8), [self.objs[0], self.objs[1]], ) def test_contains_range(self): self.assertSequenceEqual( RangesModel.objects.filter(ints__contains=NumericRange(3, 8)), [self.objs[0]], ) def test_contained_by(self): self.assertSequenceEqual( RangesModel.objects.filter(ints__contained_by=NumericRange(0, 20)), [self.objs[0], self.objs[1], self.objs[3]], ) def test_overlap(self): self.assertSequenceEqual( RangesModel.objects.filter(ints__overlap=NumericRange(3, 8)), [self.objs[0], self.objs[1]], ) def test_fully_lt(self): self.assertSequenceEqual( RangesModel.objects.filter(ints__fully_lt=NumericRange(5, 10)), [self.objs[2]], ) def test_fully_gt(self): self.assertSequenceEqual( RangesModel.objects.filter(ints__fully_gt=NumericRange(5, 10)), [], ) def test_not_lt(self): self.assertSequenceEqual( RangesModel.objects.filter(ints__not_lt=NumericRange(5, 10)), [self.objs[1]], ) def test_not_gt(self): self.assertSequenceEqual( RangesModel.objects.filter(ints__not_gt=NumericRange(5, 10)), [self.objs[0], self.objs[2]], ) def test_adjacent_to(self): self.assertSequenceEqual( RangesModel.objects.filter(ints__adjacent_to=NumericRange(0, 5)), [self.objs[1], self.objs[2]], ) def test_startswith(self): self.assertSequenceEqual( RangesModel.objects.filter(ints__startswith=0), [self.objs[0]], ) def test_endswith(self): self.assertSequenceEqual( RangesModel.objects.filter(ints__endswith=0), [self.objs[2]], ) def test_startswith_chaining(self): self.assertSequenceEqual( RangesModel.objects.filter(ints__startswith__gte=0), [self.objs[0], self.objs[1]], ) def test_bound_type(self): decimals = RangesModel.objects.bulk_create([ RangesModel(decimals=NumericRange(None, 10)), RangesModel(decimals=NumericRange(10, None)), RangesModel(decimals=NumericRange(5, 15)), RangesModel(decimals=NumericRange(5, 15, '(]')), ]) tests = [ ('lower_inc', True, [decimals[1], decimals[2]]), ('lower_inc', False, [decimals[0], decimals[3]]), ('lower_inf', True, [decimals[0]]), ('lower_inf', False, [decimals[1], decimals[2], decimals[3]]), ('upper_inc', True, [decimals[3]]), ('upper_inc', False, [decimals[0], decimals[1], decimals[2]]), ('upper_inf', True, [decimals[1]]), ('upper_inf', False, [decimals[0], decimals[2], decimals[3]]), ] for lookup, filter_arg, excepted_result in tests: with self.subTest(lookup=lookup, filter_arg=filter_arg): self.assertSequenceEqual( RangesModel.objects.filter(**{'decimals__%s' % lookup: filter_arg}), excepted_result, ) class TestQueryingWithRanges(PostgreSQLTestCase): def test_date_range(self): objs = [ RangeLookupsModel.objects.create(date='2015-01-01'), RangeLookupsModel.objects.create(date='2015-05-05'), ] self.assertSequenceEqual( RangeLookupsModel.objects.filter(date__contained_by=DateRange('2015-01-01', '2015-05-04')), [objs[0]], ) def test_date_range_datetime_field(self): objs = [ RangeLookupsModel.objects.create(timestamp='2015-01-01'), RangeLookupsModel.objects.create(timestamp='2015-05-05'), ] self.assertSequenceEqual( RangeLookupsModel.objects.filter(timestamp__date__contained_by=DateRange('2015-01-01', '2015-05-04')), [objs[0]], ) def test_datetime_range(self): objs = [ RangeLookupsModel.objects.create(timestamp='2015-01-01T09:00:00'), RangeLookupsModel.objects.create(timestamp='2015-05-05T17:00:00'), ] self.assertSequenceEqual( RangeLookupsModel.objects.filter( timestamp__contained_by=DateTimeTZRange('2015-01-01T09:00', '2015-05-04T23:55') ), [objs[0]], ) def test_small_integer_field_contained_by(self): objs = [ RangeLookupsModel.objects.create(small_integer=8), RangeLookupsModel.objects.create(small_integer=4), RangeLookupsModel.objects.create(small_integer=-1), ] self.assertSequenceEqual( RangeLookupsModel.objects.filter(small_integer__contained_by=NumericRange(4, 6)), [objs[1]], ) def test_integer_range(self): objs = [ RangeLookupsModel.objects.create(integer=5), RangeLookupsModel.objects.create(integer=99), RangeLookupsModel.objects.create(integer=-1), ] self.assertSequenceEqual( RangeLookupsModel.objects.filter(integer__contained_by=NumericRange(1, 98)), [objs[0]] ) def test_biginteger_range(self): objs = [ RangeLookupsModel.objects.create(big_integer=5), RangeLookupsModel.objects.create(big_integer=99), RangeLookupsModel.objects.create(big_integer=-1), ] self.assertSequenceEqual( RangeLookupsModel.objects.filter(big_integer__contained_by=NumericRange(1, 98)), [objs[0]] ) def test_decimal_field_contained_by(self): objs = [ RangeLookupsModel.objects.create(decimal_field=Decimal('1.33')), RangeLookupsModel.objects.create(decimal_field=Decimal('2.88')), RangeLookupsModel.objects.create(decimal_field=Decimal('99.17')), ] self.assertSequenceEqual( RangeLookupsModel.objects.filter( decimal_field__contained_by=NumericRange(Decimal('1.89'), Decimal('7.91')), ), [objs[1]], ) def test_float_range(self): objs = [ RangeLookupsModel.objects.create(float=5), RangeLookupsModel.objects.create(float=99), RangeLookupsModel.objects.create(float=-1), ] self.assertSequenceEqual( RangeLookupsModel.objects.filter(float__contained_by=NumericRange(1, 98)), [objs[0]] ) def test_small_auto_field_contained_by(self): objs = SmallAutoFieldModel.objects.bulk_create([ SmallAutoFieldModel() for i in range(1, 5) ]) self.assertSequenceEqual( SmallAutoFieldModel.objects.filter( id__contained_by=NumericRange(objs[1].pk, objs[3].pk), ), objs[1:3], ) def test_auto_field_contained_by(self): objs = RangeLookupsModel.objects.bulk_create([ RangeLookupsModel() for i in range(1, 5) ]) self.assertSequenceEqual( RangeLookupsModel.objects.filter( id__contained_by=NumericRange(objs[1].pk, objs[3].pk), ), objs[1:3], ) def test_big_auto_field_contained_by(self): objs = BigAutoFieldModel.objects.bulk_create([ BigAutoFieldModel() for i in range(1, 5) ]) self.assertSequenceEqual( BigAutoFieldModel.objects.filter( id__contained_by=NumericRange(objs[1].pk, objs[3].pk), ), objs[1:3], ) def test_f_ranges(self): parent = RangesModel.objects.create(decimals=NumericRange(0, 10)) objs = [ RangeLookupsModel.objects.create(float=5, parent=parent), RangeLookupsModel.objects.create(float=99, parent=parent), ] self.assertSequenceEqual( RangeLookupsModel.objects.filter(float__contained_by=F('parent__decimals')), [objs[0]] ) def test_exclude(self): objs = [ RangeLookupsModel.objects.create(float=5), RangeLookupsModel.objects.create(float=99), RangeLookupsModel.objects.create(float=-1), ] self.assertSequenceEqual( RangeLookupsModel.objects.exclude(float__contained_by=NumericRange(0, 100)), [objs[2]] ) class TestSerialization(PostgreSQLSimpleTestCase): test_data = ( '[{"fields": {"ints": "{\\"upper\\": \\"10\\", \\"lower\\": \\"0\\", ' '\\"bounds\\": \\"[)\\"}", "decimals": "{\\"empty\\": true}", ' '"bigints": null, "timestamps": "{\\"upper\\": \\"2014-02-02T12:12:12+00:00\\", ' '\\"lower\\": \\"2014-01-01T00:00:00+00:00\\", \\"bounds\\": \\"[)\\"}", ' '"timestamps_inner": null, ' '"timestamps_closed_bounds": "{\\"upper\\": \\"2014-02-02T12:12:12+00:00\\", ' '\\"lower\\": \\"2014-01-01T00:00:00+00:00\\", \\"bounds\\": \\"()\\"}", ' '"dates": "{\\"upper\\": \\"2014-02-02\\", \\"lower\\": \\"2014-01-01\\", \\"bounds\\": \\"[)\\"}", ' '"dates_inner": null }, ' '"model": "postgres_tests.rangesmodel", "pk": null}]' ) lower_date = datetime.date(2014, 1, 1) upper_date = datetime.date(2014, 2, 2) lower_dt = datetime.datetime(2014, 1, 1, 0, 0, 0, tzinfo=timezone.utc) upper_dt = datetime.datetime(2014, 2, 2, 12, 12, 12, tzinfo=timezone.utc) def test_dumping(self): instance = RangesModel( ints=NumericRange(0, 10), decimals=NumericRange(empty=True), timestamps=DateTimeTZRange(self.lower_dt, self.upper_dt), timestamps_closed_bounds=DateTimeTZRange( self.lower_dt, self.upper_dt, bounds='()', ), dates=DateRange(self.lower_date, self.upper_date), ) data = serializers.serialize('json', [instance]) dumped = json.loads(data) for field in ('ints', 'dates', 'timestamps', 'timestamps_closed_bounds'): dumped[0]['fields'][field] = json.loads(dumped[0]['fields'][field]) check = json.loads(self.test_data) for field in ('ints', 'dates', 'timestamps', 'timestamps_closed_bounds'): check[0]['fields'][field] = json.loads(check[0]['fields'][field]) self.assertEqual(dumped, check) def test_loading(self): instance = list(serializers.deserialize('json', self.test_data))[0].object self.assertEqual(instance.ints, NumericRange(0, 10)) self.assertEqual(instance.decimals, NumericRange(empty=True)) self.assertIsNone(instance.bigints) self.assertEqual(instance.dates, DateRange(self.lower_date, self.upper_date)) self.assertEqual(instance.timestamps, DateTimeTZRange(self.lower_dt, self.upper_dt)) self.assertEqual( instance.timestamps_closed_bounds, DateTimeTZRange(self.lower_dt, self.upper_dt, bounds='()'), ) def test_serialize_range_with_null(self): instance = RangesModel(ints=NumericRange(None, 10)) data = serializers.serialize('json', [instance]) new_instance = list(serializers.deserialize('json', data))[0].object self.assertEqual(new_instance.ints, NumericRange(None, 10)) instance = RangesModel(ints=NumericRange(10, None)) data = serializers.serialize('json', [instance]) new_instance = list(serializers.deserialize('json', data))[0].object self.assertEqual(new_instance.ints, NumericRange(10, None)) class TestChecks(PostgreSQLSimpleTestCase): def test_choices_tuple_list(self): class Model(PostgreSQLModel): field = pg_fields.IntegerRangeField( choices=[ ['1-50', [((1, 25), '1-25'), ([26, 50], '26-50')]], ((51, 100), '51-100'), ], ) self.assertEqual(Model._meta.get_field('field').check(), []) class TestValidators(PostgreSQLSimpleTestCase): def test_max(self): validator = RangeMaxValueValidator(5) validator(NumericRange(0, 5)) msg = 'Ensure that this range is completely less than or equal to 5.' with self.assertRaises(exceptions.ValidationError) as cm: validator(NumericRange(0, 10)) self.assertEqual(cm.exception.messages[0], msg) self.assertEqual(cm.exception.code, 'max_value') with self.assertRaisesMessage(exceptions.ValidationError, msg): validator(NumericRange(0, None)) # an unbound range def test_min(self): validator = RangeMinValueValidator(5) validator(NumericRange(10, 15)) msg = 'Ensure that this range is completely greater than or equal to 5.' with self.assertRaises(exceptions.ValidationError) as cm: validator(NumericRange(0, 10)) self.assertEqual(cm.exception.messages[0], msg) self.assertEqual(cm.exception.code, 'min_value') with self.assertRaisesMessage(exceptions.ValidationError, msg): validator(NumericRange(None, 10)) # an unbound range class TestFormField(PostgreSQLSimpleTestCase): def test_valid_integer(self): field = pg_forms.IntegerRangeField() value = field.clean(['1', '2']) self.assertEqual(value, NumericRange(1, 2)) def test_valid_decimal(self): field = pg_forms.DecimalRangeField() value = field.clean(['1.12345', '2.001']) self.assertEqual(value, NumericRange(Decimal('1.12345'), Decimal('2.001'))) def test_valid_timestamps(self): field = pg_forms.DateTimeRangeField() value = field.clean(['01/01/2014 00:00:00', '02/02/2014 12:12:12']) lower = datetime.datetime(2014, 1, 1, 0, 0, 0) upper = datetime.datetime(2014, 2, 2, 12, 12, 12) self.assertEqual(value, DateTimeTZRange(lower, upper)) def test_valid_dates(self): field = pg_forms.DateRangeField() value = field.clean(['01/01/2014', '02/02/2014']) lower = datetime.date(2014, 1, 1) upper = datetime.date(2014, 2, 2) self.assertEqual(value, DateRange(lower, upper)) def test_using_split_datetime_widget(self): class SplitDateTimeRangeField(pg_forms.DateTimeRangeField): base_field = forms.SplitDateTimeField class SplitForm(forms.Form): field = SplitDateTimeRangeField() form = SplitForm() self.assertHTMLEqual(str(form), ''' <tr> <th> <label>Field:</label> </th> <td> <input id="id_field_0_0" name="field_0_0" type="text"> <input id="id_field_0_1" name="field_0_1" type="text"> <input id="id_field_1_0" name="field_1_0" type="text"> <input id="id_field_1_1" name="field_1_1" type="text"> </td> </tr> ''') form = SplitForm({ 'field_0_0': '01/01/2014', 'field_0_1': '00:00:00', 'field_1_0': '02/02/2014', 'field_1_1': '12:12:12', }) self.assertTrue(form.is_valid()) lower = datetime.datetime(2014, 1, 1, 0, 0, 0) upper = datetime.datetime(2014, 2, 2, 12, 12, 12) self.assertEqual(form.cleaned_data['field'], DateTimeTZRange(lower, upper)) def test_none(self): field = pg_forms.IntegerRangeField(required=False) value = field.clean(['', '']) self.assertIsNone(value) def test_datetime_form_as_table(self): class DateTimeRangeForm(forms.Form): datetime_field = pg_forms.DateTimeRangeField(show_hidden_initial=True) form = DateTimeRangeForm() self.assertHTMLEqual( form.as_table(), """ <tr><th> <label>Datetime field:</label> </th><td> <input type="text" name="datetime_field_0" id="id_datetime_field_0"> <input type="text" name="datetime_field_1" id="id_datetime_field_1"> <input type="hidden" name="initial-datetime_field_0" id="initial-id_datetime_field_0"> <input type="hidden" name="initial-datetime_field_1" id="initial-id_datetime_field_1"> </td></tr> """ ) form = DateTimeRangeForm({ 'datetime_field_0': '2010-01-01 11:13:00', 'datetime_field_1': '2020-12-12 16:59:00', }) self.assertHTMLEqual( form.as_table(), """ <tr><th> <label>Datetime field:</label> </th><td> <input type="text" name="datetime_field_0" value="2010-01-01 11:13:00" id="id_datetime_field_0"> <input type="text" name="datetime_field_1" value="2020-12-12 16:59:00" id="id_datetime_field_1"> <input type="hidden" name="initial-datetime_field_0" value="2010-01-01 11:13:00" id="initial-id_datetime_field_0"> <input type="hidden" name="initial-datetime_field_1" value="2020-12-12 16:59:00" id="initial-id_datetime_field_1"></td></tr> """ ) def test_datetime_form_initial_data(self): class DateTimeRangeForm(forms.Form): datetime_field = pg_forms.DateTimeRangeField(show_hidden_initial=True) data = QueryDict(mutable=True) data.update({ 'datetime_field_0': '2010-01-01 11:13:00', 'datetime_field_1': '', 'initial-datetime_field_0': '2010-01-01 10:12:00', 'initial-datetime_field_1': '', }) form = DateTimeRangeForm(data=data) self.assertTrue(form.has_changed()) data['initial-datetime_field_0'] = '2010-01-01 11:13:00' form = DateTimeRangeForm(data=data) self.assertFalse(form.has_changed()) def test_rendering(self): class RangeForm(forms.Form): ints = pg_forms.IntegerRangeField() self.assertHTMLEqual(str(RangeForm()), ''' <tr> <th><label>Ints:</label></th> <td> <input id="id_ints_0" name="ints_0" type="number"> <input id="id_ints_1" name="ints_1" type="number"> </td> </tr> ''') def test_integer_lower_bound_higher(self): field = pg_forms.IntegerRangeField() with self.assertRaises(exceptions.ValidationError) as cm: field.clean(['10', '2']) self.assertEqual(cm.exception.messages[0], 'The start of the range must not exceed the end of the range.') self.assertEqual(cm.exception.code, 'bound_ordering') def test_integer_open(self): field = pg_forms.IntegerRangeField() value = field.clean(['', '0']) self.assertEqual(value, NumericRange(None, 0)) def test_integer_incorrect_data_type(self): field = pg_forms.IntegerRangeField() with self.assertRaises(exceptions.ValidationError) as cm: field.clean('1') self.assertEqual(cm.exception.messages[0], 'Enter two whole numbers.') self.assertEqual(cm.exception.code, 'invalid') def test_integer_invalid_lower(self): field = pg_forms.IntegerRangeField() with self.assertRaises(exceptions.ValidationError) as cm: field.clean(['a', '2']) self.assertEqual(cm.exception.messages[0], 'Enter a whole number.') def test_integer_invalid_upper(self): field = pg_forms.IntegerRangeField() with self.assertRaises(exceptions.ValidationError) as cm: field.clean(['1', 'b']) self.assertEqual(cm.exception.messages[0], 'Enter a whole number.') def test_integer_required(self): field = pg_forms.IntegerRangeField(required=True) with self.assertRaises(exceptions.ValidationError) as cm: field.clean(['', '']) self.assertEqual(cm.exception.messages[0], 'This field is required.') value = field.clean([1, '']) self.assertEqual(value, NumericRange(1, None)) def test_decimal_lower_bound_higher(self): field = pg_forms.DecimalRangeField() with self.assertRaises(exceptions.ValidationError) as cm: field.clean(['1.8', '1.6']) self.assertEqual(cm.exception.messages[0], 'The start of the range must not exceed the end of the range.') self.assertEqual(cm.exception.code, 'bound_ordering') def test_decimal_open(self): field = pg_forms.DecimalRangeField() value = field.clean(['', '3.1415926']) self.assertEqual(value, NumericRange(None, Decimal('3.1415926'))) def test_decimal_incorrect_data_type(self): field = pg_forms.DecimalRangeField() with self.assertRaises(exceptions.ValidationError) as cm: field.clean('1.6') self.assertEqual(cm.exception.messages[0], 'Enter two numbers.') self.assertEqual(cm.exception.code, 'invalid') def test_decimal_invalid_lower(self): field = pg_forms.DecimalRangeField() with self.assertRaises(exceptions.ValidationError) as cm: field.clean(['a', '3.1415926']) self.assertEqual(cm.exception.messages[0], 'Enter a number.') def test_decimal_invalid_upper(self): field = pg_forms.DecimalRangeField() with self.assertRaises(exceptions.ValidationError) as cm: field.clean(['1.61803399', 'b']) self.assertEqual(cm.exception.messages[0], 'Enter a number.') def test_decimal_required(self): field = pg_forms.DecimalRangeField(required=True) with self.assertRaises(exceptions.ValidationError) as cm: field.clean(['', '']) self.assertEqual(cm.exception.messages[0], 'This field is required.') value = field.clean(['1.61803399', '']) self.assertEqual(value, NumericRange(Decimal('1.61803399'), None)) def test_date_lower_bound_higher(self): field = pg_forms.DateRangeField() with self.assertRaises(exceptions.ValidationError) as cm: field.clean(['2013-04-09', '1976-04-16']) self.assertEqual(cm.exception.messages[0], 'The start of the range must not exceed the end of the range.') self.assertEqual(cm.exception.code, 'bound_ordering') def test_date_open(self): field = pg_forms.DateRangeField() value = field.clean(['', '2013-04-09']) self.assertEqual(value, DateRange(None, datetime.date(2013, 4, 9))) def test_date_incorrect_data_type(self): field = pg_forms.DateRangeField() with self.assertRaises(exceptions.ValidationError) as cm: field.clean('1') self.assertEqual(cm.exception.messages[0], 'Enter two valid dates.') self.assertEqual(cm.exception.code, 'invalid') def test_date_invalid_lower(self): field = pg_forms.DateRangeField() with self.assertRaises(exceptions.ValidationError) as cm: field.clean(['a', '2013-04-09']) self.assertEqual(cm.exception.messages[0], 'Enter a valid date.') def test_date_invalid_upper(self): field = pg_forms.DateRangeField() with self.assertRaises(exceptions.ValidationError) as cm: field.clean(['2013-04-09', 'b']) self.assertEqual(cm.exception.messages[0], 'Enter a valid date.') def test_date_required(self): field = pg_forms.DateRangeField(required=True) with self.assertRaises(exceptions.ValidationError) as cm: field.clean(['', '']) self.assertEqual(cm.exception.messages[0], 'This field is required.') value = field.clean(['1976-04-16', '']) self.assertEqual(value, DateRange(datetime.date(1976, 4, 16), None)) def test_date_has_changed_first(self): self.assertTrue(pg_forms.DateRangeField().has_changed( ['2010-01-01', '2020-12-12'], ['2010-01-31', '2020-12-12'], )) def test_date_has_changed_last(self): self.assertTrue(pg_forms.DateRangeField().has_changed( ['2010-01-01', '2020-12-12'], ['2010-01-01', '2020-12-31'], )) def test_datetime_lower_bound_higher(self): field = pg_forms.DateTimeRangeField() with self.assertRaises(exceptions.ValidationError) as cm: field.clean(['2006-10-25 14:59', '2006-10-25 14:58']) self.assertEqual(cm.exception.messages[0], 'The start of the range must not exceed the end of the range.') self.assertEqual(cm.exception.code, 'bound_ordering') def test_datetime_open(self): field = pg_forms.DateTimeRangeField() value = field.clean(['', '2013-04-09 11:45']) self.assertEqual(value, DateTimeTZRange(None, datetime.datetime(2013, 4, 9, 11, 45))) def test_datetime_incorrect_data_type(self): field = pg_forms.DateTimeRangeField() with self.assertRaises(exceptions.ValidationError) as cm: field.clean('2013-04-09 11:45') self.assertEqual(cm.exception.messages[0], 'Enter two valid date/times.') self.assertEqual(cm.exception.code, 'invalid') def test_datetime_invalid_lower(self): field = pg_forms.DateTimeRangeField() with self.assertRaises(exceptions.ValidationError) as cm: field.clean(['45', '2013-04-09 11:45']) self.assertEqual(cm.exception.messages[0], 'Enter a valid date/time.') def test_datetime_invalid_upper(self): field = pg_forms.DateTimeRangeField() with self.assertRaises(exceptions.ValidationError) as cm: field.clean(['2013-04-09 11:45', 'sweet pickles']) self.assertEqual(cm.exception.messages[0], 'Enter a valid date/time.') def test_datetime_required(self): field = pg_forms.DateTimeRangeField(required=True) with self.assertRaises(exceptions.ValidationError) as cm: field.clean(['', '']) self.assertEqual(cm.exception.messages[0], 'This field is required.') value = field.clean(['2013-04-09 11:45', '']) self.assertEqual(value, DateTimeTZRange(datetime.datetime(2013, 4, 9, 11, 45), None)) @override_settings(USE_TZ=True, TIME_ZONE='Africa/Johannesburg') def test_datetime_prepare_value(self): field = pg_forms.DateTimeRangeField() value = field.prepare_value( DateTimeTZRange(datetime.datetime(2015, 5, 22, 16, 6, 33, tzinfo=timezone.utc), None) ) self.assertEqual(value, [datetime.datetime(2015, 5, 22, 18, 6, 33), None]) def test_datetime_has_changed_first(self): self.assertTrue(pg_forms.DateTimeRangeField().has_changed( ['2010-01-01 00:00', '2020-12-12 00:00'], ['2010-01-31 23:00', '2020-12-12 00:00'], )) def test_datetime_has_changed_last(self): self.assertTrue(pg_forms.DateTimeRangeField().has_changed( ['2010-01-01 00:00', '2020-12-12 00:00'], ['2010-01-01 00:00', '2020-12-31 23:00'], )) def test_model_field_formfield_integer(self): model_field = pg_fields.IntegerRangeField() form_field = model_field.formfield() self.assertIsInstance(form_field, pg_forms.IntegerRangeField) self.assertEqual(form_field.range_kwargs, {}) def test_model_field_formfield_biginteger(self): model_field = pg_fields.BigIntegerRangeField() form_field = model_field.formfield() self.assertIsInstance(form_field, pg_forms.IntegerRangeField) self.assertEqual(form_field.range_kwargs, {}) def test_model_field_formfield_float(self): model_field = pg_fields.DecimalRangeField(default_bounds='()') form_field = model_field.formfield() self.assertIsInstance(form_field, pg_forms.DecimalRangeField) self.assertEqual(form_field.range_kwargs, {'bounds': '()'}) def test_model_field_formfield_date(self): model_field = pg_fields.DateRangeField() form_field = model_field.formfield() self.assertIsInstance(form_field, pg_forms.DateRangeField) self.assertEqual(form_field.range_kwargs, {}) def test_model_field_formfield_datetime(self): model_field = pg_fields.DateTimeRangeField() form_field = model_field.formfield() self.assertIsInstance(form_field, pg_forms.DateTimeRangeField) self.assertEqual( form_field.range_kwargs, {'bounds': pg_fields.ranges.CANONICAL_RANGE_BOUNDS}, ) def test_model_field_formfield_datetime_default_bounds(self): model_field = pg_fields.DateTimeRangeField(default_bounds='[]') form_field = model_field.formfield() self.assertIsInstance(form_field, pg_forms.DateTimeRangeField) self.assertEqual(form_field.range_kwargs, {'bounds': '[]'}) def test_model_field_with_default_bounds(self): field = pg_forms.DateTimeRangeField(default_bounds='[]') value = field.clean(['2014-01-01 00:00:00', '2014-02-03 12:13:14']) lower = datetime.datetime(2014, 1, 1, 0, 0, 0) upper = datetime.datetime(2014, 2, 3, 12, 13, 14) self.assertEqual(value, DateTimeTZRange(lower, upper, '[]')) def test_has_changed(self): for field, value in ( (pg_forms.DateRangeField(), ['2010-01-01', '2020-12-12']), (pg_forms.DateTimeRangeField(), ['2010-01-01 11:13', '2020-12-12 14:52']), (pg_forms.IntegerRangeField(), [1, 2]), (pg_forms.DecimalRangeField(), ['1.12345', '2.001']), ): with self.subTest(field=field.__class__.__name__): self.assertTrue(field.has_changed(None, value)) self.assertTrue(field.has_changed([value[0], ''], value)) self.assertTrue(field.has_changed(['', value[1]], value)) self.assertFalse(field.has_changed(value, value)) class TestWidget(PostgreSQLSimpleTestCase): def test_range_widget(self): f = pg_forms.ranges.DateTimeRangeField() self.assertHTMLEqual( f.widget.render('datetimerange', ''), '<input type="text" name="datetimerange_0"><input type="text" name="datetimerange_1">' ) self.assertHTMLEqual( f.widget.render('datetimerange', None), '<input type="text" name="datetimerange_0"><input type="text" name="datetimerange_1">' ) dt_range = DateTimeTZRange( datetime.datetime(2006, 1, 10, 7, 30), datetime.datetime(2006, 2, 12, 9, 50) ) self.assertHTMLEqual( f.widget.render('datetimerange', dt_range), '<input type="text" name="datetimerange_0" value="2006-01-10 07:30:00">' '<input type="text" name="datetimerange_1" value="2006-02-12 09:50:00">' )
cad30ad7f3c4d6ce81d292fbcf188b0fdccee75f909e333d0f564b3a817cf22c
from django.db import models from .fields import ( ArrayField, BigIntegerRangeField, CICharField, CIEmailField, CITextField, DateRangeField, DateTimeRangeField, DecimalRangeField, EnumField, HStoreField, IntegerRangeField, SearchVectorField, ) class Tag: def __init__(self, tag_id): self.tag_id = tag_id def __eq__(self, other): return isinstance(other, Tag) and self.tag_id == other.tag_id class TagField(models.SmallIntegerField): def from_db_value(self, value, expression, connection): if value is None: return value return Tag(int(value)) def to_python(self, value): if isinstance(value, Tag): return value if value is None: return value return Tag(int(value)) def get_prep_value(self, value): return value.tag_id class PostgreSQLModel(models.Model): class Meta: abstract = True required_db_vendor = 'postgresql' class IntegerArrayModel(PostgreSQLModel): field = ArrayField(models.IntegerField(), default=list, blank=True) class NullableIntegerArrayModel(PostgreSQLModel): field = ArrayField(models.IntegerField(), blank=True, null=True) field_nested = ArrayField(ArrayField(models.IntegerField(null=True)), null=True) order = models.IntegerField(null=True) class CharArrayModel(PostgreSQLModel): field = ArrayField(models.CharField(max_length=10)) class DateTimeArrayModel(PostgreSQLModel): datetimes = ArrayField(models.DateTimeField()) dates = ArrayField(models.DateField()) times = ArrayField(models.TimeField()) class NestedIntegerArrayModel(PostgreSQLModel): field = ArrayField(ArrayField(models.IntegerField())) class OtherTypesArrayModel(PostgreSQLModel): ips = ArrayField(models.GenericIPAddressField(), default=list) uuids = ArrayField(models.UUIDField(), default=list) decimals = ArrayField(models.DecimalField(max_digits=5, decimal_places=2), default=list) tags = ArrayField(TagField(), blank=True, null=True) json = ArrayField(models.JSONField(default=dict), default=list) int_ranges = ArrayField(IntegerRangeField(), blank=True, null=True) bigint_ranges = ArrayField(BigIntegerRangeField(), blank=True, null=True) class HStoreModel(PostgreSQLModel): field = HStoreField(blank=True, null=True) array_field = ArrayField(HStoreField(), null=True) class ArrayEnumModel(PostgreSQLModel): array_of_enums = ArrayField(EnumField(max_length=20)) class CharFieldModel(models.Model): field = models.CharField(max_length=64) class TextFieldModel(models.Model): field = models.TextField() class SmallAutoFieldModel(models.Model): id = models.SmallAutoField(primary_key=True) class BigAutoFieldModel(models.Model): id = models.BigAutoField(primary_key=True) # Scene/Character/Line models are used to test full text search. They're # populated with content from Monty Python and the Holy Grail. class Scene(models.Model): scene = models.TextField() setting = models.CharField(max_length=255) class Character(models.Model): name = models.CharField(max_length=255) class CITestModel(PostgreSQLModel): name = CICharField(primary_key=True, max_length=255) email = CIEmailField() description = CITextField() array_field = ArrayField(CITextField(), null=True) class Line(PostgreSQLModel): scene = models.ForeignKey('Scene', models.CASCADE) character = models.ForeignKey('Character', models.CASCADE) dialogue = models.TextField(blank=True, null=True) dialogue_search_vector = SearchVectorField(blank=True, null=True) dialogue_config = models.CharField(max_length=100, blank=True, null=True) class LineSavedSearch(PostgreSQLModel): line = models.ForeignKey('Line', models.CASCADE) query = models.CharField(max_length=100) class RangesModel(PostgreSQLModel): ints = IntegerRangeField(blank=True, null=True) bigints = BigIntegerRangeField(blank=True, null=True) decimals = DecimalRangeField(blank=True, null=True) timestamps = DateTimeRangeField(blank=True, null=True) timestamps_inner = DateTimeRangeField(blank=True, null=True) timestamps_closed_bounds = DateTimeRangeField( blank=True, null=True, default_bounds='[]', ) dates = DateRangeField(blank=True, null=True) dates_inner = DateRangeField(blank=True, null=True) class RangeLookupsModel(PostgreSQLModel): parent = models.ForeignKey(RangesModel, models.SET_NULL, blank=True, null=True) integer = models.IntegerField(blank=True, null=True) big_integer = models.BigIntegerField(blank=True, null=True) float = models.FloatField(blank=True, null=True) timestamp = models.DateTimeField(blank=True, null=True) date = models.DateField(blank=True, null=True) small_integer = models.SmallIntegerField(blank=True, null=True) decimal_field = models.DecimalField(max_digits=5, decimal_places=2, blank=True, null=True) class ArrayFieldSubclass(ArrayField): def __init__(self, *args, **kwargs): super().__init__(models.IntegerField()) class AggregateTestModel(PostgreSQLModel): """ To test postgres-specific general aggregation functions """ char_field = models.CharField(max_length=30, blank=True) text_field = models.TextField(blank=True) integer_field = models.IntegerField(null=True) boolean_field = models.BooleanField(null=True) json_field = models.JSONField(null=True) class StatTestModel(PostgreSQLModel): """ To test postgres-specific aggregation functions for statistics """ int1 = models.IntegerField() int2 = models.IntegerField() related_field = models.ForeignKey(AggregateTestModel, models.SET_NULL, null=True) class NowTestModel(models.Model): when = models.DateTimeField(null=True, default=None) class UUIDTestModel(models.Model): uuid = models.UUIDField(default=None, null=True) class Room(models.Model): number = models.IntegerField(unique=True) class HotelReservation(PostgreSQLModel): room = models.ForeignKey('Room', on_delete=models.CASCADE) datespan = DateRangeField() start = models.DateTimeField() end = models.DateTimeField() cancelled = models.BooleanField(default=False) requirements = models.JSONField(blank=True, null=True)
57a3673aadd9465f74076c349e03ed5afc9186ccd132418350cf6c161180a480
""" Indirection layer for PostgreSQL-specific fields, so the tests don't fail when run with a backend other than PostgreSQL. """ import enum from django.db import models try: from django.contrib.postgres.fields import ( ArrayField, BigIntegerRangeField, CICharField, CIEmailField, CITextField, DateRangeField, DateTimeRangeField, DecimalRangeField, HStoreField, IntegerRangeField, ) from django.contrib.postgres.search import SearchVector, SearchVectorField except ImportError: class DummyArrayField(models.Field): def __init__(self, base_field, size=None, **kwargs): super().__init__(**kwargs) def deconstruct(self): name, path, args, kwargs = super().deconstruct() kwargs.update({ 'base_field': '', 'size': 1, }) return name, path, args, kwargs class DummyContinuousRangeField(models.Field): def __init__(self, *args, default_bounds='[)', **kwargs): super().__init__(**kwargs) def deconstruct(self): name, path, args, kwargs = super().deconstruct() kwargs['default_bounds'] = '[)' return name, path, args, kwargs ArrayField = DummyArrayField BigIntegerRangeField = models.Field CICharField = models.Field CIEmailField = models.Field CITextField = models.Field DateRangeField = models.Field DateTimeRangeField = DummyContinuousRangeField DecimalRangeField = DummyContinuousRangeField HStoreField = models.Field IntegerRangeField = models.Field SearchVector = models.Expression SearchVectorField = models.Field class EnumField(models.CharField): def get_prep_value(self, value): return value.value if isinstance(value, enum.Enum) else value
08333041a9f9a27b66d207322dbeb867635ab51e04ba4e89e5b8be1ebd8db490
from unittest import mock from django.contrib.postgres.indexes import ( BloomIndex, BrinIndex, BTreeIndex, GinIndex, GistIndex, HashIndex, OpClass, SpGistIndex, ) from django.db import NotSupportedError, connection from django.db.models import CharField, F, Index, Q from django.db.models.functions import Cast, Collate, Length, Lower from django.test import skipUnlessDBFeature from django.test.utils import modify_settings, register_lookup from . import PostgreSQLSimpleTestCase, PostgreSQLTestCase from .fields import SearchVector, SearchVectorField from .models import CharFieldModel, IntegerArrayModel, Scene, TextFieldModel class IndexTestMixin: def test_name_auto_generation(self): index = self.index_class(fields=['field']) index.set_name_with_model(CharFieldModel) self.assertRegex(index.name, r'postgres_te_field_[0-9a-f]{6}_%s' % self.index_class.suffix) def test_deconstruction_no_customization(self): index = self.index_class(fields=['title'], name='test_title_%s' % self.index_class.suffix) path, args, kwargs = index.deconstruct() self.assertEqual(path, 'django.contrib.postgres.indexes.%s' % self.index_class.__name__) self.assertEqual(args, ()) self.assertEqual(kwargs, {'fields': ['title'], 'name': 'test_title_%s' % self.index_class.suffix}) def test_deconstruction_with_expressions_no_customization(self): name = f'test_title_{self.index_class.suffix}' index = self.index_class(Lower('title'), name=name) path, args, kwargs = index.deconstruct() self.assertEqual( path, f'django.contrib.postgres.indexes.{self.index_class.__name__}', ) self.assertEqual(args, (Lower('title'),)) self.assertEqual(kwargs, {'name': name}) class BloomIndexTests(IndexTestMixin, PostgreSQLSimpleTestCase): index_class = BloomIndex def test_suffix(self): self.assertEqual(BloomIndex.suffix, 'bloom') def test_deconstruction(self): index = BloomIndex(fields=['title'], name='test_bloom', length=80, columns=[4]) path, args, kwargs = index.deconstruct() self.assertEqual(path, 'django.contrib.postgres.indexes.BloomIndex') self.assertEqual(args, ()) self.assertEqual(kwargs, { 'fields': ['title'], 'name': 'test_bloom', 'length': 80, 'columns': [4], }) def test_invalid_fields(self): msg = 'Bloom indexes support a maximum of 32 fields.' with self.assertRaisesMessage(ValueError, msg): BloomIndex(fields=['title'] * 33, name='test_bloom') def test_invalid_columns(self): msg = 'BloomIndex.columns must be a list or tuple.' with self.assertRaisesMessage(ValueError, msg): BloomIndex(fields=['title'], name='test_bloom', columns='x') msg = 'BloomIndex.columns cannot have more values than fields.' with self.assertRaisesMessage(ValueError, msg): BloomIndex(fields=['title'], name='test_bloom', columns=[4, 3]) def test_invalid_columns_value(self): msg = 'BloomIndex.columns must contain integers from 1 to 4095.' for length in (0, 4096): with self.subTest(length), self.assertRaisesMessage(ValueError, msg): BloomIndex(fields=['title'], name='test_bloom', columns=[length]) def test_invalid_length(self): msg = 'BloomIndex.length must be None or an integer from 1 to 4096.' for length in (0, 4097): with self.subTest(length), self.assertRaisesMessage(ValueError, msg): BloomIndex(fields=['title'], name='test_bloom', length=length) class BrinIndexTests(IndexTestMixin, PostgreSQLSimpleTestCase): index_class = BrinIndex def test_suffix(self): self.assertEqual(BrinIndex.suffix, 'brin') def test_deconstruction(self): index = BrinIndex(fields=['title'], name='test_title_brin', autosummarize=True, pages_per_range=16) path, args, kwargs = index.deconstruct() self.assertEqual(path, 'django.contrib.postgres.indexes.BrinIndex') self.assertEqual(args, ()) self.assertEqual(kwargs, { 'fields': ['title'], 'name': 'test_title_brin', 'autosummarize': True, 'pages_per_range': 16, }) def test_invalid_pages_per_range(self): with self.assertRaisesMessage(ValueError, 'pages_per_range must be None or a positive integer'): BrinIndex(fields=['title'], name='test_title_brin', pages_per_range=0) class BTreeIndexTests(IndexTestMixin, PostgreSQLSimpleTestCase): index_class = BTreeIndex def test_suffix(self): self.assertEqual(BTreeIndex.suffix, 'btree') def test_deconstruction(self): index = BTreeIndex(fields=['title'], name='test_title_btree', fillfactor=80) path, args, kwargs = index.deconstruct() self.assertEqual(path, 'django.contrib.postgres.indexes.BTreeIndex') self.assertEqual(args, ()) self.assertEqual(kwargs, {'fields': ['title'], 'name': 'test_title_btree', 'fillfactor': 80}) class GinIndexTests(IndexTestMixin, PostgreSQLSimpleTestCase): index_class = GinIndex def test_suffix(self): self.assertEqual(GinIndex.suffix, 'gin') def test_deconstruction(self): index = GinIndex( fields=['title'], name='test_title_gin', fastupdate=True, gin_pending_list_limit=128, ) path, args, kwargs = index.deconstruct() self.assertEqual(path, 'django.contrib.postgres.indexes.GinIndex') self.assertEqual(args, ()) self.assertEqual(kwargs, { 'fields': ['title'], 'name': 'test_title_gin', 'fastupdate': True, 'gin_pending_list_limit': 128, }) class GistIndexTests(IndexTestMixin, PostgreSQLSimpleTestCase): index_class = GistIndex def test_suffix(self): self.assertEqual(GistIndex.suffix, 'gist') def test_deconstruction(self): index = GistIndex(fields=['title'], name='test_title_gist', buffering=False, fillfactor=80) path, args, kwargs = index.deconstruct() self.assertEqual(path, 'django.contrib.postgres.indexes.GistIndex') self.assertEqual(args, ()) self.assertEqual(kwargs, { 'fields': ['title'], 'name': 'test_title_gist', 'buffering': False, 'fillfactor': 80, }) class HashIndexTests(IndexTestMixin, PostgreSQLSimpleTestCase): index_class = HashIndex def test_suffix(self): self.assertEqual(HashIndex.suffix, 'hash') def test_deconstruction(self): index = HashIndex(fields=['title'], name='test_title_hash', fillfactor=80) path, args, kwargs = index.deconstruct() self.assertEqual(path, 'django.contrib.postgres.indexes.HashIndex') self.assertEqual(args, ()) self.assertEqual(kwargs, {'fields': ['title'], 'name': 'test_title_hash', 'fillfactor': 80}) class SpGistIndexTests(IndexTestMixin, PostgreSQLSimpleTestCase): index_class = SpGistIndex def test_suffix(self): self.assertEqual(SpGistIndex.suffix, 'spgist') def test_deconstruction(self): index = SpGistIndex(fields=['title'], name='test_title_spgist', fillfactor=80) path, args, kwargs = index.deconstruct() self.assertEqual(path, 'django.contrib.postgres.indexes.SpGistIndex') self.assertEqual(args, ()) self.assertEqual(kwargs, {'fields': ['title'], 'name': 'test_title_spgist', 'fillfactor': 80}) @modify_settings(INSTALLED_APPS={'append': 'django.contrib.postgres'}) class SchemaTests(PostgreSQLTestCase): get_opclass_query = ''' SELECT opcname, c.relname FROM pg_opclass AS oc JOIN pg_index as i on oc.oid = ANY(i.indclass) JOIN pg_class as c on c.oid = i.indexrelid WHERE c.relname = %s ''' def get_constraints(self, table): """ Get the indexes on the table using a new cursor. """ with connection.cursor() as cursor: return connection.introspection.get_constraints(cursor, table) def test_gin_index(self): # Ensure the table is there and doesn't have an index. self.assertNotIn('field', self.get_constraints(IntegerArrayModel._meta.db_table)) # Add the index index_name = 'integer_array_model_field_gin' index = GinIndex(fields=['field'], name=index_name) with connection.schema_editor() as editor: editor.add_index(IntegerArrayModel, index) constraints = self.get_constraints(IntegerArrayModel._meta.db_table) # Check gin index was added self.assertEqual(constraints[index_name]['type'], GinIndex.suffix) # Drop the index with connection.schema_editor() as editor: editor.remove_index(IntegerArrayModel, index) self.assertNotIn(index_name, self.get_constraints(IntegerArrayModel._meta.db_table)) def test_gin_fastupdate(self): index_name = 'integer_array_gin_fastupdate' index = GinIndex(fields=['field'], name=index_name, fastupdate=False) with connection.schema_editor() as editor: editor.add_index(IntegerArrayModel, index) constraints = self.get_constraints(IntegerArrayModel._meta.db_table) self.assertEqual(constraints[index_name]['type'], 'gin') self.assertEqual(constraints[index_name]['options'], ['fastupdate=off']) with connection.schema_editor() as editor: editor.remove_index(IntegerArrayModel, index) self.assertNotIn(index_name, self.get_constraints(IntegerArrayModel._meta.db_table)) def test_partial_gin_index(self): with register_lookup(CharField, Length): index_name = 'char_field_gin_partial_idx' index = GinIndex(fields=['field'], name=index_name, condition=Q(field__length=40)) with connection.schema_editor() as editor: editor.add_index(CharFieldModel, index) constraints = self.get_constraints(CharFieldModel._meta.db_table) self.assertEqual(constraints[index_name]['type'], 'gin') with connection.schema_editor() as editor: editor.remove_index(CharFieldModel, index) self.assertNotIn(index_name, self.get_constraints(CharFieldModel._meta.db_table)) def test_partial_gin_index_with_tablespace(self): with register_lookup(CharField, Length): index_name = 'char_field_gin_partial_idx' index = GinIndex( fields=['field'], name=index_name, condition=Q(field__length=40), db_tablespace='pg_default', ) with connection.schema_editor() as editor: editor.add_index(CharFieldModel, index) self.assertIn('TABLESPACE "pg_default" ', str(index.create_sql(CharFieldModel, editor))) constraints = self.get_constraints(CharFieldModel._meta.db_table) self.assertEqual(constraints[index_name]['type'], 'gin') with connection.schema_editor() as editor: editor.remove_index(CharFieldModel, index) self.assertNotIn(index_name, self.get_constraints(CharFieldModel._meta.db_table)) def test_gin_parameters(self): index_name = 'integer_array_gin_params' index = GinIndex(fields=['field'], name=index_name, fastupdate=True, gin_pending_list_limit=64) with connection.schema_editor() as editor: editor.add_index(IntegerArrayModel, index) constraints = self.get_constraints(IntegerArrayModel._meta.db_table) self.assertEqual(constraints[index_name]['type'], 'gin') self.assertEqual(constraints[index_name]['options'], ['gin_pending_list_limit=64', 'fastupdate=on']) with connection.schema_editor() as editor: editor.remove_index(IntegerArrayModel, index) self.assertNotIn(index_name, self.get_constraints(IntegerArrayModel._meta.db_table)) def test_trigram_op_class_gin_index(self): index_name = 'trigram_op_class_gin' index = GinIndex(OpClass(F('scene'), name='gin_trgm_ops'), name=index_name) with connection.schema_editor() as editor: editor.add_index(Scene, index) with editor.connection.cursor() as cursor: cursor.execute(self.get_opclass_query, [index_name]) self.assertCountEqual(cursor.fetchall(), [('gin_trgm_ops', index_name)]) constraints = self.get_constraints(Scene._meta.db_table) self.assertIn(index_name, constraints) self.assertIn(constraints[index_name]['type'], GinIndex.suffix) with connection.schema_editor() as editor: editor.remove_index(Scene, index) self.assertNotIn(index_name, self.get_constraints(Scene._meta.db_table)) def test_cast_search_vector_gin_index(self): index_name = 'cast_search_vector_gin' index = GinIndex(Cast('field', SearchVectorField()), name=index_name) with connection.schema_editor() as editor: editor.add_index(TextFieldModel, index) sql = index.create_sql(TextFieldModel, editor) table = TextFieldModel._meta.db_table constraints = self.get_constraints(table) self.assertIn(index_name, constraints) self.assertIn(constraints[index_name]['type'], GinIndex.suffix) self.assertIs(sql.references_column(table, 'field'), True) self.assertIn('::tsvector', str(sql)) with connection.schema_editor() as editor: editor.remove_index(TextFieldModel, index) self.assertNotIn(index_name, self.get_constraints(table)) def test_bloom_index(self): index_name = 'char_field_model_field_bloom' index = BloomIndex(fields=['field'], name=index_name) with connection.schema_editor() as editor: editor.add_index(CharFieldModel, index) constraints = self.get_constraints(CharFieldModel._meta.db_table) self.assertEqual(constraints[index_name]['type'], BloomIndex.suffix) with connection.schema_editor() as editor: editor.remove_index(CharFieldModel, index) self.assertNotIn(index_name, self.get_constraints(CharFieldModel._meta.db_table)) def test_bloom_parameters(self): index_name = 'char_field_model_field_bloom_params' index = BloomIndex(fields=['field'], name=index_name, length=512, columns=[3]) with connection.schema_editor() as editor: editor.add_index(CharFieldModel, index) constraints = self.get_constraints(CharFieldModel._meta.db_table) self.assertEqual(constraints[index_name]['type'], BloomIndex.suffix) self.assertEqual(constraints[index_name]['options'], ['length=512', 'col1=3']) with connection.schema_editor() as editor: editor.remove_index(CharFieldModel, index) self.assertNotIn(index_name, self.get_constraints(CharFieldModel._meta.db_table)) def test_brin_index(self): index_name = 'char_field_model_field_brin' index = BrinIndex(fields=['field'], name=index_name, pages_per_range=4) with connection.schema_editor() as editor: editor.add_index(CharFieldModel, index) constraints = self.get_constraints(CharFieldModel._meta.db_table) self.assertEqual(constraints[index_name]['type'], BrinIndex.suffix) self.assertEqual(constraints[index_name]['options'], ['pages_per_range=4']) with connection.schema_editor() as editor: editor.remove_index(CharFieldModel, index) self.assertNotIn(index_name, self.get_constraints(CharFieldModel._meta.db_table)) def test_brin_parameters(self): index_name = 'char_field_brin_params' index = BrinIndex(fields=['field'], name=index_name, autosummarize=True) with connection.schema_editor() as editor: editor.add_index(CharFieldModel, index) constraints = self.get_constraints(CharFieldModel._meta.db_table) self.assertEqual(constraints[index_name]['type'], BrinIndex.suffix) self.assertEqual(constraints[index_name]['options'], ['autosummarize=on']) with connection.schema_editor() as editor: editor.remove_index(CharFieldModel, index) self.assertNotIn(index_name, self.get_constraints(CharFieldModel._meta.db_table)) def test_btree_index(self): # Ensure the table is there and doesn't have an index. self.assertNotIn('field', self.get_constraints(CharFieldModel._meta.db_table)) # Add the index. index_name = 'char_field_model_field_btree' index = BTreeIndex(fields=['field'], name=index_name) with connection.schema_editor() as editor: editor.add_index(CharFieldModel, index) constraints = self.get_constraints(CharFieldModel._meta.db_table) # The index was added. self.assertEqual(constraints[index_name]['type'], BTreeIndex.suffix) # Drop the index. with connection.schema_editor() as editor: editor.remove_index(CharFieldModel, index) self.assertNotIn(index_name, self.get_constraints(CharFieldModel._meta.db_table)) def test_btree_parameters(self): index_name = 'integer_array_btree_fillfactor' index = BTreeIndex(fields=['field'], name=index_name, fillfactor=80) with connection.schema_editor() as editor: editor.add_index(CharFieldModel, index) constraints = self.get_constraints(CharFieldModel._meta.db_table) self.assertEqual(constraints[index_name]['type'], BTreeIndex.suffix) self.assertEqual(constraints[index_name]['options'], ['fillfactor=80']) with connection.schema_editor() as editor: editor.remove_index(CharFieldModel, index) self.assertNotIn(index_name, self.get_constraints(CharFieldModel._meta.db_table)) def test_gist_index(self): # Ensure the table is there and doesn't have an index. self.assertNotIn('field', self.get_constraints(CharFieldModel._meta.db_table)) # Add the index. index_name = 'char_field_model_field_gist' index = GistIndex(fields=['field'], name=index_name) with connection.schema_editor() as editor: editor.add_index(CharFieldModel, index) constraints = self.get_constraints(CharFieldModel._meta.db_table) # The index was added. self.assertEqual(constraints[index_name]['type'], GistIndex.suffix) # Drop the index. with connection.schema_editor() as editor: editor.remove_index(CharFieldModel, index) self.assertNotIn(index_name, self.get_constraints(CharFieldModel._meta.db_table)) def test_gist_parameters(self): index_name = 'integer_array_gist_buffering' index = GistIndex(fields=['field'], name=index_name, buffering=True, fillfactor=80) with connection.schema_editor() as editor: editor.add_index(CharFieldModel, index) constraints = self.get_constraints(CharFieldModel._meta.db_table) self.assertEqual(constraints[index_name]['type'], GistIndex.suffix) self.assertEqual(constraints[index_name]['options'], ['buffering=on', 'fillfactor=80']) with connection.schema_editor() as editor: editor.remove_index(CharFieldModel, index) self.assertNotIn(index_name, self.get_constraints(CharFieldModel._meta.db_table)) @skipUnlessDBFeature('supports_covering_gist_indexes') def test_gist_include(self): index_name = 'scene_gist_include_setting' index = GistIndex(name=index_name, fields=['scene'], include=['setting']) with connection.schema_editor() as editor: editor.add_index(Scene, index) constraints = self.get_constraints(Scene._meta.db_table) self.assertIn(index_name, constraints) self.assertEqual(constraints[index_name]['type'], GistIndex.suffix) self.assertEqual(constraints[index_name]['columns'], ['scene', 'setting']) with connection.schema_editor() as editor: editor.remove_index(Scene, index) self.assertNotIn(index_name, self.get_constraints(Scene._meta.db_table)) def test_gist_include_not_supported(self): index_name = 'gist_include_exception' index = GistIndex(fields=['scene'], name=index_name, include=['setting']) msg = 'Covering GiST indexes require PostgreSQL 12+.' with self.assertRaisesMessage(NotSupportedError, msg): with mock.patch( 'django.db.backends.postgresql.features.DatabaseFeatures.supports_covering_gist_indexes', False, ): with connection.schema_editor() as editor: editor.add_index(Scene, index) self.assertNotIn(index_name, self.get_constraints(Scene._meta.db_table)) def test_tsvector_op_class_gist_index(self): index_name = 'tsvector_op_class_gist' index = GistIndex( OpClass( SearchVector('scene', 'setting', config='english'), name='tsvector_ops', ), name=index_name, ) with connection.schema_editor() as editor: editor.add_index(Scene, index) sql = index.create_sql(Scene, editor) table = Scene._meta.db_table constraints = self.get_constraints(table) self.assertIn(index_name, constraints) self.assertIn(constraints[index_name]['type'], GistIndex.suffix) self.assertIs(sql.references_column(table, 'scene'), True) self.assertIs(sql.references_column(table, 'setting'), True) with connection.schema_editor() as editor: editor.remove_index(Scene, index) self.assertNotIn(index_name, self.get_constraints(table)) def test_hash_index(self): # Ensure the table is there and doesn't have an index. self.assertNotIn('field', self.get_constraints(CharFieldModel._meta.db_table)) # Add the index. index_name = 'char_field_model_field_hash' index = HashIndex(fields=['field'], name=index_name) with connection.schema_editor() as editor: editor.add_index(CharFieldModel, index) constraints = self.get_constraints(CharFieldModel._meta.db_table) # The index was added. self.assertEqual(constraints[index_name]['type'], HashIndex.suffix) # Drop the index. with connection.schema_editor() as editor: editor.remove_index(CharFieldModel, index) self.assertNotIn(index_name, self.get_constraints(CharFieldModel._meta.db_table)) def test_hash_parameters(self): index_name = 'integer_array_hash_fillfactor' index = HashIndex(fields=['field'], name=index_name, fillfactor=80) with connection.schema_editor() as editor: editor.add_index(CharFieldModel, index) constraints = self.get_constraints(CharFieldModel._meta.db_table) self.assertEqual(constraints[index_name]['type'], HashIndex.suffix) self.assertEqual(constraints[index_name]['options'], ['fillfactor=80']) with connection.schema_editor() as editor: editor.remove_index(CharFieldModel, index) self.assertNotIn(index_name, self.get_constraints(CharFieldModel._meta.db_table)) def test_spgist_index(self): # Ensure the table is there and doesn't have an index. self.assertNotIn('field', self.get_constraints(TextFieldModel._meta.db_table)) # Add the index. index_name = 'text_field_model_field_spgist' index = SpGistIndex(fields=['field'], name=index_name) with connection.schema_editor() as editor: editor.add_index(TextFieldModel, index) constraints = self.get_constraints(TextFieldModel._meta.db_table) # The index was added. self.assertEqual(constraints[index_name]['type'], SpGistIndex.suffix) # Drop the index. with connection.schema_editor() as editor: editor.remove_index(TextFieldModel, index) self.assertNotIn(index_name, self.get_constraints(TextFieldModel._meta.db_table)) def test_spgist_parameters(self): index_name = 'text_field_model_spgist_fillfactor' index = SpGistIndex(fields=['field'], name=index_name, fillfactor=80) with connection.schema_editor() as editor: editor.add_index(TextFieldModel, index) constraints = self.get_constraints(TextFieldModel._meta.db_table) self.assertEqual(constraints[index_name]['type'], SpGistIndex.suffix) self.assertEqual(constraints[index_name]['options'], ['fillfactor=80']) with connection.schema_editor() as editor: editor.remove_index(TextFieldModel, index) self.assertNotIn(index_name, self.get_constraints(TextFieldModel._meta.db_table)) @skipUnlessDBFeature('supports_covering_spgist_indexes') def test_spgist_include(self): index_name = 'scene_spgist_include_setting' index = SpGistIndex(name=index_name, fields=['scene'], include=['setting']) with connection.schema_editor() as editor: editor.add_index(Scene, index) constraints = self.get_constraints(Scene._meta.db_table) self.assertIn(index_name, constraints) self.assertEqual(constraints[index_name]['type'], SpGistIndex.suffix) self.assertEqual(constraints[index_name]['columns'], ['scene', 'setting']) with connection.schema_editor() as editor: editor.remove_index(Scene, index) self.assertNotIn(index_name, self.get_constraints(Scene._meta.db_table)) def test_spgist_include_not_supported(self): index_name = 'spgist_include_exception' index = SpGistIndex(fields=['scene'], name=index_name, include=['setting']) msg = 'Covering SP-GiST indexes require PostgreSQL 14+.' with self.assertRaisesMessage(NotSupportedError, msg): with mock.patch( 'django.db.backends.postgresql.features.DatabaseFeatures.supports_covering_spgist_indexes', False, ): with connection.schema_editor() as editor: editor.add_index(Scene, index) self.assertNotIn(index_name, self.get_constraints(Scene._meta.db_table)) def test_op_class(self): index_name = 'test_op_class' index = Index( OpClass(Lower('field'), name='text_pattern_ops'), name=index_name, ) with connection.schema_editor() as editor: editor.add_index(TextFieldModel, index) with editor.connection.cursor() as cursor: cursor.execute(self.get_opclass_query, [index_name]) self.assertCountEqual(cursor.fetchall(), [('text_pattern_ops', index_name)]) def test_op_class_descending_collation(self): collation = connection.features.test_collations.get('non_default') if not collation: self.skipTest( 'This backend does not support case-insensitive collations.' ) index_name = 'test_op_class_descending_collation' index = Index( Collate( OpClass(Lower('field'), name='text_pattern_ops').desc(nulls_last=True), collation=collation, ), name=index_name, ) with connection.schema_editor() as editor: editor.add_index(TextFieldModel, index) self.assertIn( 'COLLATE %s' % editor.quote_name(collation), str(index.create_sql(TextFieldModel, editor)), ) with editor.connection.cursor() as cursor: cursor.execute(self.get_opclass_query, [index_name]) self.assertCountEqual(cursor.fetchall(), [('text_pattern_ops', index_name)]) table = TextFieldModel._meta.db_table constraints = self.get_constraints(table) self.assertIn(index_name, constraints) self.assertEqual(constraints[index_name]['orders'], ['DESC']) with connection.schema_editor() as editor: editor.remove_index(TextFieldModel, index) self.assertNotIn(index_name, self.get_constraints(table)) def test_op_class_descending_partial(self): index_name = 'test_op_class_descending_partial' index = Index( OpClass(Lower('field'), name='text_pattern_ops').desc(), name=index_name, condition=Q(field__contains='China'), ) with connection.schema_editor() as editor: editor.add_index(TextFieldModel, index) with editor.connection.cursor() as cursor: cursor.execute(self.get_opclass_query, [index_name]) self.assertCountEqual(cursor.fetchall(), [('text_pattern_ops', index_name)]) constraints = self.get_constraints(TextFieldModel._meta.db_table) self.assertIn(index_name, constraints) self.assertEqual(constraints[index_name]['orders'], ['DESC']) def test_op_class_descending_partial_tablespace(self): index_name = 'test_op_class_descending_partial_tablespace' index = Index( OpClass(Lower('field').desc(), name='text_pattern_ops'), name=index_name, condition=Q(field__contains='China'), db_tablespace='pg_default', ) with connection.schema_editor() as editor: editor.add_index(TextFieldModel, index) self.assertIn( 'TABLESPACE "pg_default" ', str(index.create_sql(TextFieldModel, editor)) ) with editor.connection.cursor() as cursor: cursor.execute(self.get_opclass_query, [index_name]) self.assertCountEqual(cursor.fetchall(), [('text_pattern_ops', index_name)]) constraints = self.get_constraints(TextFieldModel._meta.db_table) self.assertIn(index_name, constraints) self.assertEqual(constraints[index_name]['orders'], ['DESC'])
d0b3a32140594a496436c4375dcef9f43227b4d918f75a4eb9e5b20a49bf6276
from decimal import Decimal from django.db.backends.signals import connection_created from django.db.migrations.writer import MigrationWriter from django.test.utils import modify_settings from . import PostgreSQLTestCase try: from psycopg2.extras import ( DateRange, DateTimeRange, DateTimeTZRange, NumericRange, ) from django.contrib.postgres.fields import ( DateRangeField, DateTimeRangeField, DecimalRangeField, IntegerRangeField, ) except ImportError: pass class PostgresConfigTests(PostgreSQLTestCase): def test_register_type_handlers_connection(self): from django.contrib.postgres.signals import register_type_handlers self.assertNotIn(register_type_handlers, connection_created._live_receivers(None)) with modify_settings(INSTALLED_APPS={'append': 'django.contrib.postgres'}): self.assertIn(register_type_handlers, connection_created._live_receivers(None)) self.assertNotIn(register_type_handlers, connection_created._live_receivers(None)) def test_register_serializer_for_migrations(self): tests = ( (DateRange(empty=True), DateRangeField), (DateTimeRange(empty=True), DateRangeField), (DateTimeTZRange(None, None, '[]'), DateTimeRangeField), (NumericRange(Decimal('1.0'), Decimal('5.0'), '()'), DecimalRangeField), (NumericRange(1, 10), IntegerRangeField), ) def assertNotSerializable(): for default, test_field in tests: with self.subTest(default=default): field = test_field(default=default) with self.assertRaisesMessage(ValueError, 'Cannot serialize: %s' % default.__class__.__name__): MigrationWriter.serialize(field) assertNotSerializable() with self.modify_settings(INSTALLED_APPS={'append': 'django.contrib.postgres'}): for default, test_field in tests: with self.subTest(default=default): field = test_field(default=default) serialized_field, imports = MigrationWriter.serialize(field) self.assertEqual(imports, { 'import django.contrib.postgres.fields.ranges', 'import psycopg2.extras', }) self.assertIn( '%s.%s(default=psycopg2.extras.%r)' % ( field.__module__, field.__class__.__name__, default, ), serialized_field ) assertNotSerializable()
78bcd1e55ba914958049623dc041b5fa0350d037bcd5b91c5c489285b59a6744
import json from django.core import checks, exceptions, serializers from django.db import connection from django.db.models import F, OuterRef, Subquery from django.db.models.expressions import RawSQL from django.forms import Form from django.test.utils import CaptureQueriesContext, isolate_apps from . import PostgreSQLSimpleTestCase, PostgreSQLTestCase from .models import HStoreModel, PostgreSQLModel try: from django.contrib.postgres import forms from django.contrib.postgres.fields import HStoreField from django.contrib.postgres.fields.hstore import KeyTransform from django.contrib.postgres.validators import KeysValidator except ImportError: pass class SimpleTests(PostgreSQLTestCase): def test_save_load_success(self): value = {'a': 'b'} instance = HStoreModel(field=value) instance.save() reloaded = HStoreModel.objects.get() self.assertEqual(reloaded.field, value) def test_null(self): instance = HStoreModel(field=None) instance.save() reloaded = HStoreModel.objects.get() self.assertIsNone(reloaded.field) def test_value_null(self): value = {'a': None} instance = HStoreModel(field=value) instance.save() reloaded = HStoreModel.objects.get() self.assertEqual(reloaded.field, value) def test_key_val_cast_to_string(self): value = {'a': 1, 'b': 'B', 2: 'c', 'ï': 'ê'} expected_value = {'a': '1', 'b': 'B', '2': 'c', 'ï': 'ê'} instance = HStoreModel.objects.create(field=value) instance = HStoreModel.objects.get() self.assertEqual(instance.field, expected_value) instance = HStoreModel.objects.get(field__a=1) self.assertEqual(instance.field, expected_value) instance = HStoreModel.objects.get(field__has_keys=[2, 'a', 'ï']) self.assertEqual(instance.field, expected_value) def test_array_field(self): value = [ {'a': 1, 'b': 'B', 2: 'c', 'ï': 'ê'}, {'a': 1, 'b': 'B', 2: 'c', 'ï': 'ê'}, ] expected_value = [ {'a': '1', 'b': 'B', '2': 'c', 'ï': 'ê'}, {'a': '1', 'b': 'B', '2': 'c', 'ï': 'ê'}, ] instance = HStoreModel.objects.create(array_field=value) instance.refresh_from_db() self.assertEqual(instance.array_field, expected_value) class TestQuerying(PostgreSQLTestCase): @classmethod def setUpTestData(cls): cls.objs = HStoreModel.objects.bulk_create([ HStoreModel(field={'a': 'b'}), HStoreModel(field={'a': 'b', 'c': 'd'}), HStoreModel(field={'c': 'd'}), HStoreModel(field={}), HStoreModel(field=None), HStoreModel(field={'cat': 'TigrOu', 'breed': 'birman'}), HStoreModel(field={'cat': 'minou', 'breed': 'ragdoll'}), HStoreModel(field={'cat': 'kitty', 'breed': 'Persian'}), HStoreModel(field={'cat': 'Kit Kat', 'breed': 'persian'}), ]) def test_exact(self): self.assertSequenceEqual( HStoreModel.objects.filter(field__exact={'a': 'b'}), self.objs[:1] ) def test_contained_by(self): self.assertSequenceEqual( HStoreModel.objects.filter(field__contained_by={'a': 'b', 'c': 'd'}), self.objs[:4] ) def test_contains(self): self.assertSequenceEqual( HStoreModel.objects.filter(field__contains={'a': 'b'}), self.objs[:2] ) def test_in_generator(self): def search(): yield {'a': 'b'} self.assertSequenceEqual( HStoreModel.objects.filter(field__in=search()), self.objs[:1] ) def test_has_key(self): self.assertSequenceEqual( HStoreModel.objects.filter(field__has_key='c'), self.objs[1:3] ) def test_has_keys(self): self.assertSequenceEqual( HStoreModel.objects.filter(field__has_keys=['a', 'c']), self.objs[1:2] ) def test_has_any_keys(self): self.assertSequenceEqual( HStoreModel.objects.filter(field__has_any_keys=['a', 'c']), self.objs[:3] ) def test_key_transform(self): self.assertSequenceEqual( HStoreModel.objects.filter(field__a='b'), self.objs[:2] ) def test_key_transform_raw_expression(self): expr = RawSQL('%s::hstore', ['x => b, y => c']) self.assertSequenceEqual( HStoreModel.objects.filter(field__a=KeyTransform('x', expr)), self.objs[:2] ) def test_key_transform_annotation(self): qs = HStoreModel.objects.annotate(a=F('field__a')) self.assertCountEqual( qs.values_list('a', flat=True), ['b', 'b', None, None, None, None, None, None, None], ) def test_keys(self): self.assertSequenceEqual( HStoreModel.objects.filter(field__keys=['a']), self.objs[:1] ) def test_values(self): self.assertSequenceEqual( HStoreModel.objects.filter(field__values=['b']), self.objs[:1] ) def test_field_chaining_contains(self): self.assertSequenceEqual( HStoreModel.objects.filter(field__a__contains='b'), self.objs[:2] ) def test_field_chaining_icontains(self): self.assertSequenceEqual( HStoreModel.objects.filter(field__cat__icontains='INo'), [self.objs[6]], ) def test_field_chaining_startswith(self): self.assertSequenceEqual( HStoreModel.objects.filter(field__cat__startswith='kit'), [self.objs[7]], ) def test_field_chaining_istartswith(self): self.assertSequenceEqual( HStoreModel.objects.filter(field__cat__istartswith='kit'), self.objs[7:], ) def test_field_chaining_endswith(self): self.assertSequenceEqual( HStoreModel.objects.filter(field__cat__endswith='ou'), [self.objs[6]], ) def test_field_chaining_iendswith(self): self.assertSequenceEqual( HStoreModel.objects.filter(field__cat__iendswith='ou'), self.objs[5:7], ) def test_field_chaining_iexact(self): self.assertSequenceEqual( HStoreModel.objects.filter(field__breed__iexact='persian'), self.objs[7:], ) def test_field_chaining_regex(self): self.assertSequenceEqual( HStoreModel.objects.filter(field__cat__regex=r'ou$'), [self.objs[6]], ) def test_field_chaining_iregex(self): self.assertSequenceEqual( HStoreModel.objects.filter(field__cat__iregex=r'oU$'), self.objs[5:7], ) def test_order_by_field(self): more_objs = ( HStoreModel.objects.create(field={'g': '637'}), HStoreModel.objects.create(field={'g': '002'}), HStoreModel.objects.create(field={'g': '042'}), HStoreModel.objects.create(field={'g': '981'}), ) self.assertSequenceEqual( HStoreModel.objects.filter(field__has_key='g').order_by('field__g'), [more_objs[1], more_objs[2], more_objs[0], more_objs[3]] ) def test_keys_contains(self): self.assertSequenceEqual( HStoreModel.objects.filter(field__keys__contains=['a']), self.objs[:2] ) def test_values_overlap(self): self.assertSequenceEqual( HStoreModel.objects.filter(field__values__overlap=['b', 'd']), self.objs[:3] ) def test_key_isnull(self): obj = HStoreModel.objects.create(field={'a': None}) self.assertSequenceEqual( HStoreModel.objects.filter(field__a__isnull=True), self.objs[2:9] + [obj], ) self.assertSequenceEqual( HStoreModel.objects.filter(field__a__isnull=False), self.objs[:2] ) def test_usage_in_subquery(self): self.assertSequenceEqual( HStoreModel.objects.filter(id__in=HStoreModel.objects.filter(field__a='b')), self.objs[:2] ) def test_key_sql_injection(self): with CaptureQueriesContext(connection) as queries: self.assertFalse( HStoreModel.objects.filter(**{ "field__test' = 'a') OR 1 = 1 OR ('d": 'x', }).exists() ) self.assertIn( """."field" -> 'test'' = ''a'') OR 1 = 1 OR (''d') = 'x' """, queries[0]['sql'], ) def test_obj_subquery_lookup(self): qs = HStoreModel.objects.annotate( value=Subquery(HStoreModel.objects.filter(pk=OuterRef('pk')).values('field')), ).filter(value__a='b') self.assertSequenceEqual(qs, self.objs[:2]) @isolate_apps('postgres_tests') class TestChecks(PostgreSQLSimpleTestCase): def test_invalid_default(self): class MyModel(PostgreSQLModel): field = HStoreField(default={}) model = MyModel() self.assertEqual(model.check(), [ checks.Warning( msg=( "HStoreField default should be a callable instead of an " "instance so that it's not shared between all field " "instances." ), hint='Use a callable instead, e.g., use `dict` instead of `{}`.', obj=MyModel._meta.get_field('field'), id='fields.E010', ) ]) def test_valid_default(self): class MyModel(PostgreSQLModel): field = HStoreField(default=dict) self.assertEqual(MyModel().check(), []) class TestSerialization(PostgreSQLSimpleTestCase): test_data = json.dumps([{ 'model': 'postgres_tests.hstoremodel', 'pk': None, 'fields': { 'field': json.dumps({'a': 'b'}), 'array_field': json.dumps([ json.dumps({'a': 'b'}), json.dumps({'b': 'a'}), ]), }, }]) def test_dumping(self): instance = HStoreModel(field={'a': 'b'}, array_field=[{'a': 'b'}, {'b': 'a'}]) data = serializers.serialize('json', [instance]) self.assertEqual(json.loads(data), json.loads(self.test_data)) def test_loading(self): instance = list(serializers.deserialize('json', self.test_data))[0].object self.assertEqual(instance.field, {'a': 'b'}) self.assertEqual(instance.array_field, [{'a': 'b'}, {'b': 'a'}]) def test_roundtrip_with_null(self): instance = HStoreModel(field={'a': 'b', 'c': None}) data = serializers.serialize('json', [instance]) new_instance = list(serializers.deserialize('json', data))[0].object self.assertEqual(instance.field, new_instance.field) class TestValidation(PostgreSQLSimpleTestCase): def test_not_a_string(self): field = HStoreField() with self.assertRaises(exceptions.ValidationError) as cm: field.clean({'a': 1}, None) self.assertEqual(cm.exception.code, 'not_a_string') self.assertEqual(cm.exception.message % cm.exception.params, 'The value of “a” is not a string or null.') def test_none_allowed_as_value(self): field = HStoreField() self.assertEqual(field.clean({'a': None}, None), {'a': None}) class TestFormField(PostgreSQLSimpleTestCase): def test_valid(self): field = forms.HStoreField() value = field.clean('{"a": "b"}') self.assertEqual(value, {'a': 'b'}) def test_invalid_json(self): field = forms.HStoreField() with self.assertRaises(exceptions.ValidationError) as cm: field.clean('{"a": "b"') self.assertEqual(cm.exception.messages[0], 'Could not load JSON data.') self.assertEqual(cm.exception.code, 'invalid_json') def test_non_dict_json(self): field = forms.HStoreField() msg = 'Input must be a JSON dictionary.' with self.assertRaisesMessage(exceptions.ValidationError, msg) as cm: field.clean('["a", "b", 1]') self.assertEqual(cm.exception.code, 'invalid_format') def test_not_string_values(self): field = forms.HStoreField() value = field.clean('{"a": 1}') self.assertEqual(value, {'a': '1'}) def test_none_value(self): field = forms.HStoreField() value = field.clean('{"a": null}') self.assertEqual(value, {'a': None}) def test_empty(self): field = forms.HStoreField(required=False) value = field.clean('') self.assertEqual(value, {}) def test_model_field_formfield(self): model_field = HStoreField() form_field = model_field.formfield() self.assertIsInstance(form_field, forms.HStoreField) def test_field_has_changed(self): class HStoreFormTest(Form): f1 = forms.HStoreField() form_w_hstore = HStoreFormTest() self.assertFalse(form_w_hstore.has_changed()) form_w_hstore = HStoreFormTest({'f1': '{"a": 1}'}) self.assertTrue(form_w_hstore.has_changed()) form_w_hstore = HStoreFormTest({'f1': '{"a": 1}'}, initial={'f1': '{"a": 1}'}) self.assertFalse(form_w_hstore.has_changed()) form_w_hstore = HStoreFormTest({'f1': '{"a": 2}'}, initial={'f1': '{"a": 1}'}) self.assertTrue(form_w_hstore.has_changed()) form_w_hstore = HStoreFormTest({'f1': '{"a": 1}'}, initial={'f1': {"a": 1}}) self.assertFalse(form_w_hstore.has_changed()) form_w_hstore = HStoreFormTest({'f1': '{"a": 2}'}, initial={'f1': {"a": 1}}) self.assertTrue(form_w_hstore.has_changed()) class TestValidator(PostgreSQLSimpleTestCase): def test_simple_valid(self): validator = KeysValidator(keys=['a', 'b']) validator({'a': 'foo', 'b': 'bar', 'c': 'baz'}) def test_missing_keys(self): validator = KeysValidator(keys=['a', 'b']) with self.assertRaises(exceptions.ValidationError) as cm: validator({'a': 'foo', 'c': 'baz'}) self.assertEqual(cm.exception.messages[0], 'Some keys were missing: b') self.assertEqual(cm.exception.code, 'missing_keys') def test_strict_valid(self): validator = KeysValidator(keys=['a', 'b'], strict=True) validator({'a': 'foo', 'b': 'bar'}) def test_extra_keys(self): validator = KeysValidator(keys=['a', 'b'], strict=True) with self.assertRaises(exceptions.ValidationError) as cm: validator({'a': 'foo', 'b': 'bar', 'c': 'baz'}) self.assertEqual(cm.exception.messages[0], 'Some unknown keys were provided: c') self.assertEqual(cm.exception.code, 'extra_keys') def test_custom_messages(self): messages = { 'missing_keys': 'Foobar', } validator = KeysValidator(keys=['a', 'b'], strict=True, messages=messages) with self.assertRaises(exceptions.ValidationError) as cm: validator({'a': 'foo', 'c': 'baz'}) self.assertEqual(cm.exception.messages[0], 'Foobar') self.assertEqual(cm.exception.code, 'missing_keys') with self.assertRaises(exceptions.ValidationError) as cm: validator({'a': 'foo', 'b': 'bar', 'c': 'baz'}) self.assertEqual(cm.exception.messages[0], 'Some unknown keys were provided: c') self.assertEqual(cm.exception.code, 'extra_keys') def test_deconstruct(self): messages = { 'missing_keys': 'Foobar', } validator = KeysValidator(keys=['a', 'b'], strict=True, messages=messages) path, args, kwargs = validator.deconstruct() self.assertEqual(path, 'django.contrib.postgres.validators.KeysValidator') self.assertEqual(args, ()) self.assertEqual(kwargs, {'keys': ['a', 'b'], 'strict': True, 'messages': messages})
caf54ce0436e472e8cddea0aa57d065ea41fa8244f5c4751b25947ada911b1b3
import datetime from unittest import mock from django.contrib.postgres.indexes import OpClass from django.db import ( IntegrityError, NotSupportedError, connection, transaction, ) from django.db.models import ( CheckConstraint, Deferrable, F, Func, IntegerField, Q, UniqueConstraint, ) from django.db.models.fields.json import KeyTextTransform from django.db.models.functions import Cast, Left, Lower from django.test import ignore_warnings, modify_settings, skipUnlessDBFeature from django.utils import timezone from django.utils.deprecation import RemovedInDjango50Warning from . import PostgreSQLTestCase from .models import ( HotelReservation, IntegerArrayModel, RangesModel, Room, Scene, ) try: from psycopg2.extras import DateRange, NumericRange from django.contrib.postgres.constraints import ExclusionConstraint from django.contrib.postgres.fields import ( DateTimeRangeField, RangeBoundary, RangeOperators, ) except ImportError: pass @modify_settings(INSTALLED_APPS={'append': 'django.contrib.postgres'}) class SchemaTests(PostgreSQLTestCase): get_opclass_query = ''' SELECT opcname, c.relname FROM pg_opclass AS oc JOIN pg_index as i on oc.oid = ANY(i.indclass) JOIN pg_class as c on c.oid = i.indexrelid WHERE c.relname = %s ''' def get_constraints(self, table): """Get the constraints on the table using a new cursor.""" with connection.cursor() as cursor: return connection.introspection.get_constraints(cursor, table) def test_check_constraint_range_value(self): constraint_name = 'ints_between' self.assertNotIn(constraint_name, self.get_constraints(RangesModel._meta.db_table)) constraint = CheckConstraint( check=Q(ints__contained_by=NumericRange(10, 30)), name=constraint_name, ) with connection.schema_editor() as editor: editor.add_constraint(RangesModel, constraint) self.assertIn(constraint_name, self.get_constraints(RangesModel._meta.db_table)) with self.assertRaises(IntegrityError), transaction.atomic(): RangesModel.objects.create(ints=(20, 50)) RangesModel.objects.create(ints=(10, 30)) def test_check_constraint_daterange_contains(self): constraint_name = 'dates_contains' self.assertNotIn(constraint_name, self.get_constraints(RangesModel._meta.db_table)) constraint = CheckConstraint( check=Q(dates__contains=F('dates_inner')), name=constraint_name, ) with connection.schema_editor() as editor: editor.add_constraint(RangesModel, constraint) self.assertIn(constraint_name, self.get_constraints(RangesModel._meta.db_table)) date_1 = datetime.date(2016, 1, 1) date_2 = datetime.date(2016, 1, 4) with self.assertRaises(IntegrityError), transaction.atomic(): RangesModel.objects.create( dates=(date_1, date_2), dates_inner=(date_1, date_2.replace(day=5)), ) RangesModel.objects.create( dates=(date_1, date_2), dates_inner=(date_1, date_2), ) def test_check_constraint_datetimerange_contains(self): constraint_name = 'timestamps_contains' self.assertNotIn(constraint_name, self.get_constraints(RangesModel._meta.db_table)) constraint = CheckConstraint( check=Q(timestamps__contains=F('timestamps_inner')), name=constraint_name, ) with connection.schema_editor() as editor: editor.add_constraint(RangesModel, constraint) self.assertIn(constraint_name, self.get_constraints(RangesModel._meta.db_table)) datetime_1 = datetime.datetime(2016, 1, 1) datetime_2 = datetime.datetime(2016, 1, 2, 12) with self.assertRaises(IntegrityError), transaction.atomic(): RangesModel.objects.create( timestamps=(datetime_1, datetime_2), timestamps_inner=(datetime_1, datetime_2.replace(hour=13)), ) RangesModel.objects.create( timestamps=(datetime_1, datetime_2), timestamps_inner=(datetime_1, datetime_2), ) def test_opclass(self): constraint = UniqueConstraint( name='test_opclass', fields=['scene'], opclasses=['varchar_pattern_ops'], ) with connection.schema_editor() as editor: editor.add_constraint(Scene, constraint) self.assertIn(constraint.name, self.get_constraints(Scene._meta.db_table)) with editor.connection.cursor() as cursor: cursor.execute(self.get_opclass_query, [constraint.name]) self.assertEqual( cursor.fetchall(), [('varchar_pattern_ops', constraint.name)], ) # Drop the constraint. with connection.schema_editor() as editor: editor.remove_constraint(Scene, constraint) self.assertNotIn(constraint.name, self.get_constraints(Scene._meta.db_table)) def test_opclass_multiple_columns(self): constraint = UniqueConstraint( name='test_opclass_multiple', fields=['scene', 'setting'], opclasses=['varchar_pattern_ops', 'text_pattern_ops'], ) with connection.schema_editor() as editor: editor.add_constraint(Scene, constraint) with editor.connection.cursor() as cursor: cursor.execute(self.get_opclass_query, [constraint.name]) expected_opclasses = ( ('varchar_pattern_ops', constraint.name), ('text_pattern_ops', constraint.name), ) self.assertCountEqual(cursor.fetchall(), expected_opclasses) def test_opclass_partial(self): constraint = UniqueConstraint( name='test_opclass_partial', fields=['scene'], opclasses=['varchar_pattern_ops'], condition=Q(setting__contains="Sir Bedemir's Castle"), ) with connection.schema_editor() as editor: editor.add_constraint(Scene, constraint) with editor.connection.cursor() as cursor: cursor.execute(self.get_opclass_query, [constraint.name]) self.assertCountEqual( cursor.fetchall(), [('varchar_pattern_ops', constraint.name)], ) @skipUnlessDBFeature('supports_covering_indexes') def test_opclass_include(self): constraint = UniqueConstraint( name='test_opclass_include', fields=['scene'], opclasses=['varchar_pattern_ops'], include=['setting'], ) with connection.schema_editor() as editor: editor.add_constraint(Scene, constraint) with editor.connection.cursor() as cursor: cursor.execute(self.get_opclass_query, [constraint.name]) self.assertCountEqual( cursor.fetchall(), [('varchar_pattern_ops', constraint.name)], ) @skipUnlessDBFeature('supports_expression_indexes') def test_opclass_func(self): constraint = UniqueConstraint( OpClass(Lower('scene'), name='text_pattern_ops'), name='test_opclass_func', ) with connection.schema_editor() as editor: editor.add_constraint(Scene, constraint) constraints = self.get_constraints(Scene._meta.db_table) self.assertIs(constraints[constraint.name]['unique'], True) self.assertIn(constraint.name, constraints) with editor.connection.cursor() as cursor: cursor.execute(self.get_opclass_query, [constraint.name]) self.assertEqual( cursor.fetchall(), [('text_pattern_ops', constraint.name)], ) Scene.objects.create(scene='Scene 10', setting='The dark forest of Ewing') with self.assertRaises(IntegrityError), transaction.atomic(): Scene.objects.create(scene='ScEnE 10', setting="Sir Bedemir's Castle") Scene.objects.create(scene='Scene 5', setting="Sir Bedemir's Castle") # Drop the constraint. with connection.schema_editor() as editor: editor.remove_constraint(Scene, constraint) self.assertNotIn(constraint.name, self.get_constraints(Scene._meta.db_table)) Scene.objects.create(scene='ScEnE 10', setting="Sir Bedemir's Castle") @modify_settings(INSTALLED_APPS={'append': 'django.contrib.postgres'}) class ExclusionConstraintTests(PostgreSQLTestCase): def get_constraints(self, table): """Get the constraints on the table using a new cursor.""" with connection.cursor() as cursor: return connection.introspection.get_constraints(cursor, table) def test_invalid_condition(self): msg = 'ExclusionConstraint.condition must be a Q instance.' with self.assertRaisesMessage(ValueError, msg): ExclusionConstraint( index_type='GIST', name='exclude_invalid_condition', expressions=[(F('datespan'), RangeOperators.OVERLAPS)], condition=F('invalid'), ) def test_invalid_index_type(self): msg = 'Exclusion constraints only support GiST or SP-GiST indexes.' with self.assertRaisesMessage(ValueError, msg): ExclusionConstraint( index_type='gin', name='exclude_invalid_index_type', expressions=[(F('datespan'), RangeOperators.OVERLAPS)], ) def test_invalid_expressions(self): msg = 'The expressions must be a list of 2-tuples.' for expressions in (['foo'], [('foo')], [('foo_1', 'foo_2', 'foo_3')]): with self.subTest(expressions), self.assertRaisesMessage(ValueError, msg): ExclusionConstraint( index_type='GIST', name='exclude_invalid_expressions', expressions=expressions, ) def test_empty_expressions(self): msg = 'At least one expression is required to define an exclusion constraint.' for empty_expressions in (None, []): with self.subTest(empty_expressions), self.assertRaisesMessage(ValueError, msg): ExclusionConstraint( index_type='GIST', name='exclude_empty_expressions', expressions=empty_expressions, ) def test_invalid_deferrable(self): msg = 'ExclusionConstraint.deferrable must be a Deferrable instance.' with self.assertRaisesMessage(ValueError, msg): ExclusionConstraint( name='exclude_invalid_deferrable', expressions=[(F('datespan'), RangeOperators.OVERLAPS)], deferrable='invalid', ) def test_deferrable_with_condition(self): msg = 'ExclusionConstraint with conditions cannot be deferred.' with self.assertRaisesMessage(ValueError, msg): ExclusionConstraint( name='exclude_invalid_condition', expressions=[(F('datespan'), RangeOperators.OVERLAPS)], condition=Q(cancelled=False), deferrable=Deferrable.DEFERRED, ) def test_invalid_include_type(self): msg = 'ExclusionConstraint.include must be a list or tuple.' with self.assertRaisesMessage(ValueError, msg): ExclusionConstraint( name='exclude_invalid_include', expressions=[(F('datespan'), RangeOperators.OVERLAPS)], include='invalid', ) @ignore_warnings(category=RemovedInDjango50Warning) def test_invalid_opclasses_type(self): msg = 'ExclusionConstraint.opclasses must be a list or tuple.' with self.assertRaisesMessage(ValueError, msg): ExclusionConstraint( name='exclude_invalid_opclasses', expressions=[(F('datespan'), RangeOperators.OVERLAPS)], opclasses='invalid', ) @ignore_warnings(category=RemovedInDjango50Warning) def test_opclasses_and_expressions_same_length(self): msg = ( 'ExclusionConstraint.expressions and ' 'ExclusionConstraint.opclasses must have the same number of ' 'elements.' ) with self.assertRaisesMessage(ValueError, msg): ExclusionConstraint( name='exclude_invalid_expressions_opclasses_length', expressions=[(F('datespan'), RangeOperators.OVERLAPS)], opclasses=['foo', 'bar'], ) def test_repr(self): constraint = ExclusionConstraint( name='exclude_overlapping', expressions=[ (F('datespan'), RangeOperators.OVERLAPS), (F('room'), RangeOperators.EQUAL), ], ) self.assertEqual( repr(constraint), "<ExclusionConstraint: index_type='GIST' expressions=[" "(F(datespan), '&&'), (F(room), '=')] name='exclude_overlapping'>", ) constraint = ExclusionConstraint( name='exclude_overlapping', expressions=[(F('datespan'), RangeOperators.ADJACENT_TO)], condition=Q(cancelled=False), index_type='SPGiST', ) self.assertEqual( repr(constraint), "<ExclusionConstraint: index_type='SPGiST' expressions=[" "(F(datespan), '-|-')] name='exclude_overlapping' " "condition=(AND: ('cancelled', False))>", ) constraint = ExclusionConstraint( name='exclude_overlapping', expressions=[(F('datespan'), RangeOperators.ADJACENT_TO)], deferrable=Deferrable.IMMEDIATE, ) self.assertEqual( repr(constraint), "<ExclusionConstraint: index_type='GIST' expressions=[" "(F(datespan), '-|-')] name='exclude_overlapping' " "deferrable=Deferrable.IMMEDIATE>", ) constraint = ExclusionConstraint( name='exclude_overlapping', expressions=[(F('datespan'), RangeOperators.ADJACENT_TO)], include=['cancelled', 'room'], ) self.assertEqual( repr(constraint), "<ExclusionConstraint: index_type='GIST' expressions=[" "(F(datespan), '-|-')] name='exclude_overlapping' " "include=('cancelled', 'room')>", ) constraint = ExclusionConstraint( name='exclude_overlapping', expressions=[ (OpClass('datespan', name='range_ops'), RangeOperators.ADJACENT_TO), ], ) self.assertEqual( repr(constraint), "<ExclusionConstraint: index_type='GIST' expressions=[" "(OpClass(F(datespan), name=range_ops), '-|-')] " "name='exclude_overlapping'>", ) def test_eq(self): constraint_1 = ExclusionConstraint( name='exclude_overlapping', expressions=[ (F('datespan'), RangeOperators.OVERLAPS), (F('room'), RangeOperators.EQUAL), ], condition=Q(cancelled=False), ) constraint_2 = ExclusionConstraint( name='exclude_overlapping', expressions=[ ('datespan', RangeOperators.OVERLAPS), ('room', RangeOperators.EQUAL), ], ) constraint_3 = ExclusionConstraint( name='exclude_overlapping', expressions=[('datespan', RangeOperators.OVERLAPS)], condition=Q(cancelled=False), ) constraint_4 = ExclusionConstraint( name='exclude_overlapping', expressions=[ ('datespan', RangeOperators.OVERLAPS), ('room', RangeOperators.EQUAL), ], deferrable=Deferrable.DEFERRED, ) constraint_5 = ExclusionConstraint( name='exclude_overlapping', expressions=[ ('datespan', RangeOperators.OVERLAPS), ('room', RangeOperators.EQUAL), ], deferrable=Deferrable.IMMEDIATE, ) constraint_6 = ExclusionConstraint( name='exclude_overlapping', expressions=[ ('datespan', RangeOperators.OVERLAPS), ('room', RangeOperators.EQUAL), ], deferrable=Deferrable.IMMEDIATE, include=['cancelled'], ) constraint_7 = ExclusionConstraint( name='exclude_overlapping', expressions=[ ('datespan', RangeOperators.OVERLAPS), ('room', RangeOperators.EQUAL), ], include=['cancelled'], ) with ignore_warnings(category=RemovedInDjango50Warning): constraint_8 = ExclusionConstraint( name='exclude_overlapping', expressions=[ ('datespan', RangeOperators.OVERLAPS), ('room', RangeOperators.EQUAL), ], include=['cancelled'], opclasses=['range_ops', 'range_ops'] ) constraint_9 = ExclusionConstraint( name='exclude_overlapping', expressions=[ ('datespan', RangeOperators.OVERLAPS), ('room', RangeOperators.EQUAL), ], opclasses=['range_ops', 'range_ops'] ) self.assertNotEqual(constraint_2, constraint_9) self.assertNotEqual(constraint_7, constraint_8) self.assertEqual(constraint_1, constraint_1) self.assertEqual(constraint_1, mock.ANY) self.assertNotEqual(constraint_1, constraint_2) self.assertNotEqual(constraint_1, constraint_3) self.assertNotEqual(constraint_1, constraint_4) self.assertNotEqual(constraint_2, constraint_3) self.assertNotEqual(constraint_2, constraint_4) self.assertNotEqual(constraint_2, constraint_7) self.assertNotEqual(constraint_4, constraint_5) self.assertNotEqual(constraint_5, constraint_6) self.assertNotEqual(constraint_1, object()) def test_deconstruct(self): constraint = ExclusionConstraint( name='exclude_overlapping', expressions=[('datespan', RangeOperators.OVERLAPS), ('room', RangeOperators.EQUAL)], ) path, args, kwargs = constraint.deconstruct() self.assertEqual(path, 'django.contrib.postgres.constraints.ExclusionConstraint') self.assertEqual(args, ()) self.assertEqual(kwargs, { 'name': 'exclude_overlapping', 'expressions': [('datespan', RangeOperators.OVERLAPS), ('room', RangeOperators.EQUAL)], }) def test_deconstruct_index_type(self): constraint = ExclusionConstraint( name='exclude_overlapping', index_type='SPGIST', expressions=[('datespan', RangeOperators.OVERLAPS), ('room', RangeOperators.EQUAL)], ) path, args, kwargs = constraint.deconstruct() self.assertEqual(path, 'django.contrib.postgres.constraints.ExclusionConstraint') self.assertEqual(args, ()) self.assertEqual(kwargs, { 'name': 'exclude_overlapping', 'index_type': 'SPGIST', 'expressions': [('datespan', RangeOperators.OVERLAPS), ('room', RangeOperators.EQUAL)], }) def test_deconstruct_condition(self): constraint = ExclusionConstraint( name='exclude_overlapping', expressions=[('datespan', RangeOperators.OVERLAPS), ('room', RangeOperators.EQUAL)], condition=Q(cancelled=False), ) path, args, kwargs = constraint.deconstruct() self.assertEqual(path, 'django.contrib.postgres.constraints.ExclusionConstraint') self.assertEqual(args, ()) self.assertEqual(kwargs, { 'name': 'exclude_overlapping', 'expressions': [('datespan', RangeOperators.OVERLAPS), ('room', RangeOperators.EQUAL)], 'condition': Q(cancelled=False), }) def test_deconstruct_deferrable(self): constraint = ExclusionConstraint( name='exclude_overlapping', expressions=[('datespan', RangeOperators.OVERLAPS)], deferrable=Deferrable.DEFERRED, ) path, args, kwargs = constraint.deconstruct() self.assertEqual(path, 'django.contrib.postgres.constraints.ExclusionConstraint') self.assertEqual(args, ()) self.assertEqual(kwargs, { 'name': 'exclude_overlapping', 'expressions': [('datespan', RangeOperators.OVERLAPS)], 'deferrable': Deferrable.DEFERRED, }) def test_deconstruct_include(self): constraint = ExclusionConstraint( name='exclude_overlapping', expressions=[('datespan', RangeOperators.OVERLAPS)], include=['cancelled', 'room'], ) path, args, kwargs = constraint.deconstruct() self.assertEqual(path, 'django.contrib.postgres.constraints.ExclusionConstraint') self.assertEqual(args, ()) self.assertEqual(kwargs, { 'name': 'exclude_overlapping', 'expressions': [('datespan', RangeOperators.OVERLAPS)], 'include': ('cancelled', 'room'), }) @ignore_warnings(category=RemovedInDjango50Warning) def test_deconstruct_opclasses(self): constraint = ExclusionConstraint( name='exclude_overlapping', expressions=[('datespan', RangeOperators.OVERLAPS)], opclasses=['range_ops'], ) path, args, kwargs = constraint.deconstruct() self.assertEqual(path, 'django.contrib.postgres.constraints.ExclusionConstraint') self.assertEqual(args, ()) self.assertEqual(kwargs, { 'name': 'exclude_overlapping', 'expressions': [('datespan', RangeOperators.OVERLAPS)], 'opclasses': ['range_ops'], }) def _test_range_overlaps(self, constraint): # Create exclusion constraint. self.assertNotIn(constraint.name, self.get_constraints(HotelReservation._meta.db_table)) with connection.schema_editor() as editor: editor.add_constraint(HotelReservation, constraint) self.assertIn(constraint.name, self.get_constraints(HotelReservation._meta.db_table)) # Add initial reservations. room101 = Room.objects.create(number=101) room102 = Room.objects.create(number=102) datetimes = [ timezone.datetime(2018, 6, 20), timezone.datetime(2018, 6, 24), timezone.datetime(2018, 6, 26), timezone.datetime(2018, 6, 28), timezone.datetime(2018, 6, 29), ] HotelReservation.objects.create( datespan=DateRange(datetimes[0].date(), datetimes[1].date()), start=datetimes[0], end=datetimes[1], room=room102, ) HotelReservation.objects.create( datespan=DateRange(datetimes[1].date(), datetimes[3].date()), start=datetimes[1], end=datetimes[3], room=room102, ) # Overlap dates. with self.assertRaises(IntegrityError), transaction.atomic(): reservation = HotelReservation( datespan=(datetimes[1].date(), datetimes[2].date()), start=datetimes[1], end=datetimes[2], room=room102, ) reservation.save() # Valid range. HotelReservation.objects.bulk_create([ # Other room. HotelReservation( datespan=(datetimes[1].date(), datetimes[2].date()), start=datetimes[1], end=datetimes[2], room=room101, ), # Cancelled reservation. HotelReservation( datespan=(datetimes[1].date(), datetimes[1].date()), start=datetimes[1], end=datetimes[2], room=room102, cancelled=True, ), # Other adjacent dates. HotelReservation( datespan=(datetimes[3].date(), datetimes[4].date()), start=datetimes[3], end=datetimes[4], room=room102, ), ]) @ignore_warnings(category=RemovedInDjango50Warning) def test_range_overlaps_custom_opclasses(self): class TsTzRange(Func): function = 'TSTZRANGE' output_field = DateTimeRangeField() constraint = ExclusionConstraint( name='exclude_overlapping_reservations_custom', expressions=[ (TsTzRange('start', 'end', RangeBoundary()), RangeOperators.OVERLAPS), ('room', RangeOperators.EQUAL) ], condition=Q(cancelled=False), opclasses=['range_ops', 'gist_int4_ops'], ) self._test_range_overlaps(constraint) def test_range_overlaps_custom(self): class TsTzRange(Func): function = 'TSTZRANGE' output_field = DateTimeRangeField() constraint = ExclusionConstraint( name='exclude_overlapping_reservations_custom_opclass', expressions=[ ( OpClass(TsTzRange('start', 'end', RangeBoundary()), 'range_ops'), RangeOperators.OVERLAPS, ), (OpClass('room', 'gist_int4_ops'), RangeOperators.EQUAL), ], condition=Q(cancelled=False), ) self._test_range_overlaps(constraint) def test_range_overlaps(self): constraint = ExclusionConstraint( name='exclude_overlapping_reservations', expressions=[ (F('datespan'), RangeOperators.OVERLAPS), ('room', RangeOperators.EQUAL) ], condition=Q(cancelled=False), ) self._test_range_overlaps(constraint) def test_range_adjacent(self): constraint_name = 'ints_adjacent' self.assertNotIn(constraint_name, self.get_constraints(RangesModel._meta.db_table)) constraint = ExclusionConstraint( name=constraint_name, expressions=[('ints', RangeOperators.ADJACENT_TO)], ) with connection.schema_editor() as editor: editor.add_constraint(RangesModel, constraint) self.assertIn(constraint_name, self.get_constraints(RangesModel._meta.db_table)) RangesModel.objects.create(ints=(20, 50)) with self.assertRaises(IntegrityError), transaction.atomic(): RangesModel.objects.create(ints=(10, 20)) RangesModel.objects.create(ints=(10, 19)) RangesModel.objects.create(ints=(51, 60)) # Drop the constraint. with connection.schema_editor() as editor: editor.remove_constraint(RangesModel, constraint) self.assertNotIn(constraint_name, self.get_constraints(RangesModel._meta.db_table)) def test_expressions_with_params(self): constraint_name = 'scene_left_equal' self.assertNotIn(constraint_name, self.get_constraints(Scene._meta.db_table)) constraint = ExclusionConstraint( name=constraint_name, expressions=[(Left('scene', 4), RangeOperators.EQUAL)], ) with connection.schema_editor() as editor: editor.add_constraint(Scene, constraint) self.assertIn(constraint_name, self.get_constraints(Scene._meta.db_table)) def test_expressions_with_key_transform(self): constraint_name = 'exclude_overlapping_reservations_smoking' constraint = ExclusionConstraint( name=constraint_name, expressions=[ (F('datespan'), RangeOperators.OVERLAPS), (KeyTextTransform('smoking', 'requirements'), RangeOperators.EQUAL), ], ) with connection.schema_editor() as editor: editor.add_constraint(HotelReservation, constraint) self.assertIn( constraint_name, self.get_constraints(HotelReservation._meta.db_table), ) def test_index_transform(self): constraint_name = 'first_index_equal' constraint = ExclusionConstraint( name=constraint_name, expressions=[('field__0', RangeOperators.EQUAL)], ) with connection.schema_editor() as editor: editor.add_constraint(IntegerArrayModel, constraint) self.assertIn( constraint_name, self.get_constraints(IntegerArrayModel._meta.db_table), ) def test_range_adjacent_initially_deferred(self): constraint_name = 'ints_adjacent_deferred' self.assertNotIn(constraint_name, self.get_constraints(RangesModel._meta.db_table)) constraint = ExclusionConstraint( name=constraint_name, expressions=[('ints', RangeOperators.ADJACENT_TO)], deferrable=Deferrable.DEFERRED, ) with connection.schema_editor() as editor: editor.add_constraint(RangesModel, constraint) self.assertIn(constraint_name, self.get_constraints(RangesModel._meta.db_table)) RangesModel.objects.create(ints=(20, 50)) adjacent_range = RangesModel.objects.create(ints=(10, 20)) # Constraint behavior can be changed with SET CONSTRAINTS. with self.assertRaises(IntegrityError): with transaction.atomic(), connection.cursor() as cursor: quoted_name = connection.ops.quote_name(constraint_name) cursor.execute('SET CONSTRAINTS %s IMMEDIATE' % quoted_name) # Remove adjacent range before the end of transaction. adjacent_range.delete() RangesModel.objects.create(ints=(10, 19)) RangesModel.objects.create(ints=(51, 60)) @skipUnlessDBFeature('supports_covering_gist_indexes') def test_range_adjacent_gist_include(self): constraint_name = 'ints_adjacent_gist_include' self.assertNotIn(constraint_name, self.get_constraints(RangesModel._meta.db_table)) constraint = ExclusionConstraint( name=constraint_name, expressions=[('ints', RangeOperators.ADJACENT_TO)], index_type='gist', include=['decimals', 'ints'], ) with connection.schema_editor() as editor: editor.add_constraint(RangesModel, constraint) self.assertIn(constraint_name, self.get_constraints(RangesModel._meta.db_table)) RangesModel.objects.create(ints=(20, 50)) with self.assertRaises(IntegrityError), transaction.atomic(): RangesModel.objects.create(ints=(10, 20)) RangesModel.objects.create(ints=(10, 19)) RangesModel.objects.create(ints=(51, 60)) @skipUnlessDBFeature('supports_covering_spgist_indexes') def test_range_adjacent_spgist_include(self): constraint_name = 'ints_adjacent_spgist_include' self.assertNotIn(constraint_name, self.get_constraints(RangesModel._meta.db_table)) constraint = ExclusionConstraint( name=constraint_name, expressions=[('ints', RangeOperators.ADJACENT_TO)], index_type='spgist', include=['decimals', 'ints'], ) with connection.schema_editor() as editor: editor.add_constraint(RangesModel, constraint) self.assertIn(constraint_name, self.get_constraints(RangesModel._meta.db_table)) RangesModel.objects.create(ints=(20, 50)) with self.assertRaises(IntegrityError), transaction.atomic(): RangesModel.objects.create(ints=(10, 20)) RangesModel.objects.create(ints=(10, 19)) RangesModel.objects.create(ints=(51, 60)) @skipUnlessDBFeature('supports_covering_gist_indexes') def test_range_adjacent_gist_include_condition(self): constraint_name = 'ints_adjacent_gist_include_condition' self.assertNotIn(constraint_name, self.get_constraints(RangesModel._meta.db_table)) constraint = ExclusionConstraint( name=constraint_name, expressions=[('ints', RangeOperators.ADJACENT_TO)], index_type='gist', include=['decimals'], condition=Q(id__gte=100), ) with connection.schema_editor() as editor: editor.add_constraint(RangesModel, constraint) self.assertIn(constraint_name, self.get_constraints(RangesModel._meta.db_table)) @skipUnlessDBFeature('supports_covering_spgist_indexes') def test_range_adjacent_spgist_include_condition(self): constraint_name = 'ints_adjacent_spgist_include_condition' self.assertNotIn(constraint_name, self.get_constraints(RangesModel._meta.db_table)) constraint = ExclusionConstraint( name=constraint_name, expressions=[('ints', RangeOperators.ADJACENT_TO)], index_type='spgist', include=['decimals'], condition=Q(id__gte=100), ) with connection.schema_editor() as editor: editor.add_constraint(RangesModel, constraint) self.assertIn(constraint_name, self.get_constraints(RangesModel._meta.db_table)) @skipUnlessDBFeature('supports_covering_gist_indexes') def test_range_adjacent_gist_include_deferrable(self): constraint_name = 'ints_adjacent_gist_include_deferrable' self.assertNotIn(constraint_name, self.get_constraints(RangesModel._meta.db_table)) constraint = ExclusionConstraint( name=constraint_name, expressions=[('ints', RangeOperators.ADJACENT_TO)], index_type='gist', include=['decimals'], deferrable=Deferrable.DEFERRED, ) with connection.schema_editor() as editor: editor.add_constraint(RangesModel, constraint) self.assertIn(constraint_name, self.get_constraints(RangesModel._meta.db_table)) @skipUnlessDBFeature('supports_covering_spgist_indexes') def test_range_adjacent_spgist_include_deferrable(self): constraint_name = 'ints_adjacent_spgist_include_deferrable' self.assertNotIn(constraint_name, self.get_constraints(RangesModel._meta.db_table)) constraint = ExclusionConstraint( name=constraint_name, expressions=[('ints', RangeOperators.ADJACENT_TO)], index_type='spgist', include=['decimals'], deferrable=Deferrable.DEFERRED, ) with connection.schema_editor() as editor: editor.add_constraint(RangesModel, constraint) self.assertIn(constraint_name, self.get_constraints(RangesModel._meta.db_table)) def test_gist_include_not_supported(self): constraint_name = 'ints_adjacent_gist_include_not_supported' constraint = ExclusionConstraint( name=constraint_name, expressions=[('ints', RangeOperators.ADJACENT_TO)], index_type='gist', include=['id'], ) msg = ( 'Covering exclusion constraints using a GiST index require ' 'PostgreSQL 12+.' ) with connection.schema_editor() as editor: with mock.patch( 'django.db.backends.postgresql.features.DatabaseFeatures.supports_covering_gist_indexes', False, ): with self.assertRaisesMessage(NotSupportedError, msg): editor.add_constraint(RangesModel, constraint) def test_spgist_include_not_supported(self): constraint_name = 'ints_adjacent_spgist_include_not_supported' constraint = ExclusionConstraint( name=constraint_name, expressions=[('ints', RangeOperators.ADJACENT_TO)], index_type='spgist', include=['id'], ) msg = ( 'Covering exclusion constraints using an SP-GiST index require ' 'PostgreSQL 14+.' ) with connection.schema_editor() as editor: with mock.patch( 'django.db.backends.postgresql.features.DatabaseFeatures.' 'supports_covering_spgist_indexes', False, ): with self.assertRaisesMessage(NotSupportedError, msg): editor.add_constraint(RangesModel, constraint) def test_range_adjacent_opclass(self): constraint_name = 'ints_adjacent_opclass' self.assertNotIn(constraint_name, self.get_constraints(RangesModel._meta.db_table)) constraint = ExclusionConstraint( name=constraint_name, expressions=[ (OpClass('ints', name='range_ops'), RangeOperators.ADJACENT_TO), ], ) with connection.schema_editor() as editor: editor.add_constraint(RangesModel, constraint) constraints = self.get_constraints(RangesModel._meta.db_table) self.assertIn(constraint_name, constraints) with editor.connection.cursor() as cursor: cursor.execute(SchemaTests.get_opclass_query, [constraint_name]) self.assertEqual( cursor.fetchall(), [('range_ops', constraint_name)], ) RangesModel.objects.create(ints=(20, 50)) with self.assertRaises(IntegrityError), transaction.atomic(): RangesModel.objects.create(ints=(10, 20)) RangesModel.objects.create(ints=(10, 19)) RangesModel.objects.create(ints=(51, 60)) # Drop the constraint. with connection.schema_editor() as editor: editor.remove_constraint(RangesModel, constraint) self.assertNotIn(constraint_name, self.get_constraints(RangesModel._meta.db_table)) def test_range_adjacent_opclass_condition(self): constraint_name = 'ints_adjacent_opclass_condition' self.assertNotIn(constraint_name, self.get_constraints(RangesModel._meta.db_table)) constraint = ExclusionConstraint( name=constraint_name, expressions=[ (OpClass('ints', name='range_ops'), RangeOperators.ADJACENT_TO), ], condition=Q(id__gte=100), ) with connection.schema_editor() as editor: editor.add_constraint(RangesModel, constraint) self.assertIn(constraint_name, self.get_constraints(RangesModel._meta.db_table)) def test_range_adjacent_opclass_deferrable(self): constraint_name = 'ints_adjacent_opclass_deferrable' self.assertNotIn(constraint_name, self.get_constraints(RangesModel._meta.db_table)) constraint = ExclusionConstraint( name=constraint_name, expressions=[ (OpClass('ints', name='range_ops'), RangeOperators.ADJACENT_TO), ], deferrable=Deferrable.DEFERRED, ) with connection.schema_editor() as editor: editor.add_constraint(RangesModel, constraint) self.assertIn(constraint_name, self.get_constraints(RangesModel._meta.db_table)) @skipUnlessDBFeature('supports_covering_gist_indexes') def test_range_adjacent_gist_opclass_include(self): constraint_name = 'ints_adjacent_gist_opclass_include' self.assertNotIn(constraint_name, self.get_constraints(RangesModel._meta.db_table)) constraint = ExclusionConstraint( name=constraint_name, expressions=[ (OpClass('ints', name='range_ops'), RangeOperators.ADJACENT_TO), ], index_type='gist', include=['decimals'], ) with connection.schema_editor() as editor: editor.add_constraint(RangesModel, constraint) self.assertIn(constraint_name, self.get_constraints(RangesModel._meta.db_table)) @skipUnlessDBFeature('supports_covering_spgist_indexes') def test_range_adjacent_spgist_opclass_include(self): constraint_name = 'ints_adjacent_spgist_opclass_include' self.assertNotIn(constraint_name, self.get_constraints(RangesModel._meta.db_table)) constraint = ExclusionConstraint( name=constraint_name, expressions=[ (OpClass('ints', name='range_ops'), RangeOperators.ADJACENT_TO), ], index_type='spgist', include=['decimals'], ) with connection.schema_editor() as editor: editor.add_constraint(RangesModel, constraint) self.assertIn(constraint_name, self.get_constraints(RangesModel._meta.db_table)) def test_range_equal_cast(self): constraint_name = 'exclusion_equal_room_cast' self.assertNotIn(constraint_name, self.get_constraints(Room._meta.db_table)) constraint = ExclusionConstraint( name=constraint_name, expressions=[(Cast('number', IntegerField()), RangeOperators.EQUAL)], ) with connection.schema_editor() as editor: editor.add_constraint(Room, constraint) self.assertIn(constraint_name, self.get_constraints(Room._meta.db_table)) @modify_settings(INSTALLED_APPS={'append': 'django.contrib.postgres'}) class ExclusionConstraintOpclassesDepracationTests(PostgreSQLTestCase): def get_constraints(self, table): """Get the constraints on the table using a new cursor.""" with connection.cursor() as cursor: return connection.introspection.get_constraints(cursor, table) def test_warning(self): msg = ( 'The opclasses argument is deprecated in favor of using ' 'django.contrib.postgres.indexes.OpClass in ' 'ExclusionConstraint.expressions.' ) with self.assertWarnsMessage(RemovedInDjango50Warning, msg): ExclusionConstraint( name='exclude_overlapping', expressions=[(F('datespan'), RangeOperators.ADJACENT_TO)], opclasses=['range_ops'], ) @ignore_warnings(category=RemovedInDjango50Warning) def test_repr(self): constraint = ExclusionConstraint( name='exclude_overlapping', expressions=[(F('datespan'), RangeOperators.ADJACENT_TO)], opclasses=['range_ops'], ) self.assertEqual( repr(constraint), "<ExclusionConstraint: index_type='GIST' expressions=[" "(F(datespan), '-|-')] name='exclude_overlapping' " "opclasses=['range_ops']>", ) @ignore_warnings(category=RemovedInDjango50Warning) def test_range_adjacent_opclasses(self): constraint_name = 'ints_adjacent_opclasses' self.assertNotIn(constraint_name, self.get_constraints(RangesModel._meta.db_table)) constraint = ExclusionConstraint( name=constraint_name, expressions=[('ints', RangeOperators.ADJACENT_TO)], opclasses=['range_ops'], ) with connection.schema_editor() as editor: editor.add_constraint(RangesModel, constraint) constraints = self.get_constraints(RangesModel._meta.db_table) self.assertIn(constraint_name, constraints) with editor.connection.cursor() as cursor: cursor.execute(SchemaTests.get_opclass_query, [constraint.name]) self.assertEqual( cursor.fetchall(), [('range_ops', constraint.name)], ) RangesModel.objects.create(ints=(20, 50)) with self.assertRaises(IntegrityError), transaction.atomic(): RangesModel.objects.create(ints=(10, 20)) RangesModel.objects.create(ints=(10, 19)) RangesModel.objects.create(ints=(51, 60)) # Drop the constraint. with connection.schema_editor() as editor: editor.remove_constraint(RangesModel, constraint) self.assertNotIn(constraint_name, self.get_constraints(RangesModel._meta.db_table)) @ignore_warnings(category=RemovedInDjango50Warning) def test_range_adjacent_opclasses_condition(self): constraint_name = 'ints_adjacent_opclasses_condition' self.assertNotIn(constraint_name, self.get_constraints(RangesModel._meta.db_table)) constraint = ExclusionConstraint( name=constraint_name, expressions=[('ints', RangeOperators.ADJACENT_TO)], opclasses=['range_ops'], condition=Q(id__gte=100), ) with connection.schema_editor() as editor: editor.add_constraint(RangesModel, constraint) self.assertIn(constraint_name, self.get_constraints(RangesModel._meta.db_table)) @ignore_warnings(category=RemovedInDjango50Warning) def test_range_adjacent_opclasses_deferrable(self): constraint_name = 'ints_adjacent_opclasses_deferrable' self.assertNotIn(constraint_name, self.get_constraints(RangesModel._meta.db_table)) constraint = ExclusionConstraint( name=constraint_name, expressions=[('ints', RangeOperators.ADJACENT_TO)], opclasses=['range_ops'], deferrable=Deferrable.DEFERRED, ) with connection.schema_editor() as editor: editor.add_constraint(RangesModel, constraint) self.assertIn(constraint_name, self.get_constraints(RangesModel._meta.db_table)) @ignore_warnings(category=RemovedInDjango50Warning) @skipUnlessDBFeature('supports_covering_gist_indexes') def test_range_adjacent_gist_opclasses_include(self): constraint_name = 'ints_adjacent_gist_opclasses_include' self.assertNotIn(constraint_name, self.get_constraints(RangesModel._meta.db_table)) constraint = ExclusionConstraint( name=constraint_name, expressions=[('ints', RangeOperators.ADJACENT_TO)], index_type='gist', opclasses=['range_ops'], include=['decimals'], ) with connection.schema_editor() as editor: editor.add_constraint(RangesModel, constraint) self.assertIn(constraint_name, self.get_constraints(RangesModel._meta.db_table)) @ignore_warnings(category=RemovedInDjango50Warning) @skipUnlessDBFeature('supports_covering_spgist_indexes') def test_range_adjacent_spgist_opclasses_include(self): constraint_name = 'ints_adjacent_spgist_opclasses_include' self.assertNotIn(constraint_name, self.get_constraints(RangesModel._meta.db_table)) constraint = ExclusionConstraint( name=constraint_name, expressions=[('ints', RangeOperators.ADJACENT_TO)], index_type='spgist', opclasses=['range_ops'], include=['decimals'], ) with connection.schema_editor() as editor: editor.add_constraint(RangesModel, constraint) self.assertIn(constraint_name, self.get_constraints(RangesModel._meta.db_table))
163e9d38faa12be72fd2546f7b12a0285a5a069a6625c34d02ca65bb78c4fd87
import datetime from decimal import Decimal from django.core.exceptions import FieldDoesNotExist, FieldError from django.db.models import ( BooleanField, Case, CharField, Count, DateTimeField, DecimalField, Exists, ExpressionWrapper, F, FloatField, Func, IntegerField, Max, OuterRef, Q, Subquery, Sum, Value, When, ) from django.db.models.expressions import RawSQL from django.db.models.functions import ( Coalesce, ExtractYear, Floor, Length, Lower, Trim, ) from django.test import TestCase, skipUnlessDBFeature from django.test.utils import register_lookup from .models import ( Author, Book, Company, DepartmentStore, Employee, Publisher, Store, Ticket, ) class NonAggregateAnnotationTestCase(TestCase): @classmethod def setUpTestData(cls): cls.a1 = Author.objects.create(name='Adrian Holovaty', age=34) cls.a2 = Author.objects.create(name='Jacob Kaplan-Moss', age=35) cls.a3 = Author.objects.create(name='Brad Dayley', age=45) cls.a4 = Author.objects.create(name='James Bennett', age=29) cls.a5 = Author.objects.create(name='Jeffrey Forcier', age=37) cls.a6 = Author.objects.create(name='Paul Bissex', age=29) cls.a7 = Author.objects.create(name='Wesley J. Chun', age=25) cls.a8 = Author.objects.create(name='Peter Norvig', age=57) cls.a9 = Author.objects.create(name='Stuart Russell', age=46) cls.a1.friends.add(cls.a2, cls.a4) cls.a2.friends.add(cls.a1, cls.a7) cls.a4.friends.add(cls.a1) cls.a5.friends.add(cls.a6, cls.a7) cls.a6.friends.add(cls.a5, cls.a7) cls.a7.friends.add(cls.a2, cls.a5, cls.a6) cls.a8.friends.add(cls.a9) cls.a9.friends.add(cls.a8) cls.p1 = Publisher.objects.create(name='Apress', num_awards=3) cls.p2 = Publisher.objects.create(name='Sams', num_awards=1) cls.p3 = Publisher.objects.create(name='Prentice Hall', num_awards=7) cls.p4 = Publisher.objects.create(name='Morgan Kaufmann', num_awards=9) cls.p5 = Publisher.objects.create(name="Jonno's House of Books", num_awards=0) cls.b1 = Book.objects.create( isbn='159059725', name='The Definitive Guide to Django: Web Development Done Right', pages=447, rating=4.5, price=Decimal('30.00'), contact=cls.a1, publisher=cls.p1, pubdate=datetime.date(2007, 12, 6) ) cls.b2 = Book.objects.create( isbn='067232959', name='Sams Teach Yourself Django in 24 Hours', pages=528, rating=3.0, price=Decimal('23.09'), contact=cls.a3, publisher=cls.p2, pubdate=datetime.date(2008, 3, 3) ) cls.b3 = Book.objects.create( isbn='159059996', name='Practical Django Projects', pages=300, rating=4.0, price=Decimal('29.69'), contact=cls.a4, publisher=cls.p1, pubdate=datetime.date(2008, 6, 23) ) cls.b4 = Book.objects.create( isbn='013235613', name='Python Web Development with Django', pages=350, rating=4.0, price=Decimal('29.69'), contact=cls.a5, publisher=cls.p3, pubdate=datetime.date(2008, 11, 3) ) cls.b5 = Book.objects.create( isbn='013790395', name='Artificial Intelligence: A Modern Approach', pages=1132, rating=4.0, price=Decimal('82.80'), contact=cls.a8, publisher=cls.p3, pubdate=datetime.date(1995, 1, 15) ) cls.b6 = Book.objects.create( isbn='155860191', name='Paradigms of Artificial Intelligence Programming: Case Studies in Common Lisp', pages=946, rating=5.0, price=Decimal('75.00'), contact=cls.a8, publisher=cls.p4, pubdate=datetime.date(1991, 10, 15) ) cls.b1.authors.add(cls.a1, cls.a2) cls.b2.authors.add(cls.a3) cls.b3.authors.add(cls.a4) cls.b4.authors.add(cls.a5, cls.a6, cls.a7) cls.b5.authors.add(cls.a8, cls.a9) cls.b6.authors.add(cls.a8) cls.s1 = Store.objects.create( name='Amazon.com', original_opening=datetime.datetime(1994, 4, 23, 9, 17, 42), friday_night_closing=datetime.time(23, 59, 59) ) cls.s2 = Store.objects.create( name='Books.com', original_opening=datetime.datetime(2001, 3, 15, 11, 23, 37), friday_night_closing=datetime.time(23, 59, 59) ) cls.s3 = Store.objects.create( name="Mamma and Pappa's Books", original_opening=datetime.datetime(1945, 4, 25, 16, 24, 14), friday_night_closing=datetime.time(21, 30) ) cls.s1.books.add(cls.b1, cls.b2, cls.b3, cls.b4, cls.b5, cls.b6) cls.s2.books.add(cls.b1, cls.b3, cls.b5, cls.b6) cls.s3.books.add(cls.b3, cls.b4, cls.b6) def test_basic_annotation(self): books = Book.objects.annotate(is_book=Value(1)) for book in books: self.assertEqual(book.is_book, 1) def test_basic_f_annotation(self): books = Book.objects.annotate(another_rating=F('rating')) for book in books: self.assertEqual(book.another_rating, book.rating) def test_joined_annotation(self): books = Book.objects.select_related('publisher').annotate( num_awards=F('publisher__num_awards')) for book in books: self.assertEqual(book.num_awards, book.publisher.num_awards) def test_joined_transformed_annotation(self): Employee.objects.bulk_create([ Employee( first_name='John', last_name='Doe', age=18, store=self.s1, salary=15000, ), Employee( first_name='Jane', last_name='Jones', age=30, store=self.s2, salary=30000, ), Employee( first_name='Jo', last_name='Smith', age=55, store=self.s3, salary=50000, ), ]) employees = Employee.objects.annotate( store_opened_year=F('store__original_opening__year'), ) for employee in employees: self.assertEqual( employee.store_opened_year, employee.store.original_opening.year, ) def test_custom_transform_annotation(self): with register_lookup(DecimalField, Floor): books = Book.objects.annotate(floor_price=F('price__floor')) self.assertSequenceEqual(books.values_list('pk', 'floor_price'), [ (self.b1.pk, 30), (self.b2.pk, 23), (self.b3.pk, 29), (self.b4.pk, 29), (self.b5.pk, 82), (self.b6.pk, 75), ]) def test_chaining_transforms(self): Company.objects.create(name=' Django Software Foundation ') Company.objects.create(name='Yahoo') with register_lookup(CharField, Trim), register_lookup(CharField, Length): for expr in [Length('name__trim'), F('name__trim__length')]: with self.subTest(expr=expr): self.assertCountEqual( Company.objects.annotate(length=expr).values('name', 'length'), [ {'name': ' Django Software Foundation ', 'length': 26}, {'name': 'Yahoo', 'length': 5}, ], ) def test_mixed_type_annotation_date_interval(self): active = datetime.datetime(2015, 3, 20, 14, 0, 0) duration = datetime.timedelta(hours=1) expires = datetime.datetime(2015, 3, 20, 14, 0, 0) + duration Ticket.objects.create(active_at=active, duration=duration) t = Ticket.objects.annotate( expires=ExpressionWrapper(F('active_at') + F('duration'), output_field=DateTimeField()) ).first() self.assertEqual(t.expires, expires) def test_mixed_type_annotation_numbers(self): test = self.b1 b = Book.objects.annotate( combined=ExpressionWrapper(F('pages') + F('rating'), output_field=IntegerField()) ).get(isbn=test.isbn) combined = int(test.pages + test.rating) self.assertEqual(b.combined, combined) def test_empty_expression_annotation(self): books = Book.objects.annotate( selected=ExpressionWrapper(Q(pk__in=[]), output_field=BooleanField()) ) self.assertEqual(len(books), Book.objects.count()) self.assertTrue(all(not book.selected for book in books)) books = Book.objects.annotate( selected=ExpressionWrapper(Q(pk__in=Book.objects.none()), output_field=BooleanField()) ) self.assertEqual(len(books), Book.objects.count()) self.assertTrue(all(not book.selected for book in books)) def test_full_expression_annotation(self): books = Book.objects.annotate( selected=ExpressionWrapper(~Q(pk__in=[]), output_field=BooleanField()), ) self.assertEqual(len(books), Book.objects.count()) self.assertTrue(all(book.selected for book in books)) def test_full_expression_annotation_with_aggregation(self): qs = Book.objects.filter(isbn='159059725').annotate( selected=ExpressionWrapper(~Q(pk__in=[]), output_field=BooleanField()), rating_count=Count('rating'), ) self.assertEqual([book.rating_count for book in qs], [1]) def test_aggregate_over_full_expression_annotation(self): qs = Book.objects.annotate( selected=ExpressionWrapper(~Q(pk__in=[]), output_field=BooleanField()), ).aggregate(Sum('selected')) self.assertEqual(qs['selected__sum'], Book.objects.count()) def test_empty_queryset_annotation(self): qs = Author.objects.annotate( empty=Subquery(Author.objects.values('id').none()) ) self.assertIsNone(qs.first().empty) def test_annotate_with_aggregation(self): books = Book.objects.annotate(is_book=Value(1), rating_count=Count('rating')) for book in books: self.assertEqual(book.is_book, 1) self.assertEqual(book.rating_count, 1) def test_combined_expression_annotation_with_aggregation(self): book = Book.objects.annotate( combined=ExpressionWrapper(Value(3) * Value(4), output_field=IntegerField()), rating_count=Count('rating'), ).first() self.assertEqual(book.combined, 12) self.assertEqual(book.rating_count, 1) def test_combined_f_expression_annotation_with_aggregation(self): book = Book.objects.filter(isbn='159059725').annotate( combined=ExpressionWrapper(F('price') * F('pages'), output_field=FloatField()), rating_count=Count('rating'), ).first() self.assertEqual(book.combined, 13410.0) self.assertEqual(book.rating_count, 1) @skipUnlessDBFeature('supports_boolean_expr_in_select_clause') def test_q_expression_annotation_with_aggregation(self): book = Book.objects.filter(isbn='159059725').annotate( isnull_pubdate=ExpressionWrapper( Q(pubdate__isnull=True), output_field=BooleanField(), ), rating_count=Count('rating'), ).first() self.assertIs(book.isnull_pubdate, False) self.assertEqual(book.rating_count, 1) @skipUnlessDBFeature('supports_boolean_expr_in_select_clause') def test_grouping_by_q_expression_annotation(self): authors = Author.objects.annotate( under_40=ExpressionWrapper(Q(age__lt=40), output_field=BooleanField()), ).values('under_40').annotate( count_id=Count('id'), ).values('under_40', 'count_id') self.assertCountEqual(authors, [ {'under_40': False, 'count_id': 3}, {'under_40': True, 'count_id': 6}, ]) def test_aggregate_over_annotation(self): agg = Author.objects.annotate(other_age=F('age')).aggregate(otherage_sum=Sum('other_age')) other_agg = Author.objects.aggregate(age_sum=Sum('age')) self.assertEqual(agg['otherage_sum'], other_agg['age_sum']) @skipUnlessDBFeature('can_distinct_on_fields') def test_distinct_on_with_annotation(self): store = Store.objects.create( name='test store', original_opening=datetime.datetime.now(), friday_night_closing=datetime.time(21, 00, 00), ) names = [ 'Theodore Roosevelt', 'Eleanor Roosevelt', 'Franklin Roosevelt', 'Ned Stark', 'Catelyn Stark', ] for name in names: Employee.objects.create( store=store, first_name=name.split()[0], last_name=name.split()[1], age=30, salary=2000, ) people = Employee.objects.annotate( name_lower=Lower('last_name'), ).distinct('name_lower') self.assertEqual({p.last_name for p in people}, {'Stark', 'Roosevelt'}) self.assertEqual(len(people), 2) people2 = Employee.objects.annotate( test_alias=F('store__name'), ).distinct('test_alias') self.assertEqual(len(people2), 1) lengths = Employee.objects.annotate( name_len=Length('first_name'), ).distinct('name_len').values_list('name_len', flat=True) self.assertCountEqual(lengths, [3, 7, 8]) def test_filter_annotation(self): books = Book.objects.annotate(is_book=Value(1)).filter(is_book=1) for book in books: self.assertEqual(book.is_book, 1) def test_filter_annotation_with_f(self): books = Book.objects.annotate( other_rating=F('rating') ).filter(other_rating=3.5) for book in books: self.assertEqual(book.other_rating, 3.5) def test_filter_annotation_with_double_f(self): books = Book.objects.annotate( other_rating=F('rating') ).filter(other_rating=F('rating')) for book in books: self.assertEqual(book.other_rating, book.rating) def test_filter_agg_with_double_f(self): books = Book.objects.annotate( sum_rating=Sum('rating') ).filter(sum_rating=F('sum_rating')) for book in books: self.assertEqual(book.sum_rating, book.rating) def test_filter_wrong_annotation(self): with self.assertRaisesMessage(FieldError, "Cannot resolve keyword 'nope' into field."): list(Book.objects.annotate( sum_rating=Sum('rating') ).filter(sum_rating=F('nope'))) def test_decimal_annotation(self): salary = Decimal(10) ** -Employee._meta.get_field('salary').decimal_places Employee.objects.create( first_name='Max', last_name='Paine', store=Store.objects.first(), age=23, salary=salary, ) self.assertEqual( Employee.objects.annotate(new_salary=F('salary') / 10).get().new_salary, salary / 10, ) def test_filter_decimal_annotation(self): qs = Book.objects.annotate(new_price=F('price') + 1).filter(new_price=Decimal(31)).values_list('new_price') self.assertEqual(qs.get(), (Decimal(31),)) def test_combined_annotation_commutative(self): book1 = Book.objects.annotate(adjusted_rating=F('rating') + 2).get(pk=self.b1.pk) book2 = Book.objects.annotate(adjusted_rating=2 + F('rating')).get(pk=self.b1.pk) self.assertEqual(book1.adjusted_rating, book2.adjusted_rating) book1 = Book.objects.annotate(adjusted_rating=F('rating') + None).get(pk=self.b1.pk) book2 = Book.objects.annotate(adjusted_rating=None + F('rating')).get(pk=self.b1.pk) self.assertEqual(book1.adjusted_rating, book2.adjusted_rating) def test_update_with_annotation(self): book_preupdate = Book.objects.get(pk=self.b2.pk) Book.objects.annotate(other_rating=F('rating') - 1).update(rating=F('other_rating')) book_postupdate = Book.objects.get(pk=self.b2.pk) self.assertEqual(book_preupdate.rating - 1, book_postupdate.rating) def test_annotation_with_m2m(self): books = Book.objects.annotate(author_age=F('authors__age')).filter(pk=self.b1.pk).order_by('author_age') self.assertEqual(books[0].author_age, 34) self.assertEqual(books[1].author_age, 35) def test_annotation_reverse_m2m(self): books = Book.objects.annotate( store_name=F('store__name'), ).filter( name='Practical Django Projects', ).order_by('store_name') self.assertQuerysetEqual( books, [ 'Amazon.com', 'Books.com', 'Mamma and Pappa\'s Books' ], lambda b: b.store_name ) def test_values_annotation(self): """ Annotations can reference fields in a values clause, and contribute to an existing values clause. """ # annotate references a field in values() qs = Book.objects.values('rating').annotate(other_rating=F('rating') - 1) book = qs.get(pk=self.b1.pk) self.assertEqual(book['rating'] - 1, book['other_rating']) # filter refs the annotated value book = qs.get(other_rating=4) self.assertEqual(book['other_rating'], 4) # can annotate an existing values with a new field book = qs.annotate(other_isbn=F('isbn')).get(other_rating=4) self.assertEqual(book['other_rating'], 4) self.assertEqual(book['other_isbn'], '155860191') def test_values_with_pk_annotation(self): # annotate references a field in values() with pk publishers = Publisher.objects.values('id', 'book__rating').annotate(total=Sum('book__rating')) for publisher in publishers.filter(pk=self.p1.pk): self.assertEqual(publisher['book__rating'], publisher['total']) @skipUnlessDBFeature('allows_group_by_pk') def test_rawsql_group_by_collapse(self): raw = RawSQL('SELECT MIN(id) FROM annotations_book', []) qs = Author.objects.values('id').annotate( min_book_id=raw, count_friends=Count('friends'), ).order_by() _, _, group_by = qs.query.get_compiler(using='default').pre_sql_setup() self.assertEqual(len(group_by), 1) self.assertNotEqual(raw, group_by[0]) def test_defer_annotation(self): """ Deferred attributes can be referenced by an annotation, but they are not themselves deferred, and cannot be deferred. """ qs = Book.objects.defer('rating').annotate(other_rating=F('rating') - 1) with self.assertNumQueries(2): book = qs.get(other_rating=4) self.assertEqual(book.rating, 5) self.assertEqual(book.other_rating, 4) with self.assertRaisesMessage(FieldDoesNotExist, "Book has no field named 'other_rating'"): book = qs.defer('other_rating').get(other_rating=4) def test_mti_annotations(self): """ Fields on an inherited model can be referenced by an annotated field. """ d = DepartmentStore.objects.create( name='Angus & Robinson', original_opening=datetime.date(2014, 3, 8), friday_night_closing=datetime.time(21, 00, 00), chain='Westfield' ) books = Book.objects.filter(rating__gt=4) for b in books: d.books.add(b) qs = DepartmentStore.objects.annotate( other_name=F('name'), other_chain=F('chain'), is_open=Value(True, BooleanField()), book_isbn=F('books__isbn') ).order_by('book_isbn').filter(chain='Westfield') self.assertQuerysetEqual( qs, [ ('Angus & Robinson', 'Westfield', True, '155860191'), ('Angus & Robinson', 'Westfield', True, '159059725') ], lambda d: (d.other_name, d.other_chain, d.is_open, d.book_isbn) ) def test_null_annotation(self): """ Annotating None onto a model round-trips """ book = Book.objects.annotate(no_value=Value(None, output_field=IntegerField())).first() self.assertIsNone(book.no_value) def test_order_by_annotation(self): authors = Author.objects.annotate(other_age=F('age')).order_by('other_age') self.assertQuerysetEqual( authors, [ 25, 29, 29, 34, 35, 37, 45, 46, 57, ], lambda a: a.other_age ) def test_order_by_aggregate(self): authors = Author.objects.values('age').annotate(age_count=Count('age')).order_by('age_count', 'age') self.assertQuerysetEqual( authors, [ (25, 1), (34, 1), (35, 1), (37, 1), (45, 1), (46, 1), (57, 1), (29, 2), ], lambda a: (a['age'], a['age_count']) ) def test_raw_sql_with_inherited_field(self): DepartmentStore.objects.create( name='Angus & Robinson', original_opening=datetime.date(2014, 3, 8), friday_night_closing=datetime.time(21), chain='Westfield', area=123, ) tests = ( ('name', 'Angus & Robinson'), ('surface', 123), ("case when name='Angus & Robinson' then chain else name end", 'Westfield'), ) for sql, expected_result in tests: with self.subTest(sql=sql): self.assertSequenceEqual( DepartmentStore.objects.annotate( annotation=RawSQL(sql, ()), ).values_list('annotation', flat=True), [expected_result], ) def test_annotate_exists(self): authors = Author.objects.annotate(c=Count('id')).filter(c__gt=1) self.assertFalse(authors.exists()) def test_column_field_ordering(self): """ Columns are aligned in the correct order for resolve_columns. This test will fail on MySQL if column ordering is out. Column fields should be aligned as: 1. extra_select 2. model_fields 3. annotation_fields 4. model_related_fields """ store = Store.objects.first() Employee.objects.create(id=1, first_name='Max', manager=True, last_name='Paine', store=store, age=23, salary=Decimal(50000.00)) Employee.objects.create(id=2, first_name='Buffy', manager=False, last_name='Summers', store=store, age=18, salary=Decimal(40000.00)) qs = Employee.objects.extra( select={'random_value': '42'} ).select_related('store').annotate( annotated_value=Value(17), ) rows = [ (1, 'Max', True, 42, 'Paine', 23, Decimal(50000.00), store.name, 17), (2, 'Buffy', False, 42, 'Summers', 18, Decimal(40000.00), store.name, 17) ] self.assertQuerysetEqual( qs.order_by('id'), rows, lambda e: ( e.id, e.first_name, e.manager, e.random_value, e.last_name, e.age, e.salary, e.store.name, e.annotated_value)) def test_column_field_ordering_with_deferred(self): store = Store.objects.first() Employee.objects.create(id=1, first_name='Max', manager=True, last_name='Paine', store=store, age=23, salary=Decimal(50000.00)) Employee.objects.create(id=2, first_name='Buffy', manager=False, last_name='Summers', store=store, age=18, salary=Decimal(40000.00)) qs = Employee.objects.extra( select={'random_value': '42'} ).select_related('store').annotate( annotated_value=Value(17), ) rows = [ (1, 'Max', True, 42, 'Paine', 23, Decimal(50000.00), store.name, 17), (2, 'Buffy', False, 42, 'Summers', 18, Decimal(40000.00), store.name, 17) ] # and we respect deferred columns! self.assertQuerysetEqual( qs.defer('age').order_by('id'), rows, lambda e: ( e.id, e.first_name, e.manager, e.random_value, e.last_name, e.age, e.salary, e.store.name, e.annotated_value)) def test_custom_functions(self): Company(name='Apple', motto=None, ticker_name='APPL', description='Beautiful Devices').save() Company(name='Django Software Foundation', motto=None, ticker_name=None, description=None).save() Company(name='Google', motto='Do No Evil', ticker_name='GOOG', description='Internet Company').save() Company(name='Yahoo', motto=None, ticker_name=None, description='Internet Company').save() qs = Company.objects.annotate( tagline=Func( F('motto'), F('ticker_name'), F('description'), Value('No Tag'), function='COALESCE' ) ).order_by('name') self.assertQuerysetEqual( qs, [ ('Apple', 'APPL'), ('Django Software Foundation', 'No Tag'), ('Google', 'Do No Evil'), ('Yahoo', 'Internet Company') ], lambda c: (c.name, c.tagline) ) def test_custom_functions_can_ref_other_functions(self): Company(name='Apple', motto=None, ticker_name='APPL', description='Beautiful Devices').save() Company(name='Django Software Foundation', motto=None, ticker_name=None, description=None).save() Company(name='Google', motto='Do No Evil', ticker_name='GOOG', description='Internet Company').save() Company(name='Yahoo', motto=None, ticker_name=None, description='Internet Company').save() class Lower(Func): function = 'LOWER' qs = Company.objects.annotate( tagline=Func( F('motto'), F('ticker_name'), F('description'), Value('No Tag'), function='COALESCE', ) ).annotate( tagline_lower=Lower(F('tagline')), ).order_by('name') # LOWER function supported by: # oracle, postgres, mysql, sqlite, sqlserver self.assertQuerysetEqual( qs, [ ('Apple', 'APPL'.lower()), ('Django Software Foundation', 'No Tag'.lower()), ('Google', 'Do No Evil'.lower()), ('Yahoo', 'Internet Company'.lower()) ], lambda c: (c.name, c.tagline_lower) ) def test_boolean_value_annotation(self): books = Book.objects.annotate( is_book=Value(True, output_field=BooleanField()), is_pony=Value(False, output_field=BooleanField()), is_none=Value(None, output_field=BooleanField(null=True)), ) self.assertGreater(len(books), 0) for book in books: self.assertIs(book.is_book, True) self.assertIs(book.is_pony, False) self.assertIsNone(book.is_none) def test_annotation_in_f_grouped_by_annotation(self): qs = ( Publisher.objects.annotate(multiplier=Value(3)) # group by option => sum of value * multiplier .values('name') .annotate(multiplied_value_sum=Sum(F('multiplier') * F('num_awards'))) .order_by() ) self.assertCountEqual( qs, [ {'multiplied_value_sum': 9, 'name': 'Apress'}, {'multiplied_value_sum': 0, 'name': "Jonno's House of Books"}, {'multiplied_value_sum': 27, 'name': 'Morgan Kaufmann'}, {'multiplied_value_sum': 21, 'name': 'Prentice Hall'}, {'multiplied_value_sum': 3, 'name': 'Sams'}, ] ) def test_arguments_must_be_expressions(self): msg = 'QuerySet.annotate() received non-expression(s): %s.' with self.assertRaisesMessage(TypeError, msg % BooleanField()): Book.objects.annotate(BooleanField()) with self.assertRaisesMessage(TypeError, msg % True): Book.objects.annotate(is_book=True) with self.assertRaisesMessage(TypeError, msg % ', '.join([str(BooleanField()), 'True'])): Book.objects.annotate(BooleanField(), Value(False), is_book=True) def test_chaining_annotation_filter_with_m2m(self): qs = Author.objects.filter( name='Adrian Holovaty', friends__age=35, ).annotate( jacob_name=F('friends__name'), ).filter( friends__age=29, ).annotate( james_name=F('friends__name'), ).values('jacob_name', 'james_name') self.assertCountEqual( qs, [{'jacob_name': 'Jacob Kaplan-Moss', 'james_name': 'James Bennett'}], ) def test_annotation_filter_with_subquery(self): long_books_qs = Book.objects.filter( publisher=OuterRef('pk'), pages__gt=400, ).values('publisher').annotate(count=Count('pk')).values('count') publisher_books_qs = Publisher.objects.annotate( total_books=Count('book'), ).filter( total_books=Subquery(long_books_qs, output_field=IntegerField()), ).values('name') self.assertCountEqual(publisher_books_qs, [{'name': 'Sams'}, {'name': 'Morgan Kaufmann'}]) def test_annotation_exists_aggregate_values_chaining(self): qs = Book.objects.values('publisher').annotate( has_authors=Exists(Book.authors.through.objects.filter(book=OuterRef('pk'))), max_pubdate=Max('pubdate'), ).values_list('max_pubdate', flat=True).order_by('max_pubdate') self.assertCountEqual(qs, [ datetime.date(1991, 10, 15), datetime.date(2008, 3, 3), datetime.date(2008, 6, 23), datetime.date(2008, 11, 3), ]) @skipUnlessDBFeature('supports_subqueries_in_group_by') def test_annotation_subquery_and_aggregate_values_chaining(self): qs = Book.objects.annotate( pub_year=ExtractYear('pubdate') ).values('pub_year').annotate( top_rating=Subquery( Book.objects.filter( pubdate__year=OuterRef('pub_year') ).order_by('-rating').values('rating')[:1] ), total_pages=Sum('pages'), ).values('pub_year', 'total_pages', 'top_rating') self.assertCountEqual(qs, [ {'pub_year': 1991, 'top_rating': 5.0, 'total_pages': 946}, {'pub_year': 1995, 'top_rating': 4.0, 'total_pages': 1132}, {'pub_year': 2007, 'top_rating': 4.5, 'total_pages': 447}, {'pub_year': 2008, 'top_rating': 4.0, 'total_pages': 1178}, ]) def test_annotation_subquery_outerref_transform(self): qs = Book.objects.annotate( top_rating_year=Subquery( Book.objects.filter( pubdate__year=OuterRef('pubdate__year') ).order_by('-rating').values('rating')[:1] ), ).values('pubdate__year', 'top_rating_year') self.assertCountEqual(qs, [ {'pubdate__year': 1991, 'top_rating_year': 5.0}, {'pubdate__year': 1995, 'top_rating_year': 4.0}, {'pubdate__year': 2007, 'top_rating_year': 4.5}, {'pubdate__year': 2008, 'top_rating_year': 4.0}, {'pubdate__year': 2008, 'top_rating_year': 4.0}, {'pubdate__year': 2008, 'top_rating_year': 4.0}, ]) def test_annotation_aggregate_with_m2o(self): qs = Author.objects.filter(age__lt=30).annotate( max_pages=Case( When(book_contact_set__isnull=True, then=Value(0)), default=Max(F('book__pages')), ), ).values('name', 'max_pages') self.assertCountEqual(qs, [ {'name': 'James Bennett', 'max_pages': 300}, {'name': 'Paul Bissex', 'max_pages': 0}, {'name': 'Wesley J. Chun', 'max_pages': 0}, ]) class AliasTests(TestCase): @classmethod def setUpTestData(cls): cls.a1 = Author.objects.create(name='Adrian Holovaty', age=34) cls.a2 = Author.objects.create(name='Jacob Kaplan-Moss', age=35) cls.a3 = Author.objects.create(name='James Bennett', age=34) cls.a4 = Author.objects.create(name='Peter Norvig', age=57) cls.a5 = Author.objects.create(name='Stuart Russell', age=46) p1 = Publisher.objects.create(name='Apress', num_awards=3) cls.b1 = Book.objects.create( isbn='159059725', pages=447, rating=4.5, price=Decimal('30.00'), contact=cls.a1, publisher=p1, pubdate=datetime.date(2007, 12, 6), name='The Definitive Guide to Django: Web Development Done Right', ) cls.b2 = Book.objects.create( isbn='159059996', pages=300, rating=4.0, price=Decimal('29.69'), contact=cls.a3, publisher=p1, pubdate=datetime.date(2008, 6, 23), name='Practical Django Projects', ) cls.b3 = Book.objects.create( isbn='013790395', pages=1132, rating=4.0, price=Decimal('82.80'), contact=cls.a4, publisher=p1, pubdate=datetime.date(1995, 1, 15), name='Artificial Intelligence: A Modern Approach', ) cls.b4 = Book.objects.create( isbn='155860191', pages=946, rating=5.0, price=Decimal('75.00'), contact=cls.a4, publisher=p1, pubdate=datetime.date(1991, 10, 15), name='Paradigms of Artificial Intelligence Programming: Case Studies in Common Lisp', ) cls.b1.authors.add(cls.a1, cls.a2) cls.b2.authors.add(cls.a3) cls.b3.authors.add(cls.a4, cls.a5) cls.b4.authors.add(cls.a4) Store.objects.create( name='Amazon.com', original_opening=datetime.datetime(1994, 4, 23, 9, 17, 42), friday_night_closing=datetime.time(23, 59, 59) ) Store.objects.create( name='Books.com', original_opening=datetime.datetime(2001, 3, 15, 11, 23, 37), friday_night_closing=datetime.time(23, 59, 59) ) def test_basic_alias(self): qs = Book.objects.alias(is_book=Value(1)) self.assertIs(hasattr(qs.first(), 'is_book'), False) def test_basic_alias_annotation(self): qs = Book.objects.alias( is_book_alias=Value(1), ).annotate(is_book=F('is_book_alias')) self.assertIs(hasattr(qs.first(), 'is_book_alias'), False) for book in qs: with self.subTest(book=book): self.assertEqual(book.is_book, 1) def test_basic_alias_f_annotation(self): qs = Book.objects.alias( another_rating_alias=F('rating') ).annotate(another_rating=F('another_rating_alias')) self.assertIs(hasattr(qs.first(), 'another_rating_alias'), False) for book in qs: with self.subTest(book=book): self.assertEqual(book.another_rating, book.rating) def test_basic_alias_f_transform_annotation(self): qs = Book.objects.alias( pubdate_alias=F('pubdate'), ).annotate(pubdate_year=F('pubdate_alias__year')) self.assertIs(hasattr(qs.first(), 'pubdate_alias'), False) for book in qs: with self.subTest(book=book): self.assertEqual(book.pubdate_year, book.pubdate.year) def test_alias_after_annotation(self): qs = Book.objects.annotate( is_book=Value(1), ).alias(is_book_alias=F('is_book')) book = qs.first() self.assertIs(hasattr(book, 'is_book'), True) self.assertIs(hasattr(book, 'is_book_alias'), False) def test_overwrite_annotation_with_alias(self): qs = Book.objects.annotate(is_book=Value(1)).alias(is_book=F('is_book')) self.assertIs(hasattr(qs.first(), 'is_book'), False) def test_overwrite_alias_with_annotation(self): qs = Book.objects.alias(is_book=Value(1)).annotate(is_book=F('is_book')) for book in qs: with self.subTest(book=book): self.assertEqual(book.is_book, 1) def test_alias_annotation_expression(self): qs = Book.objects.alias( is_book_alias=Value(1), ).annotate(is_book=Coalesce('is_book_alias', 0)) self.assertIs(hasattr(qs.first(), 'is_book_alias'), False) for book in qs: with self.subTest(book=book): self.assertEqual(book.is_book, 1) def test_alias_default_alias_expression(self): qs = Author.objects.alias( Sum('book__pages'), ).filter(book__pages__sum__gt=2000) self.assertIs(hasattr(qs.first(), 'book__pages__sum'), False) self.assertSequenceEqual(qs, [self.a4]) def test_joined_alias_annotation(self): qs = Book.objects.select_related('publisher').alias( num_awards_alias=F('publisher__num_awards'), ).annotate(num_awards=F('num_awards_alias')) self.assertIs(hasattr(qs.first(), 'num_awards_alias'), False) for book in qs: with self.subTest(book=book): self.assertEqual(book.num_awards, book.publisher.num_awards) def test_alias_annotate_with_aggregation(self): qs = Book.objects.alias( is_book_alias=Value(1), rating_count_alias=Count('rating'), ).annotate( is_book=F('is_book_alias'), rating_count=F('rating_count_alias'), ) book = qs.first() self.assertIs(hasattr(book, 'is_book_alias'), False) self.assertIs(hasattr(book, 'rating_count_alias'), False) for book in qs: with self.subTest(book=book): self.assertEqual(book.is_book, 1) self.assertEqual(book.rating_count, 1) def test_filter_alias_with_f(self): qs = Book.objects.alias( other_rating=F('rating'), ).filter(other_rating=4.5) self.assertIs(hasattr(qs.first(), 'other_rating'), False) self.assertSequenceEqual(qs, [self.b1]) def test_filter_alias_with_double_f(self): qs = Book.objects.alias( other_rating=F('rating'), ).filter(other_rating=F('rating')) self.assertIs(hasattr(qs.first(), 'other_rating'), False) self.assertEqual(qs.count(), Book.objects.count()) def test_filter_alias_agg_with_double_f(self): qs = Book.objects.alias( sum_rating=Sum('rating'), ).filter(sum_rating=F('sum_rating')) self.assertIs(hasattr(qs.first(), 'sum_rating'), False) self.assertEqual(qs.count(), Book.objects.count()) def test_update_with_alias(self): Book.objects.alias( other_rating=F('rating') - 1, ).update(rating=F('other_rating')) self.b1.refresh_from_db() self.assertEqual(self.b1.rating, 3.5) def test_order_by_alias(self): qs = Author.objects.alias(other_age=F('age')).order_by('other_age') self.assertIs(hasattr(qs.first(), 'other_age'), False) self.assertQuerysetEqual(qs, [34, 34, 35, 46, 57], lambda a: a.age) def test_order_by_alias_aggregate(self): qs = Author.objects.values('age').alias(age_count=Count('age')).order_by('age_count', 'age') self.assertIs(hasattr(qs.first(), 'age_count'), False) self.assertQuerysetEqual(qs, [35, 46, 57, 34], lambda a: a['age']) def test_dates_alias(self): qs = Book.objects.alias( pubdate_alias=F('pubdate'), ).dates('pubdate_alias', 'month') self.assertCountEqual(qs, [ datetime.date(1991, 10, 1), datetime.date(1995, 1, 1), datetime.date(2007, 12, 1), datetime.date(2008, 6, 1), ]) def test_datetimes_alias(self): qs = Store.objects.alias( original_opening_alias=F('original_opening'), ).datetimes('original_opening_alias', 'year') self.assertCountEqual(qs, [ datetime.datetime(1994, 1, 1), datetime.datetime(2001, 1, 1), ]) def test_aggregate_alias(self): msg = ( "Cannot aggregate over the 'other_age' alias. Use annotate() to " "promote it." ) with self.assertRaisesMessage(FieldError, msg): Author.objects.alias( other_age=F('age'), ).aggregate(otherage_sum=Sum('other_age')) def test_defer_only_alias(self): qs = Book.objects.alias(rating_alias=F('rating') - 1) msg = "Book has no field named 'rating_alias'" for operation in ['defer', 'only']: with self.subTest(operation=operation): with self.assertRaisesMessage(FieldDoesNotExist, msg): getattr(qs, operation)('rating_alias').first() @skipUnlessDBFeature('can_distinct_on_fields') def test_distinct_on_alias(self): qs = Book.objects.alias(rating_alias=F('rating') - 1) msg = "Cannot resolve keyword 'rating_alias' into field." with self.assertRaisesMessage(FieldError, msg): qs.distinct('rating_alias').first() def test_values_alias(self): qs = Book.objects.alias(rating_alias=F('rating') - 1) msg = ( "Cannot select the 'rating_alias' alias. Use annotate() to " "promote it." ) for operation in ['values', 'values_list']: with self.subTest(operation=operation): with self.assertRaisesMessage(FieldError, msg): getattr(qs, operation)('rating_alias')
f528c4d44c8d3b05e227072e02ef43d11ccc8affeccab4c20c2de8e41f173b90
import re import types from datetime import datetime, timedelta from decimal import Decimal from unittest import TestCase, mock from django.core.exceptions import ValidationError from django.core.files.base import ContentFile from django.core.validators import ( BaseValidator, DecimalValidator, EmailValidator, FileExtensionValidator, MaxLengthValidator, MaxValueValidator, MinLengthValidator, MinValueValidator, ProhibitNullCharactersValidator, RegexValidator, URLValidator, int_list_validator, validate_comma_separated_integer_list, validate_email, validate_image_file_extension, validate_integer, validate_ipv4_address, validate_ipv6_address, validate_ipv46_address, validate_slug, validate_unicode_slug, ) from django.test import SimpleTestCase try: from PIL import Image # noqa except ImportError: PILLOW_IS_INSTALLED = False else: PILLOW_IS_INSTALLED = True NOW = datetime.now() EXTENDED_SCHEMES = ['http', 'https', 'ftp', 'ftps', 'git', 'file', 'git+ssh'] VALID_URLS = [ 'http://www.djangoproject.com/', 'HTTP://WWW.DJANGOPROJECT.COM/', 'http://localhost/', 'http://example.com/', 'http://example.com:0', 'http://example.com:0/', 'http://example.com:65535', 'http://example.com:65535/', 'http://example.com./', 'http://www.example.com/', 'http://www.example.com:8000/test', 'http://valid-with-hyphens.com/', 'http://subdomain.example.com/', 'http://a.aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 'http://200.8.9.10/', 'http://200.8.9.10:8000/test', 'http://su--b.valid-----hyphens.com/', 'http://example.com?something=value', 'http://example.com/index.php?something=value&another=value2', 'https://example.com/', 'ftp://example.com/', 'ftps://example.com/', 'http://foo.com/blah_blah', 'http://foo.com/blah_blah/', 'http://foo.com/blah_blah_(wikipedia)', 'http://foo.com/blah_blah_(wikipedia)_(again)', 'http://www.example.com/wpstyle/?p=364', 'https://www.example.com/foo/?bar=baz&inga=42&quux', 'http://✪df.ws/123', 'http://[email protected]', 'http://[email protected]/', 'http://[email protected]:8080', 'http://[email protected]:8080/', 'http://[email protected]:65535', 'http://[email protected]:65535/', 'http://userid:@example.com', 'http://userid:@example.com/', 'http://userid:@example.com:8080', 'http://userid:@example.com:8080/', 'http://userid:[email protected]', 'http://userid:[email protected]/', 'http://userid:[email protected]:8', 'http://userid:[email protected]:8/', 'http://userid:[email protected]:8080', 'http://userid:[email protected]:8080/', 'http://userid:[email protected]:65535', 'http://userid:[email protected]:65535/', 'https://userid:paaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa' 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa' 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa' '[email protected]', 'https://userid:paaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa' 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa' 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa' '[email protected]:8080', 'https://useridddddddddddddddddddddddddddddddddddddddddddddddddddddddddddd' 'ddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddd' 'ddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddd' 'dddddddddddddddddddddd:[email protected]', 'https://useridddddddddddddddddddddddddddddddddddddddddddddddddddddddddddd' 'ddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddd' 'ddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddd' 'ddddddddddddddddd:[email protected]:8080', 'http://142.42.1.1/', 'http://142.42.1.1:8080/', 'http://➡.ws/䨹', 'http://⌘.ws', 'http://⌘.ws/', 'http://foo.com/blah_(wikipedia)#cite-1', 'http://foo.com/blah_(wikipedia)_blah#cite-1', 'http://foo.com/unicode_(✪)_in_parens', 'http://foo.com/(something)?after=parens', 'http://☺.damowmow.com/', 'http://djangoproject.com/events/#&product=browser', 'http://j.mp', 'ftp://foo.bar/baz', 'http://foo.bar/?q=Test%20URL-encoded%20stuff', 'http://مثال.إختبار', 'http://例子.测试', 'http://उदाहरण.परीक्षा', "http://-.~_!$&'()*+,;=%40:80%[email protected]", 'http://xn--7sbb4ac0ad0be6cf.xn--p1ai', 'http://1337.net', 'http://a.b-c.de', 'http://223.255.255.254', 'ftps://foo.bar/', 'http://10.1.1.254', 'http://[FEDC:BA98:7654:3210:FEDC:BA98:7654:3210]:80/index.html', 'http://[::192.9.5.5]/ipng', 'http://[::ffff:192.9.5.5]/ipng', 'http://[::1]:8080/', 'http://0.0.0.0/', 'http://255.255.255.255', 'http://224.0.0.0', 'http://224.1.1.1', 'http://111.112.113.114/', 'http://88.88.88.88/', 'http://11.12.13.14/', 'http://10.20.30.40/', 'http://1.2.3.4/', 'http://127.0.01.09.home.lan', 'http://aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa.ex' 'ample.com', 'http://example.aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa' 'aaaaa.com', 'http://example.aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa' 'aaaaa', 'http://aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa.aaaaaaaaaaaaaaaaaaaaaaaaaaa.aaaa' 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaa.aaaaaaaaaaaaaaaaaaaaaaaaaaaaaa.aaaaaaaaaaa' 'aaaaaaaaaaaaaaaaaaaaaa.aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa.aaaaaaaaaaaaaaaaaa' 'aaaaaaaaaaaa.aaaaaaaaaaaaaaaaaaaaaaaaaaaa', 'http://dashintld.c-m', 'http://multipledashintld.a-b-c', 'http://evenmoredashintld.a---c', 'http://dashinpunytld.xn---c', ] INVALID_URLS = [ None, 56, 'no_scheme', 'foo', 'http://', 'http://example', 'http://example.', 'http://example.com:-1', 'http://example.com:-1/', 'http://example.com:000000080', 'http://example.com:000000080/', 'http://.com', 'http://invalid-.com', 'http://-invalid.com', 'http://invalid.com-', 'http://invalid.-com', 'http://inv-.alid-.com', 'http://inv-.-alid.com', 'file://localhost/path', 'git://example.com/', 'http://.', 'http://..', 'http://../', 'http://?', 'http://??', 'http://??/', 'http://#', 'http://##', 'http://##/', 'http://foo.bar?q=Spaces should be encoded', '//', '//a', '///a', '///', 'http:///a', 'foo.com', 'rdar://1234', 'h://test', 'http:// shouldfail.com', ':// should fail', 'http://foo.bar/foo(bar)baz quux', 'http://-error-.invalid/', 'http://dashinpunytld.trailingdot.xn--.', 'http://dashinpunytld.xn---', 'http://-a.b.co', 'http://a.b-.co', 'http://a.-b.co', 'http://a.b-.c.co', 'http:/', 'http://', 'http://', 'http://1.1.1.1.1', 'http://123.123.123', 'http://3628126748', 'http://123', 'http://000.000.000.000', 'http://016.016.016.016', 'http://192.168.000.001', 'http://01.2.3.4', 'http://01.2.3.4', 'http://1.02.3.4', 'http://1.2.03.4', 'http://1.2.3.04', 'http://.www.foo.bar/', 'http://.www.foo.bar./', 'http://[::1:2::3]:8/', 'http://[::1:2::3]:8080/', 'http://[]', 'http://[]:8080', 'http://example..com/', 'http://aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa.e' 'xample.com', 'http://example.aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa' 'aaaaaa.com', 'http://example.aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa' 'aaaaaa', 'http://aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa.aaaaaaaaaaaaaaaaaaaaaaaaaaaaa.' 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa.aaaaaaaaaaaaaaaaaaaaaaaaaaaaaa.aaaaaaa' 'aaaaaaaaaaaaaaaaaaaaaaaaaa.aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa.aaaaaaaaaaaaaa' 'aaaaaaaaaaaaaaaa.aaaaaaaaaaaaaaaaaaaaaaaaa', 'https://test.[com', 'http://@example.com', 'http://:@example.com', 'http://:[email protected]', 'http://foo@[email protected]', 'http://foo/[email protected]', 'http://foo:bar:[email protected]', 'http://foo:bar@[email protected]', 'http://foo:bar/[email protected]', 'http://invalid-.com/[email protected]', # Newlines and tabs are not accepted. 'http://www.djangoproject.com/\n', 'http://[::ffff:192.9.5.5]\n', 'http://www.djangoproject.com/\r', 'http://[::ffff:192.9.5.5]\r', 'http://www.django\rproject.com/', 'http://[::\rffff:192.9.5.5]', 'http://\twww.djangoproject.com/', 'http://\t[::ffff:192.9.5.5]', # Trailing junk does not take forever to reject. 'http://www.asdasdasdasdsadfm.com.br ', 'http://www.asdasdasdasdsadfm.com.br z', ] TEST_DATA = [ # (validator, value, expected), (validate_integer, '42', None), (validate_integer, '-42', None), (validate_integer, -42, None), (validate_integer, -42.5, ValidationError), (validate_integer, None, ValidationError), (validate_integer, 'a', ValidationError), (validate_integer, '\n42', ValidationError), (validate_integer, '42\n', ValidationError), (validate_email, '[email protected]', None), (validate_email, '[email protected]', None), (validate_email, 'email@[127.0.0.1]', None), (validate_email, 'email@[2001:dB8::1]', None), (validate_email, 'email@[2001:dB8:0:0:0:0:0:1]', None), (validate_email, 'email@[::fffF:127.0.0.1]', None), (validate_email, '[email protected]', None), (validate_email, '[email protected]', None), (validate_email, '[email protected].उदाहरण.परीक्षा', None), (validate_email, 'email@localhost', None), (EmailValidator(allowlist=['localdomain']), 'email@localdomain', None), (validate_email, '"test@test"@example.com', None), (validate_email, 'example@atm.%s' % ('a' * 63), None), (validate_email, 'example@%s.atm' % ('a' * 63), None), (validate_email, 'example@%s.%s.atm' % ('a' * 63, 'b' * 10), None), (validate_email, 'example@atm.%s' % ('a' * 64), ValidationError), (validate_email, 'example@%s.atm.%s' % ('b' * 64, 'a' * 63), ValidationError), (validate_email, None, ValidationError), (validate_email, '', ValidationError), (validate_email, 'abc', ValidationError), (validate_email, 'abc@', ValidationError), (validate_email, 'abc@bar', ValidationError), (validate_email, 'a @x.cz', ValidationError), (validate_email, '[email protected]', ValidationError), (validate_email, 'something@@somewhere.com', ValidationError), (validate_email, '[email protected]', ValidationError), (validate_email, 'email@[127.0.0.256]', ValidationError), (validate_email, 'email@[2001:db8::12345]', ValidationError), (validate_email, 'email@[2001:db8:0:0:0:0:1]', ValidationError), (validate_email, 'email@[::ffff:127.0.0.256]', ValidationError), (validate_email, 'email@[2001:dg8::1]', ValidationError), (validate_email, 'email@[2001:dG8:0:0:0:0:0:1]', ValidationError), (validate_email, 'email@[::fTzF:127.0.0.1]', ValidationError), (validate_email, '[email protected]', ValidationError), (validate_email, '[email protected]', ValidationError), (validate_email, '[email protected]', ValidationError), (validate_email, '[email protected]', ValidationError), (validate_email, '[email protected]', ValidationError), (validate_email, '[email protected]\n\n<script src="x.js">', ValidationError), # Quoted-string format (CR not allowed) (validate_email, '"\\\011"@here.com', None), (validate_email, '"\\\012"@here.com', ValidationError), (validate_email, '[email protected].', ValidationError), # Max length of domain name labels is 63 characters per RFC 1034. (validate_email, 'a@%s.us' % ('a' * 63), None), (validate_email, 'a@%s.us' % ('a' * 64), ValidationError), # Trailing newlines in username or domain not allowed (validate_email, '[email protected]\n', ValidationError), (validate_email, 'a\[email protected]', ValidationError), (validate_email, '"test@test"\[email protected]', ValidationError), (validate_email, 'a@[127.0.0.1]\n', ValidationError), (validate_slug, 'slug-ok', None), (validate_slug, 'longer-slug-still-ok', None), (validate_slug, '--------', None), (validate_slug, 'nohyphensoranything', None), (validate_slug, 'a', None), (validate_slug, '1', None), (validate_slug, 'a1', None), (validate_slug, '', ValidationError), (validate_slug, ' text ', ValidationError), (validate_slug, ' ', ValidationError), (validate_slug, '[email protected]', ValidationError), (validate_slug, '你好', ValidationError), (validate_slug, '你 好', ValidationError), (validate_slug, '\n', ValidationError), (validate_slug, 'trailing-newline\n', ValidationError), (validate_unicode_slug, 'slug-ok', None), (validate_unicode_slug, 'longer-slug-still-ok', None), (validate_unicode_slug, '--------', None), (validate_unicode_slug, 'nohyphensoranything', None), (validate_unicode_slug, 'a', None), (validate_unicode_slug, '1', None), (validate_unicode_slug, 'a1', None), (validate_unicode_slug, '你好', None), (validate_unicode_slug, '', ValidationError), (validate_unicode_slug, ' text ', ValidationError), (validate_unicode_slug, ' ', ValidationError), (validate_unicode_slug, '[email protected]', ValidationError), (validate_unicode_slug, '\n', ValidationError), (validate_unicode_slug, '你 好', ValidationError), (validate_unicode_slug, 'trailing-newline\n', ValidationError), (validate_ipv4_address, '1.1.1.1', None), (validate_ipv4_address, '255.0.0.0', None), (validate_ipv4_address, '0.0.0.0', None), (validate_ipv4_address, '256.1.1.1', ValidationError), (validate_ipv4_address, '25.1.1.', ValidationError), (validate_ipv4_address, '25,1,1,1', ValidationError), (validate_ipv4_address, '25.1 .1.1', ValidationError), (validate_ipv4_address, '1.1.1.1\n', ValidationError), (validate_ipv4_address, '٧.2٥.3٣.243', ValidationError), # Leading zeros are forbidden to avoid ambiguity with the octal notation. (validate_ipv4_address, '000.000.000.000', ValidationError), (validate_ipv4_address, '016.016.016.016', ValidationError), (validate_ipv4_address, '192.168.000.001', ValidationError), (validate_ipv4_address, '01.2.3.4', ValidationError), (validate_ipv4_address, '01.2.3.4', ValidationError), (validate_ipv4_address, '1.02.3.4', ValidationError), (validate_ipv4_address, '1.2.03.4', ValidationError), (validate_ipv4_address, '1.2.3.04', ValidationError), # validate_ipv6_address uses django.utils.ipv6, which # is tested in much greater detail in its own testcase (validate_ipv6_address, 'fe80::1', None), (validate_ipv6_address, '::1', None), (validate_ipv6_address, '1:2:3:4:5:6:7:8', None), (validate_ipv6_address, '1:2', ValidationError), (validate_ipv6_address, '::zzz', ValidationError), (validate_ipv6_address, '12345::', ValidationError), (validate_ipv46_address, '1.1.1.1', None), (validate_ipv46_address, '255.0.0.0', None), (validate_ipv46_address, '0.0.0.0', None), (validate_ipv46_address, 'fe80::1', None), (validate_ipv46_address, '::1', None), (validate_ipv46_address, '1:2:3:4:5:6:7:8', None), (validate_ipv46_address, '256.1.1.1', ValidationError), (validate_ipv46_address, '25.1.1.', ValidationError), (validate_ipv46_address, '25,1,1,1', ValidationError), (validate_ipv46_address, '25.1 .1.1', ValidationError), (validate_ipv46_address, '1:2', ValidationError), (validate_ipv46_address, '::zzz', ValidationError), (validate_ipv46_address, '12345::', ValidationError), # Leading zeros are forbidden to avoid ambiguity with the octal notation. (validate_ipv46_address, '000.000.000.000', ValidationError), (validate_ipv46_address, '016.016.016.016', ValidationError), (validate_ipv46_address, '192.168.000.001', ValidationError), (validate_ipv46_address, '01.2.3.4', ValidationError), (validate_ipv46_address, '01.2.3.4', ValidationError), (validate_ipv46_address, '1.02.3.4', ValidationError), (validate_ipv46_address, '1.2.03.4', ValidationError), (validate_ipv46_address, '1.2.3.04', ValidationError), (validate_comma_separated_integer_list, '1', None), (validate_comma_separated_integer_list, '12', None), (validate_comma_separated_integer_list, '1,2', None), (validate_comma_separated_integer_list, '1,2,3', None), (validate_comma_separated_integer_list, '10,32', None), (validate_comma_separated_integer_list, '', ValidationError), (validate_comma_separated_integer_list, 'a', ValidationError), (validate_comma_separated_integer_list, 'a,b,c', ValidationError), (validate_comma_separated_integer_list, '1, 2, 3', ValidationError), (validate_comma_separated_integer_list, ',', ValidationError), (validate_comma_separated_integer_list, '1,2,3,', ValidationError), (validate_comma_separated_integer_list, '1,2,', ValidationError), (validate_comma_separated_integer_list, ',1', ValidationError), (validate_comma_separated_integer_list, '1,,2', ValidationError), (int_list_validator(sep='.'), '1.2.3', None), (int_list_validator(sep='.', allow_negative=True), '1.2.3', None), (int_list_validator(allow_negative=True), '-1,-2,3', None), (int_list_validator(allow_negative=True), '1,-2,-12', None), (int_list_validator(), '-1,2,3', ValidationError), (int_list_validator(sep='.'), '1,2,3', ValidationError), (int_list_validator(sep='.'), '1.2.3\n', ValidationError), (MaxValueValidator(10), 10, None), (MaxValueValidator(10), -10, None), (MaxValueValidator(10), 0, None), (MaxValueValidator(NOW), NOW, None), (MaxValueValidator(NOW), NOW - timedelta(days=1), None), (MaxValueValidator(0), 1, ValidationError), (MaxValueValidator(NOW), NOW + timedelta(days=1), ValidationError), (MinValueValidator(-10), -10, None), (MinValueValidator(-10), 10, None), (MinValueValidator(-10), 0, None), (MinValueValidator(NOW), NOW, None), (MinValueValidator(NOW), NOW + timedelta(days=1), None), (MinValueValidator(0), -1, ValidationError), (MinValueValidator(NOW), NOW - timedelta(days=1), ValidationError), # limit_value may be a callable. (MinValueValidator(lambda: 1), 0, ValidationError), (MinValueValidator(lambda: 1), 1, None), (MaxLengthValidator(10), '', None), (MaxLengthValidator(10), 10 * 'x', None), (MaxLengthValidator(10), 15 * 'x', ValidationError), (MinLengthValidator(10), 15 * 'x', None), (MinLengthValidator(10), 10 * 'x', None), (MinLengthValidator(10), '', ValidationError), (URLValidator(EXTENDED_SCHEMES), 'file://localhost/path', None), (URLValidator(EXTENDED_SCHEMES), 'git://example.com/', None), (URLValidator(EXTENDED_SCHEMES), 'git+ssh://[email protected]/example/hg-git.git', None), (URLValidator(EXTENDED_SCHEMES), 'git://-invalid.com', ValidationError), (BaseValidator(True), True, None), (BaseValidator(True), False, ValidationError), (RegexValidator(), '', None), (RegexValidator(), 'x1x2', None), (RegexValidator('[0-9]+'), 'xxxxxx', ValidationError), (RegexValidator('[0-9]+'), '1234', None), (RegexValidator(re.compile('[0-9]+')), '1234', None), (RegexValidator('.*'), '', None), (RegexValidator(re.compile('.*')), '', None), (RegexValidator('.*'), 'xxxxx', None), (RegexValidator('x'), 'y', ValidationError), (RegexValidator(re.compile('x')), 'y', ValidationError), (RegexValidator('x', inverse_match=True), 'y', None), (RegexValidator(re.compile('x'), inverse_match=True), 'y', None), (RegexValidator('x', inverse_match=True), 'x', ValidationError), (RegexValidator(re.compile('x'), inverse_match=True), 'x', ValidationError), (RegexValidator('x', flags=re.IGNORECASE), 'y', ValidationError), (RegexValidator('a'), 'A', ValidationError), (RegexValidator('a', flags=re.IGNORECASE), 'A', None), (FileExtensionValidator(['txt']), ContentFile('contents', name='fileWithUnsupportedExt.jpg'), ValidationError), (FileExtensionValidator(['txt']), ContentFile('contents', name='fileWithUnsupportedExt.JPG'), ValidationError), (FileExtensionValidator(['txt']), ContentFile('contents', name='fileWithNoExtension'), ValidationError), (FileExtensionValidator(['']), ContentFile('contents', name='fileWithAnExtension.txt'), ValidationError), (FileExtensionValidator([]), ContentFile('contents', name='file.txt'), ValidationError), (FileExtensionValidator(['']), ContentFile('contents', name='fileWithNoExtension'), None), (FileExtensionValidator(['txt']), ContentFile('contents', name='file.txt'), None), (FileExtensionValidator(['txt']), ContentFile('contents', name='file.TXT'), None), (FileExtensionValidator(['TXT']), ContentFile('contents', name='file.txt'), None), (FileExtensionValidator(), ContentFile('contents', name='file.jpg'), None), (DecimalValidator(max_digits=2, decimal_places=2), Decimal('0.99'), None), (DecimalValidator(max_digits=2, decimal_places=1), Decimal('0.99'), ValidationError), (DecimalValidator(max_digits=3, decimal_places=1), Decimal('999'), ValidationError), (DecimalValidator(max_digits=4, decimal_places=1), Decimal('999'), None), (DecimalValidator(max_digits=20, decimal_places=2), Decimal('742403889818000000'), None), (DecimalValidator(20, 2), Decimal('7.42403889818E+17'), None), (DecimalValidator(max_digits=20, decimal_places=2), Decimal('7424742403889818000000'), ValidationError), (DecimalValidator(max_digits=5, decimal_places=2), Decimal('7304E-1'), None), (DecimalValidator(max_digits=5, decimal_places=2), Decimal('7304E-3'), ValidationError), (DecimalValidator(max_digits=5, decimal_places=5), Decimal('70E-5'), None), (DecimalValidator(max_digits=5, decimal_places=5), Decimal('70E-6'), ValidationError), # 'Enter a number.' errors *[ (DecimalValidator(decimal_places=2, max_digits=10), Decimal(value), ValidationError) for value in ( 'NaN', '-NaN', '+NaN', 'sNaN', '-sNaN', '+sNaN', 'Inf', '-Inf', '+Inf', 'Infinity', '-Infinity', '+Infinity', ) ], (validate_image_file_extension, ContentFile('contents', name='file.jpg'), None), (validate_image_file_extension, ContentFile('contents', name='file.png'), None), (validate_image_file_extension, ContentFile('contents', name='file.PNG'), None), (validate_image_file_extension, ContentFile('contents', name='file.txt'), ValidationError), (validate_image_file_extension, ContentFile('contents', name='file'), ValidationError), (ProhibitNullCharactersValidator(), '\x00something', ValidationError), (ProhibitNullCharactersValidator(), 'something', None), (ProhibitNullCharactersValidator(), None, None), ] # Add valid and invalid URL tests. # This only tests the validator without extended schemes. TEST_DATA.extend((URLValidator(), url, None) for url in VALID_URLS) TEST_DATA.extend((URLValidator(), url, ValidationError) for url in INVALID_URLS) class TestValidators(SimpleTestCase): def test_validators(self): for validator, value, expected in TEST_DATA: name = validator.__name__ if isinstance(validator, types.FunctionType) else validator.__class__.__name__ exception_expected = expected is not None and issubclass(expected, Exception) with self.subTest(name, value=value): if validator is validate_image_file_extension and not PILLOW_IS_INSTALLED: self.skipTest('Pillow is required to test validate_image_file_extension.') if exception_expected: with self.assertRaises(expected): validator(value) else: self.assertEqual(expected, validator(value)) def test_single_message(self): v = ValidationError('Not Valid') self.assertEqual(str(v), "['Not Valid']") self.assertEqual(repr(v), "ValidationError(['Not Valid'])") def test_message_list(self): v = ValidationError(['First Problem', 'Second Problem']) self.assertEqual(str(v), "['First Problem', 'Second Problem']") self.assertEqual(repr(v), "ValidationError(['First Problem', 'Second Problem'])") def test_message_dict(self): v = ValidationError({'first': ['First Problem']}) self.assertEqual(str(v), "{'first': ['First Problem']}") self.assertEqual(repr(v), "ValidationError({'first': ['First Problem']})") def test_regex_validator_flags(self): msg = 'If the flags are set, regex must be a regular expression string.' with self.assertRaisesMessage(TypeError, msg): RegexValidator(re.compile('a'), flags=re.IGNORECASE) def test_max_length_validator_message(self): v = MaxLengthValidator(16, message='"%(value)s" has more than %(limit_value)d characters.') with self.assertRaisesMessage(ValidationError, '"djangoproject.com" has more than 16 characters.'): v('djangoproject.com') class TestValidatorEquality(TestCase): """ Validators have valid equality operators (#21638) """ def test_regex_equality(self): self.assertEqual( RegexValidator(r'^(?:[a-z0-9\.\-]*)://'), RegexValidator(r'^(?:[a-z0-9\.\-]*)://'), ) self.assertNotEqual( RegexValidator(r'^(?:[a-z0-9\.\-]*)://'), RegexValidator(r'^(?:[0-9\.\-]*)://'), ) self.assertEqual( RegexValidator(r'^(?:[a-z0-9\.\-]*)://', "oh noes", "invalid"), RegexValidator(r'^(?:[a-z0-9\.\-]*)://', "oh noes", "invalid"), ) self.assertNotEqual( RegexValidator(r'^(?:[a-z0-9\.\-]*)://', "oh", "invalid"), RegexValidator(r'^(?:[a-z0-9\.\-]*)://', "oh noes", "invalid"), ) self.assertNotEqual( RegexValidator(r'^(?:[a-z0-9\.\-]*)://', "oh noes", "invalid"), RegexValidator(r'^(?:[a-z0-9\.\-]*)://'), ) self.assertNotEqual( RegexValidator('', flags=re.IGNORECASE), RegexValidator(''), ) self.assertNotEqual( RegexValidator(''), RegexValidator('', inverse_match=True), ) def test_regex_equality_nocache(self): pattern = r'^(?:[a-z0-9\.\-]*)://' left = RegexValidator(pattern) re.purge() right = RegexValidator(pattern) self.assertEqual( left, right, ) def test_regex_equality_blank(self): self.assertEqual( RegexValidator(), RegexValidator(), ) def test_email_equality(self): self.assertEqual( EmailValidator(), EmailValidator(), ) self.assertNotEqual( EmailValidator(message="BAD EMAIL"), EmailValidator(), ) self.assertEqual( EmailValidator(message="BAD EMAIL", code="bad"), EmailValidator(message="BAD EMAIL", code="bad"), ) def test_basic_equality(self): self.assertEqual( MaxValueValidator(44), MaxValueValidator(44), ) self.assertEqual(MaxValueValidator(44), mock.ANY) self.assertNotEqual( MaxValueValidator(44), MinValueValidator(44), ) self.assertNotEqual( MinValueValidator(45), MinValueValidator(11), ) def test_decimal_equality(self): self.assertEqual( DecimalValidator(1, 2), DecimalValidator(1, 2), ) self.assertNotEqual( DecimalValidator(1, 2), DecimalValidator(1, 1), ) self.assertNotEqual( DecimalValidator(1, 2), DecimalValidator(2, 2), ) self.assertNotEqual( DecimalValidator(1, 2), MinValueValidator(11), ) def test_file_extension_equality(self): self.assertEqual( FileExtensionValidator(), FileExtensionValidator() ) self.assertEqual( FileExtensionValidator(['txt']), FileExtensionValidator(['txt']) ) self.assertEqual( FileExtensionValidator(['TXT']), FileExtensionValidator(['txt']) ) self.assertEqual( FileExtensionValidator(['TXT', 'png']), FileExtensionValidator(['txt', 'png']) ) self.assertEqual( FileExtensionValidator(['txt']), FileExtensionValidator(['txt'], code='invalid_extension') ) self.assertNotEqual( FileExtensionValidator(['txt']), FileExtensionValidator(['png']) ) self.assertNotEqual( FileExtensionValidator(['txt']), FileExtensionValidator(['png', 'jpg']) ) self.assertNotEqual( FileExtensionValidator(['txt']), FileExtensionValidator(['txt'], code='custom_code') ) self.assertNotEqual( FileExtensionValidator(['txt']), FileExtensionValidator(['txt'], message='custom error message') ) def test_prohibit_null_characters_validator_equality(self): self.assertEqual( ProhibitNullCharactersValidator(message='message', code='code'), ProhibitNullCharactersValidator(message='message', code='code') ) self.assertEqual( ProhibitNullCharactersValidator(), ProhibitNullCharactersValidator() ) self.assertNotEqual( ProhibitNullCharactersValidator(message='message1', code='code'), ProhibitNullCharactersValidator(message='message2', code='code') ) self.assertNotEqual( ProhibitNullCharactersValidator(message='message', code='code1'), ProhibitNullCharactersValidator(message='message', code='code2') )
f606113b3855c364cde37038507d17ded5a6354898eae400858f58c713076f87
import os import re from io import StringIO from unittest import mock, skipUnless from django.core.management import call_command from django.db import connection from django.db.backends.base.introspection import TableInfo from django.test import TestCase, TransactionTestCase, skipUnlessDBFeature from .models import PeopleMoreData, test_collation def inspectdb_tables_only(table_name): """ Limit introspection to tables created for models of this app. Some databases such as Oracle are extremely slow at introspection. """ return table_name.startswith('inspectdb_') def inspectdb_views_only(table_name): return ( table_name.startswith('inspectdb_') and table_name.endswith(('_materialized', '_view')) ) def special_table_only(table_name): return table_name.startswith('inspectdb_special') class InspectDBTestCase(TestCase): unique_re = re.compile(r'.*unique_together = \((.+),\).*') def test_stealth_table_name_filter_option(self): out = StringIO() call_command('inspectdb', table_name_filter=inspectdb_tables_only, stdout=out) error_message = "inspectdb has examined a table that should have been filtered out." # contrib.contenttypes is one of the apps always installed when running # the Django test suite, check that one of its tables hasn't been # inspected self.assertNotIn("class DjangoContentType(models.Model):", out.getvalue(), msg=error_message) def test_table_option(self): """ inspectdb can inspect a subset of tables by passing the table names as arguments. """ out = StringIO() call_command('inspectdb', 'inspectdb_people', stdout=out) output = out.getvalue() self.assertIn('class InspectdbPeople(models.Model):', output) self.assertNotIn("InspectdbPeopledata", output) def make_field_type_asserter(self): """Call inspectdb and return a function to validate a field type in its output""" out = StringIO() call_command('inspectdb', 'inspectdb_columntypes', stdout=out) output = out.getvalue() def assertFieldType(name, definition): out_def = re.search(r'^\s*%s = (models.*)$' % name, output, re.MULTILINE)[1] self.assertEqual(definition, out_def) return assertFieldType def test_field_types(self): """Test introspection of various Django field types""" assertFieldType = self.make_field_type_asserter() introspected_field_types = connection.features.introspected_field_types char_field_type = introspected_field_types['CharField'] # Inspecting Oracle DB doesn't produce correct results (#19884): # - it reports fields as blank=True when they aren't. if not connection.features.interprets_empty_strings_as_nulls and char_field_type == 'CharField': assertFieldType('char_field', "models.CharField(max_length=10)") assertFieldType('null_char_field', "models.CharField(max_length=10, blank=True, null=True)") assertFieldType('email_field', "models.CharField(max_length=254)") assertFieldType('file_field', "models.CharField(max_length=100)") assertFieldType('file_path_field', "models.CharField(max_length=100)") assertFieldType('slug_field', "models.CharField(max_length=50)") assertFieldType('text_field', "models.TextField()") assertFieldType('url_field', "models.CharField(max_length=200)") if char_field_type == 'TextField': assertFieldType('char_field', 'models.TextField()') assertFieldType('null_char_field', 'models.TextField(blank=True, null=True)') assertFieldType('email_field', 'models.TextField()') assertFieldType('file_field', 'models.TextField()') assertFieldType('file_path_field', 'models.TextField()') assertFieldType('slug_field', 'models.TextField()') assertFieldType('text_field', 'models.TextField()') assertFieldType('url_field', 'models.TextField()') assertFieldType('date_field', "models.DateField()") assertFieldType('date_time_field', "models.DateTimeField()") if introspected_field_types['GenericIPAddressField'] == 'GenericIPAddressField': assertFieldType('gen_ip_address_field', "models.GenericIPAddressField()") elif not connection.features.interprets_empty_strings_as_nulls: assertFieldType('gen_ip_address_field', "models.CharField(max_length=39)") assertFieldType('time_field', 'models.%s()' % introspected_field_types['TimeField']) if connection.features.has_native_uuid_field: assertFieldType('uuid_field', "models.UUIDField()") elif not connection.features.interprets_empty_strings_as_nulls: assertFieldType('uuid_field', "models.CharField(max_length=32)") @skipUnlessDBFeature('can_introspect_json_field', 'supports_json_field') def test_json_field(self): out = StringIO() call_command('inspectdb', 'inspectdb_jsonfieldcolumntype', stdout=out) output = out.getvalue() if not connection.features.interprets_empty_strings_as_nulls: self.assertIn('json_field = models.JSONField()', output) self.assertIn('null_json_field = models.JSONField(blank=True, null=True)', output) @skipUnlessDBFeature('supports_collation_on_charfield') @skipUnless(test_collation, 'Language collations are not supported.') def test_char_field_db_collation(self): out = StringIO() call_command('inspectdb', 'inspectdb_charfielddbcollation', stdout=out) output = out.getvalue() if not connection.features.interprets_empty_strings_as_nulls: self.assertIn( "char_field = models.CharField(max_length=10, " "db_collation='%s')" % test_collation, output, ) else: self.assertIn( "char_field = models.CharField(max_length=10, " "db_collation='%s', blank=True, null=True)" % test_collation, output, ) @skipUnlessDBFeature('supports_collation_on_textfield') @skipUnless(test_collation, 'Language collations are not supported.') def test_text_field_db_collation(self): out = StringIO() call_command('inspectdb', 'inspectdb_textfielddbcollation', stdout=out) output = out.getvalue() if not connection.features.interprets_empty_strings_as_nulls: self.assertIn( "text_field = models.TextField(db_collation='%s')" % test_collation, output, ) else: self.assertIn( "text_field = models.TextField(db_collation='%s, blank=True, " "null=True)" % test_collation, output, ) def test_number_field_types(self): """Test introspection of various Django field types""" assertFieldType = self.make_field_type_asserter() introspected_field_types = connection.features.introspected_field_types auto_field_type = connection.features.introspected_field_types['AutoField'] if auto_field_type != 'AutoField': assertFieldType('id', "models.%s(primary_key=True) # AutoField?" % auto_field_type) assertFieldType('big_int_field', 'models.%s()' % introspected_field_types['BigIntegerField']) bool_field_type = introspected_field_types['BooleanField'] assertFieldType('bool_field', "models.{}()".format(bool_field_type)) assertFieldType('null_bool_field', 'models.{}(blank=True, null=True)'.format(bool_field_type)) if connection.vendor != 'sqlite': assertFieldType('decimal_field', "models.DecimalField(max_digits=6, decimal_places=1)") else: # Guessed arguments on SQLite, see #5014 assertFieldType('decimal_field', "models.DecimalField(max_digits=10, decimal_places=5) " "# max_digits and decimal_places have been guessed, " "as this database handles decimal fields as float") assertFieldType('float_field', "models.FloatField()") assertFieldType('int_field', 'models.%s()' % introspected_field_types['IntegerField']) assertFieldType('pos_int_field', 'models.%s()' % introspected_field_types['PositiveIntegerField']) assertFieldType('pos_big_int_field', 'models.%s()' % introspected_field_types['PositiveBigIntegerField']) assertFieldType('pos_small_int_field', 'models.%s()' % introspected_field_types['PositiveSmallIntegerField']) assertFieldType('small_int_field', 'models.%s()' % introspected_field_types['SmallIntegerField']) @skipUnlessDBFeature('can_introspect_foreign_keys') def test_attribute_name_not_python_keyword(self): out = StringIO() call_command('inspectdb', table_name_filter=inspectdb_tables_only, stdout=out) output = out.getvalue() error_message = "inspectdb generated an attribute name which is a Python keyword" # Recursive foreign keys should be set to 'self' self.assertIn("parent = models.ForeignKey('self', models.DO_NOTHING)", output) self.assertNotIn( "from = models.ForeignKey(InspectdbPeople, models.DO_NOTHING)", output, msg=error_message, ) # As InspectdbPeople model is defined after InspectdbMessage, it should be quoted self.assertIn( "from_field = models.ForeignKey('InspectdbPeople', models.DO_NOTHING, db_column='from_id')", output, ) self.assertIn( 'people_pk = models.OneToOneField(InspectdbPeople, models.DO_NOTHING, primary_key=True)', output, ) self.assertIn( 'people_unique = models.OneToOneField(InspectdbPeople, models.DO_NOTHING)', output, ) @skipUnlessDBFeature('can_introspect_foreign_keys') def test_foreign_key_to_field(self): out = StringIO() call_command('inspectdb', 'inspectdb_foreignkeytofield', stdout=out) self.assertIn( "to_field_fk = models.ForeignKey('InspectdbPeoplemoredata', " "models.DO_NOTHING, to_field='people_unique_id')", out.getvalue(), ) def test_digits_column_name_introspection(self): """Introspection of column names consist/start with digits (#16536/#17676)""" char_field_type = connection.features.introspected_field_types['CharField'] out = StringIO() call_command('inspectdb', 'inspectdb_digitsincolumnname', stdout=out) output = out.getvalue() error_message = "inspectdb generated a model field name which is a number" self.assertNotIn(' 123 = models.%s' % char_field_type, output, msg=error_message) self.assertIn('number_123 = models.%s' % char_field_type, output) error_message = "inspectdb generated a model field name which starts with a digit" self.assertNotIn(' 4extra = models.%s' % char_field_type, output, msg=error_message) self.assertIn('number_4extra = models.%s' % char_field_type, output) self.assertNotIn(' 45extra = models.%s' % char_field_type, output, msg=error_message) self.assertIn('number_45extra = models.%s' % char_field_type, output) def test_special_column_name_introspection(self): """ Introspection of column names containing special characters, unsuitable for Python identifiers """ out = StringIO() call_command('inspectdb', table_name_filter=special_table_only, stdout=out) output = out.getvalue() base_name = connection.introspection.identifier_converter('Field') integer_field_type = connection.features.introspected_field_types['IntegerField'] self.assertIn("field = models.%s()" % integer_field_type, output) self.assertIn("field_field = models.%s(db_column='%s_')" % (integer_field_type, base_name), output) self.assertIn("field_field_0 = models.%s(db_column='%s__')" % (integer_field_type, base_name), output) self.assertIn("field_field_1 = models.%s(db_column='__field')" % integer_field_type, output) self.assertIn("prc_x = models.{}(db_column='prc(%) x')".format(integer_field_type), output) self.assertIn("tamaño = models.%s()" % integer_field_type, output) def test_table_name_introspection(self): """ Introspection of table names containing special characters, unsuitable for Python identifiers """ out = StringIO() call_command('inspectdb', table_name_filter=special_table_only, stdout=out) output = out.getvalue() self.assertIn("class InspectdbSpecialTableName(models.Model):", output) def test_managed_models(self): """By default the command generates models with `Meta.managed = False` (#14305)""" out = StringIO() call_command('inspectdb', 'inspectdb_columntypes', stdout=out) output = out.getvalue() self.longMessage = False self.assertIn(" managed = False", output, msg='inspectdb should generate unmanaged models.') def test_unique_together_meta(self): out = StringIO() call_command('inspectdb', 'inspectdb_uniquetogether', stdout=out) output = out.getvalue() self.assertIn(" unique_together = (('", output) unique_together_match = self.unique_re.findall(output) # There should be one unique_together tuple. self.assertEqual(len(unique_together_match), 1) fields = unique_together_match[0] # Fields with db_column = field name. self.assertIn("('field1', 'field2')", fields) # Fields from columns whose names are Python keywords. self.assertIn("('field1', 'field2')", fields) # Fields whose names normalize to the same Python field name and hence # are given an integer suffix. self.assertIn("('non_unique_column', 'non_unique_column_0')", fields) @skipUnless(connection.vendor == 'postgresql', 'PostgreSQL specific SQL') def test_unsupported_unique_together(self): """Unsupported index types (COALESCE here) are skipped.""" with connection.cursor() as c: c.execute( 'CREATE UNIQUE INDEX Findex ON %s ' '(id, people_unique_id, COALESCE(message_id, -1))' % PeopleMoreData._meta.db_table ) try: out = StringIO() call_command( 'inspectdb', table_name_filter=lambda tn: tn.startswith(PeopleMoreData._meta.db_table), stdout=out, ) output = out.getvalue() self.assertIn('# A unique constraint could not be introspected.', output) self.assertEqual(self.unique_re.findall(output), ["('id', 'people_unique')"]) finally: with connection.cursor() as c: c.execute('DROP INDEX Findex') @skipUnless(connection.vendor == 'sqlite', "Only patched sqlite's DatabaseIntrospection.data_types_reverse for this test") def test_custom_fields(self): """ Introspection of columns with a custom field (#21090) """ out = StringIO() with mock.patch( 'django.db.connection.introspection.data_types_reverse.base_data_types_reverse', { 'text': 'myfields.TextField', 'bigint': 'BigIntegerField', }, ): call_command('inspectdb', 'inspectdb_columntypes', stdout=out) output = out.getvalue() self.assertIn("text_field = myfields.TextField()", output) self.assertIn("big_int_field = models.BigIntegerField()", output) def test_introspection_errors(self): """ Introspection errors should not crash the command, and the error should be visible in the output. """ out = StringIO() with mock.patch('django.db.connection.introspection.get_table_list', return_value=[TableInfo(name='nonexistent', type='t')]): call_command('inspectdb', stdout=out) output = out.getvalue() self.assertIn("# Unable to inspect table 'nonexistent'", output) # The error message depends on the backend self.assertIn("# The error was:", output) class InspectDBTransactionalTests(TransactionTestCase): available_apps = ['inspectdb'] def test_include_views(self): """inspectdb --include-views creates models for database views.""" with connection.cursor() as cursor: cursor.execute( 'CREATE VIEW inspectdb_people_view AS ' 'SELECT id, name FROM inspectdb_people' ) out = StringIO() view_model = 'class InspectdbPeopleView(models.Model):' view_managed = 'managed = False # Created from a view.' try: call_command( 'inspectdb', table_name_filter=inspectdb_views_only, stdout=out, ) no_views_output = out.getvalue() self.assertNotIn(view_model, no_views_output) self.assertNotIn(view_managed, no_views_output) call_command( 'inspectdb', table_name_filter=inspectdb_views_only, include_views=True, stdout=out, ) with_views_output = out.getvalue() self.assertIn(view_model, with_views_output) self.assertIn(view_managed, with_views_output) finally: with connection.cursor() as cursor: cursor.execute('DROP VIEW inspectdb_people_view') @skipUnlessDBFeature('can_introspect_materialized_views') def test_include_materialized_views(self): """inspectdb --include-views creates models for materialized views.""" with connection.cursor() as cursor: cursor.execute( 'CREATE MATERIALIZED VIEW inspectdb_people_materialized AS ' 'SELECT id, name FROM inspectdb_people' ) out = StringIO() view_model = 'class InspectdbPeopleMaterialized(models.Model):' view_managed = 'managed = False # Created from a view.' try: call_command( 'inspectdb', table_name_filter=inspectdb_views_only, stdout=out, ) no_views_output = out.getvalue() self.assertNotIn(view_model, no_views_output) self.assertNotIn(view_managed, no_views_output) call_command( 'inspectdb', table_name_filter=inspectdb_views_only, include_views=True, stdout=out, ) with_views_output = out.getvalue() self.assertIn(view_model, with_views_output) self.assertIn(view_managed, with_views_output) finally: with connection.cursor() as cursor: cursor.execute('DROP MATERIALIZED VIEW inspectdb_people_materialized') @skipUnless(connection.vendor == 'postgresql', 'PostgreSQL specific SQL') def test_include_partitions(self): """inspectdb --include-partitions creates models for partitions.""" with connection.cursor() as cursor: cursor.execute('''\ CREATE TABLE inspectdb_partition_parent (name text not null) PARTITION BY LIST (left(upper(name), 1)) ''') cursor.execute('''\ CREATE TABLE inspectdb_partition_child PARTITION OF inspectdb_partition_parent FOR VALUES IN ('A', 'B', 'C') ''') out = StringIO() partition_model_parent = 'class InspectdbPartitionParent(models.Model):' partition_model_child = 'class InspectdbPartitionChild(models.Model):' partition_managed = 'managed = False # Created from a partition.' try: call_command('inspectdb', table_name_filter=inspectdb_tables_only, stdout=out) no_partitions_output = out.getvalue() self.assertIn(partition_model_parent, no_partitions_output) self.assertNotIn(partition_model_child, no_partitions_output) self.assertNotIn(partition_managed, no_partitions_output) call_command('inspectdb', table_name_filter=inspectdb_tables_only, include_partitions=True, stdout=out) with_partitions_output = out.getvalue() self.assertIn(partition_model_parent, with_partitions_output) self.assertIn(partition_model_child, with_partitions_output) self.assertIn(partition_managed, with_partitions_output) finally: with connection.cursor() as cursor: cursor.execute('DROP TABLE IF EXISTS inspectdb_partition_child') cursor.execute('DROP TABLE IF EXISTS inspectdb_partition_parent') @skipUnless(connection.vendor == 'postgresql', 'PostgreSQL specific SQL') def test_foreign_data_wrapper(self): with connection.cursor() as cursor: cursor.execute('CREATE EXTENSION IF NOT EXISTS file_fdw') cursor.execute('CREATE SERVER inspectdb_server FOREIGN DATA WRAPPER file_fdw') cursor.execute('''\ CREATE FOREIGN TABLE inspectdb_iris_foreign_table ( petal_length real, petal_width real, sepal_length real, sepal_width real ) SERVER inspectdb_server OPTIONS ( filename %s ) ''', [os.devnull]) out = StringIO() foreign_table_model = 'class InspectdbIrisForeignTable(models.Model):' foreign_table_managed = 'managed = False' try: call_command( 'inspectdb', table_name_filter=inspectdb_tables_only, stdout=out, ) output = out.getvalue() self.assertIn(foreign_table_model, output) self.assertIn(foreign_table_managed, output) finally: with connection.cursor() as cursor: cursor.execute('DROP FOREIGN TABLE IF EXISTS inspectdb_iris_foreign_table') cursor.execute('DROP SERVER IF EXISTS inspectdb_server') cursor.execute('DROP EXTENSION IF EXISTS file_fdw')
2308ba6f1263364dc3f7872973c609525d9432d079c34f17e1fb04f45a6aed56
from django.db import connection, models class People(models.Model): name = models.CharField(max_length=255) parent = models.ForeignKey('self', models.CASCADE) class Message(models.Model): from_field = models.ForeignKey(People, models.CASCADE, db_column='from_id') class PeopleData(models.Model): people_pk = models.ForeignKey(People, models.CASCADE, primary_key=True) ssn = models.CharField(max_length=11) class PeopleMoreData(models.Model): people_unique = models.ForeignKey(People, models.CASCADE, unique=True) message = models.ForeignKey(Message, models.CASCADE, blank=True, null=True) license = models.CharField(max_length=255) class ForeignKeyToField(models.Model): to_field_fk = models.ForeignKey( PeopleMoreData, models.CASCADE, to_field='people_unique', ) class DigitsInColumnName(models.Model): all_digits = models.CharField(max_length=11, db_column='123') leading_digit = models.CharField(max_length=11, db_column='4extra') leading_digits = models.CharField(max_length=11, db_column='45extra') class SpecialName(models.Model): field = models.IntegerField(db_column='field') # Underscores field_field_0 = models.IntegerField(db_column='Field_') field_field_1 = models.IntegerField(db_column='Field__') field_field_2 = models.IntegerField(db_column='__field') # Other chars prc_x = models.IntegerField(db_column='prc(%) x') non_ascii = models.IntegerField(db_column='tamaño') class Meta: db_table = "inspectdb_special.table name" class ColumnTypes(models.Model): id = models.AutoField(primary_key=True) big_int_field = models.BigIntegerField() bool_field = models.BooleanField(default=False) null_bool_field = models.BooleanField(null=True) char_field = models.CharField(max_length=10) null_char_field = models.CharField(max_length=10, blank=True, null=True) date_field = models.DateField() date_time_field = models.DateTimeField() decimal_field = models.DecimalField(max_digits=6, decimal_places=1) email_field = models.EmailField() file_field = models.FileField(upload_to="unused") file_path_field = models.FilePathField() float_field = models.FloatField() int_field = models.IntegerField() gen_ip_address_field = models.GenericIPAddressField(protocol="ipv4") pos_big_int_field = models.PositiveBigIntegerField() pos_int_field = models.PositiveIntegerField() pos_small_int_field = models.PositiveSmallIntegerField() slug_field = models.SlugField() small_int_field = models.SmallIntegerField() text_field = models.TextField() time_field = models.TimeField() url_field = models.URLField() uuid_field = models.UUIDField() class JSONFieldColumnType(models.Model): json_field = models.JSONField() null_json_field = models.JSONField(blank=True, null=True) class Meta: required_db_features = { 'can_introspect_json_field', 'supports_json_field', } test_collation = connection.features.test_collations.get('non_default') class CharFieldDbCollation(models.Model): char_field = models.CharField(max_length=10, db_collation=test_collation) class Meta: required_db_features = {'supports_collation_on_charfield'} class TextFieldDbCollation(models.Model): text_field = models.TextField(db_collation=test_collation) class Meta: required_db_features = {'supports_collation_on_textfield'} class UniqueTogether(models.Model): field1 = models.IntegerField() field2 = models.CharField(max_length=10) from_field = models.IntegerField(db_column='from') non_unique = models.IntegerField(db_column='non__unique_column') non_unique_0 = models.IntegerField(db_column='non_unique__column') class Meta: unique_together = [ ('field1', 'field2'), ('from_field', 'field1'), ('non_unique', 'non_unique_0'), ]
8d15106c83dbb589907323d4045161bbd1979d9059fe567ee1b305411185c971
import time from django.core.exceptions import ImproperlyConfigured from django.http import HttpResponse from django.test import RequestFactory, SimpleTestCase, override_settings from django.test.utils import require_jinja2 from django.urls import resolve from django.views.generic import RedirectView, TemplateView, View from . import views class SimpleView(View): """ A simple view with a docstring. """ def get(self, request): return HttpResponse('This is a simple view') class SimplePostView(SimpleView): post = SimpleView.get class PostOnlyView(View): def post(self, request): return HttpResponse('This view only accepts POST') class CustomizableView(SimpleView): parameter = {} def decorator(view): view.is_decorated = True return view class DecoratedDispatchView(SimpleView): @decorator def dispatch(self, request, *args, **kwargs): return super().dispatch(request, *args, **kwargs) class AboutTemplateView(TemplateView): def get(self, request): return self.render_to_response({}) def get_template_names(self): return ['generic_views/about.html'] class AboutTemplateAttributeView(TemplateView): template_name = 'generic_views/about.html' def get(self, request): return self.render_to_response(context={}) class InstanceView(View): def get(self, request): return self class ViewTest(SimpleTestCase): rf = RequestFactory() def _assert_simple(self, response): self.assertEqual(response.status_code, 200) self.assertEqual(response.content, b'This is a simple view') def test_no_init_kwargs(self): """ A view can't be accidentally instantiated before deployment """ msg = 'This method is available only on the class, not on instances.' with self.assertRaisesMessage(AttributeError, msg): SimpleView(key='value').as_view() def test_no_init_args(self): """ A view can't be accidentally instantiated before deployment """ msg = 'as_view() takes 1 positional argument but 2 were given' with self.assertRaisesMessage(TypeError, msg): SimpleView.as_view('value') def test_pathological_http_method(self): """ The edge case of an HTTP request that spoofs an existing method name is caught. """ self.assertEqual(SimpleView.as_view()( self.rf.get('/', REQUEST_METHOD='DISPATCH') ).status_code, 405) def test_get_only(self): """ Test a view which only allows GET doesn't allow other methods. """ self._assert_simple(SimpleView.as_view()(self.rf.get('/'))) self.assertEqual(SimpleView.as_view()(self.rf.post('/')).status_code, 405) self.assertEqual(SimpleView.as_view()( self.rf.get('/', REQUEST_METHOD='FAKE') ).status_code, 405) def test_get_and_head(self): """ Test a view which supplies a GET method also responds correctly to HEAD. """ self._assert_simple(SimpleView.as_view()(self.rf.get('/'))) response = SimpleView.as_view()(self.rf.head('/')) self.assertEqual(response.status_code, 200) def test_setup_get_and_head(self): view_instance = SimpleView() self.assertFalse(hasattr(view_instance, 'head')) view_instance.setup(self.rf.get('/')) self.assertTrue(hasattr(view_instance, 'head')) self.assertEqual(view_instance.head, view_instance.get) def test_head_no_get(self): """ Test a view which supplies no GET method responds to HEAD with HTTP 405. """ response = PostOnlyView.as_view()(self.rf.head('/')) self.assertEqual(response.status_code, 405) def test_get_and_post(self): """ Test a view which only allows both GET and POST. """ self._assert_simple(SimplePostView.as_view()(self.rf.get('/'))) self._assert_simple(SimplePostView.as_view()(self.rf.post('/'))) self.assertEqual(SimplePostView.as_view()( self.rf.get('/', REQUEST_METHOD='FAKE') ).status_code, 405) def test_invalid_keyword_argument(self): """ View arguments must be predefined on the class and can't be named like an HTTP method. """ msg = ( 'The method name %s is not accepted as a keyword argument to ' 'SimpleView().' ) # Check each of the allowed method names for method in SimpleView.http_method_names: with self.assertRaisesMessage(TypeError, msg % method): SimpleView.as_view(**{method: 'value'}) # Check the case view argument is ok if predefined on the class... CustomizableView.as_view(parameter="value") # ...but raises errors otherwise. msg = ( "CustomizableView() received an invalid keyword 'foobar'. " "as_view only accepts arguments that are already attributes of " "the class." ) with self.assertRaisesMessage(TypeError, msg): CustomizableView.as_view(foobar="value") def test_calling_more_than_once(self): """ Test a view can only be called once. """ request = self.rf.get('/') view = InstanceView.as_view() self.assertNotEqual(view(request), view(request)) def test_class_attributes(self): """ The callable returned from as_view() has proper special attributes. """ cls = SimpleView view = cls.as_view() self.assertEqual(view.__doc__, cls.__doc__) self.assertEqual(view.__name__, 'view') self.assertEqual(view.__module__, cls.__module__) self.assertEqual(view.__qualname__, f'{cls.as_view.__qualname__}.<locals>.view') self.assertEqual(view.__annotations__, cls.dispatch.__annotations__) self.assertFalse(hasattr(view, '__wrapped__')) def test_dispatch_decoration(self): """ Attributes set by decorators on the dispatch method are also present on the closure. """ self.assertTrue(DecoratedDispatchView.as_view().is_decorated) def test_options(self): """ Views respond to HTTP OPTIONS requests with an Allow header appropriate for the methods implemented by the view class. """ request = self.rf.options('/') view = SimpleView.as_view() response = view(request) self.assertEqual(200, response.status_code) self.assertTrue(response.headers['Allow']) def test_options_for_get_view(self): """ A view implementing GET allows GET and HEAD. """ request = self.rf.options('/') view = SimpleView.as_view() response = view(request) self._assert_allows(response, 'GET', 'HEAD') def test_options_for_get_and_post_view(self): """ A view implementing GET and POST allows GET, HEAD, and POST. """ request = self.rf.options('/') view = SimplePostView.as_view() response = view(request) self._assert_allows(response, 'GET', 'HEAD', 'POST') def test_options_for_post_view(self): """ A view implementing POST allows POST. """ request = self.rf.options('/') view = PostOnlyView.as_view() response = view(request) self._assert_allows(response, 'POST') def _assert_allows(self, response, *expected_methods): "Assert allowed HTTP methods reported in the Allow response header" response_allows = set(response.headers['Allow'].split(', ')) self.assertEqual(set(expected_methods + ('OPTIONS',)), response_allows) def test_args_kwargs_request_on_self(self): """ Test a view only has args, kwargs & request once `as_view` has been called. """ bare_view = InstanceView() view = InstanceView.as_view()(self.rf.get('/')) for attribute in ('args', 'kwargs', 'request'): self.assertNotIn(attribute, dir(bare_view)) self.assertIn(attribute, dir(view)) def test_overridden_setup(self): class SetAttributeMixin: def setup(self, request, *args, **kwargs): self.attr = True super().setup(request, *args, **kwargs) class CheckSetupView(SetAttributeMixin, SimpleView): def dispatch(self, request, *args, **kwargs): assert hasattr(self, 'attr') return super().dispatch(request, *args, **kwargs) response = CheckSetupView.as_view()(self.rf.get('/')) self.assertEqual(response.status_code, 200) def test_not_calling_parent_setup_error(self): class TestView(View): def setup(self, request, *args, **kwargs): pass # Not calling super().setup() msg = ( "TestView instance has no 'request' attribute. Did you override " "setup() and forget to call super()?" ) with self.assertRaisesMessage(AttributeError, msg): TestView.as_view()(self.rf.get('/')) def test_setup_adds_args_kwargs_request(self): request = self.rf.get('/') args = ('arg 1', 'arg 2') kwargs = {'kwarg_1': 1, 'kwarg_2': 'year'} view = View() view.setup(request, *args, **kwargs) self.assertEqual(request, view.request) self.assertEqual(args, view.args) self.assertEqual(kwargs, view.kwargs) def test_direct_instantiation(self): """ It should be possible to use the view by directly instantiating it without going through .as_view() (#21564). """ view = PostOnlyView() response = view.dispatch(self.rf.head('/')) self.assertEqual(response.status_code, 405) @override_settings(ROOT_URLCONF='generic_views.urls') class TemplateViewTest(SimpleTestCase): rf = RequestFactory() def _assert_about(self, response): response.render() self.assertContains(response, '<h1>About</h1>') def test_get(self): """ Test a view that simply renders a template on GET """ self._assert_about(AboutTemplateView.as_view()(self.rf.get('/about/'))) def test_head(self): """ Test a TemplateView responds correctly to HEAD """ response = AboutTemplateView.as_view()(self.rf.head('/about/')) self.assertEqual(response.status_code, 200) def test_get_template_attribute(self): """ Test a view that renders a template on GET with the template name as an attribute on the class. """ self._assert_about(AboutTemplateAttributeView.as_view()(self.rf.get('/about/'))) def test_get_generic_template(self): """ Test a completely generic view that renders a template on GET with the template name as an argument at instantiation. """ self._assert_about(TemplateView.as_view(template_name='generic_views/about.html')(self.rf.get('/about/'))) def test_template_name_required(self): """ A template view must provide a template name. """ msg = ( "TemplateResponseMixin requires either a definition of " "'template_name' or an implementation of 'get_template_names()'" ) with self.assertRaisesMessage(ImproperlyConfigured, msg): self.client.get('/template/no_template/') @require_jinja2 def test_template_engine(self): """ A template view may provide a template engine. """ request = self.rf.get('/using/') view = TemplateView.as_view(template_name='generic_views/using.html') self.assertEqual(view(request).render().content, b'DTL\n') view = TemplateView.as_view(template_name='generic_views/using.html', template_engine='django') self.assertEqual(view(request).render().content, b'DTL\n') view = TemplateView.as_view(template_name='generic_views/using.html', template_engine='jinja2') self.assertEqual(view(request).render().content, b'Jinja2\n') def test_template_params(self): """ A generic template view passes kwargs as context. """ response = self.client.get('/template/simple/bar/') self.assertEqual(response.status_code, 200) self.assertEqual(response.context['foo'], 'bar') self.assertIsInstance(response.context['view'], View) def test_extra_template_params(self): """ A template view can be customized to return extra context. """ response = self.client.get('/template/custom/bar/') self.assertEqual(response.status_code, 200) self.assertEqual(response.context['foo'], 'bar') self.assertEqual(response.context['key'], 'value') self.assertIsInstance(response.context['view'], View) def test_cached_views(self): """ A template view can be cached """ response = self.client.get('/template/cached/bar/') self.assertEqual(response.status_code, 200) time.sleep(1.0) response2 = self.client.get('/template/cached/bar/') self.assertEqual(response2.status_code, 200) self.assertEqual(response.content, response2.content) time.sleep(2.0) # Let the cache expire and test again response2 = self.client.get('/template/cached/bar/') self.assertEqual(response2.status_code, 200) self.assertNotEqual(response.content, response2.content) def test_content_type(self): response = self.client.get('/template/content_type/') self.assertEqual(response.headers['Content-Type'], 'text/plain') def test_resolve_view(self): match = resolve('/template/content_type/') self.assertIs(match.func.view_class, TemplateView) self.assertEqual(match.func.view_initkwargs['content_type'], 'text/plain') def test_resolve_login_required_view(self): match = resolve('/template/login_required/') self.assertIs(match.func.view_class, TemplateView) def test_extra_context(self): response = self.client.get('/template/extra_context/') self.assertEqual(response.context['title'], 'Title') @override_settings(ROOT_URLCONF='generic_views.urls') class RedirectViewTest(SimpleTestCase): rf = RequestFactory() def test_no_url(self): "Without any configuration, returns HTTP 410 GONE" response = RedirectView.as_view()(self.rf.get('/foo/')) self.assertEqual(response.status_code, 410) def test_default_redirect(self): "Default is a temporary redirect" response = RedirectView.as_view(url='/bar/')(self.rf.get('/foo/')) self.assertEqual(response.status_code, 302) self.assertEqual(response.url, '/bar/') def test_permanent_redirect(self): "Permanent redirects are an option" response = RedirectView.as_view(url='/bar/', permanent=True)(self.rf.get('/foo/')) self.assertEqual(response.status_code, 301) self.assertEqual(response.url, '/bar/') def test_temporary_redirect(self): "Temporary redirects are an option" response = RedirectView.as_view(url='/bar/', permanent=False)(self.rf.get('/foo/')) self.assertEqual(response.status_code, 302) self.assertEqual(response.url, '/bar/') def test_include_args(self): "GET arguments can be included in the redirected URL" response = RedirectView.as_view(url='/bar/')(self.rf.get('/foo/')) self.assertEqual(response.status_code, 302) self.assertEqual(response.url, '/bar/') response = RedirectView.as_view(url='/bar/', query_string=True)(self.rf.get('/foo/?pork=spam')) self.assertEqual(response.status_code, 302) self.assertEqual(response.url, '/bar/?pork=spam') def test_include_urlencoded_args(self): "GET arguments can be URL-encoded when included in the redirected URL" response = RedirectView.as_view(url='/bar/', query_string=True)( self.rf.get('/foo/?unicode=%E2%9C%93')) self.assertEqual(response.status_code, 302) self.assertEqual(response.url, '/bar/?unicode=%E2%9C%93') def test_parameter_substitution(self): "Redirection URLs can be parameterized" response = RedirectView.as_view(url='/bar/%(object_id)d/')(self.rf.get('/foo/42/'), object_id=42) self.assertEqual(response.status_code, 302) self.assertEqual(response.url, '/bar/42/') def test_named_url_pattern(self): "Named pattern parameter should reverse to the matching pattern" response = RedirectView.as_view(pattern_name='artist_detail')(self.rf.get('/foo/'), pk=1) self.assertEqual(response.status_code, 302) self.assertEqual(response.headers['Location'], '/detail/artist/1/') def test_named_url_pattern_using_args(self): response = RedirectView.as_view(pattern_name='artist_detail')(self.rf.get('/foo/'), 1) self.assertEqual(response.status_code, 302) self.assertEqual(response.headers['Location'], '/detail/artist/1/') def test_redirect_POST(self): "Default is a temporary redirect" response = RedirectView.as_view(url='/bar/')(self.rf.post('/foo/')) self.assertEqual(response.status_code, 302) self.assertEqual(response.url, '/bar/') def test_redirect_HEAD(self): "Default is a temporary redirect" response = RedirectView.as_view(url='/bar/')(self.rf.head('/foo/')) self.assertEqual(response.status_code, 302) self.assertEqual(response.url, '/bar/') def test_redirect_OPTIONS(self): "Default is a temporary redirect" response = RedirectView.as_view(url='/bar/')(self.rf.options('/foo/')) self.assertEqual(response.status_code, 302) self.assertEqual(response.url, '/bar/') def test_redirect_PUT(self): "Default is a temporary redirect" response = RedirectView.as_view(url='/bar/')(self.rf.put('/foo/')) self.assertEqual(response.status_code, 302) self.assertEqual(response.url, '/bar/') def test_redirect_PATCH(self): "Default is a temporary redirect" response = RedirectView.as_view(url='/bar/')(self.rf.patch('/foo/')) self.assertEqual(response.status_code, 302) self.assertEqual(response.url, '/bar/') def test_redirect_DELETE(self): "Default is a temporary redirect" response = RedirectView.as_view(url='/bar/')(self.rf.delete('/foo/')) self.assertEqual(response.status_code, 302) self.assertEqual(response.url, '/bar/') def test_redirect_when_meta_contains_no_query_string(self): "regression for #16705" # we can't use self.rf.get because it always sets QUERY_STRING response = RedirectView.as_view(url='/bar/')(self.rf.request(PATH_INFO='/foo/')) self.assertEqual(response.status_code, 302) def test_direct_instantiation(self): """ It should be possible to use the view without going through .as_view() (#21564). """ view = RedirectView() response = view.dispatch(self.rf.head('/foo/')) self.assertEqual(response.status_code, 410) class GetContextDataTest(SimpleTestCase): def test_get_context_data_super(self): test_view = views.CustomContextView() context = test_view.get_context_data(kwarg_test='kwarg_value') # the test_name key is inserted by the test classes parent self.assertIn('test_name', context) self.assertEqual(context['kwarg_test'], 'kwarg_value') self.assertEqual(context['custom_key'], 'custom_value') # test that kwarg overrides values assigned higher up context = test_view.get_context_data(test_name='test_value') self.assertEqual(context['test_name'], 'test_value') def test_object_at_custom_name_in_context_data(self): # Checks 'pony' key presence in dict returned by get_context_date test_view = views.CustomSingleObjectView() test_view.context_object_name = 'pony' context = test_view.get_context_data() self.assertEqual(context['pony'], test_view.object) def test_object_in_get_context_data(self): # Checks 'object' key presence in dict returned by get_context_date #20234 test_view = views.CustomSingleObjectView() context = test_view.get_context_data() self.assertEqual(context['object'], test_view.object) class UseMultipleObjectMixinTest(SimpleTestCase): rf = RequestFactory() def test_use_queryset_from_view(self): test_view = views.CustomMultipleObjectMixinView() test_view.get(self.rf.get('/')) # Don't pass queryset as argument context = test_view.get_context_data() self.assertEqual(context['object_list'], test_view.queryset) def test_overwrite_queryset(self): test_view = views.CustomMultipleObjectMixinView() test_view.get(self.rf.get('/')) queryset = [{'name': 'Lennon'}, {'name': 'Ono'}] self.assertNotEqual(test_view.queryset, queryset) # Overwrite the view's queryset with queryset from kwarg context = test_view.get_context_data(object_list=queryset) self.assertEqual(context['object_list'], queryset) class SingleObjectTemplateResponseMixinTest(SimpleTestCase): def test_template_mixin_without_template(self): """ We want to makes sure that if you use a template mixin, but forget the template, it still tells you it's ImproperlyConfigured instead of TemplateDoesNotExist. """ view = views.TemplateResponseWithoutTemplate() msg = ( "TemplateResponseMixin requires either a definition of " "'template_name' or an implementation of 'get_template_names()'" ) with self.assertRaisesMessage(ImproperlyConfigured, msg): view.get_template_names()
0b780cea9df09344bcb329aa88f27e1ee0562168d8412fa43eece6c408dd3d83
from django import forms from django.core.exceptions import ImproperlyConfigured from django.test import SimpleTestCase, TestCase, override_settings from django.test.client import RequestFactory from django.urls import reverse from django.views.generic.base import View from django.views.generic.edit import ( CreateView, DeleteView, DeleteViewCustomDeleteWarning, FormMixin, ModelFormMixin, ) from . import views from .forms import AuthorForm from .models import Artist, Author class FormMixinTests(SimpleTestCase): request_factory = RequestFactory() def test_initial_data(self): """ Test instance independence of initial data dict (see #16138) """ initial_1 = FormMixin().get_initial() initial_1['foo'] = 'bar' initial_2 = FormMixin().get_initial() self.assertNotEqual(initial_1, initial_2) def test_get_prefix(self): """ Test prefix can be set (see #18872) """ test_string = 'test' get_request = self.request_factory.get('/') class TestFormMixin(FormMixin): request = get_request default_kwargs = TestFormMixin().get_form_kwargs() self.assertIsNone(default_kwargs.get('prefix')) set_mixin = TestFormMixin() set_mixin.prefix = test_string set_kwargs = set_mixin.get_form_kwargs() self.assertEqual(test_string, set_kwargs.get('prefix')) def test_get_form(self): class TestFormMixin(FormMixin): request = self.request_factory.get('/') self.assertIsInstance( TestFormMixin().get_form(forms.Form), forms.Form, 'get_form() should use provided form class.' ) class FormClassTestFormMixin(TestFormMixin): form_class = forms.Form self.assertIsInstance( FormClassTestFormMixin().get_form(), forms.Form, 'get_form() should fallback to get_form_class() if none is provided.' ) def test_get_context_data(self): class FormContext(FormMixin): request = self.request_factory.get('/') form_class = forms.Form self.assertIsInstance(FormContext().get_context_data()['form'], forms.Form) @override_settings(ROOT_URLCONF='generic_views.urls') class BasicFormTests(TestCase): def test_post_data(self): res = self.client.post('/contact/', {'name': "Me", 'message': "Hello"}) self.assertRedirects(res, '/list/authors/') def test_late_form_validation(self): """ A form can be marked invalid in the form_valid() method (#25548). """ res = self.client.post('/late-validation/', {'name': "Me", 'message': "Hello"}) self.assertFalse(res.context['form'].is_valid()) class ModelFormMixinTests(SimpleTestCase): def test_get_form(self): form_class = views.AuthorGetQuerySetFormView().get_form_class() self.assertEqual(form_class._meta.model, Author) def test_get_form_checks_for_object(self): mixin = ModelFormMixin() mixin.request = RequestFactory().get('/') self.assertEqual({'initial': {}, 'prefix': None}, mixin.get_form_kwargs()) @override_settings(ROOT_URLCONF='generic_views.urls') class CreateViewTests(TestCase): def test_create(self): res = self.client.get('/edit/authors/create/') self.assertEqual(res.status_code, 200) self.assertIsInstance(res.context['form'], forms.ModelForm) self.assertIsInstance(res.context['view'], View) self.assertNotIn('object', res.context) self.assertNotIn('author', res.context) self.assertTemplateUsed(res, 'generic_views/author_form.html') res = self.client.post('/edit/authors/create/', {'name': 'Randall Munroe', 'slug': 'randall-munroe'}) self.assertEqual(res.status_code, 302) self.assertRedirects(res, '/list/authors/') self.assertQuerysetEqual(Author.objects.values_list('name', flat=True), ['Randall Munroe']) def test_create_invalid(self): res = self.client.post('/edit/authors/create/', {'name': 'A' * 101, 'slug': 'randall-munroe'}) self.assertEqual(res.status_code, 200) self.assertTemplateUsed(res, 'generic_views/author_form.html') self.assertEqual(len(res.context['form'].errors), 1) self.assertEqual(Author.objects.count(), 0) def test_create_with_object_url(self): res = self.client.post('/edit/artists/create/', {'name': 'Rene Magritte'}) self.assertEqual(res.status_code, 302) artist = Artist.objects.get(name='Rene Magritte') self.assertRedirects(res, '/detail/artist/%d/' % artist.pk) self.assertQuerysetEqual(Artist.objects.all(), [artist]) def test_create_with_redirect(self): res = self.client.post('/edit/authors/create/redirect/', {'name': 'Randall Munroe', 'slug': 'randall-munroe'}) self.assertEqual(res.status_code, 302) self.assertRedirects(res, '/edit/authors/create/') self.assertQuerysetEqual(Author.objects.values_list('name', flat=True), ['Randall Munroe']) def test_create_with_interpolated_redirect(self): res = self.client.post( '/edit/authors/create/interpolate_redirect/', {'name': 'Randall Munroe', 'slug': 'randall-munroe'} ) self.assertQuerysetEqual(Author.objects.values_list('name', flat=True), ['Randall Munroe']) self.assertEqual(res.status_code, 302) pk = Author.objects.first().pk self.assertRedirects(res, '/edit/author/%d/update/' % pk) # Also test with escaped chars in URL res = self.client.post( '/edit/authors/create/interpolate_redirect_nonascii/', {'name': 'John Doe', 'slug': 'john-doe'} ) self.assertEqual(res.status_code, 302) pk = Author.objects.get(name='John Doe').pk self.assertRedirects(res, '/%C3%A9dit/author/{}/update/'.format(pk)) def test_create_with_special_properties(self): res = self.client.get('/edit/authors/create/special/') self.assertEqual(res.status_code, 200) self.assertIsInstance(res.context['form'], views.AuthorForm) self.assertNotIn('object', res.context) self.assertNotIn('author', res.context) self.assertTemplateUsed(res, 'generic_views/form.html') res = self.client.post('/edit/authors/create/special/', {'name': 'Randall Munroe', 'slug': 'randall-munroe'}) self.assertEqual(res.status_code, 302) obj = Author.objects.get(slug='randall-munroe') self.assertRedirects(res, reverse('author_detail', kwargs={'pk': obj.pk})) self.assertQuerysetEqual(Author.objects.all(), [obj]) def test_create_without_redirect(self): msg = ( 'No URL to redirect to. Either provide a url or define a ' 'get_absolute_url method on the Model.' ) with self.assertRaisesMessage(ImproperlyConfigured, msg): self.client.post('/edit/authors/create/naive/', {'name': 'Randall Munroe', 'slug': 'randall-munroe'}) def test_create_restricted(self): res = self.client.post( '/edit/authors/create/restricted/', {'name': 'Randall Munroe', 'slug': 'randall-munroe'} ) self.assertEqual(res.status_code, 302) self.assertRedirects(res, '/accounts/login/?next=/edit/authors/create/restricted/') def test_create_view_with_restricted_fields(self): class MyCreateView(CreateView): model = Author fields = ['name'] self.assertEqual(list(MyCreateView().get_form_class().base_fields), ['name']) def test_create_view_all_fields(self): class MyCreateView(CreateView): model = Author fields = '__all__' self.assertEqual(list(MyCreateView().get_form_class().base_fields), ['name', 'slug']) def test_create_view_without_explicit_fields(self): class MyCreateView(CreateView): model = Author message = ( "Using ModelFormMixin (base class of MyCreateView) without the " "'fields' attribute is prohibited." ) with self.assertRaisesMessage(ImproperlyConfigured, message): MyCreateView().get_form_class() def test_define_both_fields_and_form_class(self): class MyCreateView(CreateView): model = Author form_class = AuthorForm fields = ['name'] message = "Specifying both 'fields' and 'form_class' is not permitted." with self.assertRaisesMessage(ImproperlyConfigured, message): MyCreateView().get_form_class() @override_settings(ROOT_URLCONF='generic_views.urls') class UpdateViewTests(TestCase): @classmethod def setUpTestData(cls): cls.author = Author.objects.create( pk=1, # Required for OneAuthorUpdate. name='Randall Munroe', slug='randall-munroe', ) def test_update_post(self): res = self.client.get('/edit/author/%d/update/' % self.author.pk) self.assertEqual(res.status_code, 200) self.assertIsInstance(res.context['form'], forms.ModelForm) self.assertEqual(res.context['object'], self.author) self.assertEqual(res.context['author'], self.author) self.assertTemplateUsed(res, 'generic_views/author_form.html') self.assertEqual(res.context['view'].get_form_called_count, 1) # Modification with both POST and PUT (browser compatible) res = self.client.post( '/edit/author/%d/update/' % self.author.pk, {'name': 'Randall Munroe (xkcd)', 'slug': 'randall-munroe'} ) self.assertEqual(res.status_code, 302) self.assertRedirects(res, '/list/authors/') self.assertQuerysetEqual(Author.objects.values_list('name', flat=True), ['Randall Munroe (xkcd)']) def test_update_invalid(self): res = self.client.post( '/edit/author/%d/update/' % self.author.pk, {'name': 'A' * 101, 'slug': 'randall-munroe'} ) self.assertEqual(res.status_code, 200) self.assertTemplateUsed(res, 'generic_views/author_form.html') self.assertEqual(len(res.context['form'].errors), 1) self.assertQuerysetEqual(Author.objects.all(), [self.author]) self.assertEqual(res.context['view'].get_form_called_count, 1) def test_update_with_object_url(self): a = Artist.objects.create(name='Rene Magritte') res = self.client.post('/edit/artists/%d/update/' % a.pk, {'name': 'Rene Magritte'}) self.assertEqual(res.status_code, 302) self.assertRedirects(res, '/detail/artist/%d/' % a.pk) self.assertQuerysetEqual(Artist.objects.all(), [a]) def test_update_with_redirect(self): res = self.client.post( '/edit/author/%d/update/redirect/' % self.author.pk, {'name': 'Randall Munroe (author of xkcd)', 'slug': 'randall-munroe'} ) self.assertEqual(res.status_code, 302) self.assertRedirects(res, '/edit/authors/create/') self.assertQuerysetEqual(Author.objects.values_list('name', flat=True), ['Randall Munroe (author of xkcd)']) def test_update_with_interpolated_redirect(self): res = self.client.post( '/edit/author/%d/update/interpolate_redirect/' % self.author.pk, {'name': 'Randall Munroe (author of xkcd)', 'slug': 'randall-munroe'} ) self.assertQuerysetEqual(Author.objects.values_list('name', flat=True), ['Randall Munroe (author of xkcd)']) self.assertEqual(res.status_code, 302) pk = Author.objects.first().pk self.assertRedirects(res, '/edit/author/%d/update/' % pk) # Also test with escaped chars in URL res = self.client.post( '/edit/author/%d/update/interpolate_redirect_nonascii/' % self.author.pk, {'name': 'John Doe', 'slug': 'john-doe'} ) self.assertEqual(res.status_code, 302) pk = Author.objects.get(name='John Doe').pk self.assertRedirects(res, '/%C3%A9dit/author/{}/update/'.format(pk)) def test_update_with_special_properties(self): res = self.client.get('/edit/author/%d/update/special/' % self.author.pk) self.assertEqual(res.status_code, 200) self.assertIsInstance(res.context['form'], views.AuthorForm) self.assertEqual(res.context['object'], self.author) self.assertEqual(res.context['thingy'], self.author) self.assertNotIn('author', res.context) self.assertTemplateUsed(res, 'generic_views/form.html') res = self.client.post( '/edit/author/%d/update/special/' % self.author.pk, {'name': 'Randall Munroe (author of xkcd)', 'slug': 'randall-munroe'} ) self.assertEqual(res.status_code, 302) self.assertRedirects(res, '/detail/author/%d/' % self.author.pk) self.assertQuerysetEqual(Author.objects.values_list('name', flat=True), ['Randall Munroe (author of xkcd)']) def test_update_without_redirect(self): msg = ( 'No URL to redirect to. Either provide a url or define a ' 'get_absolute_url method on the Model.' ) with self.assertRaisesMessage(ImproperlyConfigured, msg): self.client.post( '/edit/author/%d/update/naive/' % self.author.pk, {'name': 'Randall Munroe (author of xkcd)', 'slug': 'randall-munroe'} ) def test_update_get_object(self): res = self.client.get('/edit/author/update/') self.assertEqual(res.status_code, 200) self.assertIsInstance(res.context['form'], forms.ModelForm) self.assertIsInstance(res.context['view'], View) self.assertEqual(res.context['object'], self.author) self.assertEqual(res.context['author'], self.author) self.assertTemplateUsed(res, 'generic_views/author_form.html') # Modification with both POST and PUT (browser compatible) res = self.client.post('/edit/author/update/', {'name': 'Randall Munroe (xkcd)', 'slug': 'randall-munroe'}) self.assertEqual(res.status_code, 302) self.assertRedirects(res, '/list/authors/') self.assertQuerysetEqual(Author.objects.values_list('name', flat=True), ['Randall Munroe (xkcd)']) @override_settings(ROOT_URLCONF='generic_views.urls') class DeleteViewTests(TestCase): @classmethod def setUpTestData(cls): cls.author = Author.objects.create( name='Randall Munroe', slug='randall-munroe', ) def test_delete_by_post(self): res = self.client.get('/edit/author/%d/delete/' % self.author.pk) self.assertEqual(res.status_code, 200) self.assertEqual(res.context['object'], self.author) self.assertEqual(res.context['author'], self.author) self.assertTemplateUsed(res, 'generic_views/author_confirm_delete.html') # Deletion with POST res = self.client.post('/edit/author/%d/delete/' % self.author.pk) self.assertEqual(res.status_code, 302) self.assertRedirects(res, '/list/authors/') self.assertQuerysetEqual(Author.objects.all(), []) def test_delete_by_delete(self): # Deletion with browser compatible DELETE method res = self.client.delete('/edit/author/%d/delete/' % self.author.pk) self.assertEqual(res.status_code, 302) self.assertRedirects(res, '/list/authors/') self.assertQuerysetEqual(Author.objects.all(), []) def test_delete_with_redirect(self): res = self.client.post('/edit/author/%d/delete/redirect/' % self.author.pk) self.assertEqual(res.status_code, 302) self.assertRedirects(res, '/edit/authors/create/') self.assertQuerysetEqual(Author.objects.all(), []) def test_delete_with_interpolated_redirect(self): res = self.client.post('/edit/author/%d/delete/interpolate_redirect/' % self.author.pk) self.assertEqual(res.status_code, 302) self.assertRedirects(res, '/edit/authors/create/?deleted=%d' % self.author.pk) self.assertQuerysetEqual(Author.objects.all(), []) # Also test with escaped chars in URL a = Author.objects.create(**{'name': 'Randall Munroe', 'slug': 'randall-munroe'}) res = self.client.post('/edit/author/{}/delete/interpolate_redirect_nonascii/'.format(a.pk)) self.assertEqual(res.status_code, 302) self.assertRedirects(res, '/%C3%A9dit/authors/create/?deleted={}'.format(a.pk)) def test_delete_with_special_properties(self): res = self.client.get('/edit/author/%d/delete/special/' % self.author.pk) self.assertEqual(res.status_code, 200) self.assertEqual(res.context['object'], self.author) self.assertEqual(res.context['thingy'], self.author) self.assertNotIn('author', res.context) self.assertTemplateUsed(res, 'generic_views/confirm_delete.html') res = self.client.post('/edit/author/%d/delete/special/' % self.author.pk) self.assertEqual(res.status_code, 302) self.assertRedirects(res, '/list/authors/') self.assertQuerysetEqual(Author.objects.all(), []) def test_delete_without_redirect(self): msg = 'No URL to redirect to. Provide a success_url.' with self.assertRaisesMessage(ImproperlyConfigured, msg): self.client.post('/edit/author/%d/delete/naive/' % self.author.pk) def test_delete_with_form_as_post(self): res = self.client.get('/edit/author/%d/delete/form/' % self.author.pk) self.assertEqual(res.status_code, 200) self.assertEqual(res.context['object'], self.author) self.assertEqual(res.context['author'], self.author) self.assertTemplateUsed(res, 'generic_views/author_confirm_delete.html') res = self.client.post( '/edit/author/%d/delete/form/' % self.author.pk, data={'confirm': True} ) self.assertEqual(res.status_code, 302) self.assertRedirects(res, '/list/authors/') self.assertSequenceEqual(Author.objects.all(), []) def test_delete_with_form_as_post_with_validation_error(self): res = self.client.get('/edit/author/%d/delete/form/' % self.author.pk) self.assertEqual(res.status_code, 200) self.assertEqual(res.context['object'], self.author) self.assertEqual(res.context['author'], self.author) self.assertTemplateUsed(res, 'generic_views/author_confirm_delete.html') res = self.client.post('/edit/author/%d/delete/form/' % self.author.pk) self.assertEqual(res.status_code, 200) self.assertEqual(len(res.context_data['form'].errors), 2) self.assertEqual( res.context_data['form'].errors['__all__'], ['You must confirm the delete.'], ) self.assertEqual( res.context_data['form'].errors['confirm'], ['This field is required.'], ) # RemovedInDjango50Warning. def test_delete_with_custom_delete(self): class AuthorDeleteView(DeleteView): model = Author def delete(self, request, *args, **kwargs): # Custom logic. pass msg = ( 'DeleteView uses FormMixin to handle POST requests. As a ' 'consequence, any custom deletion logic in ' 'AuthorDeleteView.delete() handler should be moved to ' 'form_valid().' ) with self.assertWarnsMessage(DeleteViewCustomDeleteWarning, msg): AuthorDeleteView()
1f254de1dcb21e8a76d4bb318f00f0aae56ba3252b77b3cc4c53ce298e2dde33
import json import random from django.conf import settings from django.contrib.messages import constants from django.contrib.messages.storage.base import Message from django.contrib.messages.storage.cookie import ( CookieStorage, MessageDecoder, MessageEncoder, ) from django.test import SimpleTestCase, override_settings from django.utils.crypto import get_random_string from django.utils.safestring import SafeData, mark_safe from .base import BaseTests def set_cookie_data(storage, messages, invalid=False, encode_empty=False): """ Set ``request.COOKIES`` with the encoded data and remove the storage backend's loaded data cache. """ encoded_data = storage._encode(messages, encode_empty=encode_empty) if invalid: # Truncate the first character so that the hash is invalid. encoded_data = encoded_data[1:] storage.request.COOKIES = {CookieStorage.cookie_name: encoded_data} if hasattr(storage, '_loaded_data'): del storage._loaded_data def stored_cookie_messages_count(storage, response): """ Return an integer containing the number of messages stored. """ # Get a list of cookies, excluding ones with a max-age of 0 (because # they have been marked for deletion). cookie = response.cookies.get(storage.cookie_name) if not cookie or cookie['max-age'] == 0: return 0 data = storage._decode(cookie.value) if not data: return 0 if data[-1] == CookieStorage.not_finished: data.pop() return len(data) @override_settings(SESSION_COOKIE_DOMAIN='.example.com', SESSION_COOKIE_SECURE=True, SESSION_COOKIE_HTTPONLY=True) class CookieTests(BaseTests, SimpleTestCase): storage_class = CookieStorage def stored_messages_count(self, storage, response): return stored_cookie_messages_count(storage, response) def encode_decode(self, *args, **kwargs): storage = self.get_storage() message = Message(constants.DEBUG, *args, **kwargs) encoded = storage._encode(message) return storage._decode(encoded) def test_get(self): storage = self.storage_class(self.get_request()) # Set initial data. example_messages = ['test', 'me'] set_cookie_data(storage, example_messages) # The message contains what's expected. self.assertEqual(list(storage), example_messages) @override_settings(SESSION_COOKIE_SAMESITE='Strict') def test_cookie_setings(self): """ CookieStorage honors SESSION_COOKIE_DOMAIN, SESSION_COOKIE_SECURE, and SESSION_COOKIE_HTTPONLY (#15618, #20972). """ # Test before the messages have been consumed storage = self.get_storage() response = self.get_response() storage.add(constants.INFO, 'test') storage.update(response) messages = storage._decode(response.cookies['messages'].value) self.assertEqual(len(messages), 1) self.assertEqual(messages[0].message, 'test') self.assertEqual(response.cookies['messages']['domain'], '.example.com') self.assertEqual(response.cookies['messages']['expires'], '') self.assertIs(response.cookies['messages']['secure'], True) self.assertIs(response.cookies['messages']['httponly'], True) self.assertEqual(response.cookies['messages']['samesite'], 'Strict') # Test deletion of the cookie (storing with an empty value) after the messages have been consumed storage = self.get_storage() response = self.get_response() storage.add(constants.INFO, 'test') for m in storage: pass # Iterate through the storage to simulate consumption of messages. storage.update(response) self.assertEqual(response.cookies['messages'].value, '') self.assertEqual(response.cookies['messages']['domain'], '.example.com') self.assertEqual(response.cookies['messages']['expires'], 'Thu, 01 Jan 1970 00:00:00 GMT') self.assertEqual( response.cookies['messages']['samesite'], settings.SESSION_COOKIE_SAMESITE, ) def test_get_bad_cookie(self): request = self.get_request() storage = self.storage_class(request) # Set initial (invalid) data. example_messages = ['test', 'me'] set_cookie_data(storage, example_messages, invalid=True) # The message actually contains what we expect. self.assertEqual(list(storage), []) def test_max_cookie_length(self): """ If the data exceeds what is allowed in a cookie, older messages are removed before saving (and returned by the ``update`` method). """ storage = self.get_storage() response = self.get_response() # When storing as a cookie, the cookie has constant overhead of approx # 54 chars, and each message has a constant overhead of about 37 chars # and a variable overhead of zero in the best case. We aim for a message # size which will fit 4 messages into the cookie, but not 5. # See also FallbackTest.test_session_fallback msg_size = int((CookieStorage.max_cookie_size - 54) / 4.5 - 37) first_msg = None # Generate the same (tested) content every time that does not get run # through zlib compression. random.seed(42) for i in range(5): msg = get_random_string(msg_size) storage.add(constants.INFO, msg) if i == 0: first_msg = msg unstored_messages = storage.update(response) cookie_storing = self.stored_messages_count(storage, response) self.assertEqual(cookie_storing, 4) self.assertEqual(len(unstored_messages), 1) self.assertEqual(unstored_messages[0].message, first_msg) def test_message_rfc6265(self): non_compliant_chars = ['\\', ',', ';', '"'] messages = ['\\te,st', ';m"e', '\u2019', '123"NOTRECEIVED"'] storage = self.get_storage() encoded = storage._encode(messages) for illegal in non_compliant_chars: self.assertEqual(encoded.find(illegal), -1) def test_json_encoder_decoder(self): """ A complex nested data structure containing Message instances is properly encoded/decoded by the custom JSON encoder/decoder classes. """ messages = [ { 'message': Message(constants.INFO, 'Test message'), 'message_list': [ Message(constants.INFO, 'message %s') for x in range(5) ] + [{'another-message': Message(constants.ERROR, 'error')}], }, Message(constants.INFO, 'message %s'), ] encoder = MessageEncoder() value = encoder.encode(messages) decoded_messages = json.loads(value, cls=MessageDecoder) self.assertEqual(messages, decoded_messages) def test_safedata(self): """ A message containing SafeData is keeping its safe status when retrieved from the message storage. """ self.assertIsInstance( self.encode_decode(mark_safe('<b>Hello Django!</b>')).message, SafeData, ) self.assertNotIsInstance( self.encode_decode('<b>Hello Django!</b>').message, SafeData, ) def test_extra_tags(self): """ A message's extra_tags attribute is correctly preserved when retrieved from the message storage. """ for extra_tags in ['', None, 'some tags']: with self.subTest(extra_tags=extra_tags): self.assertEqual( self.encode_decode('message', extra_tags=extra_tags).extra_tags, extra_tags, )
ea31500671f5823441392ced0398a8363cbe101758580a4b4b98eff97ae16dd3
from unittest import mock from django.contrib.messages import constants from django.contrib.messages.storage import base from django.contrib.messages.storage.base import Message from django.test import SimpleTestCase, override_settings class MessageTests(SimpleTestCase): def test_eq(self): msg_1 = Message(constants.INFO, 'Test message 1') msg_2 = Message(constants.INFO, 'Test message 2') msg_3 = Message(constants.WARNING, 'Test message 1') self.assertEqual(msg_1, msg_1) self.assertEqual(msg_1, mock.ANY) self.assertNotEqual(msg_1, msg_2) self.assertNotEqual(msg_1, msg_3) self.assertNotEqual(msg_2, msg_3) class TestLevelTags(SimpleTestCase): message_tags = { constants.INFO: 'info', constants.DEBUG: '', constants.WARNING: '', constants.ERROR: 'bad', constants.SUCCESS: '', 12: 'custom', } @override_settings(MESSAGE_TAGS=message_tags) def test_override_settings_level_tags(self): self.assertEqual(base.LEVEL_TAGS, self.message_tags)
7d72d23176ac6e319e85c7fc99f7f9c619e53f16967542ff6ec304a39ef81ae1
from django.contrib.messages import constants, get_level, set_level from django.contrib.messages.api import MessageFailure from django.contrib.messages.constants import DEFAULT_LEVELS from django.contrib.messages.storage import default_storage from django.contrib.messages.storage.base import Message from django.http import HttpRequest, HttpResponse from django.test import modify_settings, override_settings from django.urls import reverse from django.utils.translation import gettext_lazy def add_level_messages(storage): """ Add 6 messages from different levels (including a custom one) to a storage instance. """ storage.add(constants.INFO, 'A generic info message') storage.add(29, 'Some custom level') storage.add(constants.DEBUG, 'A debugging message', extra_tags='extra-tag') storage.add(constants.WARNING, 'A warning') storage.add(constants.ERROR, 'An error') storage.add(constants.SUCCESS, 'This was a triumph.') class BaseTests: storage_class = default_storage levels = { 'debug': constants.DEBUG, 'info': constants.INFO, 'success': constants.SUCCESS, 'warning': constants.WARNING, 'error': constants.ERROR, } def setUp(self): self.settings_override = override_settings( TEMPLATES=[{ 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': [], 'APP_DIRS': True, 'OPTIONS': { 'context_processors': ( 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages', ), }, }], ROOT_URLCONF='messages_tests.urls', MESSAGE_TAGS={}, MESSAGE_STORAGE='%s.%s' % (self.storage_class.__module__, self.storage_class.__name__), SESSION_SERIALIZER='django.contrib.sessions.serializers.JSONSerializer', ) self.settings_override.enable() def tearDown(self): self.settings_override.disable() def get_request(self): return HttpRequest() def get_response(self): return HttpResponse() def get_storage(self, data=None): """ Return the storage backend, setting its loaded data to the ``data`` argument. This method avoids the storage ``_get`` method from getting called so that other parts of the storage backend can be tested independent of the message retrieval logic. """ storage = self.storage_class(self.get_request()) storage._loaded_data = data or [] return storage def test_repr(self): request = self.get_request() storage = self.storage_class(request) self.assertEqual( repr(storage), f'<{self.storage_class.__qualname__}: request=<HttpRequest>>', ) def test_add(self): storage = self.get_storage() self.assertFalse(storage.added_new) storage.add(constants.INFO, 'Test message 1') self.assertTrue(storage.added_new) storage.add(constants.INFO, 'Test message 2', extra_tags='tag') self.assertEqual(len(storage), 2) def test_add_lazy_translation(self): storage = self.get_storage() response = self.get_response() storage.add(constants.INFO, gettext_lazy('lazy message')) storage.update(response) storing = self.stored_messages_count(storage, response) self.assertEqual(storing, 1) def test_no_update(self): storage = self.get_storage() response = self.get_response() storage.update(response) storing = self.stored_messages_count(storage, response) self.assertEqual(storing, 0) def test_add_update(self): storage = self.get_storage() response = self.get_response() storage.add(constants.INFO, 'Test message 1') storage.add(constants.INFO, 'Test message 1', extra_tags='tag') storage.update(response) storing = self.stored_messages_count(storage, response) self.assertEqual(storing, 2) def test_existing_add_read_update(self): storage = self.get_existing_storage() response = self.get_response() storage.add(constants.INFO, 'Test message 3') list(storage) # Simulates a read storage.update(response) storing = self.stored_messages_count(storage, response) self.assertEqual(storing, 0) def test_existing_read_add_update(self): storage = self.get_existing_storage() response = self.get_response() list(storage) # Simulates a read storage.add(constants.INFO, 'Test message 3') storage.update(response) storing = self.stored_messages_count(storage, response) self.assertEqual(storing, 1) @override_settings(MESSAGE_LEVEL=constants.DEBUG) def test_full_request_response_cycle(self): """ With the message middleware enabled, messages are properly stored and retrieved across the full request/redirect/response cycle. """ data = { 'messages': ['Test message %d' % x for x in range(5)], } show_url = reverse('show_message') for level in ('debug', 'info', 'success', 'warning', 'error'): add_url = reverse('add_message', args=(level,)) response = self.client.post(add_url, data, follow=True) self.assertRedirects(response, show_url) self.assertIn('messages', response.context) messages = [Message(self.levels[level], msg) for msg in data['messages']] self.assertEqual(list(response.context['messages']), messages) for msg in data['messages']: self.assertContains(response, msg) @override_settings(MESSAGE_LEVEL=constants.DEBUG) def test_with_template_response(self): data = { 'messages': ['Test message %d' % x for x in range(5)], } show_url = reverse('show_template_response') for level in self.levels: add_url = reverse('add_template_response', args=(level,)) response = self.client.post(add_url, data, follow=True) self.assertRedirects(response, show_url) self.assertIn('messages', response.context) for msg in data['messages']: self.assertContains(response, msg) # there shouldn't be any messages on second GET request response = self.client.get(show_url) for msg in data['messages']: self.assertNotContains(response, msg) def test_context_processor_message_levels(self): show_url = reverse('show_template_response') response = self.client.get(show_url) self.assertIn('DEFAULT_MESSAGE_LEVELS', response.context) self.assertEqual(response.context['DEFAULT_MESSAGE_LEVELS'], DEFAULT_LEVELS) @override_settings(MESSAGE_LEVEL=constants.DEBUG) def test_multiple_posts(self): """ Messages persist properly when multiple POSTs are made before a GET. """ data = { 'messages': ['Test message %d' % x for x in range(5)], } show_url = reverse('show_message') messages = [] for level in ('debug', 'info', 'success', 'warning', 'error'): messages.extend(Message(self.levels[level], msg) for msg in data['messages']) add_url = reverse('add_message', args=(level,)) self.client.post(add_url, data) response = self.client.get(show_url) self.assertIn('messages', response.context) self.assertEqual(list(response.context['messages']), messages) for msg in data['messages']: self.assertContains(response, msg) @modify_settings( INSTALLED_APPS={'remove': 'django.contrib.messages'}, MIDDLEWARE={'remove': 'django.contrib.messages.middleware.MessageMiddleware'}, ) @override_settings( MESSAGE_LEVEL=constants.DEBUG, TEMPLATES=[{ 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': [], 'APP_DIRS': True, }], ) def test_middleware_disabled(self): """ When the middleware is disabled, an exception is raised when one attempts to store a message. """ data = { 'messages': ['Test message %d' % x for x in range(5)], } reverse('show_message') for level in ('debug', 'info', 'success', 'warning', 'error'): add_url = reverse('add_message', args=(level,)) with self.assertRaises(MessageFailure): self.client.post(add_url, data, follow=True) @modify_settings( INSTALLED_APPS={'remove': 'django.contrib.messages'}, MIDDLEWARE={'remove': 'django.contrib.messages.middleware.MessageMiddleware'}, ) @override_settings( TEMPLATES=[{ 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': [], 'APP_DIRS': True, }], ) def test_middleware_disabled_fail_silently(self): """ When the middleware is disabled, an exception is not raised if 'fail_silently' is True. """ data = { 'messages': ['Test message %d' % x for x in range(5)], 'fail_silently': True, } show_url = reverse('show_message') for level in ('debug', 'info', 'success', 'warning', 'error'): add_url = reverse('add_message', args=(level,)) response = self.client.post(add_url, data, follow=True) self.assertRedirects(response, show_url) self.assertNotIn('messages', response.context) def stored_messages_count(self, storage, response): """ Return the number of messages being stored after a ``storage.update()`` call. """ raise NotImplementedError('This method must be set by a subclass.') def test_get(self): raise NotImplementedError('This method must be set by a subclass.') def get_existing_storage(self): return self.get_storage([ Message(constants.INFO, 'Test message 1'), Message(constants.INFO, 'Test message 2', extra_tags='tag'), ]) def test_existing_read(self): """ Reading the existing storage doesn't cause the data to be lost. """ storage = self.get_existing_storage() self.assertFalse(storage.used) # After iterating the storage engine directly, the used flag is set. data = list(storage) self.assertTrue(storage.used) # The data does not disappear because it has been iterated. self.assertEqual(data, list(storage)) def test_existing_add(self): storage = self.get_existing_storage() self.assertFalse(storage.added_new) storage.add(constants.INFO, 'Test message 3') self.assertTrue(storage.added_new) def test_default_level(self): # get_level works even with no storage on the request. request = self.get_request() self.assertEqual(get_level(request), constants.INFO) # get_level returns the default level if it hasn't been set. storage = self.get_storage() request._messages = storage self.assertEqual(get_level(request), constants.INFO) # Only messages of sufficient level get recorded. add_level_messages(storage) self.assertEqual(len(storage), 5) def test_low_level(self): request = self.get_request() storage = self.storage_class(request) request._messages = storage self.assertTrue(set_level(request, 5)) self.assertEqual(get_level(request), 5) add_level_messages(storage) self.assertEqual(len(storage), 6) def test_high_level(self): request = self.get_request() storage = self.storage_class(request) request._messages = storage self.assertTrue(set_level(request, 30)) self.assertEqual(get_level(request), 30) add_level_messages(storage) self.assertEqual(len(storage), 2) @override_settings(MESSAGE_LEVEL=29) def test_settings_level(self): request = self.get_request() storage = self.storage_class(request) self.assertEqual(get_level(request), 29) add_level_messages(storage) self.assertEqual(len(storage), 3) def test_tags(self): storage = self.get_storage() storage.level = 0 add_level_messages(storage) storage.add(constants.INFO, 'A generic info message', extra_tags=None) tags = [msg.tags for msg in storage] self.assertEqual(tags, ['info', '', 'extra-tag debug', 'warning', 'error', 'success', 'info']) def test_level_tag(self): storage = self.get_storage() storage.level = 0 add_level_messages(storage) tags = [msg.level_tag for msg in storage] self.assertEqual(tags, ['info', '', 'debug', 'warning', 'error', 'success']) @override_settings(MESSAGE_TAGS={ constants.INFO: 'info', constants.DEBUG: '', constants.WARNING: '', constants.ERROR: 'bad', 29: 'custom', }) def test_custom_tags(self): storage = self.get_storage() storage.level = 0 add_level_messages(storage) tags = [msg.tags for msg in storage] self.assertEqual(tags, ['info', 'custom', 'extra-tag', '', 'bad', 'success'])
f5bdd39e3823d4f35887ac4e9f8d8268a57f9015b6504b63899b852ec44bed32
""" Unit tests for reverse URL lookups. """ import pickle import sys import threading from admin_scripts.tests import AdminScriptTestCase from django.conf import settings from django.contrib.auth.models import User from django.core.exceptions import ImproperlyConfigured, ViewDoesNotExist from django.http import ( HttpRequest, HttpResponsePermanentRedirect, HttpResponseRedirect, ) from django.shortcuts import redirect from django.test import ( RequestFactory, SimpleTestCase, TestCase, override_settings, ) from django.test.utils import override_script_prefix from django.urls import ( NoReverseMatch, Resolver404, ResolverMatch, URLPattern, URLResolver, get_callable, get_resolver, get_urlconf, include, path, re_path, resolve, reverse, reverse_lazy, ) from django.urls.resolvers import RegexPattern from . import middleware, urlconf_outer, views from .utils import URLObject from .views import empty_view resolve_test_data = ( # These entries are in the format: (path, url_name, app_name, namespace, view_name, func, args, kwargs) # Simple case ('/normal/42/37/', 'normal-view', '', '', 'normal-view', views.empty_view, (), {'arg1': '42', 'arg2': '37'}), ( '/view_class/42/37/', 'view-class', '', '', 'view-class', views.view_class_instance, (), {'arg1': '42', 'arg2': '37'} ), ( '/included/normal/42/37/', 'inc-normal-view', 'included_namespace_urls', 'included_namespace_urls', 'included_namespace_urls:inc-normal-view', views.empty_view, (), {'arg1': '42', 'arg2': '37'} ), ( '/included/view_class/42/37/', 'inc-view-class', 'included_namespace_urls', 'included_namespace_urls', 'included_namespace_urls:inc-view-class', views.view_class_instance, (), {'arg1': '42', 'arg2': '37'} ), # Unnamed args are dropped if you have *any* kwargs in a pattern ('/mixed_args/42/37/', 'mixed-args', '', '', 'mixed-args', views.empty_view, (), {'arg2': '37'}), ( '/included/mixed_args/42/37/', 'inc-mixed-args', 'included_namespace_urls', 'included_namespace_urls', 'included_namespace_urls:inc-mixed-args', views.empty_view, (), {'arg2': '37'} ), ( '/included/12/mixed_args/42/37/', 'inc-mixed-args', 'included_namespace_urls', 'included_namespace_urls', 'included_namespace_urls:inc-mixed-args', views.empty_view, (), {'arg2': '37'} ), # Unnamed views should have None as the url_name. Regression data for #21157. ( '/unnamed/normal/42/37/', None, '', '', 'urlpatterns_reverse.views.empty_view', views.empty_view, (), {'arg1': '42', 'arg2': '37'} ), ( '/unnamed/view_class/42/37/', None, '', '', 'urlpatterns_reverse.views.ViewClass', views.view_class_instance, (), {'arg1': '42', 'arg2': '37'} ), # If you have no kwargs, you get an args list. ('/no_kwargs/42/37/', 'no-kwargs', '', '', 'no-kwargs', views.empty_view, ('42', '37'), {}), ( '/included/no_kwargs/42/37/', 'inc-no-kwargs', 'included_namespace_urls', 'included_namespace_urls', 'included_namespace_urls:inc-no-kwargs', views.empty_view, ('42', '37'), {} ), ( '/included/12/no_kwargs/42/37/', 'inc-no-kwargs', 'included_namespace_urls', 'included_namespace_urls', 'included_namespace_urls:inc-no-kwargs', views.empty_view, ('12', '42', '37'), {} ), # Namespaces ( '/test1/inner/42/37/', 'urlobject-view', 'testapp', 'test-ns1', 'test-ns1:urlobject-view', views.empty_view, (), {'arg1': '42', 'arg2': '37'} ), ( '/included/test3/inner/42/37/', 'urlobject-view', 'included_namespace_urls:testapp', 'included_namespace_urls:test-ns3', 'included_namespace_urls:test-ns3:urlobject-view', views.empty_view, (), {'arg1': '42', 'arg2': '37'} ), ( '/ns-included1/normal/42/37/', 'inc-normal-view', 'included_namespace_urls', 'inc-ns1', 'inc-ns1:inc-normal-view', views.empty_view, (), {'arg1': '42', 'arg2': '37'} ), ( '/included/test3/inner/42/37/', 'urlobject-view', 'included_namespace_urls:testapp', 'included_namespace_urls:test-ns3', 'included_namespace_urls:test-ns3:urlobject-view', views.empty_view, (), {'arg1': '42', 'arg2': '37'} ), ( '/default/inner/42/37/', 'urlobject-view', 'testapp', 'testapp', 'testapp:urlobject-view', views.empty_view, (), {'arg1': '42', 'arg2': '37'} ), ( '/other2/inner/42/37/', 'urlobject-view', 'nodefault', 'other-ns2', 'other-ns2:urlobject-view', views.empty_view, (), {'arg1': '42', 'arg2': '37'} ), ( '/other1/inner/42/37/', 'urlobject-view', 'nodefault', 'other-ns1', 'other-ns1:urlobject-view', views.empty_view, (), {'arg1': '42', 'arg2': '37'} ), # Nested namespaces ( '/ns-included1/test3/inner/42/37/', 'urlobject-view', 'included_namespace_urls:testapp', 'inc-ns1:test-ns3', 'inc-ns1:test-ns3:urlobject-view', views.empty_view, (), {'arg1': '42', 'arg2': '37'} ), ( '/ns-included1/ns-included4/ns-included2/test3/inner/42/37/', 'urlobject-view', 'included_namespace_urls:namespace_urls:included_namespace_urls:testapp', 'inc-ns1:inc-ns4:inc-ns2:test-ns3', 'inc-ns1:inc-ns4:inc-ns2:test-ns3:urlobject-view', views.empty_view, (), {'arg1': '42', 'arg2': '37'} ), ( '/app-included/test3/inner/42/37/', 'urlobject-view', 'included_namespace_urls:testapp', 'inc-app:test-ns3', 'inc-app:test-ns3:urlobject-view', views.empty_view, (), {'arg1': '42', 'arg2': '37'} ), ( '/app-included/ns-included4/ns-included2/test3/inner/42/37/', 'urlobject-view', 'included_namespace_urls:namespace_urls:included_namespace_urls:testapp', 'inc-app:inc-ns4:inc-ns2:test-ns3', 'inc-app:inc-ns4:inc-ns2:test-ns3:urlobject-view', views.empty_view, (), {'arg1': '42', 'arg2': '37'} ), # Namespaces capturing variables ( '/inc70/', 'inner-nothing', 'included_urls', 'inc-ns5', 'inc-ns5:inner-nothing', views.empty_view, (), {'outer': '70'} ), ( '/inc78/extra/foobar/', 'inner-extra', 'included_urls', 'inc-ns5', 'inc-ns5:inner-extra', views.empty_view, (), {'outer': '78', 'extra': 'foobar'} ), ) test_data = ( ('places', '/places/3/', [3], {}), ('places', '/places/3/', ['3'], {}), ('places', NoReverseMatch, ['a'], {}), ('places', NoReverseMatch, [], {}), ('places?', '/place/', [], {}), ('places+', '/places/', [], {}), ('places*', '/place/', [], {}), ('places2?', '/', [], {}), ('places2+', '/places/', [], {}), ('places2*', '/', [], {}), ('places3', '/places/4/', [4], {}), ('places3', '/places/harlem/', ['harlem'], {}), ('places3', NoReverseMatch, ['harlem64'], {}), ('places4', '/places/3/', [], {'id': 3}), ('people', NoReverseMatch, [], {}), ('people', '/people/adrian/', ['adrian'], {}), ('people', '/people/adrian/', [], {'name': 'adrian'}), ('people', NoReverseMatch, ['name with spaces'], {}), ('people', NoReverseMatch, [], {'name': 'name with spaces'}), ('people2', '/people/name/', [], {}), ('people2a', '/people/name/fred/', ['fred'], {}), ('people_backref', '/people/nate-nate/', ['nate'], {}), ('people_backref', '/people/nate-nate/', [], {'name': 'nate'}), ('optional', '/optional/fred/', [], {'name': 'fred'}), ('optional', '/optional/fred/', ['fred'], {}), ('named_optional', '/optional/1/', [1], {}), ('named_optional', '/optional/1/', [], {'arg1': 1}), ('named_optional', '/optional/1/2/', [1, 2], {}), ('named_optional', '/optional/1/2/', [], {'arg1': 1, 'arg2': 2}), ('named_optional_terminated', '/optional/1/', [1], {}), ('named_optional_terminated', '/optional/1/', [], {'arg1': 1}), ('named_optional_terminated', '/optional/1/2/', [1, 2], {}), ('named_optional_terminated', '/optional/1/2/', [], {'arg1': 1, 'arg2': 2}), ('hardcoded', '/hardcoded/', [], {}), ('hardcoded2', '/hardcoded/doc.pdf', [], {}), ('people3', '/people/il/adrian/', [], {'state': 'il', 'name': 'adrian'}), ('people3', NoReverseMatch, [], {'state': 'il'}), ('people3', NoReverseMatch, [], {'name': 'adrian'}), ('people4', NoReverseMatch, [], {'state': 'il', 'name': 'adrian'}), ('people6', '/people/il/test/adrian/', ['il/test', 'adrian'], {}), ('people6', '/people//adrian/', ['adrian'], {}), ('range', '/character_set/a/', [], {}), ('range2', '/character_set/x/', [], {}), ('price', '/price/$10/', ['10'], {}), ('price2', '/price/$10/', ['10'], {}), ('price3', '/price/$10/', ['10'], {}), ('product', '/product/chocolate+($2.00)/', [], {'price': '2.00', 'product': 'chocolate'}), ('headlines', '/headlines/2007.5.21/', [], {'year': 2007, 'month': 5, 'day': 21}), ( 'windows', r'/windows_path/C:%5CDocuments%20and%20Settings%5Cspam/', [], {'drive_name': 'C', 'path': r'Documents and Settings\spam'} ), ('special', r'/special_chars/~@+%5C$*%7C/', [r'~@+\$*|'], {}), ('special', r'/special_chars/some%20resource/', [r'some resource'], {}), ('special', r'/special_chars/10%25%20complete/', [r'10% complete'], {}), ('special', r'/special_chars/some%20resource/', [], {'chars': r'some resource'}), ('special', r'/special_chars/10%25%20complete/', [], {'chars': r'10% complete'}), ('special', NoReverseMatch, [''], {}), ('mixed', '/john/0/', [], {'name': 'john'}), ('repeats', '/repeats/a/', [], {}), ('repeats2', '/repeats/aa/', [], {}), ('repeats3', '/repeats/aa/', [], {}), ('test', '/test/1', [], {}), ('inner-nothing', '/outer/42/', [], {'outer': '42'}), ('inner-nothing', '/outer/42/', ['42'], {}), ('inner-nothing', NoReverseMatch, ['foo'], {}), ('inner-extra', '/outer/42/extra/inner/', [], {'extra': 'inner', 'outer': '42'}), ('inner-extra', '/outer/42/extra/inner/', ['42', 'inner'], {}), ('inner-extra', NoReverseMatch, ['fred', 'inner'], {}), ('inner-no-kwargs', '/outer-no-kwargs/42/inner-no-kwargs/1/', ['42', '1'], {}), ('disjunction', NoReverseMatch, ['foo'], {}), ('inner-disjunction', NoReverseMatch, ['10', '11'], {}), ('extra-places', '/e-places/10/', ['10'], {}), ('extra-people', '/e-people/fred/', ['fred'], {}), ('extra-people', '/e-people/fred/', [], {'name': 'fred'}), ('part', '/part/one/', [], {'value': 'one'}), ('part', '/prefix/xx/part/one/', [], {'value': 'one', 'prefix': 'xx'}), ('part2', '/part2/one/', [], {'value': 'one'}), ('part2', '/part2/', [], {}), ('part2', '/prefix/xx/part2/one/', [], {'value': 'one', 'prefix': 'xx'}), ('part2', '/prefix/xx/part2/', [], {'prefix': 'xx'}), # Tests for nested groups. Nested capturing groups will only work if you # *only* supply the correct outer group. ('nested-noncapture', '/nested/noncapture/opt', [], {'p': 'opt'}), ('nested-capture', '/nested/capture/opt/', ['opt/'], {}), ('nested-capture', NoReverseMatch, [], {'p': 'opt'}), ('nested-mixedcapture', '/nested/capture/mixed/opt', ['opt'], {}), ('nested-mixedcapture', NoReverseMatch, [], {'p': 'opt'}), ('nested-namedcapture', '/nested/capture/named/opt/', [], {'outer': 'opt/'}), ('nested-namedcapture', NoReverseMatch, [], {'outer': 'opt/', 'inner': 'opt'}), ('nested-namedcapture', NoReverseMatch, [], {'inner': 'opt'}), ('non_path_include', '/includes/non_path_include/', [], {}), # Tests for #13154 ('defaults', '/defaults_view1/3/', [], {'arg1': 3, 'arg2': 1}), ('defaults', '/defaults_view2/3/', [], {'arg1': 3, 'arg2': 2}), ('defaults', NoReverseMatch, [], {'arg1': 3, 'arg2': 3}), ('defaults', NoReverseMatch, [], {'arg2': 1}), # Security tests ('security', '/%2Fexample.com/security/', ['/example.com'], {}), ) @override_settings(ROOT_URLCONF='urlpatterns_reverse.no_urls') class NoURLPatternsTests(SimpleTestCase): def test_no_urls_exception(self): """ URLResolver should raise an exception when no urlpatterns exist. """ resolver = URLResolver(RegexPattern(r'^$'), settings.ROOT_URLCONF) with self.assertRaisesMessage( ImproperlyConfigured, "The included URLconf 'urlpatterns_reverse.no_urls' does not " "appear to have any patterns in it. If you see the 'urlpatterns' " "variable with valid patterns in the file then the issue is " "probably caused by a circular import." ): getattr(resolver, 'url_patterns') @override_settings(ROOT_URLCONF='urlpatterns_reverse.urls') class URLPatternReverse(SimpleTestCase): def test_urlpattern_reverse(self): for name, expected, args, kwargs in test_data: with self.subTest(name=name, args=args, kwargs=kwargs): try: got = reverse(name, args=args, kwargs=kwargs) except NoReverseMatch: self.assertEqual(NoReverseMatch, expected) else: self.assertEqual(got, expected) def test_reverse_none(self): # Reversing None should raise an error, not return the last un-named view. with self.assertRaises(NoReverseMatch): reverse(None) def test_mixing_args_and_kwargs(self): msg = "Don't mix *args and **kwargs in call to reverse()!" with self.assertRaisesMessage(ValueError, msg): reverse('name', args=['a'], kwargs={'b': 'c'}) @override_script_prefix('/{{invalid}}/') def test_prefix_braces(self): self.assertEqual( '/%7B%7Binvalid%7D%7D/includes/non_path_include/', reverse('non_path_include') ) def test_prefix_parenthesis(self): # Parentheses are allowed and should not cause errors or be escaped with override_script_prefix('/bogus)/'): self.assertEqual( '/bogus)/includes/non_path_include/', reverse('non_path_include') ) with override_script_prefix('/(bogus)/'): self.assertEqual( '/(bogus)/includes/non_path_include/', reverse('non_path_include') ) @override_script_prefix('/bump%20map/') def test_prefix_format_char(self): self.assertEqual( '/bump%2520map/includes/non_path_include/', reverse('non_path_include') ) @override_script_prefix('/%7Eme/') def test_non_urlsafe_prefix_with_args(self): # Regression for #20022, adjusted for #24013 because ~ is an unreserved # character. Tests whether % is escaped. self.assertEqual('/%257Eme/places/1/', reverse('places', args=[1])) def test_patterns_reported(self): # Regression for #17076 with self.assertRaisesMessage(NoReverseMatch, r"1 pattern(s) tried: ['people/(?P<name>\\w+)/$']"): # this url exists, but requires an argument reverse("people", args=[]) @override_script_prefix('/script:name/') def test_script_name_escaping(self): self.assertEqual( reverse('optional', args=['foo:bar']), '/script:name/optional/foo:bar/' ) def test_view_not_found_message(self): msg = ( "Reverse for 'nonexistent-view' not found. 'nonexistent-view' " "is not a valid view function or pattern name." ) with self.assertRaisesMessage(NoReverseMatch, msg): reverse('nonexistent-view') def test_no_args_message(self): msg = "Reverse for 'places' with no arguments not found. 1 pattern(s) tried:" with self.assertRaisesMessage(NoReverseMatch, msg): reverse('places') def test_illegal_args_message(self): msg = "Reverse for 'places' with arguments '(1, 2)' not found. 1 pattern(s) tried:" with self.assertRaisesMessage(NoReverseMatch, msg): reverse('places', args=(1, 2)) def test_illegal_kwargs_message(self): msg = "Reverse for 'places' with keyword arguments '{'arg1': 2}' not found. 1 pattern(s) tried:" with self.assertRaisesMessage(NoReverseMatch, msg): reverse('places', kwargs={'arg1': 2}) class ResolverTests(SimpleTestCase): def test_resolver_repr(self): """ Test repr of URLResolver, especially when urlconf_name is a list (#17892). """ # Pick a resolver from a namespaced URLconf resolver = get_resolver('urlpatterns_reverse.namespace_urls') sub_resolver = resolver.namespace_dict['test-ns1'][1] self.assertIn('<URLPattern list>', repr(sub_resolver)) def test_reverse_lazy_object_coercion_by_resolve(self): """ Verifies lazy object returned by reverse_lazy is coerced to text by resolve(). Previous to #21043, this would raise a TypeError. """ urls = 'urlpatterns_reverse.named_urls' proxy_url = reverse_lazy('named-url1', urlconf=urls) resolver = get_resolver(urls) resolver.resolve(proxy_url) def test_resolver_reverse(self): resolver = get_resolver('urlpatterns_reverse.named_urls') test_urls = [ # (name, args, kwargs, expected) ('named-url1', (), {}, ''), ('named-url2', ('arg',), {}, 'extra/arg/'), ('named-url2', (), {'extra': 'arg'}, 'extra/arg/'), ] for name, args, kwargs, expected in test_urls: with self.subTest(name=name, args=args, kwargs=kwargs): self.assertEqual(resolver.reverse(name, *args, **kwargs), expected) def test_resolver_reverse_conflict(self): """ URL pattern name arguments don't need to be unique. The last registered pattern takes precedence for conflicting names. """ resolver = get_resolver('urlpatterns_reverse.named_urls_conflict') test_urls = [ # (name, args, kwargs, expected) # Without arguments, the last URL in urlpatterns has precedence. ('name-conflict', (), {}, 'conflict/'), # With an arg, the last URL in urlpatterns has precedence. ('name-conflict', ('arg',), {}, 'conflict-last/arg/'), # With a kwarg, other URL patterns can be reversed. ('name-conflict', (), {'first': 'arg'}, 'conflict-first/arg/'), ('name-conflict', (), {'middle': 'arg'}, 'conflict-middle/arg/'), ('name-conflict', (), {'last': 'arg'}, 'conflict-last/arg/'), # The number and order of the arguments don't interfere with reversing. ('name-conflict', ('arg', 'arg'), {}, 'conflict/arg/arg/'), ] for name, args, kwargs, expected in test_urls: with self.subTest(name=name, args=args, kwargs=kwargs): self.assertEqual(resolver.reverse(name, *args, **kwargs), expected) def test_non_regex(self): """ A Resolver404 is raised if resolving doesn't meet the basic requirements of a path to match - i.e., at the very least, it matches the root pattern '^/'. Never return None from resolve() to prevent a TypeError from occurring later (#10834). """ test_urls = ['', 'a', '\\', '.'] for path_ in test_urls: with self.subTest(path=path_): with self.assertRaises(Resolver404): resolve(path_) def test_404_tried_urls_have_names(self): """ The list of URLs that come back from a Resolver404 exception contains a list in the right format for printing out in the DEBUG 404 page with both the patterns and URL names, if available. """ urls = 'urlpatterns_reverse.named_urls' # this list matches the expected URL types and names returned when # you try to resolve a nonexistent URL in the first level of included # URLs in named_urls.py (e.g., '/included/nonexistent-url') url_types_names = [ [{'type': URLPattern, 'name': 'named-url1'}], [{'type': URLPattern, 'name': 'named-url2'}], [{'type': URLPattern, 'name': None}], [{'type': URLResolver}, {'type': URLPattern, 'name': 'named-url3'}], [{'type': URLResolver}, {'type': URLPattern, 'name': 'named-url4'}], [{'type': URLResolver}, {'type': URLPattern, 'name': None}], [{'type': URLResolver}, {'type': URLResolver}], ] with self.assertRaisesMessage(Resolver404, 'tried') as cm: resolve('/included/nonexistent-url', urlconf=urls) e = cm.exception # make sure we at least matched the root ('/') url resolver: self.assertIn('tried', e.args[0]) self.assertEqual( len(e.args[0]['tried']), len(url_types_names), 'Wrong number of tried URLs returned. Expected %s, got %s.' % ( len(url_types_names), len(e.args[0]['tried']) ) ) for tried, expected in zip(e.args[0]['tried'], url_types_names): for t, e in zip(tried, expected): with self.subTest(t): self.assertIsInstance(t, e['type']), '%s is not an instance of %s' % (t, e['type']) if 'name' in e: if not e['name']: self.assertIsNone(t.name, 'Expected no URL name but found %s.' % t.name) else: self.assertEqual( t.name, e['name'], 'Wrong URL name. Expected "%s", got "%s".' % (e['name'], t.name) ) def test_namespaced_view_detail(self): resolver = get_resolver('urlpatterns_reverse.nested_urls') self.assertTrue(resolver._is_callback('urlpatterns_reverse.nested_urls.view1')) self.assertTrue(resolver._is_callback('urlpatterns_reverse.nested_urls.view2')) self.assertTrue(resolver._is_callback('urlpatterns_reverse.nested_urls.View3')) self.assertFalse(resolver._is_callback('urlpatterns_reverse.nested_urls.blub')) def test_view_detail_as_method(self): # Views which have a class name as part of their path. resolver = get_resolver('urlpatterns_reverse.method_view_urls') self.assertTrue(resolver._is_callback('urlpatterns_reverse.method_view_urls.ViewContainer.method_view')) self.assertTrue(resolver._is_callback('urlpatterns_reverse.method_view_urls.ViewContainer.classmethod_view')) def test_populate_concurrency(self): """ URLResolver._populate() can be called concurrently, but not more than once per thread (#26888). """ resolver = URLResolver(RegexPattern(r'^/'), 'urlpatterns_reverse.urls') resolver._local.populating = True thread = threading.Thread(target=resolver._populate) thread.start() thread.join() self.assertNotEqual(resolver._reverse_dict, {}) @override_settings(ROOT_URLCONF='urlpatterns_reverse.reverse_lazy_urls') class ReverseLazyTest(TestCase): def test_redirect_with_lazy_reverse(self): response = self.client.get('/redirect/') self.assertRedirects(response, "/redirected_to/", status_code=302) def test_user_permission_with_lazy_reverse(self): alfred = User.objects.create_user('alfred', '[email protected]', password='testpw') response = self.client.get('/login_required_view/') self.assertRedirects(response, "/login/?next=/login_required_view/", status_code=302) self.client.force_login(alfred) response = self.client.get('/login_required_view/') self.assertEqual(response.status_code, 200) def test_inserting_reverse_lazy_into_string(self): self.assertEqual( 'Some URL: %s' % reverse_lazy('some-login-page'), 'Some URL: /login/' ) def test_build_absolute_uri(self): factory = RequestFactory() request = factory.get('/') self.assertEqual( request.build_absolute_uri(reverse_lazy('some-login-page')), 'http://testserver/login/', ) class ReverseLazySettingsTest(AdminScriptTestCase): """ reverse_lazy can be used in settings without causing a circular import error. """ def setUp(self): super().setUp() self.write_settings( 'settings.py', extra="from django.urls import reverse_lazy\nLOGIN_URL = reverse_lazy('login')", ) def test_lazy_in_settings(self): out, err = self.run_manage(['check']) self.assertNoOutput(err) @override_settings(ROOT_URLCONF='urlpatterns_reverse.urls') class ReverseShortcutTests(SimpleTestCase): def test_redirect_to_object(self): # We don't really need a model; just something with a get_absolute_url class FakeObj: def get_absolute_url(self): return "/hi-there/" res = redirect(FakeObj()) self.assertIsInstance(res, HttpResponseRedirect) self.assertEqual(res.url, '/hi-there/') res = redirect(FakeObj(), permanent=True) self.assertIsInstance(res, HttpResponsePermanentRedirect) self.assertEqual(res.url, '/hi-there/') def test_redirect_to_view_name(self): res = redirect('hardcoded2') self.assertEqual(res.url, '/hardcoded/doc.pdf') res = redirect('places', 1) self.assertEqual(res.url, '/places/1/') res = redirect('headlines', year='2008', month='02', day='17') self.assertEqual(res.url, '/headlines/2008.02.17/') with self.assertRaises(NoReverseMatch): redirect('not-a-view') def test_redirect_to_url(self): res = redirect('/foo/') self.assertEqual(res.url, '/foo/') res = redirect('http://example.com/') self.assertEqual(res.url, 'http://example.com/') # Assert that we can redirect using UTF-8 strings res = redirect('/æøå/abc/') self.assertEqual(res.url, '/%C3%A6%C3%B8%C3%A5/abc/') # Assert that no imports are attempted when dealing with a relative path # (previously, the below would resolve in a UnicodeEncodeError from __import__ ) res = redirect('/æøå.abc/') self.assertEqual(res.url, '/%C3%A6%C3%B8%C3%A5.abc/') res = redirect('os.path') self.assertEqual(res.url, 'os.path') def test_no_illegal_imports(self): # modules that are not listed in urlpatterns should not be importable redirect("urlpatterns_reverse.nonimported_module.view") self.assertNotIn("urlpatterns_reverse.nonimported_module", sys.modules) def test_reverse_by_path_nested(self): # Views added to urlpatterns using include() should be reversible. from .views import nested_view self.assertEqual(reverse(nested_view), '/includes/nested_path/') def test_redirect_view_object(self): from .views import absolute_kwargs_view res = redirect(absolute_kwargs_view) self.assertEqual(res.url, '/absolute_arg_view/') with self.assertRaises(NoReverseMatch): redirect(absolute_kwargs_view, wrong_argument=None) @override_settings(ROOT_URLCONF='urlpatterns_reverse.namespace_urls') class NamespaceTests(SimpleTestCase): def test_ambiguous_object(self): """ Names deployed via dynamic URL objects that require namespaces can't be resolved. """ test_urls = [ ('urlobject-view', [], {}), ('urlobject-view', [37, 42], {}), ('urlobject-view', [], {'arg1': 42, 'arg2': 37}), ] for name, args, kwargs in test_urls: with self.subTest(name=name, args=args, kwargs=kwargs): with self.assertRaises(NoReverseMatch): reverse(name, args=args, kwargs=kwargs) def test_ambiguous_urlpattern(self): """ Names deployed via dynamic URL objects that require namespaces can't be resolved. """ test_urls = [ ('inner-nothing', [], {}), ('inner-nothing', [37, 42], {}), ('inner-nothing', [], {'arg1': 42, 'arg2': 37}), ] for name, args, kwargs in test_urls: with self.subTest(name=name, args=args, kwargs=kwargs): with self.assertRaises(NoReverseMatch): reverse(name, args=args, kwargs=kwargs) def test_non_existent_namespace(self): """Nonexistent namespaces raise errors.""" test_urls = [ 'blahblah:urlobject-view', 'test-ns1:blahblah:urlobject-view', ] for name in test_urls: with self.subTest(name=name): with self.assertRaises(NoReverseMatch): reverse(name) def test_normal_name(self): """Normal lookups work as expected.""" test_urls = [ ('normal-view', [], {}, '/normal/'), ('normal-view', [37, 42], {}, '/normal/37/42/'), ('normal-view', [], {'arg1': 42, 'arg2': 37}, '/normal/42/37/'), ('special-view', [], {}, '/+%5C$*/'), ] for name, args, kwargs, expected in test_urls: with self.subTest(name=name, args=args, kwargs=kwargs): self.assertEqual(reverse(name, args=args, kwargs=kwargs), expected) def test_simple_included_name(self): """Normal lookups work on names included from other patterns.""" test_urls = [ ('included_namespace_urls:inc-normal-view', [], {}, '/included/normal/'), ('included_namespace_urls:inc-normal-view', [37, 42], {}, '/included/normal/37/42/'), ('included_namespace_urls:inc-normal-view', [], {'arg1': 42, 'arg2': 37}, '/included/normal/42/37/'), ('included_namespace_urls:inc-special-view', [], {}, '/included/+%5C$*/'), ] for name, args, kwargs, expected in test_urls: with self.subTest(name=name, args=args, kwargs=kwargs): self.assertEqual(reverse(name, args=args, kwargs=kwargs), expected) def test_namespace_object(self): """Dynamic URL objects can be found using a namespace.""" test_urls = [ ('test-ns1:urlobject-view', [], {}, '/test1/inner/'), ('test-ns1:urlobject-view', [37, 42], {}, '/test1/inner/37/42/'), ('test-ns1:urlobject-view', [], {'arg1': 42, 'arg2': 37}, '/test1/inner/42/37/'), ('test-ns1:urlobject-special-view', [], {}, '/test1/inner/+%5C$*/'), ] for name, args, kwargs, expected in test_urls: with self.subTest(name=name, args=args, kwargs=kwargs): self.assertEqual(reverse(name, args=args, kwargs=kwargs), expected) def test_app_object(self): """ Dynamic URL objects can return a (pattern, app_name) 2-tuple, and include() can set the namespace. """ test_urls = [ ('new-ns1:urlobject-view', [], {}, '/newapp1/inner/'), ('new-ns1:urlobject-view', [37, 42], {}, '/newapp1/inner/37/42/'), ('new-ns1:urlobject-view', [], {'arg1': 42, 'arg2': 37}, '/newapp1/inner/42/37/'), ('new-ns1:urlobject-special-view', [], {}, '/newapp1/inner/+%5C$*/'), ] for name, args, kwargs, expected in test_urls: with self.subTest(name=name, args=args, kwargs=kwargs): self.assertEqual(reverse(name, args=args, kwargs=kwargs), expected) def test_app_object_default_namespace(self): """ Namespace defaults to app_name when including a (pattern, app_name) 2-tuple. """ test_urls = [ ('newapp:urlobject-view', [], {}, '/new-default/inner/'), ('newapp:urlobject-view', [37, 42], {}, '/new-default/inner/37/42/'), ('newapp:urlobject-view', [], {'arg1': 42, 'arg2': 37}, '/new-default/inner/42/37/'), ('newapp:urlobject-special-view', [], {}, '/new-default/inner/+%5C$*/'), ] for name, args, kwargs, expected in test_urls: with self.subTest(name=name, args=args, kwargs=kwargs): self.assertEqual(reverse(name, args=args, kwargs=kwargs), expected) def test_embedded_namespace_object(self): """Namespaces can be installed anywhere in the URL pattern tree.""" test_urls = [ ('included_namespace_urls:test-ns3:urlobject-view', [], {}, '/included/test3/inner/'), ('included_namespace_urls:test-ns3:urlobject-view', [37, 42], {}, '/included/test3/inner/37/42/'), ( 'included_namespace_urls:test-ns3:urlobject-view', [], {'arg1': 42, 'arg2': 37}, '/included/test3/inner/42/37/', ), ('included_namespace_urls:test-ns3:urlobject-special-view', [], {}, '/included/test3/inner/+%5C$*/'), ] for name, args, kwargs, expected in test_urls: with self.subTest(name=name, args=args, kwargs=kwargs): self.assertEqual(reverse(name, args=args, kwargs=kwargs), expected) def test_namespace_pattern(self): """Namespaces can be applied to include()'d urlpatterns.""" test_urls = [ ('inc-ns1:inc-normal-view', [], {}, '/ns-included1/normal/'), ('inc-ns1:inc-normal-view', [37, 42], {}, '/ns-included1/normal/37/42/'), ('inc-ns1:inc-normal-view', [], {'arg1': 42, 'arg2': 37}, '/ns-included1/normal/42/37/'), ('inc-ns1:inc-special-view', [], {}, '/ns-included1/+%5C$*/'), ] for name, args, kwargs, expected in test_urls: with self.subTest(name=name, args=args, kwargs=kwargs): self.assertEqual(reverse(name, args=args, kwargs=kwargs), expected) def test_app_name_pattern(self): """ Namespaces can be applied to include()'d urlpatterns that set an app_name attribute. """ test_urls = [ ('app-ns1:inc-normal-view', [], {}, '/app-included1/normal/'), ('app-ns1:inc-normal-view', [37, 42], {}, '/app-included1/normal/37/42/'), ('app-ns1:inc-normal-view', [], {'arg1': 42, 'arg2': 37}, '/app-included1/normal/42/37/'), ('app-ns1:inc-special-view', [], {}, '/app-included1/+%5C$*/'), ] for name, args, kwargs, expected in test_urls: with self.subTest(name=name, args=args, kwargs=kwargs): self.assertEqual(reverse(name, args=args, kwargs=kwargs), expected) def test_namespace_pattern_with_variable_prefix(self): """ Using include() with namespaces when there is a regex variable in front of it. """ test_urls = [ ('inc-outer:inc-normal-view', [], {'outer': 42}, '/ns-outer/42/normal/'), ('inc-outer:inc-normal-view', [42], {}, '/ns-outer/42/normal/'), ('inc-outer:inc-normal-view', [], {'arg1': 37, 'arg2': 4, 'outer': 42}, '/ns-outer/42/normal/37/4/'), ('inc-outer:inc-normal-view', [42, 37, 4], {}, '/ns-outer/42/normal/37/4/'), ('inc-outer:inc-special-view', [], {'outer': 42}, '/ns-outer/42/+%5C$*/'), ('inc-outer:inc-special-view', [42], {}, '/ns-outer/42/+%5C$*/'), ] for name, args, kwargs, expected in test_urls: with self.subTest(name=name, args=args, kwargs=kwargs): self.assertEqual(reverse(name, args=args, kwargs=kwargs), expected) def test_multiple_namespace_pattern(self): """Namespaces can be embedded.""" test_urls = [ ('inc-ns1:test-ns3:urlobject-view', [], {}, '/ns-included1/test3/inner/'), ('inc-ns1:test-ns3:urlobject-view', [37, 42], {}, '/ns-included1/test3/inner/37/42/'), ( 'inc-ns1:test-ns3:urlobject-view', [], {'arg1': 42, 'arg2': 37}, '/ns-included1/test3/inner/42/37/', ), ('inc-ns1:test-ns3:urlobject-special-view', [], {}, '/ns-included1/test3/inner/+%5C$*/'), ] for name, args, kwargs, expected in test_urls: with self.subTest(name=name, args=args, kwargs=kwargs): self.assertEqual(reverse(name, args=args, kwargs=kwargs), expected) def test_nested_namespace_pattern(self): """Namespaces can be nested.""" test_urls = [ ( 'inc-ns1:inc-ns4:inc-ns1:test-ns3:urlobject-view', [], {}, '/ns-included1/ns-included4/ns-included1/test3/inner/', ), ( 'inc-ns1:inc-ns4:inc-ns1:test-ns3:urlobject-view', [37, 42], {}, '/ns-included1/ns-included4/ns-included1/test3/inner/37/42/', ), ( 'inc-ns1:inc-ns4:inc-ns1:test-ns3:urlobject-view', [], {'arg1': 42, 'arg2': 37}, '/ns-included1/ns-included4/ns-included1/test3/inner/42/37/', ), ( 'inc-ns1:inc-ns4:inc-ns1:test-ns3:urlobject-special-view', [], {}, '/ns-included1/ns-included4/ns-included1/test3/inner/+%5C$*/', ), ] for name, args, kwargs, expected in test_urls: with self.subTest(name=name, args=args, kwargs=kwargs): self.assertEqual(reverse(name, args=args, kwargs=kwargs), expected) def test_app_lookup_object(self): """A default application namespace can be used for lookup.""" test_urls = [ ('testapp:urlobject-view', [], {}, '/default/inner/'), ('testapp:urlobject-view', [37, 42], {}, '/default/inner/37/42/'), ('testapp:urlobject-view', [], {'arg1': 42, 'arg2': 37}, '/default/inner/42/37/'), ('testapp:urlobject-special-view', [], {}, '/default/inner/+%5C$*/'), ] for name, args, kwargs, expected in test_urls: with self.subTest(name=name, args=args, kwargs=kwargs): self.assertEqual(reverse(name, args=args, kwargs=kwargs), expected) def test_app_lookup_object_with_default(self): """A default application namespace is sensitive to the current app.""" test_urls = [ ('testapp:urlobject-view', [], {}, 'test-ns3', '/default/inner/'), ('testapp:urlobject-view', [37, 42], {}, 'test-ns3', '/default/inner/37/42/'), ('testapp:urlobject-view', [], {'arg1': 42, 'arg2': 37}, 'test-ns3', '/default/inner/42/37/'), ('testapp:urlobject-special-view', [], {}, 'test-ns3', '/default/inner/+%5C$*/'), ] for name, args, kwargs, current_app, expected in test_urls: with self.subTest(name=name, args=args, kwargs=kwargs, current_app=current_app): self.assertEqual(reverse(name, args=args, kwargs=kwargs, current_app=current_app), expected) def test_app_lookup_object_without_default(self): """ An application namespace without a default is sensitive to the current app. """ test_urls = [ ('nodefault:urlobject-view', [], {}, None, '/other2/inner/'), ('nodefault:urlobject-view', [37, 42], {}, None, '/other2/inner/37/42/'), ('nodefault:urlobject-view', [], {'arg1': 42, 'arg2': 37}, None, '/other2/inner/42/37/'), ('nodefault:urlobject-special-view', [], {}, None, '/other2/inner/+%5C$*/'), ('nodefault:urlobject-view', [], {}, 'other-ns1', '/other1/inner/'), ('nodefault:urlobject-view', [37, 42], {}, 'other-ns1', '/other1/inner/37/42/'), ('nodefault:urlobject-view', [], {'arg1': 42, 'arg2': 37}, 'other-ns1', '/other1/inner/42/37/'), ('nodefault:urlobject-special-view', [], {}, 'other-ns1', '/other1/inner/+%5C$*/'), ] for name, args, kwargs, current_app, expected in test_urls: with self.subTest(name=name, args=args, kwargs=kwargs, current_app=current_app): self.assertEqual(reverse(name, args=args, kwargs=kwargs, current_app=current_app), expected) def test_special_chars_namespace(self): test_urls = [ ('special:included_namespace_urls:inc-normal-view', [], {}, '/+%5C$*/included/normal/'), ('special:included_namespace_urls:inc-normal-view', [37, 42], {}, '/+%5C$*/included/normal/37/42/'), ( 'special:included_namespace_urls:inc-normal-view', [], {'arg1': 42, 'arg2': 37}, '/+%5C$*/included/normal/42/37/', ), ('special:included_namespace_urls:inc-special-view', [], {}, '/+%5C$*/included/+%5C$*/'), ] for name, args, kwargs, expected in test_urls: with self.subTest(name=name, args=args, kwargs=kwargs): self.assertEqual(reverse(name, args=args, kwargs=kwargs), expected) def test_namespaces_with_variables(self): """Namespace prefixes can capture variables.""" test_urls = [ ('inc-ns5:inner-nothing', [], {'outer': '70'}, '/inc70/'), ('inc-ns5:inner-extra', [], {'extra': 'foobar', 'outer': '78'}, '/inc78/extra/foobar/'), ('inc-ns5:inner-nothing', ['70'], {}, '/inc70/'), ('inc-ns5:inner-extra', ['78', 'foobar'], {}, '/inc78/extra/foobar/'), ] for name, args, kwargs, expected in test_urls: with self.subTest(name=name, args=args, kwargs=kwargs): self.assertEqual(reverse(name, args=args, kwargs=kwargs), expected) def test_nested_app_lookup(self): """ A nested current_app should be split in individual namespaces (#24904). """ test_urls = [ ('inc-ns1:testapp:urlobject-view', [], {}, None, '/ns-included1/test4/inner/'), ('inc-ns1:testapp:urlobject-view', [37, 42], {}, None, '/ns-included1/test4/inner/37/42/'), ('inc-ns1:testapp:urlobject-view', [], {'arg1': 42, 'arg2': 37}, None, '/ns-included1/test4/inner/42/37/'), ('inc-ns1:testapp:urlobject-special-view', [], {}, None, '/ns-included1/test4/inner/+%5C$*/'), ('inc-ns1:testapp:urlobject-view', [], {}, 'inc-ns1:test-ns3', '/ns-included1/test3/inner/'), ('inc-ns1:testapp:urlobject-view', [37, 42], {}, 'inc-ns1:test-ns3', '/ns-included1/test3/inner/37/42/'), ( 'inc-ns1:testapp:urlobject-view', [], {'arg1': 42, 'arg2': 37}, 'inc-ns1:test-ns3', '/ns-included1/test3/inner/42/37/', ), ( 'inc-ns1:testapp:urlobject-special-view', [], {}, 'inc-ns1:test-ns3', '/ns-included1/test3/inner/+%5C$*/', ), ] for name, args, kwargs, current_app, expected in test_urls: with self.subTest(name=name, args=args, kwargs=kwargs, current_app=current_app): self.assertEqual(reverse(name, args=args, kwargs=kwargs, current_app=current_app), expected) def test_current_app_no_partial_match(self): """current_app shouldn't be used unless it matches the whole path.""" test_urls = [ ('inc-ns1:testapp:urlobject-view', [], {}, 'nonexistent:test-ns3', '/ns-included1/test4/inner/'), ( 'inc-ns1:testapp:urlobject-view', [37, 42], {}, 'nonexistent:test-ns3', '/ns-included1/test4/inner/37/42/', ), ( 'inc-ns1:testapp:urlobject-view', [], {'arg1': 42, 'arg2': 37}, 'nonexistent:test-ns3', '/ns-included1/test4/inner/42/37/', ), ( 'inc-ns1:testapp:urlobject-special-view', [], {}, 'nonexistent:test-ns3', '/ns-included1/test4/inner/+%5C$*/', ), ] for name, args, kwargs, current_app, expected in test_urls: with self.subTest(name=name, args=args, kwargs=kwargs, current_app=current_app): self.assertEqual(reverse(name, args=args, kwargs=kwargs, current_app=current_app), expected) @override_settings(ROOT_URLCONF=urlconf_outer.__name__) class RequestURLconfTests(SimpleTestCase): def test_urlconf(self): response = self.client.get('/test/me/') self.assertEqual(response.status_code, 200) self.assertEqual(response.content, b'outer:/test/me/,inner:/inner_urlconf/second_test/') response = self.client.get('/inner_urlconf/second_test/') self.assertEqual(response.status_code, 200) response = self.client.get('/second_test/') self.assertEqual(response.status_code, 404) @override_settings( MIDDLEWARE=[ '%s.ChangeURLconfMiddleware' % middleware.__name__, ] ) def test_urlconf_overridden(self): response = self.client.get('/test/me/') self.assertEqual(response.status_code, 404) response = self.client.get('/inner_urlconf/second_test/') self.assertEqual(response.status_code, 404) response = self.client.get('/second_test/') self.assertEqual(response.status_code, 200) self.assertEqual(response.content, b'outer:,inner:/second_test/') @override_settings( MIDDLEWARE=[ '%s.NullChangeURLconfMiddleware' % middleware.__name__, ] ) def test_urlconf_overridden_with_null(self): """ Overriding request.urlconf with None will fall back to the default URLconf. """ response = self.client.get('/test/me/') self.assertEqual(response.status_code, 200) self.assertEqual(response.content, b'outer:/test/me/,inner:/inner_urlconf/second_test/') response = self.client.get('/inner_urlconf/second_test/') self.assertEqual(response.status_code, 200) response = self.client.get('/second_test/') self.assertEqual(response.status_code, 404) @override_settings( MIDDLEWARE=[ '%s.ChangeURLconfMiddleware' % middleware.__name__, '%s.ReverseInnerInResponseMiddleware' % middleware.__name__, ] ) def test_reverse_inner_in_response_middleware(self): """ Test reversing an URL from the *overridden* URLconf from inside a response middleware. """ response = self.client.get('/second_test/') self.assertEqual(response.status_code, 200) self.assertEqual(response.content, b'/second_test/') @override_settings( MIDDLEWARE=[ '%s.ChangeURLconfMiddleware' % middleware.__name__, '%s.ReverseOuterInResponseMiddleware' % middleware.__name__, ] ) def test_reverse_outer_in_response_middleware(self): """ Test reversing an URL from the *default* URLconf from inside a response middleware. """ msg = ( "Reverse for 'outer' not found. 'outer' is not a valid view " "function or pattern name." ) with self.assertRaisesMessage(NoReverseMatch, msg): self.client.get('/second_test/') @override_settings( MIDDLEWARE=[ '%s.ChangeURLconfMiddleware' % middleware.__name__, '%s.ReverseInnerInStreaming' % middleware.__name__, ] ) def test_reverse_inner_in_streaming(self): """ Test reversing an URL from the *overridden* URLconf from inside a streaming response. """ response = self.client.get('/second_test/') self.assertEqual(response.status_code, 200) self.assertEqual(b''.join(response), b'/second_test/') @override_settings( MIDDLEWARE=[ '%s.ChangeURLconfMiddleware' % middleware.__name__, '%s.ReverseOuterInStreaming' % middleware.__name__, ] ) def test_reverse_outer_in_streaming(self): """ Test reversing an URL from the *default* URLconf from inside a streaming response. """ message = "Reverse for 'outer' not found." with self.assertRaisesMessage(NoReverseMatch, message): self.client.get('/second_test/') b''.join(self.client.get('/second_test/')) def test_urlconf_is_reset_after_request(self): """The URLconf is reset after each request.""" self.assertIsNone(get_urlconf()) with override_settings(MIDDLEWARE=['%s.ChangeURLconfMiddleware' % middleware.__name__]): self.client.get(reverse('inner')) self.assertIsNone(get_urlconf()) class ErrorHandlerResolutionTests(SimpleTestCase): """Tests for handler400, handler404 and handler500""" def setUp(self): urlconf = 'urlpatterns_reverse.urls_error_handlers' urlconf_callables = 'urlpatterns_reverse.urls_error_handlers_callables' self.resolver = URLResolver(RegexPattern(r'^$'), urlconf) self.callable_resolver = URLResolver(RegexPattern(r'^$'), urlconf_callables) def test_named_handlers(self): for code in [400, 404, 500]: with self.subTest(code=code): self.assertEqual(self.resolver.resolve_error_handler(code), empty_view) def test_callable_handlers(self): for code in [400, 404, 500]: with self.subTest(code=code): self.assertEqual(self.callable_resolver.resolve_error_handler(code), empty_view) @override_settings(ROOT_URLCONF='urlpatterns_reverse.urls_without_handlers') class DefaultErrorHandlerTests(SimpleTestCase): def test_default_handler(self): "If the urls.py doesn't specify handlers, the defaults are used" response = self.client.get('/test/') self.assertEqual(response.status_code, 404) msg = "I don't think I'm getting good value for this view" with self.assertRaisesMessage(ValueError, msg): self.client.get('/bad_view/') @override_settings(ROOT_URLCONF=None) class NoRootUrlConfTests(SimpleTestCase): """Tests for handler404 and handler500 if ROOT_URLCONF is None""" def test_no_handler_exception(self): msg = ( "The included URLconf 'None' does not appear to have any patterns " "in it. If you see the 'urlpatterns' variable with valid patterns " "in the file then the issue is probably caused by a circular " "import." ) with self.assertRaisesMessage(ImproperlyConfigured, msg): self.client.get('/test/me/') @override_settings(ROOT_URLCONF='urlpatterns_reverse.namespace_urls') class ResolverMatchTests(SimpleTestCase): def test_urlpattern_resolve(self): for path_, url_name, app_name, namespace, view_name, func, args, kwargs in resolve_test_data: with self.subTest(path=path_): # Legacy support for extracting "function, args, kwargs". match_func, match_args, match_kwargs = resolve(path_) self.assertEqual(match_func, func) self.assertEqual(match_args, args) self.assertEqual(match_kwargs, kwargs) # ResolverMatch capabilities. match = resolve(path_) self.assertEqual(match.__class__, ResolverMatch) self.assertEqual(match.url_name, url_name) self.assertEqual(match.app_name, app_name) self.assertEqual(match.namespace, namespace) self.assertEqual(match.view_name, view_name) self.assertEqual(match.func, func) self.assertEqual(match.args, args) self.assertEqual(match.kwargs, kwargs) # and for legacy purposes: self.assertEqual(match[0], func) self.assertEqual(match[1], args) self.assertEqual(match[2], kwargs) def test_resolver_match_on_request(self): response = self.client.get('/resolver_match/') resolver_match = response.resolver_match self.assertEqual(resolver_match.url_name, 'test-resolver-match') def test_resolver_match_on_request_before_resolution(self): request = HttpRequest() self.assertIsNone(request.resolver_match) def test_repr(self): self.assertEqual( repr(resolve('/no_kwargs/42/37/')), "ResolverMatch(func=urlpatterns_reverse.views.empty_view, " "args=('42', '37'), kwargs={}, url_name='no-kwargs', app_names=[], " "namespaces=[], route='^no_kwargs/([0-9]+)/([0-9]+)/$')", ) @override_settings(ROOT_URLCONF='urlpatterns_reverse.reverse_lazy_urls') def test_classbased_repr(self): self.assertEqual( repr(resolve('/redirect/')), "ResolverMatch(func=urlpatterns_reverse.views.LazyRedirectView, " "args=(), kwargs={}, url_name=None, app_names=[], " "namespaces=[], route='redirect/')", ) @override_settings(ROOT_URLCONF='urlpatterns_reverse.urls') def test_repr_functools_partial(self): tests = [ ('partial', 'template.html'), ('partial_nested', 'nested_partial.html'), ('partial_wrapped', 'template.html'), ] for name, template_name in tests: with self.subTest(name=name): func = ( f"functools.partial({views.empty_view!r}, " f"template_name='{template_name}')" ) self.assertEqual( repr(resolve(f'/{name}/')), f"ResolverMatch(func={func}, args=(), kwargs={{}}, " f"url_name='{name}', app_names=[], namespaces=[], " f"route='{name}/')", ) @override_settings(ROOT_URLCONF='urlpatterns.path_urls') def test_pickling(self): msg = 'Cannot pickle ResolverMatch.' with self.assertRaisesMessage(pickle.PicklingError, msg): pickle.dumps(resolve('/users/')) @override_settings(ROOT_URLCONF='urlpatterns_reverse.erroneous_urls') class ErroneousViewTests(SimpleTestCase): def test_noncallable_view(self): # View is not a callable (explicit import; arbitrary Python object) with self.assertRaisesMessage(TypeError, 'view must be a callable'): path('uncallable-object/', views.uncallable) def test_invalid_regex(self): # Regex contains an error (refs #6170) msg = '(regex_error/$" is not a valid regular expression' with self.assertRaisesMessage(ImproperlyConfigured, msg): reverse(views.empty_view) class ViewLoadingTests(SimpleTestCase): def test_view_loading(self): self.assertEqual(get_callable('urlpatterns_reverse.views.empty_view'), empty_view) self.assertEqual(get_callable(empty_view), empty_view) def test_view_does_not_exist(self): msg = "View does not exist in module urlpatterns_reverse.views." with self.assertRaisesMessage(ViewDoesNotExist, msg): get_callable('urlpatterns_reverse.views.i_should_not_exist') def test_attributeerror_not_hidden(self): msg = 'I am here to confuse django.urls.get_callable' with self.assertRaisesMessage(AttributeError, msg): get_callable('urlpatterns_reverse.views_broken.i_am_broken') def test_non_string_value(self): msg = "'1' is not a callable or a dot-notation path" with self.assertRaisesMessage(ViewDoesNotExist, msg): get_callable(1) def test_string_without_dot(self): msg = "Could not import 'test'. The path must be fully qualified." with self.assertRaisesMessage(ImportError, msg): get_callable('test') def test_module_does_not_exist(self): with self.assertRaisesMessage(ImportError, "No module named 'foo'"): get_callable('foo.bar') def test_parent_module_does_not_exist(self): msg = 'Parent module urlpatterns_reverse.foo does not exist.' with self.assertRaisesMessage(ViewDoesNotExist, msg): get_callable('urlpatterns_reverse.foo.bar') def test_not_callable(self): msg = ( "Could not import 'urlpatterns_reverse.tests.resolve_test_data'. " "View is not callable." ) with self.assertRaisesMessage(ViewDoesNotExist, msg): get_callable('urlpatterns_reverse.tests.resolve_test_data') class IncludeTests(SimpleTestCase): url_patterns = [ path('inner/', views.empty_view, name='urlobject-view'), re_path(r'^inner/(?P<arg1>[0-9]+)/(?P<arg2>[0-9]+)/$', views.empty_view, name='urlobject-view'), re_path(r'^inner/\+\\\$\*/$', views.empty_view, name='urlobject-special-view'), ] app_urls = URLObject('inc-app') def test_include_urls(self): self.assertEqual(include(self.url_patterns), (self.url_patterns, None, None)) def test_include_namespace(self): msg = ( 'Specifying a namespace in include() without providing an ' 'app_name is not supported.' ) with self.assertRaisesMessage(ImproperlyConfigured, msg): include(self.url_patterns, 'namespace') def test_include_4_tuple(self): msg = 'Passing a 4-tuple to include() is not supported.' with self.assertRaisesMessage(ImproperlyConfigured, msg): include((self.url_patterns, 'app_name', 'namespace', 'blah')) def test_include_3_tuple(self): msg = 'Passing a 3-tuple to include() is not supported.' with self.assertRaisesMessage(ImproperlyConfigured, msg): include((self.url_patterns, 'app_name', 'namespace')) def test_include_3_tuple_namespace(self): msg = 'Cannot override the namespace for a dynamic module that provides a namespace.' with self.assertRaisesMessage(ImproperlyConfigured, msg): include((self.url_patterns, 'app_name', 'namespace'), 'namespace') def test_include_2_tuple(self): self.assertEqual( include((self.url_patterns, 'app_name')), (self.url_patterns, 'app_name', 'app_name') ) def test_include_2_tuple_namespace(self): self.assertEqual( include((self.url_patterns, 'app_name'), namespace='namespace'), (self.url_patterns, 'app_name', 'namespace') ) def test_include_app_name(self): self.assertEqual( include(self.app_urls), (self.app_urls, 'inc-app', 'inc-app') ) def test_include_app_name_namespace(self): self.assertEqual( include(self.app_urls, 'namespace'), (self.app_urls, 'inc-app', 'namespace') ) @override_settings(ROOT_URLCONF='urlpatterns_reverse.urls') class LookaheadTests(SimpleTestCase): def test_valid_resolve(self): test_urls = [ '/lookahead-/a-city/', '/lookbehind-/a-city/', '/lookahead+/a-city/', '/lookbehind+/a-city/', ] for test_url in test_urls: with self.subTest(url=test_url): self.assertEqual(resolve(test_url).kwargs, {'city': 'a-city'}) def test_invalid_resolve(self): test_urls = [ '/lookahead-/not-a-city/', '/lookbehind-/not-a-city/', '/lookahead+/other-city/', '/lookbehind+/other-city/', ] for test_url in test_urls: with self.subTest(url=test_url): with self.assertRaises(Resolver404): resolve(test_url) def test_valid_reverse(self): test_urls = [ ('lookahead-positive', {'city': 'a-city'}, '/lookahead+/a-city/'), ('lookahead-negative', {'city': 'a-city'}, '/lookahead-/a-city/'), ('lookbehind-positive', {'city': 'a-city'}, '/lookbehind+/a-city/'), ('lookbehind-negative', {'city': 'a-city'}, '/lookbehind-/a-city/'), ] for name, kwargs, expected in test_urls: with self.subTest(name=name, kwargs=kwargs): self.assertEqual(reverse(name, kwargs=kwargs), expected) def test_invalid_reverse(self): test_urls = [ ('lookahead-positive', {'city': 'other-city'}), ('lookahead-negative', {'city': 'not-a-city'}), ('lookbehind-positive', {'city': 'other-city'}), ('lookbehind-negative', {'city': 'not-a-city'}), ] for name, kwargs in test_urls: with self.subTest(name=name, kwargs=kwargs): with self.assertRaises(NoReverseMatch): reverse(name, kwargs=kwargs)
dabd8bd1d31d007b0a05e1727c01605b6701ec7da15c4e485ef7618742ab1347
import datetime import os import shutil import tempfile import unittest from io import StringIO from pathlib import Path from unittest import mock from admin_scripts.tests import AdminScriptTestCase from django.conf import settings from django.contrib.staticfiles import storage from django.contrib.staticfiles.management.commands import ( collectstatic, runserver, ) from django.core.exceptions import ImproperlyConfigured from django.core.management import CommandError, call_command from django.core.management.base import SystemCheckError from django.test import RequestFactory, override_settings from django.test.utils import extend_sys_path from django.utils import timezone from django.utils._os import symlinks_supported from django.utils.functional import empty from .cases import CollectionTestCase, StaticFilesTestCase, TestDefaults from .settings import TEST_ROOT, TEST_SETTINGS from .storage import DummyStorage class TestNoFilesCreated: def test_no_files_created(self): """ Make sure no files were create in the destination directory. """ self.assertEqual(os.listdir(settings.STATIC_ROOT), []) class TestRunserver(StaticFilesTestCase): @override_settings(MIDDLEWARE=['django.middleware.common.CommonMiddleware']) def test_middleware_loaded_only_once(self): command = runserver.Command() with mock.patch('django.middleware.common.CommonMiddleware') as mocked: command.get_handler(use_static_handler=True, insecure_serving=True) self.assertEqual(mocked.call_count, 1) def test_404_response(self): command = runserver.Command() handler = command.get_handler(use_static_handler=True, insecure_serving=True) missing_static_file = os.path.join(settings.STATIC_URL, 'unknown.css') req = RequestFactory().get(missing_static_file) with override_settings(DEBUG=False): response = handler.get_response(req) self.assertEqual(response.status_code, 404) with override_settings(DEBUG=True): response = handler.get_response(req) self.assertEqual(response.status_code, 404) class TestFindStatic(TestDefaults, CollectionTestCase): """ Test ``findstatic`` management command. """ def _get_file(self, filepath): path = call_command('findstatic', filepath, all=False, verbosity=0, stdout=StringIO()) with open(path, encoding='utf-8') as f: return f.read() def test_all_files(self): """ findstatic returns all candidate files if run without --first and -v1. """ result = call_command('findstatic', 'test/file.txt', verbosity=1, stdout=StringIO()) lines = [line.strip() for line in result.split('\n')] self.assertEqual(len(lines), 3) # three because there is also the "Found <file> here" line self.assertIn('project', lines[1]) self.assertIn('apps', lines[2]) def test_all_files_less_verbose(self): """ findstatic returns all candidate files if run without --first and -v0. """ result = call_command('findstatic', 'test/file.txt', verbosity=0, stdout=StringIO()) lines = [line.strip() for line in result.split('\n')] self.assertEqual(len(lines), 2) self.assertIn('project', lines[0]) self.assertIn('apps', lines[1]) def test_all_files_more_verbose(self): """ findstatic returns all candidate files if run without --first and -v2. Also, test that findstatic returns the searched locations with -v2. """ result = call_command('findstatic', 'test/file.txt', verbosity=2, stdout=StringIO()) lines = [line.strip() for line in result.split('\n')] self.assertIn('project', lines[1]) self.assertIn('apps', lines[2]) self.assertIn("Looking in the following locations:", lines[3]) searched_locations = ', '.join(lines[4:]) # AppDirectoriesFinder searched locations self.assertIn(os.path.join('staticfiles_tests', 'apps', 'test', 'static'), searched_locations) self.assertIn(os.path.join('staticfiles_tests', 'apps', 'no_label', 'static'), searched_locations) # FileSystemFinder searched locations self.assertIn(TEST_SETTINGS['STATICFILES_DIRS'][1][1], searched_locations) self.assertIn(TEST_SETTINGS['STATICFILES_DIRS'][0], searched_locations) self.assertIn(str(TEST_SETTINGS['STATICFILES_DIRS'][2]), searched_locations) # DefaultStorageFinder searched locations self.assertIn( os.path.join('staticfiles_tests', 'project', 'site_media', 'media'), searched_locations ) class TestConfiguration(StaticFilesTestCase): def test_location_empty(self): msg = 'without having set the STATIC_ROOT setting to a filesystem path' err = StringIO() for root in ['', None]: with override_settings(STATIC_ROOT=root): with self.assertRaisesMessage(ImproperlyConfigured, msg): call_command('collectstatic', interactive=False, verbosity=0, stderr=err) def test_local_storage_detection_helper(self): staticfiles_storage = storage.staticfiles_storage try: storage.staticfiles_storage._wrapped = empty with self.settings(STATICFILES_STORAGE='django.contrib.staticfiles.storage.StaticFilesStorage'): command = collectstatic.Command() self.assertTrue(command.is_local_storage()) storage.staticfiles_storage._wrapped = empty with self.settings(STATICFILES_STORAGE='staticfiles_tests.storage.DummyStorage'): command = collectstatic.Command() self.assertFalse(command.is_local_storage()) collectstatic.staticfiles_storage = storage.FileSystemStorage() command = collectstatic.Command() self.assertTrue(command.is_local_storage()) collectstatic.staticfiles_storage = DummyStorage() command = collectstatic.Command() self.assertFalse(command.is_local_storage()) finally: staticfiles_storage._wrapped = empty collectstatic.staticfiles_storage = staticfiles_storage storage.staticfiles_storage = staticfiles_storage @override_settings(STATICFILES_DIRS=('test')) def test_collectstatis_check(self): msg = 'The STATICFILES_DIRS setting is not a tuple or list.' with self.assertRaisesMessage(SystemCheckError, msg): call_command('collectstatic', skip_checks=False) class TestCollectionHelpSubcommand(AdminScriptTestCase): @override_settings(STATIC_ROOT=None) def test_missing_settings_dont_prevent_help(self): """ Even if the STATIC_ROOT setting is not set, one can still call the `manage.py help collectstatic` command. """ self.write_settings('settings.py', apps=['django.contrib.staticfiles']) out, err = self.run_manage(['help', 'collectstatic']) self.assertNoOutput(err) class TestCollection(TestDefaults, CollectionTestCase): """ Test ``collectstatic`` management command. """ def test_ignore(self): """ -i patterns are ignored. """ self.assertFileNotFound('test/test.ignoreme') def test_common_ignore_patterns(self): """ Common ignore patterns (*~, .*, CVS) are ignored. """ self.assertFileNotFound('test/.hidden') self.assertFileNotFound('test/backup~') self.assertFileNotFound('test/CVS') def test_pathlib(self): self.assertFileContains('pathlib.txt', 'pathlib') class TestCollectionPathLib(TestCollection): def mkdtemp(self): tmp_dir = super().mkdtemp() return Path(tmp_dir) class TestCollectionVerbosity(CollectionTestCase): copying_msg = 'Copying ' run_collectstatic_in_setUp = False post_process_msg = 'Post-processed' staticfiles_copied_msg = 'static files copied to' def test_verbosity_0(self): stdout = StringIO() self.run_collectstatic(verbosity=0, stdout=stdout) self.assertEqual(stdout.getvalue(), '') def test_verbosity_1(self): stdout = StringIO() self.run_collectstatic(verbosity=1, stdout=stdout) output = stdout.getvalue() self.assertIn(self.staticfiles_copied_msg, output) self.assertNotIn(self.copying_msg, output) def test_verbosity_2(self): stdout = StringIO() self.run_collectstatic(verbosity=2, stdout=stdout) output = stdout.getvalue() self.assertIn(self.staticfiles_copied_msg, output) self.assertIn(self.copying_msg, output) @override_settings(STATICFILES_STORAGE='django.contrib.staticfiles.storage.ManifestStaticFilesStorage') def test_verbosity_1_with_post_process(self): stdout = StringIO() self.run_collectstatic(verbosity=1, stdout=stdout, post_process=True) self.assertNotIn(self.post_process_msg, stdout.getvalue()) @override_settings(STATICFILES_STORAGE='django.contrib.staticfiles.storage.ManifestStaticFilesStorage') def test_verbosity_2_with_post_process(self): stdout = StringIO() self.run_collectstatic(verbosity=2, stdout=stdout, post_process=True) self.assertIn(self.post_process_msg, stdout.getvalue()) class TestCollectionClear(CollectionTestCase): """ Test the ``--clear`` option of the ``collectstatic`` management command. """ def run_collectstatic(self, **kwargs): clear_filepath = os.path.join(settings.STATIC_ROOT, 'cleared.txt') with open(clear_filepath, 'w') as f: f.write('should be cleared') super().run_collectstatic(clear=True) def test_cleared_not_found(self): self.assertFileNotFound('cleared.txt') def test_dir_not_exists(self, **kwargs): shutil.rmtree(settings.STATIC_ROOT) super().run_collectstatic(clear=True) @override_settings(STATICFILES_STORAGE='staticfiles_tests.storage.PathNotImplementedStorage') def test_handle_path_notimplemented(self): self.run_collectstatic() self.assertFileNotFound('cleared.txt') class TestInteractiveMessages(CollectionTestCase): overwrite_warning_msg = "This will overwrite existing files!" delete_warning_msg = "This will DELETE ALL FILES in this location!" files_copied_msg = "static files copied" @staticmethod def mock_input(stdout): def _input(msg): stdout.write(msg) return 'yes' return _input def test_warning_when_clearing_staticdir(self): stdout = StringIO() self.run_collectstatic() with mock.patch('builtins.input', side_effect=self.mock_input(stdout)): call_command('collectstatic', interactive=True, clear=True, stdout=stdout) output = stdout.getvalue() self.assertNotIn(self.overwrite_warning_msg, output) self.assertIn(self.delete_warning_msg, output) def test_warning_when_overwriting_files_in_staticdir(self): stdout = StringIO() self.run_collectstatic() with mock.patch('builtins.input', side_effect=self.mock_input(stdout)): call_command('collectstatic', interactive=True, stdout=stdout) output = stdout.getvalue() self.assertIn(self.overwrite_warning_msg, output) self.assertNotIn(self.delete_warning_msg, output) def test_no_warning_when_staticdir_does_not_exist(self): stdout = StringIO() shutil.rmtree(settings.STATIC_ROOT) call_command('collectstatic', interactive=True, stdout=stdout) output = stdout.getvalue() self.assertNotIn(self.overwrite_warning_msg, output) self.assertNotIn(self.delete_warning_msg, output) self.assertIn(self.files_copied_msg, output) def test_no_warning_for_empty_staticdir(self): stdout = StringIO() with tempfile.TemporaryDirectory(prefix='collectstatic_empty_staticdir_test') as static_dir: with override_settings(STATIC_ROOT=static_dir): call_command('collectstatic', interactive=True, stdout=stdout) output = stdout.getvalue() self.assertNotIn(self.overwrite_warning_msg, output) self.assertNotIn(self.delete_warning_msg, output) self.assertIn(self.files_copied_msg, output) def test_cancelled(self): self.run_collectstatic() with mock.patch('builtins.input', side_effect=lambda _: 'no'): with self.assertRaisesMessage(CommandError, 'Collecting static files cancelled'): call_command('collectstatic', interactive=True) class TestCollectionNoDefaultIgnore(TestDefaults, CollectionTestCase): """ The ``--no-default-ignore`` option of the ``collectstatic`` management command. """ def run_collectstatic(self): super().run_collectstatic(use_default_ignore_patterns=False) def test_no_common_ignore_patterns(self): """ With --no-default-ignore, common ignore patterns (*~, .*, CVS) are not ignored. """ self.assertFileContains('test/.hidden', 'should be ignored') self.assertFileContains('test/backup~', 'should be ignored') self.assertFileContains('test/CVS', 'should be ignored') @override_settings(INSTALLED_APPS=[ 'staticfiles_tests.apps.staticfiles_config.IgnorePatternsAppConfig', 'staticfiles_tests.apps.test', ]) class TestCollectionCustomIgnorePatterns(CollectionTestCase): def test_custom_ignore_patterns(self): """ A custom ignore_patterns list, ['*.css', '*/vendor/*.js'] in this case, can be specified in an AppConfig definition. """ self.assertFileNotFound('test/nonascii.css') self.assertFileContains('test/.hidden', 'should be ignored') self.assertFileNotFound(os.path.join('test', 'vendor', 'module.js')) class TestCollectionDryRun(TestNoFilesCreated, CollectionTestCase): """ Test ``--dry-run`` option for ``collectstatic`` management command. """ def run_collectstatic(self): super().run_collectstatic(dry_run=True) @override_settings(STATICFILES_STORAGE='django.contrib.staticfiles.storage.ManifestStaticFilesStorage') class TestCollectionDryRunManifestStaticFilesStorage(TestCollectionDryRun): pass class TestCollectionFilesOverride(CollectionTestCase): """ Test overriding duplicated files by ``collectstatic`` management command. Check for proper handling of apps order in installed apps even if file modification dates are in different order: 'staticfiles_test_app', 'staticfiles_tests.apps.no_label', """ def setUp(self): self.temp_dir = tempfile.mkdtemp() self.addCleanup(shutil.rmtree, self.temp_dir) # get modification and access times for no_label/static/file2.txt self.orig_path = os.path.join(TEST_ROOT, 'apps', 'no_label', 'static', 'file2.txt') self.orig_mtime = os.path.getmtime(self.orig_path) self.orig_atime = os.path.getatime(self.orig_path) # prepare duplicate of file2.txt from a temporary app # this file will have modification time older than no_label/static/file2.txt # anyway it should be taken to STATIC_ROOT because the temporary app is before # 'no_label' app in installed apps self.temp_app_path = os.path.join(self.temp_dir, 'staticfiles_test_app') self.testfile_path = os.path.join(self.temp_app_path, 'static', 'file2.txt') os.makedirs(self.temp_app_path) with open(os.path.join(self.temp_app_path, '__init__.py'), 'w+'): pass os.makedirs(os.path.dirname(self.testfile_path)) with open(self.testfile_path, 'w+') as f: f.write('duplicate of file2.txt') os.utime(self.testfile_path, (self.orig_atime - 1, self.orig_mtime - 1)) self.settings_with_test_app = self.modify_settings( INSTALLED_APPS={'prepend': 'staticfiles_test_app'}, ) with extend_sys_path(self.temp_dir): self.settings_with_test_app.enable() super().setUp() def tearDown(self): super().tearDown() self.settings_with_test_app.disable() def test_ordering_override(self): """ Test if collectstatic takes files in proper order """ self.assertFileContains('file2.txt', 'duplicate of file2.txt') # run collectstatic again self.run_collectstatic() self.assertFileContains('file2.txt', 'duplicate of file2.txt') # The collectstatic test suite already has conflicting files since both # project/test/file.txt and apps/test/static/test/file.txt are collected. To # properly test for the warning not happening unless we tell it to explicitly, # we remove the project directory and will add back a conflicting file later. @override_settings(STATICFILES_DIRS=[]) class TestCollectionOverwriteWarning(CollectionTestCase): """ Test warning in ``collectstatic`` output when a file is skipped because a previous file was already written to the same path. """ # If this string is in the collectstatic output, it means the warning we're # looking for was emitted. warning_string = 'Found another file' def _collectstatic_output(self, **kwargs): """ Run collectstatic, and capture and return the output. We want to run the command at highest verbosity, which is why we can't just call e.g. BaseCollectionTestCase.run_collectstatic() """ out = StringIO() call_command('collectstatic', interactive=False, verbosity=3, stdout=out, **kwargs) return out.getvalue() def test_no_warning(self): """ There isn't a warning if there isn't a duplicate destination. """ output = self._collectstatic_output(clear=True) self.assertNotIn(self.warning_string, output) def test_warning(self): """ There is a warning when there are duplicate destinations. """ with tempfile.TemporaryDirectory() as static_dir: duplicate = os.path.join(static_dir, 'test', 'file.txt') os.mkdir(os.path.dirname(duplicate)) with open(duplicate, 'w+') as f: f.write('duplicate of file.txt') with self.settings(STATICFILES_DIRS=[static_dir]): output = self._collectstatic_output(clear=True) self.assertIn(self.warning_string, output) os.remove(duplicate) # Make sure the warning went away again. with self.settings(STATICFILES_DIRS=[static_dir]): output = self._collectstatic_output(clear=True) self.assertNotIn(self.warning_string, output) @override_settings(STATICFILES_STORAGE='staticfiles_tests.storage.DummyStorage') class TestCollectionNonLocalStorage(TestNoFilesCreated, CollectionTestCase): """ Tests for a Storage that implements get_modified_time() but not path() (#15035). """ def test_storage_properties(self): # Properties of the Storage as described in the ticket. storage = DummyStorage() self.assertEqual(storage.get_modified_time('name'), datetime.datetime(1970, 1, 1, tzinfo=timezone.utc)) with self.assertRaisesMessage(NotImplementedError, "This backend doesn't support absolute paths."): storage.path('name') class TestCollectionNeverCopyStorage(CollectionTestCase): @override_settings(STATICFILES_STORAGE='staticfiles_tests.storage.NeverCopyRemoteStorage') def test_skips_newer_files_in_remote_storage(self): """ collectstatic skips newer files in a remote storage. run_collectstatic() in setUp() copies the static files, then files are always skipped after NeverCopyRemoteStorage is activated since NeverCopyRemoteStorage.get_modified_time() returns a datetime in the future to simulate an unmodified file. """ stdout = StringIO() self.run_collectstatic(stdout=stdout, verbosity=2) output = stdout.getvalue() self.assertIn("Skipping 'test.txt' (not modified)", output) @unittest.skipUnless(symlinks_supported(), "Must be able to symlink to run this test.") class TestCollectionLinks(TestDefaults, CollectionTestCase): """ Test ``--link`` option for ``collectstatic`` management command. Note that by inheriting ``TestDefaults`` we repeat all the standard file resolving tests here, to make sure using ``--link`` does not change the file-selection semantics. """ def run_collectstatic(self, clear=False, link=True, **kwargs): super().run_collectstatic(link=link, clear=clear, **kwargs) def test_links_created(self): """ With ``--link``, symbolic links are created. """ self.assertTrue(os.path.islink(os.path.join(settings.STATIC_ROOT, 'test.txt'))) def test_broken_symlink(self): """ Test broken symlink gets deleted. """ path = os.path.join(settings.STATIC_ROOT, 'test.txt') os.unlink(path) self.run_collectstatic() self.assertTrue(os.path.islink(path)) def test_symlinks_and_files_replaced(self): """ Running collectstatic in non-symlink mode replaces symlinks with files, while symlink mode replaces files with symlinks. """ path = os.path.join(settings.STATIC_ROOT, 'test.txt') self.assertTrue(os.path.islink(path)) self.run_collectstatic(link=False) self.assertFalse(os.path.islink(path)) self.run_collectstatic(link=True) self.assertTrue(os.path.islink(path)) def test_clear_broken_symlink(self): """ With ``--clear``, broken symbolic links are deleted. """ nonexistent_file_path = os.path.join(settings.STATIC_ROOT, 'nonexistent.txt') broken_symlink_path = os.path.join(settings.STATIC_ROOT, 'symlink.txt') os.symlink(nonexistent_file_path, broken_symlink_path) self.run_collectstatic(clear=True) self.assertFalse(os.path.lexists(broken_symlink_path)) @override_settings(STATICFILES_STORAGE='staticfiles_tests.storage.PathNotImplementedStorage') def test_no_remote_link(self): with self.assertRaisesMessage(CommandError, "Can't symlink to a remote destination."): self.run_collectstatic()
1d3c7e3cd2306fff3fd7ce8cadf4f34fb366c5e23be1790afe5940c0ac9e6745
""" A subset of the tests in tests/servers/tests exercising django.contrib.staticfiles.testing.StaticLiveServerTestCase instead of django.test.LiveServerTestCase. """ import os from urllib.request import urlopen from django.contrib.staticfiles.testing import StaticLiveServerTestCase from django.core.exceptions import ImproperlyConfigured from django.test import modify_settings, override_settings TEST_ROOT = os.path.dirname(__file__) TEST_SETTINGS = { 'MEDIA_URL': 'media/', 'STATIC_URL': 'static/', 'MEDIA_ROOT': os.path.join(TEST_ROOT, 'project', 'site_media', 'media'), 'STATIC_ROOT': os.path.join(TEST_ROOT, 'project', 'site_media', 'static'), } class LiveServerBase(StaticLiveServerTestCase): available_apps = [] @classmethod def setUpClass(cls): # Override settings cls.settings_override = override_settings(**TEST_SETTINGS) cls.settings_override.enable() cls.addClassCleanup(cls.settings_override.disable) super().setUpClass() class StaticLiveServerChecks(LiveServerBase): @classmethod def setUpClass(cls): # If contrib.staticfiles isn't configured properly, the exception # should bubble up to the main thread. old_STATIC_URL = TEST_SETTINGS['STATIC_URL'] TEST_SETTINGS['STATIC_URL'] = None try: cls.raises_exception() finally: TEST_SETTINGS['STATIC_URL'] = old_STATIC_URL @classmethod def tearDownClass(cls): # skip it, as setUpClass doesn't call its parent either pass @classmethod def raises_exception(cls): try: super().setUpClass() except ImproperlyConfigured: # This raises ImproperlyConfigured("You're using the staticfiles # app without having set the required STATIC_URL setting.") pass else: raise Exception('setUpClass() should have raised an exception.') def test_test_test(self): # Intentionally empty method so that the test is picked up by the # test runner and the overridden setUpClass() method is executed. pass class StaticLiveServerView(LiveServerBase): def urlopen(self, url): return urlopen(self.live_server_url + url) # The test is going to access a static file stored in this application. @modify_settings(INSTALLED_APPS={'append': 'staticfiles_tests.apps.test'}) def test_collectstatic_emulation(self): """ StaticLiveServerTestCase use of staticfiles' serve() allows it to discover app's static assets without having to collectstatic first. """ with self.urlopen('/static/test/file.txt') as f: self.assertEqual(f.read().rstrip(b'\r\n'), b'In static directory.')
17967b033004752c34dd5b024387cbd2738af994e8ce7a3ecaa031836d2ded3a
from urllib.parse import urljoin from django.contrib.staticfiles import storage from django.forms import Media from django.templatetags.static import static from django.test import SimpleTestCase, override_settings class StaticTestStorage(storage.StaticFilesStorage): def url(self, name): return urljoin('https://example.com/assets/', name) @override_settings( STATIC_URL='http://media.example.com/static/', INSTALLED_APPS=('django.contrib.staticfiles',), STATICFILES_STORAGE='staticfiles_tests.test_forms.StaticTestStorage', ) class StaticFilesFormsMediaTestCase(SimpleTestCase): def test_absolute_url(self): m = Media( css={'all': ('path/to/css1', '/path/to/css2')}, js=( '/path/to/js1', 'http://media.other.com/path/to/js2', 'https://secure.other.com/path/to/js3', static('relative/path/to/js4'), ), ) self.assertEqual( str(m), """<link href="https://example.com/assets/path/to/css1" media="all" rel="stylesheet"> <link href="/path/to/css2" media="all" rel="stylesheet"> <script src="/path/to/js1"></script> <script src="http://media.other.com/path/to/js2"></script> <script src="https://secure.other.com/path/to/js3"></script> <script src="https://example.com/assets/relative/path/to/js4"></script>""" )
3677fb8ae70172031a8115ac68539c4f6c512ca3256b69258c8b6731052ea01d
import json import os import shutil import sys import tempfile import unittest from io import StringIO from pathlib import Path from unittest import mock from django.conf import settings from django.contrib.staticfiles import finders, storage from django.contrib.staticfiles.management.commands.collectstatic import ( Command as CollectstaticCommand, ) from django.core.management import call_command from django.test import SimpleTestCase, override_settings from .cases import CollectionTestCase from .settings import TEST_ROOT def hashed_file_path(test, path): fullpath = test.render_template(test.static_template_snippet(path)) return fullpath.replace(settings.STATIC_URL, '') class TestHashedFiles: hashed_file_path = hashed_file_path def tearDown(self): # Clear hashed files to avoid side effects among tests. storage.staticfiles_storage.hashed_files.clear() def assertPostCondition(self): """ Assert post conditions for a test are met. Must be manually called at the end of each test. """ pass def test_template_tag_return(self): self.assertStaticRaises(ValueError, "does/not/exist.png", "/static/does/not/exist.png") self.assertStaticRenders("test/file.txt", "/static/test/file.dad0999e4f8f.txt") self.assertStaticRenders("test/file.txt", "/static/test/file.dad0999e4f8f.txt", asvar=True) self.assertStaticRenders("cached/styles.css", "/static/cached/styles.5e0040571e1a.css") self.assertStaticRenders("path/", "/static/path/") self.assertStaticRenders("path/?query", "/static/path/?query") self.assertPostCondition() def test_template_tag_simple_content(self): relpath = self.hashed_file_path("cached/styles.css") self.assertEqual(relpath, "cached/styles.5e0040571e1a.css") with storage.staticfiles_storage.open(relpath) as relfile: content = relfile.read() self.assertNotIn(b"cached/other.css", content) self.assertIn(b"other.d41d8cd98f00.css", content) self.assertPostCondition() def test_path_ignored_completely(self): relpath = self.hashed_file_path("cached/css/ignored.css") self.assertEqual(relpath, "cached/css/ignored.554da52152af.css") with storage.staticfiles_storage.open(relpath) as relfile: content = relfile.read() self.assertIn(b'#foobar', content) self.assertIn(b'http:foobar', content) self.assertIn(b'https:foobar', content) self.assertIn(b'data:foobar', content) self.assertIn(b'chrome:foobar', content) self.assertIn(b'//foobar', content) self.assertPostCondition() def test_path_with_querystring(self): relpath = self.hashed_file_path("cached/styles.css?spam=eggs") self.assertEqual(relpath, "cached/styles.5e0040571e1a.css?spam=eggs") with storage.staticfiles_storage.open("cached/styles.5e0040571e1a.css") as relfile: content = relfile.read() self.assertNotIn(b"cached/other.css", content) self.assertIn(b"other.d41d8cd98f00.css", content) self.assertPostCondition() def test_path_with_fragment(self): relpath = self.hashed_file_path("cached/styles.css#eggs") self.assertEqual(relpath, "cached/styles.5e0040571e1a.css#eggs") with storage.staticfiles_storage.open("cached/styles.5e0040571e1a.css") as relfile: content = relfile.read() self.assertNotIn(b"cached/other.css", content) self.assertIn(b"other.d41d8cd98f00.css", content) self.assertPostCondition() def test_path_with_querystring_and_fragment(self): relpath = self.hashed_file_path("cached/css/fragments.css") self.assertEqual(relpath, "cached/css/fragments.a60c0e74834f.css") with storage.staticfiles_storage.open(relpath) as relfile: content = relfile.read() self.assertIn(b'fonts/font.b9b105392eb8.eot?#iefix', content) self.assertIn(b'fonts/font.b8d603e42714.svg#webfontIyfZbseF', content) self.assertIn(b'fonts/font.b8d603e42714.svg#path/to/../../fonts/font.svg', content) self.assertIn(b'data:font/woff;charset=utf-8;base64,d09GRgABAAAAADJoAA0AAAAAR2QAAQAAAAAAAAAAAAA', content) self.assertIn(b'#default#VML', content) self.assertPostCondition() def test_template_tag_absolute(self): relpath = self.hashed_file_path("cached/absolute.css") self.assertEqual(relpath, "cached/absolute.eb04def9f9a4.css") with storage.staticfiles_storage.open(relpath) as relfile: content = relfile.read() self.assertNotIn(b"/static/cached/styles.css", content) self.assertIn(b"/static/cached/styles.5e0040571e1a.css", content) self.assertNotIn(b"/static/styles_root.css", content) self.assertIn(b"/static/styles_root.401f2509a628.css", content) self.assertIn(b'/static/cached/img/relative.acae32e4532b.png', content) self.assertPostCondition() def test_template_tag_absolute_root(self): """ Like test_template_tag_absolute, but for a file in STATIC_ROOT (#26249). """ relpath = self.hashed_file_path("absolute_root.css") self.assertEqual(relpath, "absolute_root.f821df1b64f7.css") with storage.staticfiles_storage.open(relpath) as relfile: content = relfile.read() self.assertNotIn(b"/static/styles_root.css", content) self.assertIn(b"/static/styles_root.401f2509a628.css", content) self.assertPostCondition() def test_template_tag_relative(self): relpath = self.hashed_file_path("cached/relative.css") self.assertEqual(relpath, "cached/relative.c3e9e1ea6f2e.css") with storage.staticfiles_storage.open(relpath) as relfile: content = relfile.read() self.assertNotIn(b"../cached/styles.css", content) self.assertNotIn(b'@import "styles.css"', content) self.assertNotIn(b'url(img/relative.png)', content) self.assertIn(b'url("img/relative.acae32e4532b.png")', content) self.assertIn(b"../cached/styles.5e0040571e1a.css", content) self.assertPostCondition() def test_import_replacement(self): "See #18050" relpath = self.hashed_file_path("cached/import.css") self.assertEqual(relpath, "cached/import.f53576679e5a.css") with storage.staticfiles_storage.open(relpath) as relfile: self.assertIn(b"""import url("styles.5e0040571e1a.css")""", relfile.read()) self.assertPostCondition() def test_template_tag_deep_relative(self): relpath = self.hashed_file_path("cached/css/window.css") self.assertEqual(relpath, "cached/css/window.5d5c10836967.css") with storage.staticfiles_storage.open(relpath) as relfile: content = relfile.read() self.assertNotIn(b'url(img/window.png)', content) self.assertIn(b'url("img/window.acae32e4532b.png")', content) self.assertPostCondition() def test_template_tag_url(self): relpath = self.hashed_file_path("cached/url.css") self.assertEqual(relpath, "cached/url.902310b73412.css") with storage.staticfiles_storage.open(relpath) as relfile: self.assertIn(b"https://", relfile.read()) self.assertPostCondition() @override_settings( STATICFILES_DIRS=[os.path.join(TEST_ROOT, 'project', 'loop')], STATICFILES_FINDERS=['django.contrib.staticfiles.finders.FileSystemFinder'], ) def test_import_loop(self): finders.get_finder.cache_clear() err = StringIO() with self.assertRaisesMessage(RuntimeError, 'Max post-process passes exceeded'): call_command('collectstatic', interactive=False, verbosity=0, stderr=err) self.assertEqual("Post-processing 'All' failed!\n\n", err.getvalue()) self.assertPostCondition() def test_post_processing(self): """ post_processing behaves correctly. Files that are alterable should always be post-processed; files that aren't should be skipped. collectstatic has already been called once in setUp() for this testcase, therefore we check by verifying behavior on a second run. """ collectstatic_args = { 'interactive': False, 'verbosity': 0, 'link': False, 'clear': False, 'dry_run': False, 'post_process': True, 'use_default_ignore_patterns': True, 'ignore_patterns': ['*.ignoreme'], } collectstatic_cmd = CollectstaticCommand() collectstatic_cmd.set_options(**collectstatic_args) stats = collectstatic_cmd.collect() self.assertIn(os.path.join('cached', 'css', 'window.css'), stats['post_processed']) self.assertIn(os.path.join('cached', 'css', 'img', 'window.png'), stats['unmodified']) self.assertIn(os.path.join('test', 'nonascii.css'), stats['post_processed']) # No file should be yielded twice. self.assertCountEqual(stats['post_processed'], set(stats['post_processed'])) self.assertPostCondition() def test_css_import_case_insensitive(self): relpath = self.hashed_file_path("cached/styles_insensitive.css") self.assertEqual(relpath, "cached/styles_insensitive.3fa427592a53.css") with storage.staticfiles_storage.open(relpath) as relfile: content = relfile.read() self.assertNotIn(b"cached/other.css", content) self.assertIn(b"other.d41d8cd98f00.css", content) self.assertPostCondition() def test_css_source_map(self): relpath = self.hashed_file_path('cached/source_map.css') self.assertEqual(relpath, 'cached/source_map.b2fceaf426aa.css') with storage.staticfiles_storage.open(relpath) as relfile: content = relfile.read() self.assertNotIn(b'/*# sourceMappingURL=source_map.css.map */', content) self.assertIn( b'/*# sourceMappingURL=source_map.css.99914b932bd3.map */', content, ) self.assertPostCondition() def test_css_source_map_sensitive(self): relpath = self.hashed_file_path('cached/source_map_sensitive.css') self.assertEqual(relpath, 'cached/source_map_sensitive.456683f2106f.css') with storage.staticfiles_storage.open(relpath) as relfile: content = relfile.read() self.assertIn(b'/*# sOuRcEMaPpInGURL=source_map.css.map */', content) self.assertNotIn( b'/*# sourceMappingURL=source_map.css.99914b932bd3.map */', content, ) self.assertPostCondition() def test_js_source_map(self): relpath = self.hashed_file_path('cached/source_map.js') self.assertEqual(relpath, 'cached/source_map.cd45b8534a87.js') with storage.staticfiles_storage.open(relpath) as relfile: content = relfile.read() self.assertNotIn(b'//# sourceMappingURL=source_map.js.map', content) self.assertIn( b'//# sourceMappingURL=source_map.js.99914b932bd3.map', content, ) self.assertPostCondition() def test_js_source_map_sensitive(self): relpath = self.hashed_file_path('cached/source_map_sensitive.js') self.assertEqual(relpath, 'cached/source_map_sensitive.5da96fdd3cb3.js') with storage.staticfiles_storage.open(relpath) as relfile: content = relfile.read() self.assertIn(b'//# sOuRcEMaPpInGURL=source_map.js.map', content) self.assertNotIn( b'//# sourceMappingURL=source_map.js.99914b932bd3.map', content, ) self.assertPostCondition() @override_settings( STATICFILES_DIRS=[os.path.join(TEST_ROOT, 'project', 'faulty')], STATICFILES_FINDERS=['django.contrib.staticfiles.finders.FileSystemFinder'], ) def test_post_processing_failure(self): """ post_processing indicates the origin of the error when it fails. """ finders.get_finder.cache_clear() err = StringIO() with self.assertRaises(Exception): call_command('collectstatic', interactive=False, verbosity=0, stderr=err) self.assertEqual("Post-processing 'faulty.css' failed!\n\n", err.getvalue()) self.assertPostCondition() @override_settings(STATICFILES_STORAGE='staticfiles_tests.storage.ExtraPatternsStorage') class TestExtraPatternsStorage(CollectionTestCase): def setUp(self): storage.staticfiles_storage.hashed_files.clear() # avoid cache interference super().setUp() def cached_file_path(self, path): fullpath = self.render_template(self.static_template_snippet(path)) return fullpath.replace(settings.STATIC_URL, '') def test_multi_extension_patterns(self): """ With storage classes having several file extension patterns, only the files matching a specific file pattern should be affected by the substitution (#19670). """ # CSS files shouldn't be touched by JS patterns. relpath = self.cached_file_path("cached/import.css") self.assertEqual(relpath, "cached/import.f53576679e5a.css") with storage.staticfiles_storage.open(relpath) as relfile: self.assertIn(b'import url("styles.5e0040571e1a.css")', relfile.read()) # Confirm JS patterns have been applied to JS files. relpath = self.cached_file_path("cached/test.js") self.assertEqual(relpath, "cached/test.388d7a790d46.js") with storage.staticfiles_storage.open(relpath) as relfile: self.assertIn(b'JS_URL("import.f53576679e5a.css")', relfile.read()) @override_settings( STATICFILES_STORAGE='django.contrib.staticfiles.storage.ManifestStaticFilesStorage', ) class TestCollectionManifestStorage(TestHashedFiles, CollectionTestCase): """ Tests for the Cache busting storage """ def setUp(self): super().setUp() temp_dir = tempfile.mkdtemp() os.makedirs(os.path.join(temp_dir, 'test')) self._clear_filename = os.path.join(temp_dir, 'test', 'cleared.txt') with open(self._clear_filename, 'w') as f: f.write('to be deleted in one test') self.patched_settings = self.settings( STATICFILES_DIRS=settings.STATICFILES_DIRS + [temp_dir], ) self.patched_settings.enable() self.addCleanup(shutil.rmtree, temp_dir) self._manifest_strict = storage.staticfiles_storage.manifest_strict def tearDown(self): self.patched_settings.disable() if os.path.exists(self._clear_filename): os.unlink(self._clear_filename) storage.staticfiles_storage.manifest_strict = self._manifest_strict super().tearDown() def assertPostCondition(self): hashed_files = storage.staticfiles_storage.hashed_files # The in-memory version of the manifest matches the one on disk # since a properly created manifest should cover all filenames. if hashed_files: manifest = storage.staticfiles_storage.load_manifest() self.assertEqual(hashed_files, manifest) def test_manifest_exists(self): filename = storage.staticfiles_storage.manifest_name path = storage.staticfiles_storage.path(filename) self.assertTrue(os.path.exists(path)) def test_manifest_does_not_exist(self): storage.staticfiles_storage.manifest_name = 'does.not.exist.json' self.assertIsNone(storage.staticfiles_storage.read_manifest()) def test_manifest_does_not_ignore_permission_error(self): with mock.patch('builtins.open', side_effect=PermissionError): with self.assertRaises(PermissionError): storage.staticfiles_storage.read_manifest() def test_loaded_cache(self): self.assertNotEqual(storage.staticfiles_storage.hashed_files, {}) manifest_content = storage.staticfiles_storage.read_manifest() self.assertIn( '"version": "%s"' % storage.staticfiles_storage.manifest_version, manifest_content ) def test_parse_cache(self): hashed_files = storage.staticfiles_storage.hashed_files manifest = storage.staticfiles_storage.load_manifest() self.assertEqual(hashed_files, manifest) def test_clear_empties_manifest(self): cleared_file_name = storage.staticfiles_storage.clean_name(os.path.join('test', 'cleared.txt')) # collect the additional file self.run_collectstatic() hashed_files = storage.staticfiles_storage.hashed_files self.assertIn(cleared_file_name, hashed_files) manifest_content = storage.staticfiles_storage.load_manifest() self.assertIn(cleared_file_name, manifest_content) original_path = storage.staticfiles_storage.path(cleared_file_name) self.assertTrue(os.path.exists(original_path)) # delete the original file form the app, collect with clear os.unlink(self._clear_filename) self.run_collectstatic(clear=True) self.assertFileNotFound(original_path) hashed_files = storage.staticfiles_storage.hashed_files self.assertNotIn(cleared_file_name, hashed_files) manifest_content = storage.staticfiles_storage.load_manifest() self.assertNotIn(cleared_file_name, manifest_content) def test_missing_entry(self): missing_file_name = 'cached/missing.css' configured_storage = storage.staticfiles_storage self.assertNotIn(missing_file_name, configured_storage.hashed_files) # File name not found in manifest with self.assertRaisesMessage(ValueError, "Missing staticfiles manifest entry for '%s'" % missing_file_name): self.hashed_file_path(missing_file_name) configured_storage.manifest_strict = False # File doesn't exist on disk err_msg = "The file '%s' could not be found with %r." % (missing_file_name, configured_storage._wrapped) with self.assertRaisesMessage(ValueError, err_msg): self.hashed_file_path(missing_file_name) content = StringIO() content.write('Found') configured_storage.save(missing_file_name, content) # File exists on disk self.hashed_file_path(missing_file_name) def test_intermediate_files(self): cached_files = os.listdir(os.path.join(settings.STATIC_ROOT, 'cached')) # Intermediate files shouldn't be created for reference. self.assertEqual( len([ cached_file for cached_file in cached_files if cached_file.startswith('relative.') ]), 2, ) @override_settings(STATICFILES_STORAGE='staticfiles_tests.storage.NoneHashStorage') class TestCollectionNoneHashStorage(CollectionTestCase): hashed_file_path = hashed_file_path def test_hashed_name(self): relpath = self.hashed_file_path('cached/styles.css') self.assertEqual(relpath, 'cached/styles.css') @override_settings( STATICFILES_STORAGE='staticfiles_tests.storage.NoPostProcessReplacedPathStorage' ) class TestCollectionNoPostProcessReplacedPaths(CollectionTestCase): run_collectstatic_in_setUp = False def test_collectstatistic_no_post_process_replaced_paths(self): stdout = StringIO() self.run_collectstatic(verbosity=1, stdout=stdout) self.assertIn('post-processed', stdout.getvalue()) @override_settings(STATICFILES_STORAGE='staticfiles_tests.storage.SimpleStorage') class TestCollectionSimpleStorage(CollectionTestCase): hashed_file_path = hashed_file_path def setUp(self): storage.staticfiles_storage.hashed_files.clear() # avoid cache interference super().setUp() def test_template_tag_return(self): self.assertStaticRaises(ValueError, "does/not/exist.png", "/static/does/not/exist.png") self.assertStaticRenders("test/file.txt", "/static/test/file.deploy12345.txt") self.assertStaticRenders("cached/styles.css", "/static/cached/styles.deploy12345.css") self.assertStaticRenders("path/", "/static/path/") self.assertStaticRenders("path/?query", "/static/path/?query") def test_template_tag_simple_content(self): relpath = self.hashed_file_path("cached/styles.css") self.assertEqual(relpath, "cached/styles.deploy12345.css") with storage.staticfiles_storage.open(relpath) as relfile: content = relfile.read() self.assertNotIn(b"cached/other.css", content) self.assertIn(b"other.deploy12345.css", content) class CustomManifestStorage(storage.ManifestStaticFilesStorage): def __init__(self, *args, manifest_storage=None, **kwargs): manifest_storage = storage.StaticFilesStorage( location=kwargs.pop('manifest_location'), ) super().__init__(*args, manifest_storage=manifest_storage, **kwargs) class TestCustomManifestStorage(SimpleTestCase): def setUp(self): self.manifest_path = Path(tempfile.mkdtemp()) self.addCleanup(shutil.rmtree, self.manifest_path) self.staticfiles_storage = CustomManifestStorage( manifest_location=self.manifest_path, ) self.manifest_file = self.manifest_path / self.staticfiles_storage.manifest_name # Manifest without paths. self.manifest = {'version': self.staticfiles_storage.manifest_version} with self.manifest_file.open('w') as manifest_file: json.dump(self.manifest, manifest_file) def test_read_manifest(self): self.assertEqual( self.staticfiles_storage.read_manifest(), json.dumps(self.manifest), ) def test_read_manifest_nonexistent(self): os.remove(self.manifest_file) self.assertIsNone(self.staticfiles_storage.read_manifest()) def test_save_manifest_override(self): self.assertIs(self.manifest_file.exists(), True) self.staticfiles_storage.save_manifest() self.assertIs(self.manifest_file.exists(), True) new_manifest = json.loads(self.staticfiles_storage.read_manifest()) self.assertIn('paths', new_manifest) self.assertNotEqual(new_manifest, self.manifest) def test_save_manifest_create(self): os.remove(self.manifest_file) self.staticfiles_storage.save_manifest() self.assertIs(self.manifest_file.exists(), True) new_manifest = json.loads(self.staticfiles_storage.read_manifest()) self.assertIn('paths', new_manifest) self.assertNotEqual(new_manifest, self.manifest) class CustomStaticFilesStorage(storage.StaticFilesStorage): """ Used in TestStaticFilePermissions """ def __init__(self, *args, **kwargs): kwargs['file_permissions_mode'] = 0o640 kwargs['directory_permissions_mode'] = 0o740 super().__init__(*args, **kwargs) @unittest.skipIf(sys.platform == 'win32', "Windows only partially supports chmod.") class TestStaticFilePermissions(CollectionTestCase): command_params = { 'interactive': False, 'verbosity': 0, 'ignore_patterns': ['*.ignoreme'], } def setUp(self): self.umask = 0o027 self.old_umask = os.umask(self.umask) super().setUp() def tearDown(self): os.umask(self.old_umask) super().tearDown() # Don't run collectstatic command in this test class. def run_collectstatic(self, **kwargs): pass @override_settings( FILE_UPLOAD_PERMISSIONS=0o655, FILE_UPLOAD_DIRECTORY_PERMISSIONS=0o765, ) def test_collect_static_files_permissions(self): call_command('collectstatic', **self.command_params) static_root = Path(settings.STATIC_ROOT) test_file = static_root / 'test.txt' file_mode = test_file.stat().st_mode & 0o777 self.assertEqual(file_mode, 0o655) tests = [ static_root / 'subdir', static_root / 'nested', static_root / 'nested' / 'css', ] for directory in tests: with self.subTest(directory=directory): dir_mode = directory.stat().st_mode & 0o777 self.assertEqual(dir_mode, 0o765) @override_settings( FILE_UPLOAD_PERMISSIONS=None, FILE_UPLOAD_DIRECTORY_PERMISSIONS=None, ) def test_collect_static_files_default_permissions(self): call_command('collectstatic', **self.command_params) static_root = Path(settings.STATIC_ROOT) test_file = static_root / 'test.txt' file_mode = test_file.stat().st_mode & 0o777 self.assertEqual(file_mode, 0o666 & ~self.umask) tests = [ static_root / 'subdir', static_root / 'nested', static_root / 'nested' / 'css', ] for directory in tests: with self.subTest(directory=directory): dir_mode = directory.stat().st_mode & 0o777 self.assertEqual(dir_mode, 0o777 & ~self.umask) @override_settings( FILE_UPLOAD_PERMISSIONS=0o655, FILE_UPLOAD_DIRECTORY_PERMISSIONS=0o765, STATICFILES_STORAGE='staticfiles_tests.test_storage.CustomStaticFilesStorage', ) def test_collect_static_files_subclass_of_static_storage(self): call_command('collectstatic', **self.command_params) static_root = Path(settings.STATIC_ROOT) test_file = static_root / 'test.txt' file_mode = test_file.stat().st_mode & 0o777 self.assertEqual(file_mode, 0o640) tests = [ static_root / 'subdir', static_root / 'nested', static_root / 'nested' / 'css', ] for directory in tests: with self.subTest(directory=directory): dir_mode = directory.stat().st_mode & 0o777 self.assertEqual(dir_mode, 0o740) @override_settings( STATICFILES_STORAGE='django.contrib.staticfiles.storage.ManifestStaticFilesStorage', ) class TestCollectionHashedFilesCache(CollectionTestCase): """ Files referenced from CSS use the correct final hashed name regardless of the order in which the files are post-processed. """ hashed_file_path = hashed_file_path def setUp(self): super().setUp() self._temp_dir = temp_dir = tempfile.mkdtemp() os.makedirs(os.path.join(temp_dir, 'test')) self.addCleanup(shutil.rmtree, temp_dir) def _get_filename_path(self, filename): return os.path.join(self._temp_dir, 'test', filename) def test_file_change_after_collectstatic(self): # Create initial static files. file_contents = ( ('foo.png', 'foo'), ('bar.css', 'url("foo.png")\nurl("xyz.png")'), ('xyz.png', 'xyz'), ) for filename, content in file_contents: with open(self._get_filename_path(filename), 'w') as f: f.write(content) with self.modify_settings(STATICFILES_DIRS={'append': self._temp_dir}): finders.get_finder.cache_clear() err = StringIO() # First collectstatic run. call_command('collectstatic', interactive=False, verbosity=0, stderr=err) relpath = self.hashed_file_path('test/bar.css') with storage.staticfiles_storage.open(relpath) as relfile: content = relfile.read() self.assertIn(b'foo.acbd18db4cc2.png', content) self.assertIn(b'xyz.d16fb36f0911.png', content) # Change the contents of the png files. for filename in ('foo.png', 'xyz.png'): with open(self._get_filename_path(filename), 'w+b') as f: f.write(b"new content of file to change its hash") # The hashes of the png files in the CSS file are updated after # a second collectstatic. call_command('collectstatic', interactive=False, verbosity=0, stderr=err) relpath = self.hashed_file_path('test/bar.css') with storage.staticfiles_storage.open(relpath) as relfile: content = relfile.read() self.assertIn(b'foo.57a5cb9ba68d.png', content) self.assertIn(b'xyz.57a5cb9ba68d.png', content)
904abc70bf8ff2f378633334bc1b13b718e8190f8e3d751b2ef0476a58f519b1
import unittest from datetime import date, datetime, time, timedelta from decimal import Decimal from operator import attrgetter, itemgetter from uuid import UUID from django.core.exceptions import FieldError from django.db import connection from django.db.models import ( BinaryField, BooleanField, Case, Count, DecimalField, F, GenericIPAddressField, IntegerField, Max, Min, Q, Sum, TextField, Value, When, ) from django.test import SimpleTestCase, TestCase from .models import CaseTestModel, Client, FKCaseTestModel, O2OCaseTestModel try: from PIL import Image except ImportError: Image = None class CaseExpressionTests(TestCase): @classmethod def setUpTestData(cls): o = CaseTestModel.objects.create(integer=1, integer2=1, string='1') O2OCaseTestModel.objects.create(o2o=o, integer=1) FKCaseTestModel.objects.create(fk=o, integer=1) o = CaseTestModel.objects.create(integer=2, integer2=3, string='2') O2OCaseTestModel.objects.create(o2o=o, integer=2) FKCaseTestModel.objects.create(fk=o, integer=2) FKCaseTestModel.objects.create(fk=o, integer=3) o = CaseTestModel.objects.create(integer=3, integer2=4, string='3') O2OCaseTestModel.objects.create(o2o=o, integer=3) FKCaseTestModel.objects.create(fk=o, integer=3) FKCaseTestModel.objects.create(fk=o, integer=4) o = CaseTestModel.objects.create(integer=2, integer2=2, string='2') O2OCaseTestModel.objects.create(o2o=o, integer=2) FKCaseTestModel.objects.create(fk=o, integer=2) FKCaseTestModel.objects.create(fk=o, integer=3) o = CaseTestModel.objects.create(integer=3, integer2=4, string='3') O2OCaseTestModel.objects.create(o2o=o, integer=3) FKCaseTestModel.objects.create(fk=o, integer=3) FKCaseTestModel.objects.create(fk=o, integer=4) o = CaseTestModel.objects.create(integer=3, integer2=3, string='3') O2OCaseTestModel.objects.create(o2o=o, integer=3) FKCaseTestModel.objects.create(fk=o, integer=3) FKCaseTestModel.objects.create(fk=o, integer=4) o = CaseTestModel.objects.create(integer=4, integer2=5, string='4') O2OCaseTestModel.objects.create(o2o=o, integer=1) FKCaseTestModel.objects.create(fk=o, integer=5) cls.group_by_fields = [ f.name for f in CaseTestModel._meta.get_fields() if not (f.is_relation and f.auto_created) and ( connection.features.allows_group_by_lob or not isinstance(f, (BinaryField, TextField)) ) ] def test_annotate(self): self.assertQuerysetEqual( CaseTestModel.objects.annotate(test=Case( When(integer=1, then=Value('one')), When(integer=2, then=Value('two')), default=Value('other'), )).order_by('pk'), [(1, 'one'), (2, 'two'), (3, 'other'), (2, 'two'), (3, 'other'), (3, 'other'), (4, 'other')], transform=attrgetter('integer', 'test') ) def test_annotate_without_default(self): self.assertQuerysetEqual( CaseTestModel.objects.annotate(test=Case( When(integer=1, then=1), When(integer=2, then=2), )).order_by('pk'), [(1, 1), (2, 2), (3, None), (2, 2), (3, None), (3, None), (4, None)], transform=attrgetter('integer', 'test') ) def test_annotate_with_expression_as_value(self): self.assertQuerysetEqual( CaseTestModel.objects.annotate(f_test=Case( When(integer=1, then=F('integer') + 1), When(integer=2, then=F('integer') + 3), default='integer', )).order_by('pk'), [(1, 2), (2, 5), (3, 3), (2, 5), (3, 3), (3, 3), (4, 4)], transform=attrgetter('integer', 'f_test') ) def test_annotate_with_expression_as_condition(self): self.assertQuerysetEqual( CaseTestModel.objects.annotate(f_test=Case( When(integer2=F('integer'), then=Value('equal')), When(integer2=F('integer') + 1, then=Value('+1')), )).order_by('pk'), [(1, 'equal'), (2, '+1'), (3, '+1'), (2, 'equal'), (3, '+1'), (3, 'equal'), (4, '+1')], transform=attrgetter('integer', 'f_test') ) def test_annotate_with_join_in_value(self): self.assertQuerysetEqual( CaseTestModel.objects.annotate(join_test=Case( When(integer=1, then=F('o2o_rel__integer') + 1), When(integer=2, then=F('o2o_rel__integer') + 3), default='o2o_rel__integer', )).order_by('pk'), [(1, 2), (2, 5), (3, 3), (2, 5), (3, 3), (3, 3), (4, 1)], transform=attrgetter('integer', 'join_test') ) def test_annotate_with_in_clause(self): fk_rels = FKCaseTestModel.objects.filter(integer__in=[5]) self.assertQuerysetEqual( CaseTestModel.objects.only('pk', 'integer').annotate(in_test=Sum(Case( When(fk_rel__in=fk_rels, then=F('fk_rel__integer')), default=Value(0), ))).order_by('pk'), [(1, 0), (2, 0), (3, 0), (2, 0), (3, 0), (3, 0), (4, 5)], transform=attrgetter('integer', 'in_test') ) def test_annotate_with_join_in_condition(self): self.assertQuerysetEqual( CaseTestModel.objects.annotate(join_test=Case( When(integer2=F('o2o_rel__integer'), then=Value('equal')), When(integer2=F('o2o_rel__integer') + 1, then=Value('+1')), default=Value('other'), )).order_by('pk'), [(1, 'equal'), (2, '+1'), (3, '+1'), (2, 'equal'), (3, '+1'), (3, 'equal'), (4, 'other')], transform=attrgetter('integer', 'join_test') ) def test_annotate_with_join_in_predicate(self): self.assertQuerysetEqual( CaseTestModel.objects.annotate(join_test=Case( When(o2o_rel__integer=1, then=Value('one')), When(o2o_rel__integer=2, then=Value('two')), When(o2o_rel__integer=3, then=Value('three')), default=Value('other'), )).order_by('pk'), [(1, 'one'), (2, 'two'), (3, 'three'), (2, 'two'), (3, 'three'), (3, 'three'), (4, 'one')], transform=attrgetter('integer', 'join_test') ) def test_annotate_with_annotation_in_value(self): self.assertQuerysetEqual( CaseTestModel.objects.annotate( f_plus_1=F('integer') + 1, f_plus_3=F('integer') + 3, ).annotate( f_test=Case( When(integer=1, then='f_plus_1'), When(integer=2, then='f_plus_3'), default='integer', ), ).order_by('pk'), [(1, 2), (2, 5), (3, 3), (2, 5), (3, 3), (3, 3), (4, 4)], transform=attrgetter('integer', 'f_test') ) def test_annotate_with_annotation_in_condition(self): self.assertQuerysetEqual( CaseTestModel.objects.annotate( f_plus_1=F('integer') + 1, ).annotate( f_test=Case( When(integer2=F('integer'), then=Value('equal')), When(integer2=F('f_plus_1'), then=Value('+1')), ), ).order_by('pk'), [(1, 'equal'), (2, '+1'), (3, '+1'), (2, 'equal'), (3, '+1'), (3, 'equal'), (4, '+1')], transform=attrgetter('integer', 'f_test') ) def test_annotate_with_annotation_in_predicate(self): self.assertQuerysetEqual( CaseTestModel.objects.annotate( f_minus_2=F('integer') - 2, ).annotate( test=Case( When(f_minus_2=-1, then=Value('negative one')), When(f_minus_2=0, then=Value('zero')), When(f_minus_2=1, then=Value('one')), default=Value('other'), ), ).order_by('pk'), [(1, 'negative one'), (2, 'zero'), (3, 'one'), (2, 'zero'), (3, 'one'), (3, 'one'), (4, 'other')], transform=attrgetter('integer', 'test') ) def test_annotate_with_aggregation_in_value(self): self.assertQuerysetEqual( CaseTestModel.objects.values(*self.group_by_fields).annotate( min=Min('fk_rel__integer'), max=Max('fk_rel__integer'), ).annotate( test=Case( When(integer=2, then='min'), When(integer=3, then='max'), ), ).order_by('pk'), [(1, None, 1, 1), (2, 2, 2, 3), (3, 4, 3, 4), (2, 2, 2, 3), (3, 4, 3, 4), (3, 4, 3, 4), (4, None, 5, 5)], transform=itemgetter('integer', 'test', 'min', 'max') ) def test_annotate_with_aggregation_in_condition(self): self.assertQuerysetEqual( CaseTestModel.objects.values(*self.group_by_fields).annotate( min=Min('fk_rel__integer'), max=Max('fk_rel__integer'), ).annotate( test=Case( When(integer2=F('min'), then=Value('min')), When(integer2=F('max'), then=Value('max')), ), ).order_by('pk'), [(1, 1, 'min'), (2, 3, 'max'), (3, 4, 'max'), (2, 2, 'min'), (3, 4, 'max'), (3, 3, 'min'), (4, 5, 'min')], transform=itemgetter('integer', 'integer2', 'test') ) def test_annotate_with_aggregation_in_predicate(self): self.assertQuerysetEqual( CaseTestModel.objects.values(*self.group_by_fields).annotate( max=Max('fk_rel__integer'), ).annotate( test=Case( When(max=3, then=Value('max = 3')), When(max=4, then=Value('max = 4')), default=Value(''), ), ).order_by('pk'), [(1, 1, ''), (2, 3, 'max = 3'), (3, 4, 'max = 4'), (2, 3, 'max = 3'), (3, 4, 'max = 4'), (3, 4, 'max = 4'), (4, 5, '')], transform=itemgetter('integer', 'max', 'test') ) def test_annotate_exclude(self): self.assertQuerysetEqual( CaseTestModel.objects.annotate(test=Case( When(integer=1, then=Value('one')), When(integer=2, then=Value('two')), default=Value('other'), )).exclude(test='other').order_by('pk'), [(1, 'one'), (2, 'two'), (2, 'two')], transform=attrgetter('integer', 'test') ) def test_annotate_filter_decimal(self): obj = CaseTestModel.objects.create(integer=0, decimal=Decimal('1')) qs = CaseTestModel.objects.annotate( x=Case(When(integer=0, then=F('decimal'))), y=Case(When(integer=0, then=Value(Decimal('1')))), ) self.assertSequenceEqual(qs.filter(Q(x=1) & Q(x=Decimal('1'))), [obj]) self.assertSequenceEqual(qs.filter(Q(y=1) & Q(y=Decimal('1'))), [obj]) def test_annotate_values_not_in_order_by(self): self.assertEqual( list(CaseTestModel.objects.annotate(test=Case( When(integer=1, then=Value('one')), When(integer=2, then=Value('two')), When(integer=3, then=Value('three')), default=Value('other'), )).order_by('test').values_list('integer', flat=True)), [1, 4, 3, 3, 3, 2, 2] ) def test_annotate_with_empty_when(self): objects = CaseTestModel.objects.annotate( selected=Case( When(pk__in=[], then=Value('selected')), default=Value('not selected'), ) ) self.assertEqual(len(objects), CaseTestModel.objects.count()) self.assertTrue(all(obj.selected == 'not selected' for obj in objects)) def test_combined_expression(self): self.assertQuerysetEqual( CaseTestModel.objects.annotate( test=Case( When(integer=1, then=2), When(integer=2, then=1), default=3, ) + 1, ).order_by('pk'), [(1, 3), (2, 2), (3, 4), (2, 2), (3, 4), (3, 4), (4, 4)], transform=attrgetter('integer', 'test') ) def test_in_subquery(self): self.assertQuerysetEqual( CaseTestModel.objects.filter( pk__in=CaseTestModel.objects.annotate( test=Case( When(integer=F('integer2'), then='pk'), When(integer=4, then='pk'), ), ).values('test')).order_by('pk'), [(1, 1), (2, 2), (3, 3), (4, 5)], transform=attrgetter('integer', 'integer2') ) def test_condition_with_lookups(self): qs = CaseTestModel.objects.annotate( test=Case( When(Q(integer2=1), string='2', then=Value(False)), When(Q(integer2=1), string='1', then=Value(True)), default=Value(False), output_field=BooleanField(), ), ) self.assertIs(qs.get(integer=1).test, True) def test_case_reuse(self): SOME_CASE = Case( When(pk=0, then=Value('0')), default=Value('1'), ) self.assertQuerysetEqual( CaseTestModel.objects.annotate(somecase=SOME_CASE).order_by('pk'), CaseTestModel.objects.annotate(somecase=SOME_CASE).order_by('pk').values_list('pk', 'somecase'), lambda x: (x.pk, x.somecase) ) def test_aggregate(self): self.assertEqual( CaseTestModel.objects.aggregate( one=Sum(Case( When(integer=1, then=1), )), two=Sum(Case( When(integer=2, then=1), )), three=Sum(Case( When(integer=3, then=1), )), four=Sum(Case( When(integer=4, then=1), )), ), {'one': 1, 'two': 2, 'three': 3, 'four': 1} ) def test_aggregate_with_expression_as_value(self): self.assertEqual( CaseTestModel.objects.aggregate( one=Sum(Case(When(integer=1, then='integer'))), two=Sum(Case(When(integer=2, then=F('integer') - 1))), three=Sum(Case(When(integer=3, then=F('integer') + 1))), ), {'one': 1, 'two': 2, 'three': 12} ) def test_aggregate_with_expression_as_condition(self): self.assertEqual( CaseTestModel.objects.aggregate( equal=Sum(Case( When(integer2=F('integer'), then=1), )), plus_one=Sum(Case( When(integer2=F('integer') + 1, then=1), )), ), {'equal': 3, 'plus_one': 4} ) def test_filter(self): self.assertQuerysetEqual( CaseTestModel.objects.filter(integer2=Case( When(integer=2, then=3), When(integer=3, then=4), default=1, )).order_by('pk'), [(1, 1), (2, 3), (3, 4), (3, 4)], transform=attrgetter('integer', 'integer2') ) def test_filter_without_default(self): self.assertQuerysetEqual( CaseTestModel.objects.filter(integer2=Case( When(integer=2, then=3), When(integer=3, then=4), )).order_by('pk'), [(2, 3), (3, 4), (3, 4)], transform=attrgetter('integer', 'integer2') ) def test_filter_with_expression_as_value(self): self.assertQuerysetEqual( CaseTestModel.objects.filter(integer2=Case( When(integer=2, then=F('integer') + 1), When(integer=3, then=F('integer')), default='integer', )).order_by('pk'), [(1, 1), (2, 3), (3, 3)], transform=attrgetter('integer', 'integer2') ) def test_filter_with_expression_as_condition(self): self.assertQuerysetEqual( CaseTestModel.objects.filter(string=Case( When(integer2=F('integer'), then=Value('2')), When(integer2=F('integer') + 1, then=Value('3')), )).order_by('pk'), [(3, 4, '3'), (2, 2, '2'), (3, 4, '3')], transform=attrgetter('integer', 'integer2', 'string') ) def test_filter_with_join_in_value(self): self.assertQuerysetEqual( CaseTestModel.objects.filter(integer2=Case( When(integer=2, then=F('o2o_rel__integer') + 1), When(integer=3, then=F('o2o_rel__integer')), default='o2o_rel__integer', )).order_by('pk'), [(1, 1), (2, 3), (3, 3)], transform=attrgetter('integer', 'integer2') ) def test_filter_with_join_in_condition(self): self.assertQuerysetEqual( CaseTestModel.objects.filter(integer=Case( When(integer2=F('o2o_rel__integer') + 1, then=2), When(integer2=F('o2o_rel__integer'), then=3), )).order_by('pk'), [(2, 3), (3, 3)], transform=attrgetter('integer', 'integer2') ) def test_filter_with_join_in_predicate(self): self.assertQuerysetEqual( CaseTestModel.objects.filter(integer2=Case( When(o2o_rel__integer=1, then=1), When(o2o_rel__integer=2, then=3), When(o2o_rel__integer=3, then=4), )).order_by('pk'), [(1, 1), (2, 3), (3, 4), (3, 4)], transform=attrgetter('integer', 'integer2') ) def test_filter_with_annotation_in_value(self): self.assertQuerysetEqual( CaseTestModel.objects.annotate( f=F('integer'), f_plus_1=F('integer') + 1, ).filter( integer2=Case( When(integer=2, then='f_plus_1'), When(integer=3, then='f'), ), ).order_by('pk'), [(2, 3), (3, 3)], transform=attrgetter('integer', 'integer2') ) def test_filter_with_annotation_in_condition(self): self.assertQuerysetEqual( CaseTestModel.objects.annotate( f_plus_1=F('integer') + 1, ).filter( integer=Case( When(integer2=F('integer'), then=2), When(integer2=F('f_plus_1'), then=3), ), ).order_by('pk'), [(3, 4), (2, 2), (3, 4)], transform=attrgetter('integer', 'integer2') ) def test_filter_with_annotation_in_predicate(self): self.assertQuerysetEqual( CaseTestModel.objects.annotate( f_plus_1=F('integer') + 1, ).filter( integer2=Case( When(f_plus_1=3, then=3), When(f_plus_1=4, then=4), default=1, ), ).order_by('pk'), [(1, 1), (2, 3), (3, 4), (3, 4)], transform=attrgetter('integer', 'integer2') ) def test_filter_with_aggregation_in_value(self): self.assertQuerysetEqual( CaseTestModel.objects.values(*self.group_by_fields).annotate( min=Min('fk_rel__integer'), max=Max('fk_rel__integer'), ).filter( integer2=Case( When(integer=2, then='min'), When(integer=3, then='max'), ), ).order_by('pk'), [(3, 4, 3, 4), (2, 2, 2, 3), (3, 4, 3, 4)], transform=itemgetter('integer', 'integer2', 'min', 'max') ) def test_filter_with_aggregation_in_condition(self): self.assertQuerysetEqual( CaseTestModel.objects.values(*self.group_by_fields).annotate( min=Min('fk_rel__integer'), max=Max('fk_rel__integer'), ).filter( integer=Case( When(integer2=F('min'), then=2), When(integer2=F('max'), then=3), ), ).order_by('pk'), [(3, 4, 3, 4), (2, 2, 2, 3), (3, 4, 3, 4)], transform=itemgetter('integer', 'integer2', 'min', 'max') ) def test_filter_with_aggregation_in_predicate(self): self.assertQuerysetEqual( CaseTestModel.objects.values(*self.group_by_fields).annotate( max=Max('fk_rel__integer'), ).filter( integer=Case( When(max=3, then=2), When(max=4, then=3), ), ).order_by('pk'), [(2, 3, 3), (3, 4, 4), (2, 2, 3), (3, 4, 4), (3, 3, 4)], transform=itemgetter('integer', 'integer2', 'max') ) def test_update(self): CaseTestModel.objects.update( string=Case( When(integer=1, then=Value('one')), When(integer=2, then=Value('two')), default=Value('other'), ), ) self.assertQuerysetEqual( CaseTestModel.objects.all().order_by('pk'), [(1, 'one'), (2, 'two'), (3, 'other'), (2, 'two'), (3, 'other'), (3, 'other'), (4, 'other')], transform=attrgetter('integer', 'string') ) def test_update_without_default(self): CaseTestModel.objects.update( integer2=Case( When(integer=1, then=1), When(integer=2, then=2), ), ) self.assertQuerysetEqual( CaseTestModel.objects.all().order_by('pk'), [(1, 1), (2, 2), (3, None), (2, 2), (3, None), (3, None), (4, None)], transform=attrgetter('integer', 'integer2') ) def test_update_with_expression_as_value(self): CaseTestModel.objects.update( integer=Case( When(integer=1, then=F('integer') + 1), When(integer=2, then=F('integer') + 3), default='integer', ), ) self.assertQuerysetEqual( CaseTestModel.objects.all().order_by('pk'), [('1', 2), ('2', 5), ('3', 3), ('2', 5), ('3', 3), ('3', 3), ('4', 4)], transform=attrgetter('string', 'integer') ) def test_update_with_expression_as_condition(self): CaseTestModel.objects.update( string=Case( When(integer2=F('integer'), then=Value('equal')), When(integer2=F('integer') + 1, then=Value('+1')), ), ) self.assertQuerysetEqual( CaseTestModel.objects.all().order_by('pk'), [(1, 'equal'), (2, '+1'), (3, '+1'), (2, 'equal'), (3, '+1'), (3, 'equal'), (4, '+1')], transform=attrgetter('integer', 'string') ) def test_update_with_join_in_condition_raise_field_error(self): with self.assertRaisesMessage(FieldError, 'Joined field references are not permitted in this query'): CaseTestModel.objects.update( integer=Case( When(integer2=F('o2o_rel__integer') + 1, then=2), When(integer2=F('o2o_rel__integer'), then=3), ), ) def test_update_with_join_in_predicate_raise_field_error(self): with self.assertRaisesMessage(FieldError, 'Joined field references are not permitted in this query'): CaseTestModel.objects.update( string=Case( When(o2o_rel__integer=1, then=Value('one')), When(o2o_rel__integer=2, then=Value('two')), When(o2o_rel__integer=3, then=Value('three')), default=Value('other'), ), ) def test_update_big_integer(self): CaseTestModel.objects.update( big_integer=Case( When(integer=1, then=1), When(integer=2, then=2), ), ) self.assertQuerysetEqual( CaseTestModel.objects.all().order_by('pk'), [(1, 1), (2, 2), (3, None), (2, 2), (3, None), (3, None), (4, None)], transform=attrgetter('integer', 'big_integer') ) def test_update_binary(self): CaseTestModel.objects.update( binary=Case( When(integer=1, then=b'one'), When(integer=2, then=b'two'), default=b'', ), ) self.assertQuerysetEqual( CaseTestModel.objects.all().order_by('pk'), [(1, b'one'), (2, b'two'), (3, b''), (2, b'two'), (3, b''), (3, b''), (4, b'')], transform=lambda o: (o.integer, bytes(o.binary)) ) def test_update_boolean(self): CaseTestModel.objects.update( boolean=Case( When(integer=1, then=True), When(integer=2, then=True), default=False, ), ) self.assertQuerysetEqual( CaseTestModel.objects.all().order_by('pk'), [(1, True), (2, True), (3, False), (2, True), (3, False), (3, False), (4, False)], transform=attrgetter('integer', 'boolean') ) def test_update_date(self): CaseTestModel.objects.update( date=Case( When(integer=1, then=date(2015, 1, 1)), When(integer=2, then=date(2015, 1, 2)), ), ) self.assertQuerysetEqual( CaseTestModel.objects.all().order_by('pk'), [ (1, date(2015, 1, 1)), (2, date(2015, 1, 2)), (3, None), (2, date(2015, 1, 2)), (3, None), (3, None), (4, None) ], transform=attrgetter('integer', 'date') ) def test_update_date_time(self): CaseTestModel.objects.update( date_time=Case( When(integer=1, then=datetime(2015, 1, 1)), When(integer=2, then=datetime(2015, 1, 2)), ), ) self.assertQuerysetEqual( CaseTestModel.objects.all().order_by('pk'), [ (1, datetime(2015, 1, 1)), (2, datetime(2015, 1, 2)), (3, None), (2, datetime(2015, 1, 2)), (3, None), (3, None), (4, None) ], transform=attrgetter('integer', 'date_time') ) def test_update_decimal(self): CaseTestModel.objects.update( decimal=Case( When(integer=1, then=Decimal('1.1')), When(integer=2, then=Value(Decimal('2.2'), output_field=DecimalField())), ), ) self.assertQuerysetEqual( CaseTestModel.objects.all().order_by('pk'), [ (1, Decimal('1.1')), (2, Decimal('2.2')), (3, None), (2, Decimal('2.2')), (3, None), (3, None), (4, None) ], transform=attrgetter('integer', 'decimal') ) def test_update_duration(self): CaseTestModel.objects.update( duration=Case( When(integer=1, then=timedelta(1)), When(integer=2, then=timedelta(2)), ), ) self.assertQuerysetEqual( CaseTestModel.objects.all().order_by('pk'), [(1, timedelta(1)), (2, timedelta(2)), (3, None), (2, timedelta(2)), (3, None), (3, None), (4, None)], transform=attrgetter('integer', 'duration') ) def test_update_email(self): CaseTestModel.objects.update( email=Case( When(integer=1, then=Value('[email protected]')), When(integer=2, then=Value('[email protected]')), default=Value(''), ), ) self.assertQuerysetEqual( CaseTestModel.objects.all().order_by('pk'), [(1, '[email protected]'), (2, '[email protected]'), (3, ''), (2, '[email protected]'), (3, ''), (3, ''), (4, '')], transform=attrgetter('integer', 'email') ) def test_update_file(self): CaseTestModel.objects.update( file=Case( When(integer=1, then=Value('~/1')), When(integer=2, then=Value('~/2')), ), ) self.assertQuerysetEqual( CaseTestModel.objects.all().order_by('pk'), [(1, '~/1'), (2, '~/2'), (3, ''), (2, '~/2'), (3, ''), (3, ''), (4, '')], transform=lambda o: (o.integer, str(o.file)) ) def test_update_file_path(self): CaseTestModel.objects.update( file_path=Case( When(integer=1, then=Value('~/1')), When(integer=2, then=Value('~/2')), default=Value(''), ), ) self.assertQuerysetEqual( CaseTestModel.objects.all().order_by('pk'), [(1, '~/1'), (2, '~/2'), (3, ''), (2, '~/2'), (3, ''), (3, ''), (4, '')], transform=attrgetter('integer', 'file_path') ) def test_update_float(self): CaseTestModel.objects.update( float=Case( When(integer=1, then=1.1), When(integer=2, then=2.2), ), ) self.assertQuerysetEqual( CaseTestModel.objects.all().order_by('pk'), [(1, 1.1), (2, 2.2), (3, None), (2, 2.2), (3, None), (3, None), (4, None)], transform=attrgetter('integer', 'float') ) @unittest.skipUnless(Image, "Pillow not installed") def test_update_image(self): CaseTestModel.objects.update( image=Case( When(integer=1, then=Value('~/1')), When(integer=2, then=Value('~/2')), ), ) self.assertQuerysetEqual( CaseTestModel.objects.all().order_by('pk'), [(1, '~/1'), (2, '~/2'), (3, ''), (2, '~/2'), (3, ''), (3, ''), (4, '')], transform=lambda o: (o.integer, str(o.image)) ) def test_update_generic_ip_address(self): CaseTestModel.objects.update( generic_ip_address=Case( When(integer=1, then=Value('1.1.1.1')), When(integer=2, then=Value('2.2.2.2')), output_field=GenericIPAddressField(), ), ) self.assertQuerysetEqual( CaseTestModel.objects.all().order_by('pk'), [(1, '1.1.1.1'), (2, '2.2.2.2'), (3, None), (2, '2.2.2.2'), (3, None), (3, None), (4, None)], transform=attrgetter('integer', 'generic_ip_address') ) def test_update_null_boolean(self): CaseTestModel.objects.update( null_boolean=Case( When(integer=1, then=True), When(integer=2, then=False), ), ) self.assertQuerysetEqual( CaseTestModel.objects.all().order_by('pk'), [(1, True), (2, False), (3, None), (2, False), (3, None), (3, None), (4, None)], transform=attrgetter('integer', 'null_boolean') ) def test_update_positive_big_integer(self): CaseTestModel.objects.update( positive_big_integer=Case( When(integer=1, then=1), When(integer=2, then=2), ), ) self.assertQuerysetEqual( CaseTestModel.objects.all().order_by('pk'), [(1, 1), (2, 2), (3, None), (2, 2), (3, None), (3, None), (4, None)], transform=attrgetter('integer', 'positive_big_integer') ) def test_update_positive_integer(self): CaseTestModel.objects.update( positive_integer=Case( When(integer=1, then=1), When(integer=2, then=2), ), ) self.assertQuerysetEqual( CaseTestModel.objects.all().order_by('pk'), [(1, 1), (2, 2), (3, None), (2, 2), (3, None), (3, None), (4, None)], transform=attrgetter('integer', 'positive_integer') ) def test_update_positive_small_integer(self): CaseTestModel.objects.update( positive_small_integer=Case( When(integer=1, then=1), When(integer=2, then=2), ), ) self.assertQuerysetEqual( CaseTestModel.objects.all().order_by('pk'), [(1, 1), (2, 2), (3, None), (2, 2), (3, None), (3, None), (4, None)], transform=attrgetter('integer', 'positive_small_integer') ) def test_update_slug(self): CaseTestModel.objects.update( slug=Case( When(integer=1, then=Value('1')), When(integer=2, then=Value('2')), default=Value(''), ), ) self.assertQuerysetEqual( CaseTestModel.objects.all().order_by('pk'), [(1, '1'), (2, '2'), (3, ''), (2, '2'), (3, ''), (3, ''), (4, '')], transform=attrgetter('integer', 'slug') ) def test_update_small_integer(self): CaseTestModel.objects.update( small_integer=Case( When(integer=1, then=1), When(integer=2, then=2), ), ) self.assertQuerysetEqual( CaseTestModel.objects.all().order_by('pk'), [(1, 1), (2, 2), (3, None), (2, 2), (3, None), (3, None), (4, None)], transform=attrgetter('integer', 'small_integer') ) def test_update_string(self): CaseTestModel.objects.filter(string__in=['1', '2']).update( string=Case( When(integer=1, then=Value('1')), When(integer=2, then=Value('2')), ), ) self.assertQuerysetEqual( CaseTestModel.objects.filter(string__in=['1', '2']).order_by('pk'), [(1, '1'), (2, '2'), (2, '2')], transform=attrgetter('integer', 'string') ) def test_update_text(self): CaseTestModel.objects.update( text=Case( When(integer=1, then=Value('1')), When(integer=2, then=Value('2')), default=Value(''), ), ) self.assertQuerysetEqual( CaseTestModel.objects.all().order_by('pk'), [(1, '1'), (2, '2'), (3, ''), (2, '2'), (3, ''), (3, ''), (4, '')], transform=attrgetter('integer', 'text') ) def test_update_time(self): CaseTestModel.objects.update( time=Case( When(integer=1, then=time(1)), When(integer=2, then=time(2)), ), ) self.assertQuerysetEqual( CaseTestModel.objects.all().order_by('pk'), [(1, time(1)), (2, time(2)), (3, None), (2, time(2)), (3, None), (3, None), (4, None)], transform=attrgetter('integer', 'time') ) def test_update_url(self): CaseTestModel.objects.update( url=Case( When(integer=1, then=Value('http://1.example.com/')), When(integer=2, then=Value('http://2.example.com/')), default=Value(''), ), ) self.assertQuerysetEqual( CaseTestModel.objects.all().order_by('pk'), [ (1, 'http://1.example.com/'), (2, 'http://2.example.com/'), (3, ''), (2, 'http://2.example.com/'), (3, ''), (3, ''), (4, '') ], transform=attrgetter('integer', 'url') ) def test_update_uuid(self): CaseTestModel.objects.update( uuid=Case( When(integer=1, then=UUID('11111111111111111111111111111111')), When(integer=2, then=UUID('22222222222222222222222222222222')), ), ) self.assertQuerysetEqual( CaseTestModel.objects.all().order_by('pk'), [ (1, UUID('11111111111111111111111111111111')), (2, UUID('22222222222222222222222222222222')), (3, None), (2, UUID('22222222222222222222222222222222')), (3, None), (3, None), (4, None), ], transform=attrgetter('integer', 'uuid') ) def test_update_fk(self): obj1, obj2 = CaseTestModel.objects.all()[:2] CaseTestModel.objects.update( fk=Case( When(integer=1, then=obj1.pk), When(integer=2, then=obj2.pk), ), ) self.assertQuerysetEqual( CaseTestModel.objects.all().order_by('pk'), [(1, obj1.pk), (2, obj2.pk), (3, None), (2, obj2.pk), (3, None), (3, None), (4, None)], transform=attrgetter('integer', 'fk_id') ) def test_lookup_in_condition(self): self.assertQuerysetEqual( CaseTestModel.objects.annotate( test=Case( When(integer__lt=2, then=Value('less than 2')), When(integer__gt=2, then=Value('greater than 2')), default=Value('equal to 2'), ), ).order_by('pk'), [ (1, 'less than 2'), (2, 'equal to 2'), (3, 'greater than 2'), (2, 'equal to 2'), (3, 'greater than 2'), (3, 'greater than 2'), (4, 'greater than 2') ], transform=attrgetter('integer', 'test') ) def test_lookup_different_fields(self): self.assertQuerysetEqual( CaseTestModel.objects.annotate( test=Case( When(integer=2, integer2=3, then=Value('when')), default=Value('default'), ), ).order_by('pk'), [ (1, 1, 'default'), (2, 3, 'when'), (3, 4, 'default'), (2, 2, 'default'), (3, 4, 'default'), (3, 3, 'default'), (4, 5, 'default') ], transform=attrgetter('integer', 'integer2', 'test') ) def test_combined_q_object(self): self.assertQuerysetEqual( CaseTestModel.objects.annotate( test=Case( When(Q(integer=2) | Q(integer2=3), then=Value('when')), default=Value('default'), ), ).order_by('pk'), [ (1, 1, 'default'), (2, 3, 'when'), (3, 4, 'default'), (2, 2, 'when'), (3, 4, 'default'), (3, 3, 'when'), (4, 5, 'default') ], transform=attrgetter('integer', 'integer2', 'test') ) def test_order_by_conditional_implicit(self): self.assertQuerysetEqual( CaseTestModel.objects.filter(integer__lte=2).annotate(test=Case( When(integer=1, then=2), When(integer=2, then=1), default=3, )).order_by('test', 'pk'), [(2, 1), (2, 1), (1, 2)], transform=attrgetter('integer', 'test') ) def test_order_by_conditional_explicit(self): self.assertQuerysetEqual( CaseTestModel.objects.filter(integer__lte=2).annotate(test=Case( When(integer=1, then=2), When(integer=2, then=1), default=3, )).order_by(F('test').asc(), 'pk'), [(2, 1), (2, 1), (1, 2)], transform=attrgetter('integer', 'test') ) def test_join_promotion(self): o = CaseTestModel.objects.create(integer=1, integer2=1, string='1') # Testing that: # 1. There isn't any object on the remote side of the fk_rel # relation. If the query used inner joins, then the join to fk_rel # would remove o from the results. So, in effect we are testing that # we are promoting the fk_rel join to a left outer join here. # 2. The default value of 3 is generated for the case expression. self.assertQuerysetEqual( CaseTestModel.objects.filter(pk=o.pk).annotate( foo=Case( When(fk_rel__pk=1, then=2), default=3, ), ), [(o, 3)], lambda x: (x, x.foo) ) # Now 2 should be generated, as the fk_rel is null. self.assertQuerysetEqual( CaseTestModel.objects.filter(pk=o.pk).annotate( foo=Case( When(fk_rel__isnull=True, then=2), default=3, ), ), [(o, 2)], lambda x: (x, x.foo) ) def test_join_promotion_multiple_annotations(self): o = CaseTestModel.objects.create(integer=1, integer2=1, string='1') # Testing that: # 1. There isn't any object on the remote side of the fk_rel # relation. If the query used inner joins, then the join to fk_rel # would remove o from the results. So, in effect we are testing that # we are promoting the fk_rel join to a left outer join here. # 2. The default value of 3 is generated for the case expression. self.assertQuerysetEqual( CaseTestModel.objects.filter(pk=o.pk).annotate( foo=Case( When(fk_rel__pk=1, then=2), default=3, ), bar=Case( When(fk_rel__pk=1, then=4), default=5, ), ), [(o, 3, 5)], lambda x: (x, x.foo, x.bar) ) # Now 2 should be generated, as the fk_rel is null. self.assertQuerysetEqual( CaseTestModel.objects.filter(pk=o.pk).annotate( foo=Case( When(fk_rel__isnull=True, then=2), default=3, ), bar=Case( When(fk_rel__isnull=True, then=4), default=5, ), ), [(o, 2, 4)], lambda x: (x, x.foo, x.bar) ) def test_m2m_exclude(self): CaseTestModel.objects.create(integer=10, integer2=1, string='1') qs = CaseTestModel.objects.values_list('id', 'integer').annotate( cnt=Sum( Case(When(~Q(fk_rel__integer=1), then=1), default=2), ), ).order_by('integer') # The first o has 2 as its fk_rel__integer=1, thus it hits the # default=2 case. The other ones have 2 as the result as they have 2 # fk_rel objects, except for integer=4 and integer=10 (created above). # The integer=4 case has one integer, thus the result is 1, and # integer=10 doesn't have any and this too generates 1 (instead of 0) # as ~Q() also matches nulls. self.assertQuerysetEqual( qs, [(1, 2), (2, 2), (2, 2), (3, 2), (3, 2), (3, 2), (4, 1), (10, 1)], lambda x: x[1:] ) def test_m2m_reuse(self): CaseTestModel.objects.create(integer=10, integer2=1, string='1') # Need to use values before annotate so that Oracle will not group # by fields it isn't capable of grouping by. qs = CaseTestModel.objects.values_list('id', 'integer').annotate( cnt=Sum( Case(When(~Q(fk_rel__integer=1), then=1), default=2), ), ).annotate( cnt2=Sum( Case(When(~Q(fk_rel__integer=1), then=1), default=2), ), ).order_by('integer') self.assertEqual(str(qs.query).count(' JOIN '), 1) self.assertQuerysetEqual( qs, [(1, 2, 2), (2, 2, 2), (2, 2, 2), (3, 2, 2), (3, 2, 2), (3, 2, 2), (4, 1, 1), (10, 1, 1)], lambda x: x[1:] ) def test_aggregation_empty_cases(self): tests = [ # Empty cases and default. (Case(output_field=IntegerField()), None), # Empty cases and a constant default. (Case(default=Value('empty')), 'empty'), # Empty cases and column in the default. (Case(default=F('url')), ''), ] for case, value in tests: with self.subTest(case=case): self.assertQuerysetEqual( CaseTestModel.objects.values('string').annotate( case=case, integer_sum=Sum('integer'), ).order_by('string'), [ ('1', value, 1), ('2', value, 4), ('3', value, 9), ('4', value, 4), ], transform=itemgetter('string', 'case', 'integer_sum'), ) class CaseDocumentationExamples(TestCase): @classmethod def setUpTestData(cls): Client.objects.create( name='Jane Doe', account_type=Client.REGULAR, registered_on=date.today() - timedelta(days=36), ) Client.objects.create( name='James Smith', account_type=Client.GOLD, registered_on=date.today() - timedelta(days=5), ) Client.objects.create( name='Jack Black', account_type=Client.PLATINUM, registered_on=date.today() - timedelta(days=10 * 365), ) def test_simple_example(self): self.assertQuerysetEqual( Client.objects.annotate( discount=Case( When(account_type=Client.GOLD, then=Value('5%')), When(account_type=Client.PLATINUM, then=Value('10%')), default=Value('0%'), ), ).order_by('pk'), [('Jane Doe', '0%'), ('James Smith', '5%'), ('Jack Black', '10%')], transform=attrgetter('name', 'discount') ) def test_lookup_example(self): a_month_ago = date.today() - timedelta(days=30) a_year_ago = date.today() - timedelta(days=365) self.assertQuerysetEqual( Client.objects.annotate( discount=Case( When(registered_on__lte=a_year_ago, then=Value('10%')), When(registered_on__lte=a_month_ago, then=Value('5%')), default=Value('0%'), ), ).order_by('pk'), [('Jane Doe', '5%'), ('James Smith', '0%'), ('Jack Black', '10%')], transform=attrgetter('name', 'discount') ) def test_conditional_update_example(self): a_month_ago = date.today() - timedelta(days=30) a_year_ago = date.today() - timedelta(days=365) Client.objects.update( account_type=Case( When(registered_on__lte=a_year_ago, then=Value(Client.PLATINUM)), When(registered_on__lte=a_month_ago, then=Value(Client.GOLD)), default=Value(Client.REGULAR), ), ) self.assertQuerysetEqual( Client.objects.all().order_by('pk'), [('Jane Doe', 'G'), ('James Smith', 'R'), ('Jack Black', 'P')], transform=attrgetter('name', 'account_type') ) def test_conditional_aggregation_example(self): Client.objects.create( name='Jean Grey', account_type=Client.REGULAR, registered_on=date.today(), ) Client.objects.create( name='James Bond', account_type=Client.PLATINUM, registered_on=date.today(), ) Client.objects.create( name='Jane Porter', account_type=Client.PLATINUM, registered_on=date.today(), ) self.assertEqual( Client.objects.aggregate( regular=Count('pk', filter=Q(account_type=Client.REGULAR)), gold=Count('pk', filter=Q(account_type=Client.GOLD)), platinum=Count('pk', filter=Q(account_type=Client.PLATINUM)), ), {'regular': 2, 'gold': 1, 'platinum': 3} ) # This was the example before the filter argument was added. self.assertEqual( Client.objects.aggregate( regular=Sum(Case( When(account_type=Client.REGULAR, then=1), )), gold=Sum(Case( When(account_type=Client.GOLD, then=1), )), platinum=Sum(Case( When(account_type=Client.PLATINUM, then=1), )), ), {'regular': 2, 'gold': 1, 'platinum': 3} ) def test_filter_example(self): a_month_ago = date.today() - timedelta(days=30) a_year_ago = date.today() - timedelta(days=365) self.assertQuerysetEqual( Client.objects.filter( registered_on__lte=Case( When(account_type=Client.GOLD, then=a_month_ago), When(account_type=Client.PLATINUM, then=a_year_ago), ), ), [('Jack Black', 'P')], transform=attrgetter('name', 'account_type') ) def test_hash(self): expression_1 = Case( When(account_type__in=[Client.REGULAR, Client.GOLD], then=1), default=2, output_field=IntegerField(), ) expression_2 = Case( When(account_type__in=(Client.REGULAR, Client.GOLD), then=1), default=2, output_field=IntegerField(), ) expression_3 = Case(When(account_type__in=[Client.REGULAR, Client.GOLD], then=1), default=2) expression_4 = Case(When(account_type__in=[Client.PLATINUM, Client.GOLD], then=2), default=1) self.assertEqual(hash(expression_1), hash(expression_2)) self.assertNotEqual(hash(expression_2), hash(expression_3)) self.assertNotEqual(hash(expression_1), hash(expression_4)) self.assertNotEqual(hash(expression_3), hash(expression_4)) class CaseWhenTests(SimpleTestCase): def test_only_when_arguments(self): msg = 'Positional arguments must all be When objects.' with self.assertRaisesMessage(TypeError, msg): Case(When(Q(pk__in=[])), object()) def test_invalid_when_constructor_args(self): msg = ( 'When() supports a Q object, a boolean expression, or lookups as ' 'a condition.' ) with self.assertRaisesMessage(TypeError, msg): When(condition=object()) with self.assertRaisesMessage(TypeError, msg): When(condition=Value(1)) with self.assertRaisesMessage(TypeError, msg): When(Value(1), string='1') with self.assertRaisesMessage(TypeError, msg): When() def test_empty_q_object(self): msg = "An empty Q() can't be used as a When() condition." with self.assertRaisesMessage(ValueError, msg): When(Q(), then=Value(True))
4961e71a2c6bdd92e24fb0f9420f55518c7dd651dae8f022b7241df14482edfc
import io import itertools import os import sys import tempfile from unittest import skipIf from django.core.files.base import ContentFile from django.http import FileResponse from django.test import SimpleTestCase class UnseekableBytesIO(io.BytesIO): def seekable(self): return False class FileResponseTests(SimpleTestCase): def test_content_length_file(self): response = FileResponse(open(__file__, 'rb')) response.close() self.assertEqual(response.headers['Content-Length'], str(os.path.getsize(__file__))) def test_content_length_buffer(self): response = FileResponse(io.BytesIO(b'binary content')) self.assertEqual(response.headers['Content-Length'], '14') def test_content_length_nonzero_starting_position_file(self): file = open(__file__, 'rb') file.seek(10) response = FileResponse(file) response.close() self.assertEqual(response.headers['Content-Length'], str(os.path.getsize(__file__) - 10)) def test_content_length_nonzero_starting_position_buffer(self): test_tuples = ( ('BytesIO', io.BytesIO), ('UnseekableBytesIO', UnseekableBytesIO), ) for buffer_class_name, BufferClass in test_tuples: with self.subTest(buffer_class_name=buffer_class_name): buffer = BufferClass(b'binary content') buffer.seek(10) response = FileResponse(buffer) self.assertEqual(response.headers['Content-Length'], '4') def test_content_length_nonzero_starting_position_file_seekable_no_tell(self): class TestFile: def __init__(self, path, *args, **kwargs): self._file = open(path, *args, **kwargs) def read(self, n_bytes=-1): return self._file.read(n_bytes) def seek(self, offset, whence=io.SEEK_SET): return self._file.seek(offset, whence) def seekable(self): return True @property def name(self): return self._file.name def close(self): if self._file: self._file.close() self._file = None def __enter__(self): return self def __exit__(self, e_type, e_val, e_tb): self.close() file = TestFile(__file__, 'rb') file.seek(10) response = FileResponse(file) response.close() self.assertEqual(response.headers['Content-Length'], str(os.path.getsize(__file__) - 10)) def test_content_type_file(self): response = FileResponse(open(__file__, 'rb')) response.close() self.assertIn(response.headers['Content-Type'], ['text/x-python', 'text/plain']) def test_content_type_buffer(self): response = FileResponse(io.BytesIO(b'binary content')) self.assertEqual(response.headers['Content-Type'], 'application/octet-stream') def test_content_type_buffer_explicit(self): response = FileResponse(io.BytesIO(b'binary content'), content_type='video/webm') self.assertEqual(response.headers['Content-Type'], 'video/webm') def test_content_type_buffer_explicit_default(self): response = FileResponse(io.BytesIO(b'binary content'), content_type='text/html') self.assertEqual(response.headers['Content-Type'], 'text/html') def test_content_type_buffer_named(self): test_tuples = ( (__file__, ['text/x-python', 'text/plain']), (__file__ + 'nosuchfile', ['application/octet-stream']), ('test_fileresponse.py', ['text/x-python', 'text/plain']), ('test_fileresponse.pynosuchfile', ['application/octet-stream']), ) for filename, content_types in test_tuples: with self.subTest(filename=filename): buffer = io.BytesIO(b'binary content') buffer.name = filename response = FileResponse(buffer) self.assertIn(response.headers['Content-Type'], content_types) def test_content_disposition_file(self): filenames = ( ('', 'test_fileresponse.py'), ('custom_name.py', 'custom_name.py'), ) dispositions = ( (False, 'inline'), (True, 'attachment'), ) for (filename, header_filename), (as_attachment, header_disposition) in itertools.product( filenames, dispositions ): with self.subTest(filename=filename, disposition=header_disposition): response = FileResponse(open(__file__, 'rb'), filename=filename, as_attachment=as_attachment) response.close() self.assertEqual( response.headers['Content-Disposition'], '%s; filename="%s"' % (header_disposition, header_filename), ) def test_content_disposition_buffer(self): response = FileResponse(io.BytesIO(b'binary content')) self.assertFalse(response.has_header('Content-Disposition')) def test_content_disposition_buffer_attachment(self): response = FileResponse(io.BytesIO(b'binary content'), as_attachment=True) self.assertEqual(response.headers['Content-Disposition'], 'attachment') def test_content_disposition_buffer_explicit_filename(self): dispositions = ( (False, 'inline'), (True, 'attachment'), ) for as_attachment, header_disposition in dispositions: response = FileResponse( io.BytesIO(b'binary content'), as_attachment=as_attachment, filename='custom_name.py', ) self.assertEqual( response.headers['Content-Disposition'], '%s; filename="custom_name.py"' % header_disposition, ) def test_response_buffer(self): response = FileResponse(io.BytesIO(b'binary content')) self.assertEqual(list(response), [b'binary content']) def test_response_nonzero_starting_position(self): test_tuples = ( ('BytesIO', io.BytesIO), ('UnseekableBytesIO', UnseekableBytesIO), ) for buffer_class_name, BufferClass in test_tuples: with self.subTest(buffer_class_name=buffer_class_name): buffer = BufferClass(b'binary content') buffer.seek(10) response = FileResponse(buffer) self.assertEqual(list(response), [b'tent']) def test_buffer_explicit_absolute_filename(self): """ Headers are set correctly with a buffer when an absolute filename is provided. """ response = FileResponse(io.BytesIO(b'binary content'), filename=__file__) self.assertEqual(response.headers['Content-Length'], '14') self.assertEqual(response.headers['Content-Disposition'], 'inline; filename="test_fileresponse.py"') @skipIf(sys.platform == 'win32', "Named pipes are Unix-only.") def test_file_from_named_pipe_response(self): with tempfile.TemporaryDirectory() as temp_dir: pipe_file = os.path.join(temp_dir, 'named_pipe') os.mkfifo(pipe_file) pipe_for_read = os.open(pipe_file, os.O_RDONLY | os.O_NONBLOCK) with open(pipe_file, 'wb') as pipe_for_write: pipe_for_write.write(b'binary content') response = FileResponse(os.fdopen(pipe_for_read, mode='rb')) response_content = list(response) response.close() self.assertEqual(response_content, [b'binary content']) self.assertFalse(response.has_header('Content-Length')) def test_compressed_response(self): """ If compressed responses are served with the uncompressed Content-Type and a compression Content-Encoding, browsers might automatically uncompress the file, which is most probably not wanted. """ test_tuples = ( ('.tar.gz', 'application/gzip'), ('.tar.bz2', 'application/x-bzip'), ('.tar.xz', 'application/x-xz'), ) for extension, mimetype in test_tuples: with self.subTest(ext=extension): with tempfile.NamedTemporaryFile(suffix=extension) as tmp: response = FileResponse(tmp) self.assertEqual(response.headers['Content-Type'], mimetype) self.assertFalse(response.has_header('Content-Encoding')) def test_unicode_attachment(self): response = FileResponse( ContentFile(b'binary content', name="祝您平安.odt"), as_attachment=True, content_type='application/vnd.oasis.opendocument.text', ) self.assertEqual( response.headers['Content-Type'], 'application/vnd.oasis.opendocument.text', ) self.assertEqual( response.headers['Content-Disposition'], "attachment; filename*=utf-8''%E7%A5%9D%E6%82%A8%E5%B9%B3%E5%AE%89.odt", ) def test_repr(self): response = FileResponse(io.BytesIO(b'binary content')) self.assertEqual( repr(response), '<FileResponse status_code=200, "application/octet-stream">', )
d05c64a77a46159d50ac0aa662873cf778ca84d4a3fc0d42e245411e8c3edbe2
import json from django.contrib.contenttypes.fields import GenericForeignKey from django.db import models from django.test import TestCase from django.test.utils import isolate_apps from .models import Answer, Post, Question @isolate_apps('contenttypes_tests') class GenericForeignKeyTests(TestCase): def test_str(self): class Model(models.Model): field = GenericForeignKey() self.assertEqual(str(Model.field), 'contenttypes_tests.Model.field') def test_get_content_type_no_arguments(self): with self.assertRaisesMessage(Exception, 'Impossible arguments to GFK.get_content_type!'): Answer.question.get_content_type() def test_incorrect_get_prefetch_queryset_arguments(self): with self.assertRaisesMessage(ValueError, "Custom queryset can't be used for this lookup."): Answer.question.get_prefetch_queryset(Answer.objects.all(), Answer.objects.all()) def test_get_object_cache_respects_deleted_objects(self): question = Question.objects.create(text='Who?') post = Post.objects.create(title='Answer', parent=question) question_pk = question.pk Question.objects.all().delete() post = Post.objects.get(pk=post.pk) with self.assertNumQueries(1): self.assertEqual(post.object_id, question_pk) self.assertIsNone(post.parent) self.assertIsNone(post.parent) class GenericRelationTests(TestCase): def test_value_to_string(self): question = Question.objects.create(text='test') answer1 = Answer.objects.create(question=question) answer2 = Answer.objects.create(question=question) result = json.loads(Question.answer_set.field.value_to_string(question)) self.assertCountEqual(result, [answer1.pk, answer2.pk])
136b9c69d957ca6a24d340c715540af681939a3f55da83d3fe70c6e782a91b72
""" Regression tests for the Test Client, especially the customized assertions. """ import itertools import os from django.contrib.auth.models import User from django.contrib.auth.signals import user_logged_in, user_logged_out from django.http import HttpResponse from django.template import ( Context, RequestContext, TemplateSyntaxError, engines, ) from django.template.response import SimpleTemplateResponse from django.test import ( Client, SimpleTestCase, TestCase, modify_settings, override_settings, ) from django.test.client import RedirectCycleError, RequestFactory, encode_file from django.test.utils import ContextList from django.urls import NoReverseMatch, reverse from django.utils.translation import gettext_lazy from .models import CustomUser from .views import CustomTestException class TestDataMixin: @classmethod def setUpTestData(cls): cls.u1 = User.objects.create_user(username='testclient', password='password') cls.staff = User.objects.create_user(username='staff', password='password', is_staff=True) @override_settings(ROOT_URLCONF='test_client_regress.urls') class AssertContainsTests(SimpleTestCase): def test_contains(self): "Responses can be inspected for content, including counting repeated substrings" response = self.client.get('/no_template_view/') self.assertNotContains(response, 'never') self.assertContains(response, 'never', 0) self.assertContains(response, 'once') self.assertContains(response, 'once', 1) self.assertContains(response, 'twice') self.assertContains(response, 'twice', 2) try: self.assertContains(response, 'text', status_code=999) except AssertionError as e: self.assertIn("Couldn't retrieve content: Response code was 200 (expected 999)", str(e)) try: self.assertContains(response, 'text', status_code=999, msg_prefix='abc') except AssertionError as e: self.assertIn("abc: Couldn't retrieve content: Response code was 200 (expected 999)", str(e)) try: self.assertNotContains(response, 'text', status_code=999) except AssertionError as e: self.assertIn("Couldn't retrieve content: Response code was 200 (expected 999)", str(e)) try: self.assertNotContains(response, 'text', status_code=999, msg_prefix='abc') except AssertionError as e: self.assertIn("abc: Couldn't retrieve content: Response code was 200 (expected 999)", str(e)) try: self.assertNotContains(response, 'once') except AssertionError as e: self.assertIn("Response should not contain 'once'", str(e)) try: self.assertNotContains(response, 'once', msg_prefix='abc') except AssertionError as e: self.assertIn("abc: Response should not contain 'once'", str(e)) try: self.assertContains(response, 'never', 1) except AssertionError as e: self.assertIn("Found 0 instances of 'never' in response (expected 1)", str(e)) try: self.assertContains(response, 'never', 1, msg_prefix='abc') except AssertionError as e: self.assertIn("abc: Found 0 instances of 'never' in response (expected 1)", str(e)) try: self.assertContains(response, 'once', 0) except AssertionError as e: self.assertIn("Found 1 instances of 'once' in response (expected 0)", str(e)) try: self.assertContains(response, 'once', 0, msg_prefix='abc') except AssertionError as e: self.assertIn("abc: Found 1 instances of 'once' in response (expected 0)", str(e)) try: self.assertContains(response, 'once', 2) except AssertionError as e: self.assertIn("Found 1 instances of 'once' in response (expected 2)", str(e)) try: self.assertContains(response, 'once', 2, msg_prefix='abc') except AssertionError as e: self.assertIn("abc: Found 1 instances of 'once' in response (expected 2)", str(e)) try: self.assertContains(response, 'twice', 1) except AssertionError as e: self.assertIn("Found 2 instances of 'twice' in response (expected 1)", str(e)) try: self.assertContains(response, 'twice', 1, msg_prefix='abc') except AssertionError as e: self.assertIn("abc: Found 2 instances of 'twice' in response (expected 1)", str(e)) try: self.assertContains(response, 'thrice') except AssertionError as e: self.assertIn("Couldn't find 'thrice' in response", str(e)) try: self.assertContains(response, 'thrice', msg_prefix='abc') except AssertionError as e: self.assertIn("abc: Couldn't find 'thrice' in response", str(e)) try: self.assertContains(response, 'thrice', 3) except AssertionError as e: self.assertIn("Found 0 instances of 'thrice' in response (expected 3)", str(e)) try: self.assertContains(response, 'thrice', 3, msg_prefix='abc') except AssertionError as e: self.assertIn("abc: Found 0 instances of 'thrice' in response (expected 3)", str(e)) def test_unicode_contains(self): "Unicode characters can be found in template context" # Regression test for #10183 r = self.client.get('/check_unicode/') self.assertContains(r, 'さかき') self.assertContains(r, b'\xe5\xb3\xa0'.decode()) def test_unicode_not_contains(self): "Unicode characters can be searched for, and not found in template context" # Regression test for #10183 r = self.client.get('/check_unicode/') self.assertNotContains(r, 'はたけ') self.assertNotContains(r, b'\xe3\x81\xaf\xe3\x81\x9f\xe3\x81\x91'.decode()) def test_binary_contains(self): r = self.client.get('/check_binary/') self.assertContains(r, b'%PDF-1.4\r\n%\x93\x8c\x8b\x9e') with self.assertRaises(AssertionError): self.assertContains(r, b'%PDF-1.4\r\n%\x93\x8c\x8b\x9e', count=2) def test_binary_not_contains(self): r = self.client.get('/check_binary/') self.assertNotContains(r, b'%ODF-1.4\r\n%\x93\x8c\x8b\x9e') with self.assertRaises(AssertionError): self.assertNotContains(r, b'%PDF-1.4\r\n%\x93\x8c\x8b\x9e') def test_nontext_contains(self): r = self.client.get('/no_template_view/') self.assertContains(r, gettext_lazy('once')) def test_nontext_not_contains(self): r = self.client.get('/no_template_view/') self.assertNotContains(r, gettext_lazy('never')) def test_assert_contains_renders_template_response(self): """ An unrendered SimpleTemplateResponse may be used in assertContains(). """ template = engines['django'].from_string('Hello') response = SimpleTemplateResponse(template) self.assertContains(response, 'Hello') def test_assert_contains_using_non_template_response(self): """ auto-rendering does not affect responses that aren't instances (or subclasses) of SimpleTemplateResponse. Refs #15826. """ response = HttpResponse('Hello') self.assertContains(response, 'Hello') def test_assert_not_contains_renders_template_response(self): """ An unrendered SimpleTemplateResponse may be used in assertNotContains(). """ template = engines['django'].from_string('Hello') response = SimpleTemplateResponse(template) self.assertNotContains(response, 'Bye') def test_assert_not_contains_using_non_template_response(self): """ auto-rendering does not affect responses that aren't instances (or subclasses) of SimpleTemplateResponse. """ response = HttpResponse('Hello') self.assertNotContains(response, 'Bye') @override_settings(ROOT_URLCONF='test_client_regress.urls') class AssertTemplateUsedTests(TestDataMixin, TestCase): def test_no_context(self): "Template usage assertions work then templates aren't in use" response = self.client.get('/no_template_view/') # The no template case doesn't mess with the template assertions self.assertTemplateNotUsed(response, 'GET Template') try: self.assertTemplateUsed(response, 'GET Template') except AssertionError as e: self.assertIn("No templates used to render the response", str(e)) try: self.assertTemplateUsed(response, 'GET Template', msg_prefix='abc') except AssertionError as e: self.assertIn("abc: No templates used to render the response", str(e)) msg = 'No templates used to render the response' with self.assertRaisesMessage(AssertionError, msg): self.assertTemplateUsed(response, 'GET Template', count=2) def test_single_context(self): "Template assertions work when there is a single context" response = self.client.get('/post_view/', {}) msg = ( ": Template 'Empty GET Template' was used unexpectedly in " "rendering the response" ) with self.assertRaisesMessage(AssertionError, msg): self.assertTemplateNotUsed(response, 'Empty GET Template') with self.assertRaisesMessage(AssertionError, 'abc' + msg): self.assertTemplateNotUsed(response, 'Empty GET Template', msg_prefix='abc') msg = ( ": Template 'Empty POST Template' was not a template used to " "render the response. Actual template(s) used: Empty GET Template" ) with self.assertRaisesMessage(AssertionError, msg): self.assertTemplateUsed(response, 'Empty POST Template') with self.assertRaisesMessage(AssertionError, 'abc' + msg): self.assertTemplateUsed(response, 'Empty POST Template', msg_prefix='abc') msg = ( ": Template 'Empty GET Template' was expected to be rendered 2 " "time(s) but was actually rendered 1 time(s)." ) with self.assertRaisesMessage(AssertionError, msg): self.assertTemplateUsed(response, 'Empty GET Template', count=2) with self.assertRaisesMessage(AssertionError, 'abc' + msg): self.assertTemplateUsed(response, 'Empty GET Template', msg_prefix='abc', count=2) def test_multiple_context(self): "Template assertions work when there are multiple contexts" post_data = { 'text': 'Hello World', 'email': '[email protected]', 'value': 37, 'single': 'b', 'multi': ('b', 'c', 'e') } response = self.client.post('/form_view_with_template/', post_data) self.assertContains(response, 'POST data OK') msg = "Template '%s' was used unexpectedly in rendering the response" with self.assertRaisesMessage(AssertionError, msg % 'form_view.html'): self.assertTemplateNotUsed(response, "form_view.html") with self.assertRaisesMessage(AssertionError, msg % 'base.html'): self.assertTemplateNotUsed(response, 'base.html') msg = ( "Template 'Valid POST Template' was not a template used to render " "the response. Actual template(s) used: form_view.html, base.html" ) with self.assertRaisesMessage(AssertionError, msg): self.assertTemplateUsed(response, "Valid POST Template") msg = ( "Template 'base.html' was expected to be rendered 2 time(s) but " "was actually rendered 1 time(s)." ) with self.assertRaisesMessage(AssertionError, msg): self.assertTemplateUsed(response, 'base.html', count=2) def test_template_rendered_multiple_times(self): """Template assertions work when a template is rendered multiple times.""" response = self.client.get('/render_template_multiple_times/') self.assertTemplateUsed(response, 'base.html', count=2) @override_settings(ROOT_URLCONF='test_client_regress.urls') class AssertRedirectsTests(SimpleTestCase): def test_redirect_page(self): "An assertion is raised if the original page couldn't be retrieved as expected" # This page will redirect with code 301, not 302 response = self.client.get('/permanent_redirect_view/') try: self.assertRedirects(response, '/get_view/') except AssertionError as e: self.assertIn("Response didn't redirect as expected: Response code was 301 (expected 302)", str(e)) try: self.assertRedirects(response, '/get_view/', msg_prefix='abc') except AssertionError as e: self.assertIn("abc: Response didn't redirect as expected: Response code was 301 (expected 302)", str(e)) def test_lost_query(self): "An assertion is raised if the redirect location doesn't preserve GET parameters" response = self.client.get('/redirect_view/', {'var': 'value'}) try: self.assertRedirects(response, '/get_view/') except AssertionError as e: self.assertIn("Response redirected to '/get_view/?var=value', expected '/get_view/'", str(e)) try: self.assertRedirects(response, '/get_view/', msg_prefix='abc') except AssertionError as e: self.assertIn("abc: Response redirected to '/get_view/?var=value', expected '/get_view/'", str(e)) def test_incorrect_target(self): "An assertion is raised if the response redirects to another target" response = self.client.get('/permanent_redirect_view/') try: # Should redirect to get_view self.assertRedirects(response, '/some_view/') except AssertionError as e: self.assertIn("Response didn't redirect as expected: Response code was 301 (expected 302)", str(e)) def test_target_page(self): "An assertion is raised if the response redirect target cannot be retrieved as expected" response = self.client.get('/double_redirect_view/') try: # The redirect target responds with a 301 code, not 200 self.assertRedirects(response, 'http://testserver/permanent_redirect_view/') except AssertionError as e: self.assertIn( "Couldn't retrieve redirection page '/permanent_redirect_view/': " "response code was 301 (expected 200)", str(e) ) try: # The redirect target responds with a 301 code, not 200 self.assertRedirects(response, 'http://testserver/permanent_redirect_view/', msg_prefix='abc') except AssertionError as e: self.assertIn( "abc: Couldn't retrieve redirection page '/permanent_redirect_view/': " "response code was 301 (expected 200)", str(e) ) def test_redirect_chain(self): "You can follow a redirect chain of multiple redirects" response = self.client.get('/redirects/further/more/', {}, follow=True) self.assertRedirects(response, '/no_template_view/', status_code=302, target_status_code=200) self.assertEqual(len(response.redirect_chain), 1) self.assertEqual(response.redirect_chain[0], ('/no_template_view/', 302)) def test_multiple_redirect_chain(self): "You can follow a redirect chain of multiple redirects" response = self.client.get('/redirects/', {}, follow=True) self.assertRedirects(response, '/no_template_view/', status_code=302, target_status_code=200) self.assertEqual(len(response.redirect_chain), 3) self.assertEqual(response.redirect_chain[0], ('/redirects/further/', 302)) self.assertEqual(response.redirect_chain[1], ('/redirects/further/more/', 302)) self.assertEqual(response.redirect_chain[2], ('/no_template_view/', 302)) def test_redirect_chain_to_non_existent(self): "You can follow a chain to a nonexistent view." response = self.client.get('/redirect_to_non_existent_view2/', {}, follow=True) self.assertRedirects(response, '/non_existent_view/', status_code=302, target_status_code=404) def test_redirect_chain_to_self(self): "Redirections to self are caught and escaped" with self.assertRaises(RedirectCycleError) as context: self.client.get('/redirect_to_self/', {}, follow=True) response = context.exception.last_response # The chain of redirects stops once the cycle is detected. self.assertRedirects(response, '/redirect_to_self/', status_code=302, target_status_code=302) self.assertEqual(len(response.redirect_chain), 2) def test_redirect_to_self_with_changing_query(self): "Redirections don't loop forever even if query is changing" with self.assertRaises(RedirectCycleError): self.client.get('/redirect_to_self_with_changing_query_view/', {'counter': '0'}, follow=True) def test_circular_redirect(self): "Circular redirect chains are caught and escaped" with self.assertRaises(RedirectCycleError) as context: self.client.get('/circular_redirect_1/', {}, follow=True) response = context.exception.last_response # The chain of redirects will get back to the starting point, but stop there. self.assertRedirects(response, '/circular_redirect_2/', status_code=302, target_status_code=302) self.assertEqual(len(response.redirect_chain), 4) def test_redirect_chain_post(self): "A redirect chain will be followed from an initial POST post" response = self.client.post('/redirects/', {'nothing': 'to_send'}, follow=True) self.assertRedirects(response, '/no_template_view/', 302, 200) self.assertEqual(len(response.redirect_chain), 3) def test_redirect_chain_head(self): "A redirect chain will be followed from an initial HEAD request" response = self.client.head('/redirects/', {'nothing': 'to_send'}, follow=True) self.assertRedirects(response, '/no_template_view/', 302, 200) self.assertEqual(len(response.redirect_chain), 3) def test_redirect_chain_options(self): "A redirect chain will be followed from an initial OPTIONS request" response = self.client.options('/redirects/', follow=True) self.assertRedirects(response, '/no_template_view/', 302, 200) self.assertEqual(len(response.redirect_chain), 3) def test_redirect_chain_put(self): "A redirect chain will be followed from an initial PUT request" response = self.client.put('/redirects/', follow=True) self.assertRedirects(response, '/no_template_view/', 302, 200) self.assertEqual(len(response.redirect_chain), 3) def test_redirect_chain_delete(self): "A redirect chain will be followed from an initial DELETE request" response = self.client.delete('/redirects/', follow=True) self.assertRedirects(response, '/no_template_view/', 302, 200) self.assertEqual(len(response.redirect_chain), 3) @modify_settings(ALLOWED_HOSTS={'append': 'otherserver'}) def test_redirect_to_different_host(self): "The test client will preserve scheme, host and port changes" response = self.client.get('/redirect_other_host/', follow=True) self.assertRedirects( response, 'https://otherserver:8443/no_template_view/', status_code=302, target_status_code=200 ) # We can't use is_secure() or get_host() # because response.request is a dictionary, not an HttpRequest self.assertEqual(response.request.get('wsgi.url_scheme'), 'https') self.assertEqual(response.request.get('SERVER_NAME'), 'otherserver') self.assertEqual(response.request.get('SERVER_PORT'), '8443') # assertRedirects() can follow redirect to 'otherserver' too. response = self.client.get('/redirect_other_host/', follow=False) self.assertRedirects( response, 'https://otherserver:8443/no_template_view/', status_code=302, target_status_code=200 ) def test_redirect_chain_on_non_redirect_page(self): "An assertion is raised if the original page couldn't be retrieved as expected" # This page will redirect with code 301, not 302 response = self.client.get('/get_view/', follow=True) try: self.assertRedirects(response, '/get_view/') except AssertionError as e: self.assertIn("Response didn't redirect as expected: Response code was 200 (expected 302)", str(e)) try: self.assertRedirects(response, '/get_view/', msg_prefix='abc') except AssertionError as e: self.assertIn("abc: Response didn't redirect as expected: Response code was 200 (expected 302)", str(e)) def test_redirect_on_non_redirect_page(self): "An assertion is raised if the original page couldn't be retrieved as expected" # This page will redirect with code 301, not 302 response = self.client.get('/get_view/') try: self.assertRedirects(response, '/get_view/') except AssertionError as e: self.assertIn("Response didn't redirect as expected: Response code was 200 (expected 302)", str(e)) try: self.assertRedirects(response, '/get_view/', msg_prefix='abc') except AssertionError as e: self.assertIn("abc: Response didn't redirect as expected: Response code was 200 (expected 302)", str(e)) def test_redirect_scheme(self): "An assertion is raised if the response doesn't have the scheme specified in expected_url" # For all possible True/False combinations of follow and secure for follow, secure in itertools.product([True, False], repeat=2): # always redirects to https response = self.client.get('/https_redirect_view/', follow=follow, secure=secure) # the goal scheme is https self.assertRedirects(response, 'https://testserver/secure_view/', status_code=302) with self.assertRaises(AssertionError): self.assertRedirects(response, 'http://testserver/secure_view/', status_code=302) def test_redirect_fetch_redirect_response(self): """Preserve extra headers of requests made with django.test.Client.""" methods = ( 'get', 'post', 'head', 'options', 'put', 'patch', 'delete', 'trace', ) for method in methods: with self.subTest(method=method): req_method = getattr(self.client, method) response = req_method( '/redirect_based_on_extra_headers_1/', follow=False, HTTP_REDIRECT='val', ) self.assertRedirects( response, '/redirect_based_on_extra_headers_2/', fetch_redirect_response=True, status_code=302, target_status_code=302, ) @override_settings(ROOT_URLCONF='test_client_regress.urls') class AssertFormErrorTests(SimpleTestCase): def test_unknown_form(self): "An assertion is raised if the form name is unknown" post_data = { 'text': 'Hello World', 'email': 'not an email address', 'value': 37, 'single': 'b', 'multi': ('b', 'c', 'e') } response = self.client.post('/form_view/', post_data) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, "Invalid POST Template") msg = "The form 'wrong_form' was not used to render the response" with self.assertRaisesMessage(AssertionError, msg): self.assertFormError(response, 'wrong_form', 'some_field', 'Some error.') with self.assertRaisesMessage(AssertionError, 'abc: ' + msg): self.assertFormError(response, 'wrong_form', 'some_field', 'Some error.', msg_prefix='abc') def test_unknown_field(self): "An assertion is raised if the field name is unknown" post_data = { 'text': 'Hello World', 'email': 'not an email address', 'value': 37, 'single': 'b', 'multi': ('b', 'c', 'e') } response = self.client.post('/form_view/', post_data) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, "Invalid POST Template") msg = "The form 'form' in context 0 does not contain the field 'some_field'" with self.assertRaisesMessage(AssertionError, msg): self.assertFormError(response, 'form', 'some_field', 'Some error.') with self.assertRaisesMessage(AssertionError, 'abc: ' + msg): self.assertFormError(response, 'form', 'some_field', 'Some error.', msg_prefix='abc') def test_noerror_field(self): "An assertion is raised if the field doesn't have any errors" post_data = { 'text': 'Hello World', 'email': 'not an email address', 'value': 37, 'single': 'b', 'multi': ('b', 'c', 'e') } response = self.client.post('/form_view/', post_data) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, "Invalid POST Template") msg = "The field 'value' on form 'form' in context 0 contains no errors" with self.assertRaisesMessage(AssertionError, msg): self.assertFormError(response, 'form', 'value', 'Some error.') with self.assertRaisesMessage(AssertionError, 'abc: ' + msg): self.assertFormError(response, 'form', 'value', 'Some error.', msg_prefix='abc') def test_unknown_error(self): "An assertion is raised if the field doesn't contain the provided error" post_data = { 'text': 'Hello World', 'email': 'not an email address', 'value': 37, 'single': 'b', 'multi': ('b', 'c', 'e') } response = self.client.post('/form_view/', post_data) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, "Invalid POST Template") msg = ( "The field 'email' on form 'form' in context 0 does not contain " "the error 'Some error.' (actual errors: ['Enter a valid email " "address.'])" ) with self.assertRaisesMessage(AssertionError, msg): self.assertFormError(response, 'form', 'email', 'Some error.') with self.assertRaisesMessage(AssertionError, 'abc: ' + msg): self.assertFormError(response, 'form', 'email', 'Some error.', msg_prefix='abc') def test_unknown_nonfield_error(self): """ An assertion is raised if the form's non field errors doesn't contain the provided error. """ post_data = { 'text': 'Hello World', 'email': 'not an email address', 'value': 37, 'single': 'b', 'multi': ('b', 'c', 'e') } response = self.client.post('/form_view/', post_data) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, "Invalid POST Template") msg = ( "The form 'form' in context 0 does not contain the non-field " "error 'Some error.' (actual errors: none)" ) with self.assertRaisesMessage(AssertionError, msg): self.assertFormError(response, 'form', None, 'Some error.') with self.assertRaisesMessage(AssertionError, 'abc: ' + msg): self.assertFormError(response, 'form', None, 'Some error.', msg_prefix='abc') @override_settings(ROOT_URLCONF='test_client_regress.urls') class AssertFormsetErrorTests(SimpleTestCase): msg_prefixes = [("", {}), ("abc: ", {"msg_prefix": "abc"})] def setUp(self): """Makes response object for testing field and non-field errors""" # For testing field and non-field errors self.response_form_errors = self.getResponse({ 'form-TOTAL_FORMS': '2', 'form-INITIAL_FORMS': '2', 'form-0-text': 'Raise non-field error', 'form-0-email': 'not an email address', 'form-0-value': 37, 'form-0-single': 'b', 'form-0-multi': ('b', 'c', 'e'), 'form-1-text': 'Hello World', 'form-1-email': '[email protected]', 'form-1-value': 37, 'form-1-single': 'b', 'form-1-multi': ('b', 'c', 'e'), }) # For testing non-form errors self.response_nonform_errors = self.getResponse({ 'form-TOTAL_FORMS': '2', 'form-INITIAL_FORMS': '2', 'form-0-text': 'Hello World', 'form-0-email': '[email protected]', 'form-0-value': 37, 'form-0-single': 'b', 'form-0-multi': ('b', 'c', 'e'), 'form-1-text': 'Hello World', 'form-1-email': '[email protected]', 'form-1-value': 37, 'form-1-single': 'b', 'form-1-multi': ('b', 'c', 'e'), }) def getResponse(self, post_data): response = self.client.post('/formset_view/', post_data) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, "Invalid POST Template") return response def test_unknown_formset(self): "An assertion is raised if the formset name is unknown" for prefix, kwargs in self.msg_prefixes: msg = prefix + "The formset 'wrong_formset' was not used to render the response" with self.assertRaisesMessage(AssertionError, msg): self.assertFormsetError( self.response_form_errors, 'wrong_formset', 0, 'Some_field', 'Some error.', **kwargs ) def test_unknown_field(self): "An assertion is raised if the field name is unknown" for prefix, kwargs in self.msg_prefixes: msg = prefix + "The formset 'my_formset', form 0 in context 0 does not contain the field 'Some_field'" with self.assertRaisesMessage(AssertionError, msg): self.assertFormsetError( self.response_form_errors, 'my_formset', 0, 'Some_field', 'Some error.', **kwargs ) def test_no_error_field(self): "An assertion is raised if the field doesn't have any errors" for prefix, kwargs in self.msg_prefixes: msg = prefix + "The field 'value' on formset 'my_formset', form 1 in context 0 contains no errors" with self.assertRaisesMessage(AssertionError, msg): self.assertFormsetError(self.response_form_errors, 'my_formset', 1, 'value', 'Some error.', **kwargs) def test_unknown_error(self): "An assertion is raised if the field doesn't contain the specified error" for prefix, kwargs in self.msg_prefixes: msg = prefix + ( "The field 'email' on formset 'my_formset', form 0 " "in context 0 does not contain the error 'Some error.' " "(actual errors: ['Enter a valid email address.'])" ) with self.assertRaisesMessage(AssertionError, msg): self.assertFormsetError(self.response_form_errors, 'my_formset', 0, 'email', 'Some error.', **kwargs) def test_field_error(self): "No assertion is raised if the field contains the provided error" error_msg = ['Enter a valid email address.'] for prefix, kwargs in self.msg_prefixes: self.assertFormsetError(self.response_form_errors, 'my_formset', 0, 'email', error_msg, **kwargs) def test_no_nonfield_error(self): "An assertion is raised if the formsets non-field errors doesn't contain any errors." for prefix, kwargs in self.msg_prefixes: msg = prefix + "The formset 'my_formset', form 1 in context 0 does not contain any non-field errors." with self.assertRaisesMessage(AssertionError, msg): self.assertFormsetError(self.response_form_errors, 'my_formset', 1, None, 'Some error.', **kwargs) def test_unknown_nonfield_error(self): "An assertion is raised if the formsets non-field errors doesn't contain the provided error." for prefix, kwargs in self.msg_prefixes: msg = prefix + ( "The formset 'my_formset', form 0 in context 0 does not " "contain the non-field error 'Some error.' (actual errors: " "['Non-field error.'])" ) with self.assertRaisesMessage(AssertionError, msg): self.assertFormsetError(self.response_form_errors, 'my_formset', 0, None, 'Some error.', **kwargs) def test_nonfield_error(self): "No assertion is raised if the formsets non-field errors contains the provided error." for prefix, kwargs in self.msg_prefixes: self.assertFormsetError(self.response_form_errors, 'my_formset', 0, None, 'Non-field error.', **kwargs) def test_no_nonform_error(self): "An assertion is raised if the formsets non-form errors doesn't contain any errors." for prefix, kwargs in self.msg_prefixes: msg = prefix + "The formset 'my_formset' in context 0 does not contain any non-form errors." with self.assertRaisesMessage(AssertionError, msg): self.assertFormsetError(self.response_form_errors, 'my_formset', None, None, 'Some error.', **kwargs) def test_unknown_nonform_error(self): "An assertion is raised if the formsets non-form errors doesn't contain the provided error." for prefix, kwargs in self.msg_prefixes: msg = prefix + ( "The formset 'my_formset' in context 0 does not contain the " "non-form error 'Some error.' (actual errors: ['Forms in a set " "must have distinct email addresses.'])" ) with self.assertRaisesMessage(AssertionError, msg): self.assertFormsetError( self.response_nonform_errors, 'my_formset', None, None, 'Some error.', **kwargs ) def test_nonform_error(self): "No assertion is raised if the formsets non-form errors contains the provided error." msg = 'Forms in a set must have distinct email addresses.' for prefix, kwargs in self.msg_prefixes: self.assertFormsetError(self.response_nonform_errors, 'my_formset', None, None, msg, **kwargs) @override_settings(ROOT_URLCONF='test_client_regress.urls') class LoginTests(TestDataMixin, TestCase): def test_login_different_client(self): "Using a different test client doesn't violate authentication" # Create a second client, and log in. c = Client() login = c.login(username='testclient', password='password') self.assertTrue(login, 'Could not log in') # Get a redirection page with the second client. response = c.get("/login_protected_redirect_view/") # At this points, the self.client isn't logged in. # assertRedirects uses the original client, not the default client. self.assertRedirects(response, "/get_view/") @override_settings( SESSION_ENGINE='test_client_regress.session', ROOT_URLCONF='test_client_regress.urls', ) class SessionEngineTests(TestDataMixin, TestCase): def test_login(self): "A session engine that modifies the session key can be used to log in" login = self.client.login(username='testclient', password='password') self.assertTrue(login, 'Could not log in') # Try to access a login protected page. response = self.client.get("/login_protected_view/") self.assertEqual(response.status_code, 200) self.assertEqual(response.context['user'].username, 'testclient') @override_settings(ROOT_URLCONF='test_client_regress.urls',) class URLEscapingTests(SimpleTestCase): def test_simple_argument_get(self): "Get a view that has a simple string argument" response = self.client.get(reverse('arg_view', args=['Slartibartfast'])) self.assertEqual(response.status_code, 200) self.assertEqual(response.content, b'Howdy, Slartibartfast') def test_argument_with_space_get(self): "Get a view that has a string argument that requires escaping" response = self.client.get(reverse('arg_view', args=['Arthur Dent'])) self.assertEqual(response.status_code, 200) self.assertEqual(response.content, b'Hi, Arthur') def test_simple_argument_post(self): "Post for a view that has a simple string argument" response = self.client.post(reverse('arg_view', args=['Slartibartfast'])) self.assertEqual(response.status_code, 200) self.assertEqual(response.content, b'Howdy, Slartibartfast') def test_argument_with_space_post(self): "Post for a view that has a string argument that requires escaping" response = self.client.post(reverse('arg_view', args=['Arthur Dent'])) self.assertEqual(response.status_code, 200) self.assertEqual(response.content, b'Hi, Arthur') @override_settings(ROOT_URLCONF='test_client_regress.urls') class ExceptionTests(TestDataMixin, TestCase): def test_exception_cleared(self): "#5836 - A stale user exception isn't re-raised by the test client." login = self.client.login(username='testclient', password='password') self.assertTrue(login, 'Could not log in') with self.assertRaises(CustomTestException): self.client.get("/staff_only/") # At this point, an exception has been raised, and should be cleared. # This next operation should be successful; if it isn't we have a problem. login = self.client.login(username='staff', password='password') self.assertTrue(login, 'Could not log in') self.client.get("/staff_only/") @override_settings(ROOT_URLCONF='test_client_regress.urls') class TemplateExceptionTests(SimpleTestCase): @override_settings(TEMPLATES=[{ 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': [os.path.join(os.path.dirname(__file__), 'bad_templates')], }]) def test_bad_404_template(self): "Errors found when rendering 404 error templates are re-raised" with self.assertRaises(TemplateSyntaxError): self.client.get("/no_such_view/") # We need two different tests to check URLconf substitution - one to check # it was changed, and another one (without self.urls) to check it was reverted on # teardown. This pair of tests relies upon the alphabetical ordering of test execution. @override_settings(ROOT_URLCONF='test_client_regress.urls') class UrlconfSubstitutionTests(SimpleTestCase): def test_urlconf_was_changed(self): "TestCase can enforce a custom URLconf on a per-test basis" url = reverse('arg_view', args=['somename']) self.assertEqual(url, '/arg_view/somename/') # This test needs to run *after* UrlconfSubstitutionTests; the zz prefix in the # name is to ensure alphabetical ordering. class zzUrlconfSubstitutionTests(SimpleTestCase): def test_urlconf_was_reverted(self): """URLconf is reverted to original value after modification in a TestCase This will not find a match as the default ROOT_URLCONF is empty. """ with self.assertRaises(NoReverseMatch): reverse('arg_view', args=['somename']) @override_settings(ROOT_URLCONF='test_client_regress.urls') class ContextTests(TestDataMixin, TestCase): def test_single_context(self): "Context variables can be retrieved from a single context" response = self.client.get("/request_data/", data={'foo': 'whiz'}) self.assertIsInstance(response.context, RequestContext) self.assertIn('get-foo', response.context) self.assertEqual(response.context['get-foo'], 'whiz') self.assertEqual(response.context['data'], 'sausage') with self.assertRaisesMessage(KeyError, 'does-not-exist'): response.context['does-not-exist'] def test_inherited_context(self): "Context variables can be retrieved from a list of contexts" response = self.client.get("/request_data_extended/", data={'foo': 'whiz'}) self.assertEqual(response.context.__class__, ContextList) self.assertEqual(len(response.context), 2) self.assertIn('get-foo', response.context) self.assertEqual(response.context['get-foo'], 'whiz') self.assertEqual(response.context['data'], 'bacon') with self.assertRaisesMessage(KeyError, 'does-not-exist'): response.context['does-not-exist'] def test_contextlist_keys(self): c1 = Context() c1.update({'hello': 'world', 'goodbye': 'john'}) c1.update({'hello': 'dolly', 'dolly': 'parton'}) c2 = Context() c2.update({'goodbye': 'world', 'python': 'rocks'}) c2.update({'goodbye': 'dolly'}) k = ContextList([c1, c2]) # None, True and False are builtins of BaseContext, and present # in every Context without needing to be added. self.assertEqual({'None', 'True', 'False', 'hello', 'goodbye', 'python', 'dolly'}, k.keys()) def test_contextlist_get(self): c1 = Context({'hello': 'world', 'goodbye': 'john'}) c2 = Context({'goodbye': 'world', 'python': 'rocks'}) k = ContextList([c1, c2]) self.assertEqual(k.get('hello'), 'world') self.assertEqual(k.get('goodbye'), 'john') self.assertEqual(k.get('python'), 'rocks') self.assertEqual(k.get('nonexistent', 'default'), 'default') def test_15368(self): # Need to insert a context processor that assumes certain things about # the request instance. This triggers a bug caused by some ways of # copying RequestContext. with self.settings(TEMPLATES=[{ 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'test_client_regress.context_processors.special', ], }, }]): response = self.client.get("/request_context_view/") self.assertContains(response, 'Path: /request_context_view/') def test_nested_requests(self): """ response.context is not lost when view call another view. """ response = self.client.get("/nested_view/") self.assertIsInstance(response.context, RequestContext) self.assertEqual(response.context['nested'], 'yes') @override_settings(ROOT_URLCONF='test_client_regress.urls') class SessionTests(TestDataMixin, TestCase): def test_session(self): "The session isn't lost if a user logs in" # The session doesn't exist to start. response = self.client.get('/check_session/') self.assertEqual(response.status_code, 200) self.assertEqual(response.content, b'NO') # This request sets a session variable. response = self.client.get('/set_session/') self.assertEqual(response.status_code, 200) self.assertEqual(response.content, b'set_session') # The session has been modified response = self.client.get('/check_session/') self.assertEqual(response.status_code, 200) self.assertEqual(response.content, b'YES') # Log in login = self.client.login(username='testclient', password='password') self.assertTrue(login, 'Could not log in') # Session should still contain the modified value response = self.client.get('/check_session/') self.assertEqual(response.status_code, 200) self.assertEqual(response.content, b'YES') def test_session_initiated(self): session = self.client.session session['session_var'] = 'foo' session.save() response = self.client.get('/check_session/') self.assertEqual(response.content, b'foo') def test_logout(self): """Logout should work whether the user is logged in or not (#9978).""" self.client.logout() login = self.client.login(username='testclient', password='password') self.assertTrue(login, 'Could not log in') self.client.logout() self.client.logout() def test_logout_with_user(self): """Logout should send user_logged_out signal if user was logged in.""" def listener(*args, **kwargs): listener.executed = True self.assertEqual(kwargs['sender'], User) listener.executed = False user_logged_out.connect(listener) self.client.login(username='testclient', password='password') self.client.logout() user_logged_out.disconnect(listener) self.assertTrue(listener.executed) @override_settings(AUTH_USER_MODEL='test_client_regress.CustomUser') def test_logout_with_custom_user(self): """Logout should send user_logged_out signal if custom user was logged in.""" def listener(*args, **kwargs): self.assertEqual(kwargs['sender'], CustomUser) listener.executed = True listener.executed = False u = CustomUser.custom_objects.create(email='[email protected]') u.set_password('password') u.save() user_logged_out.connect(listener) self.client.login(username='[email protected]', password='password') self.client.logout() user_logged_out.disconnect(listener) self.assertTrue(listener.executed) @override_settings(AUTHENTICATION_BACKENDS=( 'django.contrib.auth.backends.ModelBackend', 'test_client_regress.auth_backends.CustomUserBackend')) def test_logout_with_custom_auth_backend(self): "Request a logout after logging in with custom authentication backend" def listener(*args, **kwargs): self.assertEqual(kwargs['sender'], CustomUser) listener.executed = True listener.executed = False u = CustomUser.custom_objects.create(email='[email protected]') u.set_password('password') u.save() user_logged_out.connect(listener) self.client.login(username='[email protected]', password='password') self.client.logout() user_logged_out.disconnect(listener) self.assertTrue(listener.executed) def test_logout_without_user(self): """Logout should send signal even if user not authenticated.""" def listener(user, *args, **kwargs): listener.user = user listener.executed = True listener.executed = False user_logged_out.connect(listener) self.client.login(username='incorrect', password='password') self.client.logout() user_logged_out.disconnect(listener) self.assertTrue(listener.executed) self.assertIsNone(listener.user) def test_login_with_user(self): """Login should send user_logged_in signal on successful login.""" def listener(*args, **kwargs): listener.executed = True listener.executed = False user_logged_in.connect(listener) self.client.login(username='testclient', password='password') user_logged_out.disconnect(listener) self.assertTrue(listener.executed) def test_login_without_signal(self): """Login shouldn't send signal if user wasn't logged in""" def listener(*args, **kwargs): listener.executed = True listener.executed = False user_logged_in.connect(listener) self.client.login(username='incorrect', password='password') user_logged_in.disconnect(listener) self.assertFalse(listener.executed) @override_settings(ROOT_URLCONF='test_client_regress.urls') class RequestMethodTests(SimpleTestCase): def test_get(self): "Request a view via request method GET" response = self.client.get('/request_methods/') self.assertEqual(response.status_code, 200) self.assertEqual(response.content, b'request method: GET') def test_post(self): "Request a view via request method POST" response = self.client.post('/request_methods/') self.assertEqual(response.status_code, 200) self.assertEqual(response.content, b'request method: POST') def test_head(self): "Request a view via request method HEAD" response = self.client.head('/request_methods/') self.assertEqual(response.status_code, 200) # A HEAD request doesn't return any content. self.assertNotEqual(response.content, b'request method: HEAD') self.assertEqual(response.content, b'') def test_options(self): "Request a view via request method OPTIONS" response = self.client.options('/request_methods/') self.assertEqual(response.status_code, 200) self.assertEqual(response.content, b'request method: OPTIONS') def test_put(self): "Request a view via request method PUT" response = self.client.put('/request_methods/') self.assertEqual(response.status_code, 200) self.assertEqual(response.content, b'request method: PUT') def test_delete(self): "Request a view via request method DELETE" response = self.client.delete('/request_methods/') self.assertEqual(response.status_code, 200) self.assertEqual(response.content, b'request method: DELETE') def test_patch(self): "Request a view via request method PATCH" response = self.client.patch('/request_methods/') self.assertEqual(response.status_code, 200) self.assertEqual(response.content, b'request method: PATCH') @override_settings(ROOT_URLCONF='test_client_regress.urls') class RequestMethodStringDataTests(SimpleTestCase): def test_post(self): "Request a view with string data via request method POST" # Regression test for #11371 data = '{"test": "json"}' response = self.client.post('/request_methods/', data=data, content_type='application/json') self.assertEqual(response.status_code, 200) self.assertEqual(response.content, b'request method: POST') def test_put(self): "Request a view with string data via request method PUT" # Regression test for #11371 data = '{"test": "json"}' response = self.client.put('/request_methods/', data=data, content_type='application/json') self.assertEqual(response.status_code, 200) self.assertEqual(response.content, b'request method: PUT') def test_patch(self): "Request a view with string data via request method PATCH" # Regression test for #17797 data = '{"test": "json"}' response = self.client.patch('/request_methods/', data=data, content_type='application/json') self.assertEqual(response.status_code, 200) self.assertEqual(response.content, b'request method: PATCH') def test_empty_string_data(self): "Request a view with empty string data via request method GET/POST/HEAD" # Regression test for #21740 response = self.client.get('/body/', data='', content_type='application/json') self.assertEqual(response.content, b'') response = self.client.post('/body/', data='', content_type='application/json') self.assertEqual(response.content, b'') response = self.client.head('/body/', data='', content_type='application/json') self.assertEqual(response.content, b'') def test_json_bytes(self): response = self.client.post('/body/', data=b"{'value': 37}", content_type='application/json') self.assertEqual(response.content, b"{'value': 37}") def test_json(self): response = self.client.get('/json_response/') self.assertEqual(response.json(), {'key': 'value'}) def test_json_charset(self): response = self.client.get('/json_response_latin1/') self.assertEqual(response.charset, 'latin1') self.assertEqual(response.json(), {'a': 'Å'}) def test_json_structured_suffixes(self): valid_types = ( 'application/vnd.api+json', 'application/vnd.api.foo+json', 'application/json; charset=utf-8', 'application/activity+json', 'application/activity+json; charset=utf-8', ) for content_type in valid_types: response = self.client.get('/json_response/', {'content_type': content_type}) self.assertEqual(response.headers['Content-Type'], content_type) self.assertEqual(response.json(), {'key': 'value'}) def test_json_multiple_access(self): response = self.client.get('/json_response/') self.assertIs(response.json(), response.json()) def test_json_wrong_header(self): response = self.client.get('/body/') msg = 'Content-Type header is "text/html; charset=utf-8", not "application/json"' with self.assertRaisesMessage(ValueError, msg): self.assertEqual(response.json(), {'key': 'value'}) @override_settings(ROOT_URLCONF='test_client_regress.urls',) class QueryStringTests(SimpleTestCase): def test_get_like_requests(self): for method_name in ('get', 'head'): # A GET-like request can pass a query string as data (#10571) method = getattr(self.client, method_name) response = method("/request_data/", data={'foo': 'whiz'}) self.assertEqual(response.context['get-foo'], 'whiz') # A GET-like request can pass a query string as part of the URL response = method("/request_data/?foo=whiz") self.assertEqual(response.context['get-foo'], 'whiz') # Data provided in the URL to a GET-like request is overridden by actual form data response = method("/request_data/?foo=whiz", data={'foo': 'bang'}) self.assertEqual(response.context['get-foo'], 'bang') response = method("/request_data/?foo=whiz", data={'bar': 'bang'}) self.assertIsNone(response.context['get-foo']) self.assertEqual(response.context['get-bar'], 'bang') def test_post_like_requests(self): # A POST-like request can pass a query string as data response = self.client.post("/request_data/", data={'foo': 'whiz'}) self.assertIsNone(response.context['get-foo']) self.assertEqual(response.context['post-foo'], 'whiz') # A POST-like request can pass a query string as part of the URL response = self.client.post("/request_data/?foo=whiz") self.assertEqual(response.context['get-foo'], 'whiz') self.assertIsNone(response.context['post-foo']) # POST data provided in the URL augments actual form data response = self.client.post("/request_data/?foo=whiz", data={'foo': 'bang'}) self.assertEqual(response.context['get-foo'], 'whiz') self.assertEqual(response.context['post-foo'], 'bang') response = self.client.post("/request_data/?foo=whiz", data={'bar': 'bang'}) self.assertEqual(response.context['get-foo'], 'whiz') self.assertIsNone(response.context['get-bar']) self.assertIsNone(response.context['post-foo']) self.assertEqual(response.context['post-bar'], 'bang') @override_settings(ROOT_URLCONF='test_client_regress.urls') class PayloadEncodingTests(SimpleTestCase): """Regression tests for #10571.""" def test_simple_payload(self): """A simple ASCII-only text can be POSTed.""" text = 'English: mountain pass' response = self.client.post('/parse_encoded_text/', text, content_type='text/plain') self.assertEqual(response.content, text.encode()) def test_utf8_payload(self): """Non-ASCII data encoded as UTF-8 can be POSTed.""" text = 'dog: собака' response = self.client.post('/parse_encoded_text/', text, content_type='text/plain; charset=utf-8') self.assertEqual(response.content, text.encode()) def test_utf16_payload(self): """Non-ASCII data encoded as UTF-16 can be POSTed.""" text = 'dog: собака' response = self.client.post('/parse_encoded_text/', text, content_type='text/plain; charset=utf-16') self.assertEqual(response.content, text.encode('utf-16')) def test_non_utf_payload(self): """Non-ASCII data as a non-UTF based encoding can be POSTed.""" text = 'dog: собака' response = self.client.post('/parse_encoded_text/', text, content_type='text/plain; charset=koi8-r') self.assertEqual(response.content, text.encode('koi8-r')) class DummyFile: def __init__(self, filename): self.name = filename def read(self): return b'TEST_FILE_CONTENT' class UploadedFileEncodingTest(SimpleTestCase): def test_file_encoding(self): encoded_file = encode_file('TEST_BOUNDARY', 'TEST_KEY', DummyFile('test_name.bin')) self.assertEqual(b'--TEST_BOUNDARY', encoded_file[0]) self.assertEqual(b'Content-Disposition: form-data; name="TEST_KEY"; filename="test_name.bin"', encoded_file[1]) self.assertEqual(b'TEST_FILE_CONTENT', encoded_file[-1]) def test_guesses_content_type_on_file_encoding(self): self.assertEqual(b'Content-Type: application/octet-stream', encode_file('IGNORE', 'IGNORE', DummyFile("file.bin"))[2]) self.assertEqual(b'Content-Type: text/plain', encode_file('IGNORE', 'IGNORE', DummyFile("file.txt"))[2]) self.assertIn(encode_file('IGNORE', 'IGNORE', DummyFile("file.zip"))[2], ( b'Content-Type: application/x-compress', b'Content-Type: application/x-zip', b'Content-Type: application/x-zip-compressed', b'Content-Type: application/zip',)) self.assertEqual(b'Content-Type: application/octet-stream', encode_file('IGNORE', 'IGNORE', DummyFile("file.unknown"))[2]) @override_settings(ROOT_URLCONF='test_client_regress.urls',) class RequestHeadersTest(SimpleTestCase): def test_client_headers(self): "A test client can receive custom headers" response = self.client.get("/check_headers/", HTTP_X_ARG_CHECK='Testing 123') self.assertEqual(response.content, b"HTTP_X_ARG_CHECK: Testing 123") self.assertEqual(response.status_code, 200) def test_client_headers_redirect(self): "Test client headers are preserved through redirects" response = self.client.get("/check_headers_redirect/", follow=True, HTTP_X_ARG_CHECK='Testing 123') self.assertEqual(response.content, b"HTTP_X_ARG_CHECK: Testing 123") self.assertRedirects(response, '/check_headers/', status_code=302, target_status_code=200) @override_settings(ROOT_URLCONF='test_client_regress.urls') class ReadLimitedStreamTest(SimpleTestCase): """ HttpRequest.body, HttpRequest.read(), and HttpRequest.read(BUFFER) have proper LimitedStream behavior. Refs #14753, #15785 """ def test_body_from_empty_request(self): """HttpRequest.body on a test client GET request should return the empty string.""" self.assertEqual(self.client.get("/body/").content, b'') def test_read_from_empty_request(self): """HttpRequest.read() on a test client GET request should return the empty string.""" self.assertEqual(self.client.get("/read_all/").content, b'') def test_read_numbytes_from_empty_request(self): """HttpRequest.read(LARGE_BUFFER) on a test client GET request should return the empty string.""" self.assertEqual(self.client.get("/read_buffer/").content, b'') def test_read_from_nonempty_request(self): """HttpRequest.read() on a test client PUT request with some payload should return that payload.""" payload = b'foobar' self.assertEqual(self.client.put("/read_all/", data=payload, content_type='text/plain').content, payload) def test_read_numbytes_from_nonempty_request(self): """HttpRequest.read(LARGE_BUFFER) on a test client PUT request with some payload should return that payload.""" payload = b'foobar' self.assertEqual(self.client.put("/read_buffer/", data=payload, content_type='text/plain').content, payload) @override_settings(ROOT_URLCONF='test_client_regress.urls') class RequestFactoryStateTest(SimpleTestCase): """Regression tests for #15929.""" # These tests are checking that certain middleware don't change certain # global state. Alternatively, from the point of view of a test, they are # ensuring test isolation behavior. So, unusually, it doesn't make sense to # run the tests individually, and if any are failing it is confusing to run # them with any other set of tests. def common_test_that_should_always_pass(self): request = RequestFactory().get('/') request.session = {} self.assertFalse(hasattr(request, 'user')) def test_request(self): self.common_test_that_should_always_pass() def test_request_after_client(self): # apart from the next line the three tests are identical self.client.get('/') self.common_test_that_should_always_pass() def test_request_after_client_2(self): # This test is executed after the previous one self.common_test_that_should_always_pass() @override_settings(ROOT_URLCONF='test_client_regress.urls') class RequestFactoryEnvironmentTests(SimpleTestCase): """ Regression tests for #8551 and #17067: ensure that environment variables are set correctly in RequestFactory. """ def test_should_set_correct_env_variables(self): request = RequestFactory().get('/path/') self.assertEqual(request.META.get('REMOTE_ADDR'), '127.0.0.1') self.assertEqual(request.META.get('SERVER_NAME'), 'testserver') self.assertEqual(request.META.get('SERVER_PORT'), '80') self.assertEqual(request.META.get('SERVER_PROTOCOL'), 'HTTP/1.1') self.assertEqual(request.META.get('SCRIPT_NAME') + request.META.get('PATH_INFO'), '/path/') def test_cookies(self): factory = RequestFactory() factory.cookies.load('A="B"; C="D"; Path=/; Version=1') request = factory.get('/') self.assertEqual(request.META['HTTP_COOKIE'], 'A="B"; C="D"')
5c2397278cab9c0e34147e9c511a7c14ef694c38fcac7546ea38de2d59a93262
from django.apps import apps from django.db import models from django.test import SimpleTestCase, override_settings from django.test.utils import isolate_lru_cache class FieldDeconstructionTests(SimpleTestCase): """ Tests the deconstruct() method on all core fields. """ def test_name(self): """ Tests the outputting of the correct name if assigned one. """ # First try using a "normal" field field = models.CharField(max_length=65) name, path, args, kwargs = field.deconstruct() self.assertIsNone(name) field.set_attributes_from_name("is_awesome_test") name, path, args, kwargs = field.deconstruct() self.assertEqual(name, "is_awesome_test") # Now try with a ForeignKey field = models.ForeignKey("some_fake.ModelName", models.CASCADE) name, path, args, kwargs = field.deconstruct() self.assertIsNone(name) field.set_attributes_from_name("author") name, path, args, kwargs = field.deconstruct() self.assertEqual(name, "author") def test_db_tablespace(self): field = models.Field() _, _, args, kwargs = field.deconstruct() self.assertEqual(args, []) self.assertEqual(kwargs, {}) # With a DEFAULT_DB_TABLESPACE. with self.settings(DEFAULT_DB_TABLESPACE='foo'): _, _, args, kwargs = field.deconstruct() self.assertEqual(args, []) self.assertEqual(kwargs, {}) # With a db_tablespace. field = models.Field(db_tablespace='foo') _, _, args, kwargs = field.deconstruct() self.assertEqual(args, []) self.assertEqual(kwargs, {'db_tablespace': 'foo'}) # With a db_tablespace equal to DEFAULT_DB_TABLESPACE. with self.settings(DEFAULT_DB_TABLESPACE='foo'): _, _, args, kwargs = field.deconstruct() self.assertEqual(args, []) self.assertEqual(kwargs, {'db_tablespace': 'foo'}) def test_auto_field(self): field = models.AutoField(primary_key=True) field.set_attributes_from_name("id") name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.AutoField") self.assertEqual(args, []) self.assertEqual(kwargs, {"primary_key": True}) def test_big_integer_field(self): field = models.BigIntegerField() name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.BigIntegerField") self.assertEqual(args, []) self.assertEqual(kwargs, {}) def test_boolean_field(self): field = models.BooleanField() name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.BooleanField") self.assertEqual(args, []) self.assertEqual(kwargs, {}) field = models.BooleanField(default=True) name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.BooleanField") self.assertEqual(args, []) self.assertEqual(kwargs, {"default": True}) def test_char_field(self): field = models.CharField(max_length=65) name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.CharField") self.assertEqual(args, []) self.assertEqual(kwargs, {"max_length": 65}) field = models.CharField(max_length=65, null=True, blank=True) name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.CharField") self.assertEqual(args, []) self.assertEqual(kwargs, {"max_length": 65, "null": True, "blank": True}) def test_char_field_choices(self): field = models.CharField(max_length=1, choices=(("A", "One"), ("B", "Two"))) name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.CharField") self.assertEqual(args, []) self.assertEqual(kwargs, {"choices": [("A", "One"), ("B", "Two")], "max_length": 1}) def test_csi_field(self): field = models.CommaSeparatedIntegerField(max_length=100) name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.CommaSeparatedIntegerField") self.assertEqual(args, []) self.assertEqual(kwargs, {"max_length": 100}) def test_date_field(self): field = models.DateField() name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.DateField") self.assertEqual(args, []) self.assertEqual(kwargs, {}) field = models.DateField(auto_now=True) name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.DateField") self.assertEqual(args, []) self.assertEqual(kwargs, {"auto_now": True}) def test_datetime_field(self): field = models.DateTimeField() name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.DateTimeField") self.assertEqual(args, []) self.assertEqual(kwargs, {}) field = models.DateTimeField(auto_now_add=True) name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.DateTimeField") self.assertEqual(args, []) self.assertEqual(kwargs, {"auto_now_add": True}) # Bug #21785 field = models.DateTimeField(auto_now=True, auto_now_add=True) name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.DateTimeField") self.assertEqual(args, []) self.assertEqual(kwargs, {"auto_now_add": True, "auto_now": True}) def test_decimal_field(self): field = models.DecimalField(max_digits=5, decimal_places=2) name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.DecimalField") self.assertEqual(args, []) self.assertEqual(kwargs, {"max_digits": 5, "decimal_places": 2}) def test_decimal_field_0_decimal_places(self): """ A DecimalField with decimal_places=0 should work (#22272). """ field = models.DecimalField(max_digits=5, decimal_places=0) name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.DecimalField") self.assertEqual(args, []) self.assertEqual(kwargs, {"max_digits": 5, "decimal_places": 0}) def test_email_field(self): field = models.EmailField() name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.EmailField") self.assertEqual(args, []) self.assertEqual(kwargs, {"max_length": 254}) field = models.EmailField(max_length=255) name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.EmailField") self.assertEqual(args, []) self.assertEqual(kwargs, {"max_length": 255}) def test_file_field(self): field = models.FileField(upload_to="foo/bar") name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.FileField") self.assertEqual(args, []) self.assertEqual(kwargs, {"upload_to": "foo/bar"}) # Test max_length field = models.FileField(upload_to="foo/bar", max_length=200) name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.FileField") self.assertEqual(args, []) self.assertEqual(kwargs, {"upload_to": "foo/bar", "max_length": 200}) def test_file_path_field(self): field = models.FilePathField(match=r".*\.txt$") name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.FilePathField") self.assertEqual(args, []) self.assertEqual(kwargs, {"match": r".*\.txt$"}) field = models.FilePathField(recursive=True, allow_folders=True, max_length=123) name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.FilePathField") self.assertEqual(args, []) self.assertEqual(kwargs, {"recursive": True, "allow_folders": True, "max_length": 123}) def test_float_field(self): field = models.FloatField() name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.FloatField") self.assertEqual(args, []) self.assertEqual(kwargs, {}) def test_foreign_key(self): # Test basic pointing from django.contrib.auth.models import Permission field = models.ForeignKey("auth.Permission", models.CASCADE) field.remote_field.model = Permission field.remote_field.field_name = "id" name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.ForeignKey") self.assertEqual(args, []) self.assertEqual(kwargs, {"to": "auth.permission", "on_delete": models.CASCADE}) self.assertFalse(hasattr(kwargs['to'], "setting_name")) # Test swap detection for swappable model field = models.ForeignKey("auth.User", models.CASCADE) name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.ForeignKey") self.assertEqual(args, []) self.assertEqual(kwargs, {"to": "auth.user", "on_delete": models.CASCADE}) self.assertEqual(kwargs['to'].setting_name, "AUTH_USER_MODEL") # Swap detection for lowercase swappable model. field = models.ForeignKey('auth.user', models.CASCADE) name, path, args, kwargs = field.deconstruct() self.assertEqual(path, 'django.db.models.ForeignKey') self.assertEqual(args, []) self.assertEqual(kwargs, {'to': 'auth.user', 'on_delete': models.CASCADE}) self.assertEqual(kwargs['to'].setting_name, 'AUTH_USER_MODEL') # Test nonexistent (for now) model field = models.ForeignKey("something.Else", models.CASCADE) name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.ForeignKey") self.assertEqual(args, []) self.assertEqual(kwargs, {"to": "something.else", "on_delete": models.CASCADE}) # Test on_delete field = models.ForeignKey("auth.User", models.SET_NULL) name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.ForeignKey") self.assertEqual(args, []) self.assertEqual(kwargs, {"to": "auth.user", "on_delete": models.SET_NULL}) # Test to_field preservation field = models.ForeignKey("auth.Permission", models.CASCADE, to_field="foobar") name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.ForeignKey") self.assertEqual(args, []) self.assertEqual(kwargs, {"to": "auth.permission", "to_field": "foobar", "on_delete": models.CASCADE}) # Test related_name preservation field = models.ForeignKey("auth.Permission", models.CASCADE, related_name="foobar") name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.ForeignKey") self.assertEqual(args, []) self.assertEqual(kwargs, {"to": "auth.permission", "related_name": "foobar", "on_delete": models.CASCADE}) # Test related_query_name field = models.ForeignKey("auth.Permission", models.CASCADE, related_query_name="foobar") name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.ForeignKey") self.assertEqual(args, []) self.assertEqual( kwargs, {"to": "auth.permission", "related_query_name": "foobar", "on_delete": models.CASCADE} ) # Test limit_choices_to field = models.ForeignKey("auth.Permission", models.CASCADE, limit_choices_to={'foo': 'bar'}) name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.ForeignKey") self.assertEqual(args, []) self.assertEqual( kwargs, {"to": "auth.permission", "limit_choices_to": {'foo': 'bar'}, "on_delete": models.CASCADE} ) # Test unique field = models.ForeignKey("auth.Permission", models.CASCADE, unique=True) name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.ForeignKey") self.assertEqual(args, []) self.assertEqual(kwargs, {"to": "auth.permission", "unique": True, "on_delete": models.CASCADE}) @override_settings(AUTH_USER_MODEL="auth.Permission") def test_foreign_key_swapped(self): with isolate_lru_cache(apps.get_swappable_settings_name): # It doesn't matter that we swapped out user for permission; # there's no validation. We just want to check the setting stuff works. field = models.ForeignKey("auth.Permission", models.CASCADE) name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.ForeignKey") self.assertEqual(args, []) self.assertEqual(kwargs, {"to": "auth.permission", "on_delete": models.CASCADE}) self.assertEqual(kwargs['to'].setting_name, "AUTH_USER_MODEL") # Model names are case-insensitive. with isolate_lru_cache(apps.get_swappable_settings_name): # It doesn't matter that we swapped out user for permission; # there's no validation. We just want to check the setting stuff # works. field = models.ForeignKey('auth.permission', models.CASCADE) name, path, args, kwargs = field.deconstruct() self.assertEqual(path, 'django.db.models.ForeignKey') self.assertEqual(args, []) self.assertEqual(kwargs, {'to': 'auth.permission', 'on_delete': models.CASCADE}) self.assertEqual(kwargs['to'].setting_name, 'AUTH_USER_MODEL') def test_one_to_one(self): # Test basic pointing from django.contrib.auth.models import Permission field = models.OneToOneField("auth.Permission", models.CASCADE) field.remote_field.model = Permission field.remote_field.field_name = "id" name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.OneToOneField") self.assertEqual(args, []) self.assertEqual(kwargs, {"to": "auth.permission", "on_delete": models.CASCADE}) self.assertFalse(hasattr(kwargs['to'], "setting_name")) # Test swap detection for swappable model field = models.OneToOneField("auth.User", models.CASCADE) name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.OneToOneField") self.assertEqual(args, []) self.assertEqual(kwargs, {"to": "auth.user", "on_delete": models.CASCADE}) self.assertEqual(kwargs['to'].setting_name, "AUTH_USER_MODEL") # Test nonexistent (for now) model field = models.OneToOneField("something.Else", models.CASCADE) name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.OneToOneField") self.assertEqual(args, []) self.assertEqual(kwargs, {"to": "something.else", "on_delete": models.CASCADE}) # Test on_delete field = models.OneToOneField("auth.User", models.SET_NULL) name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.OneToOneField") self.assertEqual(args, []) self.assertEqual(kwargs, {"to": "auth.user", "on_delete": models.SET_NULL}) # Test to_field field = models.OneToOneField("auth.Permission", models.CASCADE, to_field="foobar") name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.OneToOneField") self.assertEqual(args, []) self.assertEqual(kwargs, {"to": "auth.permission", "to_field": "foobar", "on_delete": models.CASCADE}) # Test related_name field = models.OneToOneField("auth.Permission", models.CASCADE, related_name="foobar") name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.OneToOneField") self.assertEqual(args, []) self.assertEqual(kwargs, {"to": "auth.permission", "related_name": "foobar", "on_delete": models.CASCADE}) # Test related_query_name field = models.OneToOneField("auth.Permission", models.CASCADE, related_query_name="foobar") name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.OneToOneField") self.assertEqual(args, []) self.assertEqual( kwargs, {"to": "auth.permission", "related_query_name": "foobar", "on_delete": models.CASCADE} ) # Test limit_choices_to field = models.OneToOneField("auth.Permission", models.CASCADE, limit_choices_to={'foo': 'bar'}) name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.OneToOneField") self.assertEqual(args, []) self.assertEqual( kwargs, {"to": "auth.permission", "limit_choices_to": {'foo': 'bar'}, "on_delete": models.CASCADE} ) # Test unique field = models.OneToOneField("auth.Permission", models.CASCADE, unique=True) name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.OneToOneField") self.assertEqual(args, []) self.assertEqual(kwargs, {"to": "auth.permission", "on_delete": models.CASCADE}) def test_image_field(self): field = models.ImageField(upload_to="foo/barness", width_field="width", height_field="height") name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.ImageField") self.assertEqual(args, []) self.assertEqual(kwargs, {"upload_to": "foo/barness", "width_field": "width", "height_field": "height"}) def test_integer_field(self): field = models.IntegerField() name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.IntegerField") self.assertEqual(args, []) self.assertEqual(kwargs, {}) def test_ip_address_field(self): field = models.IPAddressField() name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.IPAddressField") self.assertEqual(args, []) self.assertEqual(kwargs, {}) def test_generic_ip_address_field(self): field = models.GenericIPAddressField() name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.GenericIPAddressField") self.assertEqual(args, []) self.assertEqual(kwargs, {}) field = models.GenericIPAddressField(protocol="IPv6") name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.GenericIPAddressField") self.assertEqual(args, []) self.assertEqual(kwargs, {"protocol": "IPv6"}) def test_many_to_many_field(self): # Test normal field = models.ManyToManyField("auth.Permission") name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.ManyToManyField") self.assertEqual(args, []) self.assertEqual(kwargs, {"to": "auth.Permission"}) self.assertFalse(hasattr(kwargs['to'], "setting_name")) # Test swappable field = models.ManyToManyField("auth.User") name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.ManyToManyField") self.assertEqual(args, []) self.assertEqual(kwargs, {"to": "auth.User"}) self.assertEqual(kwargs['to'].setting_name, "AUTH_USER_MODEL") # Test through field = models.ManyToManyField("auth.Permission", through="auth.Group") name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.ManyToManyField") self.assertEqual(args, []) self.assertEqual(kwargs, {"to": "auth.Permission", "through": "auth.Group"}) # Test custom db_table field = models.ManyToManyField("auth.Permission", db_table="custom_table") name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.ManyToManyField") self.assertEqual(args, []) self.assertEqual(kwargs, {"to": "auth.Permission", "db_table": "custom_table"}) # Test related_name field = models.ManyToManyField("auth.Permission", related_name="custom_table") name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.ManyToManyField") self.assertEqual(args, []) self.assertEqual(kwargs, {"to": "auth.Permission", "related_name": "custom_table"}) # Test related_query_name field = models.ManyToManyField("auth.Permission", related_query_name="foobar") name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.ManyToManyField") self.assertEqual(args, []) self.assertEqual(kwargs, {"to": "auth.Permission", "related_query_name": "foobar"}) # Test limit_choices_to field = models.ManyToManyField("auth.Permission", limit_choices_to={'foo': 'bar'}) name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.ManyToManyField") self.assertEqual(args, []) self.assertEqual(kwargs, {"to": "auth.Permission", "limit_choices_to": {'foo': 'bar'}}) @override_settings(AUTH_USER_MODEL="auth.Permission") def test_many_to_many_field_swapped(self): with isolate_lru_cache(apps.get_swappable_settings_name): # It doesn't matter that we swapped out user for permission; # there's no validation. We just want to check the setting stuff works. field = models.ManyToManyField("auth.Permission") name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.ManyToManyField") self.assertEqual(args, []) self.assertEqual(kwargs, {"to": "auth.Permission"}) self.assertEqual(kwargs['to'].setting_name, "AUTH_USER_MODEL") def test_many_to_many_field_related_name(self): class MyModel(models.Model): flag = models.BooleanField(default=True) m2m = models.ManyToManyField('self') m2m_related_name = models.ManyToManyField( 'self', related_query_name='custom_query_name', limit_choices_to={'flag': True}, ) name, path, args, kwargs = MyModel.m2m.field.deconstruct() self.assertEqual(path, 'django.db.models.ManyToManyField') self.assertEqual(args, []) # deconstruct() should not include attributes which were not passed to # the field during initialization. self.assertEqual(kwargs, {'to': 'field_deconstruction.MyModel'}) # Passed attributes. name, path, args, kwargs = MyModel.m2m_related_name.field.deconstruct() self.assertEqual(path, 'django.db.models.ManyToManyField') self.assertEqual(args, []) self.assertEqual(kwargs, { 'to': 'field_deconstruction.MyModel', 'related_query_name': 'custom_query_name', 'limit_choices_to': {'flag': True}, }) def test_positive_integer_field(self): field = models.PositiveIntegerField() name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.PositiveIntegerField") self.assertEqual(args, []) self.assertEqual(kwargs, {}) def test_positive_small_integer_field(self): field = models.PositiveSmallIntegerField() name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.PositiveSmallIntegerField") self.assertEqual(args, []) self.assertEqual(kwargs, {}) def test_positive_big_integer_field(self): field = models.PositiveBigIntegerField() name, path, args, kwargs = field.deconstruct() self.assertEqual(path, 'django.db.models.PositiveBigIntegerField') self.assertEqual(args, []) self.assertEqual(kwargs, {}) def test_slug_field(self): field = models.SlugField() name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.SlugField") self.assertEqual(args, []) self.assertEqual(kwargs, {}) field = models.SlugField(db_index=False, max_length=231) name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.SlugField") self.assertEqual(args, []) self.assertEqual(kwargs, {"db_index": False, "max_length": 231}) def test_small_integer_field(self): field = models.SmallIntegerField() name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.SmallIntegerField") self.assertEqual(args, []) self.assertEqual(kwargs, {}) def test_text_field(self): field = models.TextField() name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.TextField") self.assertEqual(args, []) self.assertEqual(kwargs, {}) def test_time_field(self): field = models.TimeField() name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.TimeField") self.assertEqual(args, []) self.assertEqual(kwargs, {}) field = models.TimeField(auto_now=True) name, path, args, kwargs = field.deconstruct() self.assertEqual(args, []) self.assertEqual(kwargs, {'auto_now': True}) field = models.TimeField(auto_now_add=True) name, path, args, kwargs = field.deconstruct() self.assertEqual(args, []) self.assertEqual(kwargs, {'auto_now_add': True}) def test_url_field(self): field = models.URLField() name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.URLField") self.assertEqual(args, []) self.assertEqual(kwargs, {}) field = models.URLField(max_length=231) name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.URLField") self.assertEqual(args, []) self.assertEqual(kwargs, {"max_length": 231}) def test_binary_field(self): field = models.BinaryField() name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.BinaryField") self.assertEqual(args, []) self.assertEqual(kwargs, {}) field = models.BinaryField(editable=True) name, path, args, kwargs = field.deconstruct() self.assertEqual(args, []) self.assertEqual(kwargs, {'editable': True})
fd2fe49316f6b5615e33c2117e24d4196fc3d4bb35924f511eaddb0c60904d31
from django.db.models.base import ModelState, ModelStateCacheDescriptor from django.test import SimpleTestCase class ModelStateTests(SimpleTestCase): def test_fields_cache_descriptor(self): self.assertIsInstance(ModelState.fields_cache, ModelStateCacheDescriptor) def test_related_managers_descriptor(self): self.assertIsInstance(ModelState.related_managers_cache, ModelStateCacheDescriptor)
c2f5cab0e58b8b2302625ccb1d13c82f3646a0ad3793284b3ac96cd4055df314
import os from functools import partial from django.conf.urls.i18n import i18n_patterns from django.urls import include, path, re_path from django.utils.translation import gettext_lazy as _ from django.views import defaults, i18n, static from . import views base_dir = os.path.dirname(os.path.abspath(__file__)) media_dir = os.path.join(base_dir, 'media') locale_dir = os.path.join(base_dir, 'locale') urlpatterns = [ path('', views.index_page), # Default views path('nonexistent_url/', partial(defaults.page_not_found, exception=None)), path('server_error/', defaults.server_error), # a view that raises an exception for the debug view path('raises/', views.raises), path('raises400/', views.raises400), path('raises400_bad_request/', views.raises400_bad_request), path('raises403/', views.raises403), path('raises404/', views.raises404), path('raises500/', views.raises500), path('custom_reporter_class_view/', views.custom_reporter_class_view), path('technical404/', views.technical404, name='my404'), path('classbased404/', views.Http404View.as_view()), path('classbased500/', views.Raises500View.as_view()), # i18n views path('i18n/', include('django.conf.urls.i18n')), path('jsi18n/', i18n.JavaScriptCatalog.as_view(packages=['view_tests'])), path('jsi18n/app1/', i18n.JavaScriptCatalog.as_view(packages=['view_tests.app1'])), path('jsi18n/app2/', i18n.JavaScriptCatalog.as_view(packages=['view_tests.app2'])), path('jsi18n/app5/', i18n.JavaScriptCatalog.as_view(packages=['view_tests.app5'])), path('jsi18n_english_translation/', i18n.JavaScriptCatalog.as_view(packages=['view_tests.app0'])), path('jsi18n_multi_packages1/', i18n.JavaScriptCatalog.as_view(packages=['view_tests.app1', 'view_tests.app2'])), path('jsi18n_multi_packages2/', i18n.JavaScriptCatalog.as_view(packages=['view_tests.app3', 'view_tests.app4'])), path('jsi18n_admin/', i18n.JavaScriptCatalog.as_view(packages=['django.contrib.admin', 'view_tests'])), path('jsi18n_template/', views.jsi18n), path('jsi18n_multi_catalogs/', views.jsi18n_multi_catalogs), path('jsoni18n/', i18n.JSONCatalog.as_view(packages=['view_tests'])), # Static views re_path(r'^site_media/(?P<path>.*)$', static.serve, {'document_root': media_dir, 'show_indexes': True}), ] urlpatterns += i18n_patterns( re_path(_(r'^translated/$'), views.index_page, name='i18n_prefixed'), ) urlpatterns += [ path('template_exception/', views.template_exception, name='template_exception'), path( 'raises_template_does_not_exist/<path:path>', views.raises_template_does_not_exist, name='raises_template_does_not_exist' ), path('render_no_template/', views.render_no_template, name='render_no_template'), re_path(r'^test-setlang/(?P<parameter>[^/]+)/$', views.with_parameter, name='with_parameter'), # Patterns to test the technical 404. re_path(r'^regex-post/(?P<pk>[0-9]+)/$', views.index_page, name='regex-post'), path('path-post/<int:pk>/', views.index_page, name='path-post'), ]
1390e6d53e63eae909fa089c58e9d2398c30da6569b4e9641c062a51648b6ab3
import datetime import decimal import logging import sys from pathlib import Path from django.core.exceptions import ( BadRequest, PermissionDenied, SuspiciousOperation, ) from django.http import Http404, HttpResponse, JsonResponse from django.shortcuts import render from django.template import TemplateDoesNotExist from django.urls import get_resolver from django.views import View from django.views.debug import ( ExceptionReporter, SafeExceptionReporterFilter, technical_500_response, ) from django.views.decorators.debug import ( sensitive_post_parameters, sensitive_variables, ) TEMPLATES_PATH = Path(__file__).resolve().parent / 'templates' def index_page(request): """Dummy index page""" return HttpResponse('<html><body>Dummy page</body></html>') def with_parameter(request, parameter): return HttpResponse('ok') def raises(request): # Make sure that a callable that raises an exception in the stack frame's # local vars won't hijack the technical 500 response (#15025). def callable(): raise Exception try: raise Exception except Exception: return technical_500_response(request, *sys.exc_info()) def raises500(request): # We need to inspect the HTML generated by the fancy 500 debug view but # the test client ignores it, so we send it explicitly. try: raise Exception except Exception: return technical_500_response(request, *sys.exc_info()) class Raises500View(View): def get(self, request): try: raise Exception except Exception: return technical_500_response(request, *sys.exc_info()) def raises400(request): raise SuspiciousOperation def raises400_bad_request(request): raise BadRequest('Malformed request syntax') def raises403(request): raise PermissionDenied("Insufficient Permissions") def raises404(request): resolver = get_resolver(None) resolver.resolve('/not-in-urls') def technical404(request): raise Http404("Testing technical 404.") class Http404View(View): def get(self, request): raise Http404("Testing class-based technical 404.") def template_exception(request): return render(request, 'debug/template_exception.html') def jsi18n(request): return render(request, 'jsi18n.html') def jsi18n_multi_catalogs(request): return render(request, 'jsi18n-multi-catalogs.html') def raises_template_does_not_exist(request, path='i_dont_exist.html'): # We need to inspect the HTML generated by the fancy 500 debug view but # the test client ignores it, so we send it explicitly. try: return render(request, path) except TemplateDoesNotExist: return technical_500_response(request, *sys.exc_info()) def render_no_template(request): # If we do not specify a template, we need to make sure the debug # view doesn't blow up. return render(request, [], {}) def send_log(request, exc_info): logger = logging.getLogger('django') # The default logging config has a logging filter to ensure admin emails are # only sent with DEBUG=False, but since someone might choose to remove that # filter, we still want to be able to test the behavior of error emails # with DEBUG=True. So we need to remove the filter temporarily. admin_email_handler = [ h for h in logger.handlers if h.__class__.__name__ == "AdminEmailHandler" ][0] orig_filters = admin_email_handler.filters admin_email_handler.filters = [] admin_email_handler.include_html = True logger.error( 'Internal Server Error: %s', request.path, exc_info=exc_info, extra={'status_code': 500, 'request': request}, ) admin_email_handler.filters = orig_filters def non_sensitive_view(request): # Do not just use plain strings for the variables' values in the code # so that the tests don't return false positives when the function's source # is displayed in the exception report. cooked_eggs = ''.join(['s', 'c', 'r', 'a', 'm', 'b', 'l', 'e', 'd']) # NOQA sauce = ''.join(['w', 'o', 'r', 'c', 'e', 's', 't', 'e', 'r', 's', 'h', 'i', 'r', 'e']) # NOQA try: raise Exception except Exception: exc_info = sys.exc_info() send_log(request, exc_info) return technical_500_response(request, *exc_info) @sensitive_variables('sauce') @sensitive_post_parameters('bacon-key', 'sausage-key') def sensitive_view(request): # Do not just use plain strings for the variables' values in the code # so that the tests don't return false positives when the function's source # is displayed in the exception report. cooked_eggs = ''.join(['s', 'c', 'r', 'a', 'm', 'b', 'l', 'e', 'd']) # NOQA sauce = ''.join(['w', 'o', 'r', 'c', 'e', 's', 't', 'e', 'r', 's', 'h', 'i', 'r', 'e']) # NOQA try: raise Exception except Exception: exc_info = sys.exc_info() send_log(request, exc_info) return technical_500_response(request, *exc_info) @sensitive_variables() @sensitive_post_parameters() def paranoid_view(request): # Do not just use plain strings for the variables' values in the code # so that the tests don't return false positives when the function's source # is displayed in the exception report. cooked_eggs = ''.join(['s', 'c', 'r', 'a', 'm', 'b', 'l', 'e', 'd']) # NOQA sauce = ''.join(['w', 'o', 'r', 'c', 'e', 's', 't', 'e', 'r', 's', 'h', 'i', 'r', 'e']) # NOQA try: raise Exception except Exception: exc_info = sys.exc_info() send_log(request, exc_info) return technical_500_response(request, *exc_info) def sensitive_args_function_caller(request): try: sensitive_args_function(''.join(['w', 'o', 'r', 'c', 'e', 's', 't', 'e', 'r', 's', 'h', 'i', 'r', 'e'])) except Exception: exc_info = sys.exc_info() send_log(request, exc_info) return technical_500_response(request, *exc_info) @sensitive_variables('sauce') def sensitive_args_function(sauce): # Do not just use plain strings for the variables' values in the code # so that the tests don't return false positives when the function's source # is displayed in the exception report. cooked_eggs = ''.join(['s', 'c', 'r', 'a', 'm', 'b', 'l', 'e', 'd']) # NOQA raise Exception def sensitive_kwargs_function_caller(request): try: sensitive_kwargs_function(''.join(['w', 'o', 'r', 'c', 'e', 's', 't', 'e', 'r', 's', 'h', 'i', 'r', 'e'])) except Exception: exc_info = sys.exc_info() send_log(request, exc_info) return technical_500_response(request, *exc_info) @sensitive_variables('sauce') def sensitive_kwargs_function(sauce=None): # Do not just use plain strings for the variables' values in the code # so that the tests don't return false positives when the function's source # is displayed in the exception report. cooked_eggs = ''.join(['s', 'c', 'r', 'a', 'm', 'b', 'l', 'e', 'd']) # NOQA raise Exception class UnsafeExceptionReporterFilter(SafeExceptionReporterFilter): """ Ignores all the filtering done by its parent class. """ def get_post_parameters(self, request): return request.POST def get_traceback_frame_variables(self, request, tb_frame): return tb_frame.f_locals.items() @sensitive_variables() @sensitive_post_parameters() def custom_exception_reporter_filter_view(request): # Do not just use plain strings for the variables' values in the code # so that the tests don't return false positives when the function's source # is displayed in the exception report. cooked_eggs = ''.join(['s', 'c', 'r', 'a', 'm', 'b', 'l', 'e', 'd']) # NOQA sauce = ''.join(['w', 'o', 'r', 'c', 'e', 's', 't', 'e', 'r', 's', 'h', 'i', 'r', 'e']) # NOQA request.exception_reporter_filter = UnsafeExceptionReporterFilter() try: raise Exception except Exception: exc_info = sys.exc_info() send_log(request, exc_info) return technical_500_response(request, *exc_info) class CustomExceptionReporter(ExceptionReporter): custom_traceback_text = 'custom traceback text' def get_traceback_html(self): return self.custom_traceback_text class TemplateOverrideExceptionReporter(ExceptionReporter): html_template_path = TEMPLATES_PATH / 'my_technical_500.html' text_template_path = TEMPLATES_PATH / 'my_technical_500.txt' def custom_reporter_class_view(request): request.exception_reporter_class = CustomExceptionReporter try: raise Exception except Exception: exc_info = sys.exc_info() return technical_500_response(request, *exc_info) class Klass: @sensitive_variables('sauce') def method(self, request): # Do not just use plain strings for the variables' values in the code # so that the tests don't return false positives when the function's # source is displayed in the exception report. cooked_eggs = ''.join(['s', 'c', 'r', 'a', 'm', 'b', 'l', 'e', 'd']) # NOQA sauce = ''.join(['w', 'o', 'r', 'c', 'e', 's', 't', 'e', 'r', 's', 'h', 'i', 'r', 'e']) # NOQA try: raise Exception except Exception: exc_info = sys.exc_info() send_log(request, exc_info) return technical_500_response(request, *exc_info) def sensitive_method_view(request): return Klass().method(request) @sensitive_variables('sauce') @sensitive_post_parameters('bacon-key', 'sausage-key') def multivalue_dict_key_error(request): cooked_eggs = ''.join(['s', 'c', 'r', 'a', 'm', 'b', 'l', 'e', 'd']) # NOQA sauce = ''.join(['w', 'o', 'r', 'c', 'e', 's', 't', 'e', 'r', 's', 'h', 'i', 'r', 'e']) # NOQA try: request.POST['bar'] except Exception: exc_info = sys.exc_info() send_log(request, exc_info) return technical_500_response(request, *exc_info) def json_response_view(request): return JsonResponse({ 'a': [1, 2, 3], 'foo': {'bar': 'baz'}, # Make sure datetime and Decimal objects would be serialized properly 'timestamp': datetime.datetime(2013, 5, 19, 20), 'value': decimal.Decimal('3.14'), })
f04ec192726947cb4e10e5ebaa4b77bdc25ab4ecaa8bb2b534dbc9acc2fd447c
import sys import threading import time from unittest import skipIf, skipUnless from django.db import ( DatabaseError, Error, IntegrityError, OperationalError, connection, transaction, ) from django.test import ( TestCase, TransactionTestCase, skipIfDBFeature, skipUnlessDBFeature, ) from .models import Reporter @skipUnlessDBFeature('uses_savepoints') class AtomicTests(TransactionTestCase): """ Tests for the atomic decorator and context manager. The tests make assertions on internal attributes because there isn't a robust way to ask the database for its current transaction state. Since the decorator syntax is converted into a context manager (see the implementation), there are only a few basic tests with the decorator syntax and the bulk of the tests use the context manager syntax. """ available_apps = ['transactions'] def test_decorator_syntax_commit(self): @transaction.atomic def make_reporter(): return Reporter.objects.create(first_name="Tintin") reporter = make_reporter() self.assertSequenceEqual(Reporter.objects.all(), [reporter]) def test_decorator_syntax_rollback(self): @transaction.atomic def make_reporter(): Reporter.objects.create(first_name="Haddock") raise Exception("Oops, that's his last name") with self.assertRaisesMessage(Exception, "Oops"): make_reporter() self.assertSequenceEqual(Reporter.objects.all(), []) def test_alternate_decorator_syntax_commit(self): @transaction.atomic() def make_reporter(): return Reporter.objects.create(first_name="Tintin") reporter = make_reporter() self.assertSequenceEqual(Reporter.objects.all(), [reporter]) def test_alternate_decorator_syntax_rollback(self): @transaction.atomic() def make_reporter(): Reporter.objects.create(first_name="Haddock") raise Exception("Oops, that's his last name") with self.assertRaisesMessage(Exception, "Oops"): make_reporter() self.assertSequenceEqual(Reporter.objects.all(), []) def test_commit(self): with transaction.atomic(): reporter = Reporter.objects.create(first_name="Tintin") self.assertSequenceEqual(Reporter.objects.all(), [reporter]) def test_rollback(self): with self.assertRaisesMessage(Exception, "Oops"): with transaction.atomic(): Reporter.objects.create(first_name="Haddock") raise Exception("Oops, that's his last name") self.assertSequenceEqual(Reporter.objects.all(), []) def test_nested_commit_commit(self): with transaction.atomic(): reporter1 = Reporter.objects.create(first_name="Tintin") with transaction.atomic(): reporter2 = Reporter.objects.create(first_name="Archibald", last_name="Haddock") self.assertSequenceEqual(Reporter.objects.all(), [reporter2, reporter1]) def test_nested_commit_rollback(self): with transaction.atomic(): reporter = Reporter.objects.create(first_name="Tintin") with self.assertRaisesMessage(Exception, "Oops"): with transaction.atomic(): Reporter.objects.create(first_name="Haddock") raise Exception("Oops, that's his last name") self.assertSequenceEqual(Reporter.objects.all(), [reporter]) def test_nested_rollback_commit(self): with self.assertRaisesMessage(Exception, "Oops"): with transaction.atomic(): Reporter.objects.create(last_name="Tintin") with transaction.atomic(): Reporter.objects.create(last_name="Haddock") raise Exception("Oops, that's his first name") self.assertSequenceEqual(Reporter.objects.all(), []) def test_nested_rollback_rollback(self): with self.assertRaisesMessage(Exception, "Oops"): with transaction.atomic(): Reporter.objects.create(last_name="Tintin") with self.assertRaisesMessage(Exception, "Oops"): with transaction.atomic(): Reporter.objects.create(first_name="Haddock") raise Exception("Oops, that's his last name") raise Exception("Oops, that's his first name") self.assertSequenceEqual(Reporter.objects.all(), []) def test_merged_commit_commit(self): with transaction.atomic(): reporter1 = Reporter.objects.create(first_name="Tintin") with transaction.atomic(savepoint=False): reporter2 = Reporter.objects.create(first_name="Archibald", last_name="Haddock") self.assertSequenceEqual(Reporter.objects.all(), [reporter2, reporter1]) def test_merged_commit_rollback(self): with transaction.atomic(): Reporter.objects.create(first_name="Tintin") with self.assertRaisesMessage(Exception, "Oops"): with transaction.atomic(savepoint=False): Reporter.objects.create(first_name="Haddock") raise Exception("Oops, that's his last name") # Writes in the outer block are rolled back too. self.assertSequenceEqual(Reporter.objects.all(), []) def test_merged_rollback_commit(self): with self.assertRaisesMessage(Exception, "Oops"): with transaction.atomic(): Reporter.objects.create(last_name="Tintin") with transaction.atomic(savepoint=False): Reporter.objects.create(last_name="Haddock") raise Exception("Oops, that's his first name") self.assertSequenceEqual(Reporter.objects.all(), []) def test_merged_rollback_rollback(self): with self.assertRaisesMessage(Exception, "Oops"): with transaction.atomic(): Reporter.objects.create(last_name="Tintin") with self.assertRaisesMessage(Exception, "Oops"): with transaction.atomic(savepoint=False): Reporter.objects.create(first_name="Haddock") raise Exception("Oops, that's his last name") raise Exception("Oops, that's his first name") self.assertSequenceEqual(Reporter.objects.all(), []) def test_reuse_commit_commit(self): atomic = transaction.atomic() with atomic: reporter1 = Reporter.objects.create(first_name="Tintin") with atomic: reporter2 = Reporter.objects.create(first_name="Archibald", last_name="Haddock") self.assertSequenceEqual(Reporter.objects.all(), [reporter2, reporter1]) def test_reuse_commit_rollback(self): atomic = transaction.atomic() with atomic: reporter = Reporter.objects.create(first_name="Tintin") with self.assertRaisesMessage(Exception, "Oops"): with atomic: Reporter.objects.create(first_name="Haddock") raise Exception("Oops, that's his last name") self.assertSequenceEqual(Reporter.objects.all(), [reporter]) def test_reuse_rollback_commit(self): atomic = transaction.atomic() with self.assertRaisesMessage(Exception, "Oops"): with atomic: Reporter.objects.create(last_name="Tintin") with atomic: Reporter.objects.create(last_name="Haddock") raise Exception("Oops, that's his first name") self.assertSequenceEqual(Reporter.objects.all(), []) def test_reuse_rollback_rollback(self): atomic = transaction.atomic() with self.assertRaisesMessage(Exception, "Oops"): with atomic: Reporter.objects.create(last_name="Tintin") with self.assertRaisesMessage(Exception, "Oops"): with atomic: Reporter.objects.create(first_name="Haddock") raise Exception("Oops, that's his last name") raise Exception("Oops, that's his first name") self.assertSequenceEqual(Reporter.objects.all(), []) def test_force_rollback(self): with transaction.atomic(): Reporter.objects.create(first_name="Tintin") # atomic block shouldn't rollback, but force it. self.assertFalse(transaction.get_rollback()) transaction.set_rollback(True) self.assertSequenceEqual(Reporter.objects.all(), []) def test_prevent_rollback(self): with transaction.atomic(): reporter = Reporter.objects.create(first_name="Tintin") sid = transaction.savepoint() # trigger a database error inside an inner atomic without savepoint with self.assertRaises(DatabaseError): with transaction.atomic(savepoint=False): with connection.cursor() as cursor: cursor.execute( "SELECT no_such_col FROM transactions_reporter") # prevent atomic from rolling back since we're recovering manually self.assertTrue(transaction.get_rollback()) transaction.set_rollback(False) transaction.savepoint_rollback(sid) self.assertSequenceEqual(Reporter.objects.all(), [reporter]) class AtomicInsideTransactionTests(AtomicTests): """All basic tests for atomic should also pass within an existing transaction.""" def setUp(self): self.atomic = transaction.atomic() self.atomic.__enter__() def tearDown(self): self.atomic.__exit__(*sys.exc_info()) class AtomicWithoutAutocommitTests(AtomicTests): """All basic tests for atomic should also pass when autocommit is turned off.""" def setUp(self): transaction.set_autocommit(False) def tearDown(self): # The tests access the database after exercising 'atomic', initiating # a transaction ; a rollback is required before restoring autocommit. transaction.rollback() transaction.set_autocommit(True) @skipUnlessDBFeature('uses_savepoints') class AtomicMergeTests(TransactionTestCase): """Test merging transactions with savepoint=False.""" available_apps = ['transactions'] def test_merged_outer_rollback(self): with transaction.atomic(): Reporter.objects.create(first_name="Tintin") with transaction.atomic(savepoint=False): Reporter.objects.create(first_name="Archibald", last_name="Haddock") with self.assertRaisesMessage(Exception, "Oops"): with transaction.atomic(savepoint=False): Reporter.objects.create(first_name="Calculus") raise Exception("Oops, that's his last name") # The third insert couldn't be roll back. Temporarily mark the # connection as not needing rollback to check it. self.assertTrue(transaction.get_rollback()) transaction.set_rollback(False) self.assertEqual(Reporter.objects.count(), 3) transaction.set_rollback(True) # The second insert couldn't be roll back. Temporarily mark the # connection as not needing rollback to check it. self.assertTrue(transaction.get_rollback()) transaction.set_rollback(False) self.assertEqual(Reporter.objects.count(), 3) transaction.set_rollback(True) # The first block has a savepoint and must roll back. self.assertSequenceEqual(Reporter.objects.all(), []) def test_merged_inner_savepoint_rollback(self): with transaction.atomic(): reporter = Reporter.objects.create(first_name="Tintin") with transaction.atomic(): Reporter.objects.create(first_name="Archibald", last_name="Haddock") with self.assertRaisesMessage(Exception, "Oops"): with transaction.atomic(savepoint=False): Reporter.objects.create(first_name="Calculus") raise Exception("Oops, that's his last name") # The third insert couldn't be roll back. Temporarily mark the # connection as not needing rollback to check it. self.assertTrue(transaction.get_rollback()) transaction.set_rollback(False) self.assertEqual(Reporter.objects.count(), 3) transaction.set_rollback(True) # The second block has a savepoint and must roll back. self.assertEqual(Reporter.objects.count(), 1) self.assertSequenceEqual(Reporter.objects.all(), [reporter]) @skipUnlessDBFeature('uses_savepoints') class AtomicErrorsTests(TransactionTestCase): available_apps = ['transactions'] forbidden_atomic_msg = "This is forbidden when an 'atomic' block is active." def test_atomic_prevents_setting_autocommit(self): autocommit = transaction.get_autocommit() with transaction.atomic(): with self.assertRaisesMessage(transaction.TransactionManagementError, self.forbidden_atomic_msg): transaction.set_autocommit(not autocommit) # Make sure autocommit wasn't changed. self.assertEqual(connection.autocommit, autocommit) def test_atomic_prevents_calling_transaction_methods(self): with transaction.atomic(): with self.assertRaisesMessage(transaction.TransactionManagementError, self.forbidden_atomic_msg): transaction.commit() with self.assertRaisesMessage(transaction.TransactionManagementError, self.forbidden_atomic_msg): transaction.rollback() def test_atomic_prevents_queries_in_broken_transaction(self): r1 = Reporter.objects.create(first_name="Archibald", last_name="Haddock") with transaction.atomic(): r2 = Reporter(first_name="Cuthbert", last_name="Calculus", id=r1.id) with self.assertRaises(IntegrityError): r2.save(force_insert=True) # The transaction is marked as needing rollback. msg = ( "An error occurred in the current transaction. You can't " "execute queries until the end of the 'atomic' block." ) with self.assertRaisesMessage(transaction.TransactionManagementError, msg): r2.save(force_update=True) self.assertEqual(Reporter.objects.get(pk=r1.pk).last_name, "Haddock") @skipIfDBFeature('atomic_transactions') def test_atomic_allows_queries_after_fixing_transaction(self): r1 = Reporter.objects.create(first_name="Archibald", last_name="Haddock") with transaction.atomic(): r2 = Reporter(first_name="Cuthbert", last_name="Calculus", id=r1.id) with self.assertRaises(IntegrityError): r2.save(force_insert=True) # Mark the transaction as no longer needing rollback. transaction.set_rollback(False) r2.save(force_update=True) self.assertEqual(Reporter.objects.get(pk=r1.pk).last_name, "Calculus") @skipUnlessDBFeature('test_db_allows_multiple_connections') def test_atomic_prevents_queries_in_broken_transaction_after_client_close(self): with transaction.atomic(): Reporter.objects.create(first_name="Archibald", last_name="Haddock") connection.close() # The connection is closed and the transaction is marked as # needing rollback. This will raise an InterfaceError on databases # that refuse to create cursors on closed connections (PostgreSQL) # and a TransactionManagementError on other databases. with self.assertRaises(Error): Reporter.objects.create(first_name="Cuthbert", last_name="Calculus") # The connection is usable again . self.assertEqual(Reporter.objects.count(), 0) @skipUnless(connection.vendor == 'mysql', "MySQL-specific behaviors") class AtomicMySQLTests(TransactionTestCase): available_apps = ['transactions'] @skipIf(threading is None, "Test requires threading") def test_implicit_savepoint_rollback(self): """MySQL implicitly rolls back savepoints when it deadlocks (#22291).""" Reporter.objects.create(id=1) Reporter.objects.create(id=2) main_thread_ready = threading.Event() def other_thread(): try: with transaction.atomic(): Reporter.objects.select_for_update().get(id=1) main_thread_ready.wait() # 1) This line locks... (see below for 2) Reporter.objects.exclude(id=1).update(id=2) finally: # This is the thread-local connection, not the main connection. connection.close() other_thread = threading.Thread(target=other_thread) other_thread.start() with self.assertRaisesMessage(OperationalError, 'Deadlock found'): # Double atomic to enter a transaction and create a savepoint. with transaction.atomic(): with transaction.atomic(): Reporter.objects.select_for_update().get(id=2) main_thread_ready.set() # The two threads can't be synchronized with an event here # because the other thread locks. Sleep for a little while. time.sleep(1) # 2) ... and this line deadlocks. (see above for 1) Reporter.objects.exclude(id=2).update(id=1) other_thread.join() class AtomicMiscTests(TransactionTestCase): available_apps = ['transactions'] def test_wrap_callable_instance(self): """#20028 -- Atomic must support wrapping callable instances.""" class Callable: def __call__(self): pass # Must not raise an exception transaction.atomic(Callable()) @skipUnlessDBFeature('can_release_savepoints') def test_atomic_does_not_leak_savepoints_on_failure(self): """#23074 -- Savepoints must be released after rollback.""" # Expect an error when rolling back a savepoint that doesn't exist. # Done outside of the transaction block to ensure proper recovery. with self.assertRaises(Error): # Start a plain transaction. with transaction.atomic(): # Swallow the intentional error raised in the sub-transaction. with self.assertRaisesMessage(Exception, "Oops"): # Start a sub-transaction with a savepoint. with transaction.atomic(): sid = connection.savepoint_ids[-1] raise Exception("Oops") # This is expected to fail because the savepoint no longer exists. connection.savepoint_rollback(sid) def test_mark_for_rollback_on_error_in_transaction(self): with transaction.atomic(savepoint=False): # Swallow the intentional error raised. with self.assertRaisesMessage(Exception, "Oops"): # Wrap in `mark_for_rollback_on_error` to check if the transaction is marked broken. with transaction.mark_for_rollback_on_error(): # Ensure that we are still in a good state. self.assertFalse(transaction.get_rollback()) raise Exception("Oops") # Ensure that `mark_for_rollback_on_error` marked the transaction as broken … self.assertTrue(transaction.get_rollback()) # … and further queries fail. msg = "You can't execute queries until the end of the 'atomic' block." with self.assertRaisesMessage(transaction.TransactionManagementError, msg): Reporter.objects.create() # Transaction errors are reset at the end of an transaction, so this should just work. Reporter.objects.create() def test_mark_for_rollback_on_error_in_autocommit(self): self.assertTrue(transaction.get_autocommit()) # Swallow the intentional error raised. with self.assertRaisesMessage(Exception, "Oops"): # Wrap in `mark_for_rollback_on_error` to check if the transaction is marked broken. with transaction.mark_for_rollback_on_error(): # Ensure that we are still in a good state. self.assertFalse(transaction.get_connection().needs_rollback) raise Exception("Oops") # Ensure that `mark_for_rollback_on_error` did not mark the transaction # as broken, since we are in autocommit mode … self.assertFalse(transaction.get_connection().needs_rollback) # … and further queries work nicely. Reporter.objects.create() class NonAutocommitTests(TransactionTestCase): available_apps = [] def setUp(self): transaction.set_autocommit(False) def tearDown(self): transaction.rollback() transaction.set_autocommit(True) def test_orm_query_after_error_and_rollback(self): """ ORM queries are allowed after an error and a rollback in non-autocommit mode (#27504). """ r1 = Reporter.objects.create(first_name='Archibald', last_name='Haddock') r2 = Reporter(first_name='Cuthbert', last_name='Calculus', id=r1.id) with self.assertRaises(IntegrityError): r2.save(force_insert=True) transaction.rollback() Reporter.objects.last() def test_orm_query_without_autocommit(self): """#24921 -- ORM queries must be possible after set_autocommit(False).""" Reporter.objects.create(first_name="Tintin") class DurableTestsBase: available_apps = ['transactions'] def test_commit(self): with transaction.atomic(durable=True): reporter = Reporter.objects.create(first_name='Tintin') self.assertEqual(Reporter.objects.get(), reporter) def test_nested_outer_durable(self): with transaction.atomic(durable=True): reporter1 = Reporter.objects.create(first_name='Tintin') with transaction.atomic(): reporter2 = Reporter.objects.create( first_name='Archibald', last_name='Haddock', ) self.assertSequenceEqual(Reporter.objects.all(), [reporter2, reporter1]) def test_nested_both_durable(self): msg = 'A durable atomic block cannot be nested within another atomic block.' with transaction.atomic(durable=True): with self.assertRaisesMessage(RuntimeError, msg): with transaction.atomic(durable=True): pass def test_nested_inner_durable(self): msg = 'A durable atomic block cannot be nested within another atomic block.' with transaction.atomic(): with self.assertRaisesMessage(RuntimeError, msg): with transaction.atomic(durable=True): pass def test_sequence_of_durables(self): with transaction.atomic(durable=True): reporter = Reporter.objects.create(first_name='Tintin 1') self.assertEqual(Reporter.objects.get(first_name='Tintin 1'), reporter) with transaction.atomic(durable=True): reporter = Reporter.objects.create(first_name='Tintin 2') self.assertEqual(Reporter.objects.get(first_name='Tintin 2'), reporter) class DurableTransactionTests(DurableTestsBase, TransactionTestCase): pass class DurableTests(DurableTestsBase, TestCase): pass
3eb90f5da55ed830cbe1b885db326db3e4b3bddd35e3d0fe74bbfb16868ac674
# Unit tests for cache framework # Uses whatever cache backend is set in the test settings file. import copy import io import os import pickle import re import shutil import sys import tempfile import threading import time import unittest from pathlib import Path from unittest import mock, skipIf from django.conf import settings from django.core import management, signals from django.core.cache import ( DEFAULT_CACHE_ALIAS, CacheHandler, CacheKeyWarning, InvalidCacheKey, cache, caches, ) from django.core.cache.backends.base import InvalidCacheBackendError from django.core.cache.backends.redis import RedisCacheClient from django.core.cache.utils import make_template_fragment_key from django.db import close_old_connections, connection, connections from django.db.backends.utils import CursorWrapper from django.http import ( HttpRequest, HttpResponse, HttpResponseNotModified, StreamingHttpResponse, ) from django.middleware.cache import ( CacheMiddleware, FetchFromCacheMiddleware, UpdateCacheMiddleware, ) from django.middleware.csrf import CsrfViewMiddleware from django.template import engines from django.template.context_processors import csrf from django.template.response import TemplateResponse from django.test import ( RequestFactory, SimpleTestCase, TestCase, TransactionTestCase, override_settings, ) from django.test.signals import setting_changed from django.test.utils import CaptureQueriesContext from django.utils import timezone, translation from django.utils.cache import ( get_cache_key, learn_cache_key, patch_cache_control, patch_vary_headers, ) from django.views.decorators.cache import cache_control, cache_page from .models import Poll, expensive_calculation # functions/classes for complex data type tests def f(): return 42 class C: def m(n): return 24 class Unpicklable: def __getstate__(self): raise pickle.PickleError() def empty_response(request): return HttpResponse() KEY_ERRORS_WITH_MEMCACHED_MSG = ( 'Cache key contains characters that will cause errors if used with ' 'memcached: %r' ) @override_settings(CACHES={ 'default': { 'BACKEND': 'django.core.cache.backends.dummy.DummyCache', } }) class DummyCacheTests(SimpleTestCase): # The Dummy cache backend doesn't really behave like a test backend, # so it has its own test case. def test_simple(self): "Dummy cache backend ignores cache set calls" cache.set("key", "value") self.assertIsNone(cache.get("key")) def test_add(self): "Add doesn't do anything in dummy cache backend" self.assertIs(cache.add("addkey1", "value"), True) self.assertIs(cache.add("addkey1", "newvalue"), True) self.assertIsNone(cache.get("addkey1")) def test_non_existent(self): "Nonexistent keys aren't found in the dummy cache backend" self.assertIsNone(cache.get("does_not_exist")) self.assertEqual(cache.get("does_not_exist", "bang!"), "bang!") def test_get_many(self): "get_many returns nothing for the dummy cache backend" cache.set_many({'a': 'a', 'b': 'b', 'c': 'c', 'd': 'd'}) self.assertEqual(cache.get_many(['a', 'c', 'd']), {}) self.assertEqual(cache.get_many(['a', 'b', 'e']), {}) def test_get_many_invalid_key(self): msg = KEY_ERRORS_WITH_MEMCACHED_MSG % ':1:key with spaces' with self.assertWarnsMessage(CacheKeyWarning, msg): cache.get_many(['key with spaces']) def test_delete(self): "Cache deletion is transparently ignored on the dummy cache backend" cache.set_many({'key1': 'spam', 'key2': 'eggs'}) self.assertIsNone(cache.get("key1")) self.assertIs(cache.delete("key1"), False) self.assertIsNone(cache.get("key1")) self.assertIsNone(cache.get("key2")) def test_has_key(self): "The has_key method doesn't ever return True for the dummy cache backend" cache.set("hello1", "goodbye1") self.assertIs(cache.has_key("hello1"), False) self.assertIs(cache.has_key("goodbye1"), False) def test_in(self): "The in operator doesn't ever return True for the dummy cache backend" cache.set("hello2", "goodbye2") self.assertNotIn("hello2", cache) self.assertNotIn("goodbye2", cache) def test_incr(self): "Dummy cache values can't be incremented" cache.set('answer', 42) with self.assertRaises(ValueError): cache.incr('answer') with self.assertRaises(ValueError): cache.incr('does_not_exist') with self.assertRaises(ValueError): cache.incr('does_not_exist', -1) def test_decr(self): "Dummy cache values can't be decremented" cache.set('answer', 42) with self.assertRaises(ValueError): cache.decr('answer') with self.assertRaises(ValueError): cache.decr('does_not_exist') with self.assertRaises(ValueError): cache.decr('does_not_exist', -1) def test_touch(self): """Dummy cache can't do touch().""" self.assertIs(cache.touch('whatever'), False) def test_data_types(self): "All data types are ignored equally by the dummy cache" tests = { 'string': 'this is a string', 'int': 42, 'bool': True, 'list': [1, 2, 3, 4], 'tuple': (1, 2, 3, 4), 'dict': {'A': 1, 'B': 2}, 'function': f, 'class': C, } for key, value in tests.items(): with self.subTest(key=key): cache.set(key, value) self.assertIsNone(cache.get(key)) def test_expiration(self): "Expiration has no effect on the dummy cache" cache.set('expire1', 'very quickly', 1) cache.set('expire2', 'very quickly', 1) cache.set('expire3', 'very quickly', 1) time.sleep(2) self.assertIsNone(cache.get("expire1")) self.assertIs(cache.add("expire2", "newvalue"), True) self.assertIsNone(cache.get("expire2")) self.assertIs(cache.has_key("expire3"), False) def test_unicode(self): "Unicode values are ignored by the dummy cache" stuff = { 'ascii': 'ascii_value', 'unicode_ascii': 'Iñtërnâtiônàlizætiøn1', 'Iñtërnâtiônàlizætiøn': 'Iñtërnâtiônàlizætiøn2', 'ascii2': {'x': 1} } for (key, value) in stuff.items(): with self.subTest(key=key): cache.set(key, value) self.assertIsNone(cache.get(key)) def test_set_many(self): "set_many does nothing for the dummy cache backend" self.assertEqual(cache.set_many({'a': 1, 'b': 2}), []) self.assertEqual(cache.set_many({'a': 1, 'b': 2}, timeout=2, version='1'), []) def test_set_many_invalid_key(self): msg = KEY_ERRORS_WITH_MEMCACHED_MSG % ':1:key with spaces' with self.assertWarnsMessage(CacheKeyWarning, msg): cache.set_many({'key with spaces': 'foo'}) def test_delete_many(self): "delete_many does nothing for the dummy cache backend" cache.delete_many(['a', 'b']) def test_delete_many_invalid_key(self): msg = KEY_ERRORS_WITH_MEMCACHED_MSG % ':1:key with spaces' with self.assertWarnsMessage(CacheKeyWarning, msg): cache.delete_many(['key with spaces']) def test_clear(self): "clear does nothing for the dummy cache backend" cache.clear() def test_incr_version(self): "Dummy cache versions can't be incremented" cache.set('answer', 42) with self.assertRaises(ValueError): cache.incr_version('answer') with self.assertRaises(ValueError): cache.incr_version('does_not_exist') def test_decr_version(self): "Dummy cache versions can't be decremented" cache.set('answer', 42) with self.assertRaises(ValueError): cache.decr_version('answer') with self.assertRaises(ValueError): cache.decr_version('does_not_exist') def test_get_or_set(self): self.assertEqual(cache.get_or_set('mykey', 'default'), 'default') self.assertIsNone(cache.get_or_set('mykey', None)) def test_get_or_set_callable(self): def my_callable(): return 'default' self.assertEqual(cache.get_or_set('mykey', my_callable), 'default') self.assertEqual(cache.get_or_set('mykey', my_callable()), 'default') def custom_key_func(key, key_prefix, version): "A customized cache key function" return 'CUSTOM-' + '-'.join([key_prefix, str(version), key]) _caches_setting_base = { 'default': {}, 'prefix': {'KEY_PREFIX': 'cacheprefix{}'.format(os.getpid())}, 'v2': {'VERSION': 2}, 'custom_key': {'KEY_FUNCTION': custom_key_func}, 'custom_key2': {'KEY_FUNCTION': 'cache.tests.custom_key_func'}, 'cull': {'OPTIONS': {'MAX_ENTRIES': 30}}, 'zero_cull': {'OPTIONS': {'CULL_FREQUENCY': 0, 'MAX_ENTRIES': 30}}, } def caches_setting_for_tests(base=None, exclude=None, **params): # `base` is used to pull in the memcached config from the original settings, # `exclude` is a set of cache names denoting which `_caches_setting_base` keys # should be omitted. # `params` are test specific overrides and `_caches_settings_base` is the # base config for the tests. # This results in the following search order: # params -> _caches_setting_base -> base base = base or {} exclude = exclude or set() setting = {k: base.copy() for k in _caches_setting_base if k not in exclude} for key, cache_params in setting.items(): cache_params.update(_caches_setting_base[key]) cache_params.update(params) return setting class BaseCacheTests: # A common set of tests to apply to all cache backends factory = RequestFactory() # Some clients raise custom exceptions when .incr() or .decr() are called # with a non-integer value. incr_decr_type_error = TypeError def tearDown(self): cache.clear() def test_simple(self): # Simple cache set/get works cache.set("key", "value") self.assertEqual(cache.get("key"), "value") def test_default_used_when_none_is_set(self): """If None is cached, get() returns it instead of the default.""" cache.set('key_default_none', None) self.assertIsNone(cache.get('key_default_none', default='default')) def test_add(self): # A key can be added to a cache self.assertIs(cache.add("addkey1", "value"), True) self.assertIs(cache.add("addkey1", "newvalue"), False) self.assertEqual(cache.get("addkey1"), "value") def test_prefix(self): # Test for same cache key conflicts between shared backend cache.set('somekey', 'value') # should not be set in the prefixed cache self.assertIs(caches['prefix'].has_key('somekey'), False) caches['prefix'].set('somekey', 'value2') self.assertEqual(cache.get('somekey'), 'value') self.assertEqual(caches['prefix'].get('somekey'), 'value2') def test_non_existent(self): """Nonexistent cache keys return as None/default.""" self.assertIsNone(cache.get("does_not_exist")) self.assertEqual(cache.get("does_not_exist", "bang!"), "bang!") def test_get_many(self): # Multiple cache keys can be returned using get_many cache.set_many({'a': 'a', 'b': 'b', 'c': 'c', 'd': 'd'}) self.assertEqual(cache.get_many(['a', 'c', 'd']), {'a': 'a', 'c': 'c', 'd': 'd'}) self.assertEqual(cache.get_many(['a', 'b', 'e']), {'a': 'a', 'b': 'b'}) self.assertEqual(cache.get_many(iter(['a', 'b', 'e'])), {'a': 'a', 'b': 'b'}) cache.set_many({'x': None, 'y': 1}) self.assertEqual(cache.get_many(['x', 'y']), {'x': None, 'y': 1}) def test_delete(self): # Cache keys can be deleted cache.set_many({'key1': 'spam', 'key2': 'eggs'}) self.assertEqual(cache.get("key1"), "spam") self.assertIs(cache.delete("key1"), True) self.assertIsNone(cache.get("key1")) self.assertEqual(cache.get("key2"), "eggs") def test_delete_nonexistent(self): self.assertIs(cache.delete('nonexistent_key'), False) def test_has_key(self): # The cache can be inspected for cache keys cache.set("hello1", "goodbye1") self.assertIs(cache.has_key("hello1"), True) self.assertIs(cache.has_key("goodbye1"), False) cache.set("no_expiry", "here", None) self.assertIs(cache.has_key("no_expiry"), True) cache.set('null', None) self.assertIs(cache.has_key('null'), True) def test_in(self): # The in operator can be used to inspect cache contents cache.set("hello2", "goodbye2") self.assertIn("hello2", cache) self.assertNotIn("goodbye2", cache) cache.set('null', None) self.assertIn('null', cache) def test_incr(self): # Cache values can be incremented cache.set('answer', 41) self.assertEqual(cache.incr('answer'), 42) self.assertEqual(cache.get('answer'), 42) self.assertEqual(cache.incr('answer', 10), 52) self.assertEqual(cache.get('answer'), 52) self.assertEqual(cache.incr('answer', -10), 42) with self.assertRaises(ValueError): cache.incr('does_not_exist') with self.assertRaises(ValueError): cache.incr('does_not_exist', -1) cache.set('null', None) with self.assertRaises(self.incr_decr_type_error): cache.incr('null') def test_decr(self): # Cache values can be decremented cache.set('answer', 43) self.assertEqual(cache.decr('answer'), 42) self.assertEqual(cache.get('answer'), 42) self.assertEqual(cache.decr('answer', 10), 32) self.assertEqual(cache.get('answer'), 32) self.assertEqual(cache.decr('answer', -10), 42) with self.assertRaises(ValueError): cache.decr('does_not_exist') with self.assertRaises(ValueError): cache.incr('does_not_exist', -1) cache.set('null', None) with self.assertRaises(self.incr_decr_type_error): cache.decr('null') def test_close(self): self.assertTrue(hasattr(cache, 'close')) cache.close() def test_data_types(self): # Many different data types can be cached tests = { 'string': 'this is a string', 'int': 42, 'bool': True, 'list': [1, 2, 3, 4], 'tuple': (1, 2, 3, 4), 'dict': {'A': 1, 'B': 2}, 'function': f, 'class': C, } for key, value in tests.items(): with self.subTest(key=key): cache.set(key, value) self.assertEqual(cache.get(key), value) def test_cache_read_for_model_instance(self): # Don't want fields with callable as default to be called on cache read expensive_calculation.num_runs = 0 Poll.objects.all().delete() my_poll = Poll.objects.create(question="Well?") self.assertEqual(Poll.objects.count(), 1) pub_date = my_poll.pub_date cache.set('question', my_poll) cached_poll = cache.get('question') self.assertEqual(cached_poll.pub_date, pub_date) # We only want the default expensive calculation run once self.assertEqual(expensive_calculation.num_runs, 1) def test_cache_write_for_model_instance_with_deferred(self): # Don't want fields with callable as default to be called on cache write expensive_calculation.num_runs = 0 Poll.objects.all().delete() Poll.objects.create(question="What?") self.assertEqual(expensive_calculation.num_runs, 1) defer_qs = Poll.objects.all().defer('question') self.assertEqual(defer_qs.count(), 1) self.assertEqual(expensive_calculation.num_runs, 1) cache.set('deferred_queryset', defer_qs) # cache set should not re-evaluate default functions self.assertEqual(expensive_calculation.num_runs, 1) def test_cache_read_for_model_instance_with_deferred(self): # Don't want fields with callable as default to be called on cache read expensive_calculation.num_runs = 0 Poll.objects.all().delete() Poll.objects.create(question="What?") self.assertEqual(expensive_calculation.num_runs, 1) defer_qs = Poll.objects.all().defer('question') self.assertEqual(defer_qs.count(), 1) cache.set('deferred_queryset', defer_qs) self.assertEqual(expensive_calculation.num_runs, 1) runs_before_cache_read = expensive_calculation.num_runs cache.get('deferred_queryset') # We only want the default expensive calculation run on creation and set self.assertEqual(expensive_calculation.num_runs, runs_before_cache_read) def test_expiration(self): # Cache values can be set to expire cache.set('expire1', 'very quickly', 1) cache.set('expire2', 'very quickly', 1) cache.set('expire3', 'very quickly', 1) time.sleep(2) self.assertIsNone(cache.get("expire1")) self.assertIs(cache.add("expire2", "newvalue"), True) self.assertEqual(cache.get("expire2"), "newvalue") self.assertIs(cache.has_key("expire3"), False) def test_touch(self): # cache.touch() updates the timeout. cache.set('expire1', 'very quickly', timeout=1) self.assertIs(cache.touch('expire1', timeout=4), True) time.sleep(2) self.assertIs(cache.has_key('expire1'), True) time.sleep(3) self.assertIs(cache.has_key('expire1'), False) # cache.touch() works without the timeout argument. cache.set('expire1', 'very quickly', timeout=1) self.assertIs(cache.touch('expire1'), True) time.sleep(2) self.assertIs(cache.has_key('expire1'), True) self.assertIs(cache.touch('nonexistent'), False) def test_unicode(self): # Unicode values can be cached stuff = { 'ascii': 'ascii_value', 'unicode_ascii': 'Iñtërnâtiônàlizætiøn1', 'Iñtërnâtiônàlizætiøn': 'Iñtërnâtiônàlizætiøn2', 'ascii2': {'x': 1} } # Test `set` for (key, value) in stuff.items(): with self.subTest(key=key): cache.set(key, value) self.assertEqual(cache.get(key), value) # Test `add` for (key, value) in stuff.items(): with self.subTest(key=key): self.assertIs(cache.delete(key), True) self.assertIs(cache.add(key, value), True) self.assertEqual(cache.get(key), value) # Test `set_many` for (key, value) in stuff.items(): self.assertIs(cache.delete(key), True) cache.set_many(stuff) for (key, value) in stuff.items(): with self.subTest(key=key): self.assertEqual(cache.get(key), value) def test_binary_string(self): # Binary strings should be cacheable from zlib import compress, decompress value = 'value_to_be_compressed' compressed_value = compress(value.encode()) # Test set cache.set('binary1', compressed_value) compressed_result = cache.get('binary1') self.assertEqual(compressed_value, compressed_result) self.assertEqual(value, decompress(compressed_result).decode()) # Test add self.assertIs(cache.add('binary1-add', compressed_value), True) compressed_result = cache.get('binary1-add') self.assertEqual(compressed_value, compressed_result) self.assertEqual(value, decompress(compressed_result).decode()) # Test set_many cache.set_many({'binary1-set_many': compressed_value}) compressed_result = cache.get('binary1-set_many') self.assertEqual(compressed_value, compressed_result) self.assertEqual(value, decompress(compressed_result).decode()) def test_set_many(self): # Multiple keys can be set using set_many cache.set_many({"key1": "spam", "key2": "eggs"}) self.assertEqual(cache.get("key1"), "spam") self.assertEqual(cache.get("key2"), "eggs") def test_set_many_returns_empty_list_on_success(self): """set_many() returns an empty list when all keys are inserted.""" failing_keys = cache.set_many({'key1': 'spam', 'key2': 'eggs'}) self.assertEqual(failing_keys, []) def test_set_many_expiration(self): # set_many takes a second ``timeout`` parameter cache.set_many({"key1": "spam", "key2": "eggs"}, 1) time.sleep(2) self.assertIsNone(cache.get("key1")) self.assertIsNone(cache.get("key2")) def test_delete_many(self): # Multiple keys can be deleted using delete_many cache.set_many({'key1': 'spam', 'key2': 'eggs', 'key3': 'ham'}) cache.delete_many(["key1", "key2"]) self.assertIsNone(cache.get("key1")) self.assertIsNone(cache.get("key2")) self.assertEqual(cache.get("key3"), "ham") def test_clear(self): # The cache can be emptied using clear cache.set_many({'key1': 'spam', 'key2': 'eggs'}) cache.clear() self.assertIsNone(cache.get("key1")) self.assertIsNone(cache.get("key2")) def test_long_timeout(self): """ Follow memcached's convention where a timeout greater than 30 days is treated as an absolute expiration timestamp instead of a relative offset (#12399). """ cache.set('key1', 'eggs', 60 * 60 * 24 * 30 + 1) # 30 days + 1 second self.assertEqual(cache.get('key1'), 'eggs') self.assertIs(cache.add('key2', 'ham', 60 * 60 * 24 * 30 + 1), True) self.assertEqual(cache.get('key2'), 'ham') cache.set_many({'key3': 'sausage', 'key4': 'lobster bisque'}, 60 * 60 * 24 * 30 + 1) self.assertEqual(cache.get('key3'), 'sausage') self.assertEqual(cache.get('key4'), 'lobster bisque') def test_forever_timeout(self): """ Passing in None into timeout results in a value that is cached forever """ cache.set('key1', 'eggs', None) self.assertEqual(cache.get('key1'), 'eggs') self.assertIs(cache.add('key2', 'ham', None), True) self.assertEqual(cache.get('key2'), 'ham') self.assertIs(cache.add('key1', 'new eggs', None), False) self.assertEqual(cache.get('key1'), 'eggs') cache.set_many({'key3': 'sausage', 'key4': 'lobster bisque'}, None) self.assertEqual(cache.get('key3'), 'sausage') self.assertEqual(cache.get('key4'), 'lobster bisque') cache.set('key5', 'belgian fries', timeout=1) self.assertIs(cache.touch('key5', timeout=None), True) time.sleep(2) self.assertEqual(cache.get('key5'), 'belgian fries') def test_zero_timeout(self): """ Passing in zero into timeout results in a value that is not cached """ cache.set('key1', 'eggs', 0) self.assertIsNone(cache.get('key1')) self.assertIs(cache.add('key2', 'ham', 0), True) self.assertIsNone(cache.get('key2')) cache.set_many({'key3': 'sausage', 'key4': 'lobster bisque'}, 0) self.assertIsNone(cache.get('key3')) self.assertIsNone(cache.get('key4')) cache.set('key5', 'belgian fries', timeout=5) self.assertIs(cache.touch('key5', timeout=0), True) self.assertIsNone(cache.get('key5')) def test_float_timeout(self): # Make sure a timeout given as a float doesn't crash anything. cache.set("key1", "spam", 100.2) self.assertEqual(cache.get("key1"), "spam") def _perform_cull_test(self, cull_cache_name, initial_count, final_count): try: cull_cache = caches[cull_cache_name] except InvalidCacheBackendError: self.skipTest("Culling isn't implemented.") # Create initial cache key entries. This will overflow the cache, # causing a cull. for i in range(1, initial_count): cull_cache.set('cull%d' % i, 'value', 1000) count = 0 # Count how many keys are left in the cache. for i in range(1, initial_count): if cull_cache.has_key('cull%d' % i): count += 1 self.assertEqual(count, final_count) def test_cull(self): self._perform_cull_test('cull', 50, 29) def test_zero_cull(self): self._perform_cull_test('zero_cull', 50, 19) def test_cull_delete_when_store_empty(self): try: cull_cache = caches['cull'] except InvalidCacheBackendError: self.skipTest("Culling isn't implemented.") old_max_entries = cull_cache._max_entries # Force _cull to delete on first cached record. cull_cache._max_entries = -1 try: cull_cache.set('force_cull_delete', 'value', 1000) self.assertIs(cull_cache.has_key('force_cull_delete'), True) finally: cull_cache._max_entries = old_max_entries def _perform_invalid_key_test(self, key, expected_warning, key_func=None): """ All the builtin backends should warn (except memcached that should error) on keys that would be refused by memcached. This encourages portable caching code without making it too difficult to use production backends with more liberal key rules. Refs #6447. """ # mimic custom ``make_key`` method being defined since the default will # never show the below warnings def func(key, *args): return key old_func = cache.key_func cache.key_func = key_func or func tests = [ ('add', [key, 1]), ('get', [key]), ('set', [key, 1]), ('incr', [key]), ('decr', [key]), ('touch', [key]), ('delete', [key]), ('get_many', [[key, 'b']]), ('set_many', [{key: 1, 'b': 2}]), ('delete_many', [[key, 'b']]), ] try: for operation, args in tests: with self.subTest(operation=operation): with self.assertWarns(CacheKeyWarning) as cm: getattr(cache, operation)(*args) self.assertEqual(str(cm.warning), expected_warning) finally: cache.key_func = old_func def test_invalid_key_characters(self): # memcached doesn't allow whitespace or control characters in keys. key = 'key with spaces and 清' self._perform_invalid_key_test(key, KEY_ERRORS_WITH_MEMCACHED_MSG % key) def test_invalid_key_length(self): # memcached limits key length to 250. key = ('a' * 250) + '清' expected_warning = ( 'Cache key will cause errors if used with memcached: ' '%r (longer than %s)' % (key, 250) ) self._perform_invalid_key_test(key, expected_warning) def test_invalid_with_version_key_length(self): # Custom make_key() that adds a version to the key and exceeds the # limit. def key_func(key, *args): return key + ':1' key = 'a' * 249 expected_warning = ( 'Cache key will cause errors if used with memcached: ' '%r (longer than %s)' % (key_func(key), 250) ) self._perform_invalid_key_test(key, expected_warning, key_func=key_func) def test_cache_versioning_get_set(self): # set, using default version = 1 cache.set('answer1', 42) self.assertEqual(cache.get('answer1'), 42) self.assertEqual(cache.get('answer1', version=1), 42) self.assertIsNone(cache.get('answer1', version=2)) self.assertIsNone(caches['v2'].get('answer1')) self.assertEqual(caches['v2'].get('answer1', version=1), 42) self.assertIsNone(caches['v2'].get('answer1', version=2)) # set, default version = 1, but manually override version = 2 cache.set('answer2', 42, version=2) self.assertIsNone(cache.get('answer2')) self.assertIsNone(cache.get('answer2', version=1)) self.assertEqual(cache.get('answer2', version=2), 42) self.assertEqual(caches['v2'].get('answer2'), 42) self.assertIsNone(caches['v2'].get('answer2', version=1)) self.assertEqual(caches['v2'].get('answer2', version=2), 42) # v2 set, using default version = 2 caches['v2'].set('answer3', 42) self.assertIsNone(cache.get('answer3')) self.assertIsNone(cache.get('answer3', version=1)) self.assertEqual(cache.get('answer3', version=2), 42) self.assertEqual(caches['v2'].get('answer3'), 42) self.assertIsNone(caches['v2'].get('answer3', version=1)) self.assertEqual(caches['v2'].get('answer3', version=2), 42) # v2 set, default version = 2, but manually override version = 1 caches['v2'].set('answer4', 42, version=1) self.assertEqual(cache.get('answer4'), 42) self.assertEqual(cache.get('answer4', version=1), 42) self.assertIsNone(cache.get('answer4', version=2)) self.assertIsNone(caches['v2'].get('answer4')) self.assertEqual(caches['v2'].get('answer4', version=1), 42) self.assertIsNone(caches['v2'].get('answer4', version=2)) def test_cache_versioning_add(self): # add, default version = 1, but manually override version = 2 self.assertIs(cache.add('answer1', 42, version=2), True) self.assertIsNone(cache.get('answer1', version=1)) self.assertEqual(cache.get('answer1', version=2), 42) self.assertIs(cache.add('answer1', 37, version=2), False) self.assertIsNone(cache.get('answer1', version=1)) self.assertEqual(cache.get('answer1', version=2), 42) self.assertIs(cache.add('answer1', 37, version=1), True) self.assertEqual(cache.get('answer1', version=1), 37) self.assertEqual(cache.get('answer1', version=2), 42) # v2 add, using default version = 2 self.assertIs(caches['v2'].add('answer2', 42), True) self.assertIsNone(cache.get('answer2', version=1)) self.assertEqual(cache.get('answer2', version=2), 42) self.assertIs(caches['v2'].add('answer2', 37), False) self.assertIsNone(cache.get('answer2', version=1)) self.assertEqual(cache.get('answer2', version=2), 42) self.assertIs(caches['v2'].add('answer2', 37, version=1), True) self.assertEqual(cache.get('answer2', version=1), 37) self.assertEqual(cache.get('answer2', version=2), 42) # v2 add, default version = 2, but manually override version = 1 self.assertIs(caches['v2'].add('answer3', 42, version=1), True) self.assertEqual(cache.get('answer3', version=1), 42) self.assertIsNone(cache.get('answer3', version=2)) self.assertIs(caches['v2'].add('answer3', 37, version=1), False) self.assertEqual(cache.get('answer3', version=1), 42) self.assertIsNone(cache.get('answer3', version=2)) self.assertIs(caches['v2'].add('answer3', 37), True) self.assertEqual(cache.get('answer3', version=1), 42) self.assertEqual(cache.get('answer3', version=2), 37) def test_cache_versioning_has_key(self): cache.set('answer1', 42) # has_key self.assertIs(cache.has_key('answer1'), True) self.assertIs(cache.has_key('answer1', version=1), True) self.assertIs(cache.has_key('answer1', version=2), False) self.assertIs(caches['v2'].has_key('answer1'), False) self.assertIs(caches['v2'].has_key('answer1', version=1), True) self.assertIs(caches['v2'].has_key('answer1', version=2), False) def test_cache_versioning_delete(self): cache.set('answer1', 37, version=1) cache.set('answer1', 42, version=2) self.assertIs(cache.delete('answer1'), True) self.assertIsNone(cache.get('answer1', version=1)) self.assertEqual(cache.get('answer1', version=2), 42) cache.set('answer2', 37, version=1) cache.set('answer2', 42, version=2) self.assertIs(cache.delete('answer2', version=2), True) self.assertEqual(cache.get('answer2', version=1), 37) self.assertIsNone(cache.get('answer2', version=2)) cache.set('answer3', 37, version=1) cache.set('answer3', 42, version=2) self.assertIs(caches['v2'].delete('answer3'), True) self.assertEqual(cache.get('answer3', version=1), 37) self.assertIsNone(cache.get('answer3', version=2)) cache.set('answer4', 37, version=1) cache.set('answer4', 42, version=2) self.assertIs(caches['v2'].delete('answer4', version=1), True) self.assertIsNone(cache.get('answer4', version=1)) self.assertEqual(cache.get('answer4', version=2), 42) def test_cache_versioning_incr_decr(self): cache.set('answer1', 37, version=1) cache.set('answer1', 42, version=2) self.assertEqual(cache.incr('answer1'), 38) self.assertEqual(cache.get('answer1', version=1), 38) self.assertEqual(cache.get('answer1', version=2), 42) self.assertEqual(cache.decr('answer1'), 37) self.assertEqual(cache.get('answer1', version=1), 37) self.assertEqual(cache.get('answer1', version=2), 42) cache.set('answer2', 37, version=1) cache.set('answer2', 42, version=2) self.assertEqual(cache.incr('answer2', version=2), 43) self.assertEqual(cache.get('answer2', version=1), 37) self.assertEqual(cache.get('answer2', version=2), 43) self.assertEqual(cache.decr('answer2', version=2), 42) self.assertEqual(cache.get('answer2', version=1), 37) self.assertEqual(cache.get('answer2', version=2), 42) cache.set('answer3', 37, version=1) cache.set('answer3', 42, version=2) self.assertEqual(caches['v2'].incr('answer3'), 43) self.assertEqual(cache.get('answer3', version=1), 37) self.assertEqual(cache.get('answer3', version=2), 43) self.assertEqual(caches['v2'].decr('answer3'), 42) self.assertEqual(cache.get('answer3', version=1), 37) self.assertEqual(cache.get('answer3', version=2), 42) cache.set('answer4', 37, version=1) cache.set('answer4', 42, version=2) self.assertEqual(caches['v2'].incr('answer4', version=1), 38) self.assertEqual(cache.get('answer4', version=1), 38) self.assertEqual(cache.get('answer4', version=2), 42) self.assertEqual(caches['v2'].decr('answer4', version=1), 37) self.assertEqual(cache.get('answer4', version=1), 37) self.assertEqual(cache.get('answer4', version=2), 42) def test_cache_versioning_get_set_many(self): # set, using default version = 1 cache.set_many({'ford1': 37, 'arthur1': 42}) self.assertEqual(cache.get_many(['ford1', 'arthur1']), {'ford1': 37, 'arthur1': 42}) self.assertEqual(cache.get_many(['ford1', 'arthur1'], version=1), {'ford1': 37, 'arthur1': 42}) self.assertEqual(cache.get_many(['ford1', 'arthur1'], version=2), {}) self.assertEqual(caches['v2'].get_many(['ford1', 'arthur1']), {}) self.assertEqual(caches['v2'].get_many(['ford1', 'arthur1'], version=1), {'ford1': 37, 'arthur1': 42}) self.assertEqual(caches['v2'].get_many(['ford1', 'arthur1'], version=2), {}) # set, default version = 1, but manually override version = 2 cache.set_many({'ford2': 37, 'arthur2': 42}, version=2) self.assertEqual(cache.get_many(['ford2', 'arthur2']), {}) self.assertEqual(cache.get_many(['ford2', 'arthur2'], version=1), {}) self.assertEqual(cache.get_many(['ford2', 'arthur2'], version=2), {'ford2': 37, 'arthur2': 42}) self.assertEqual(caches['v2'].get_many(['ford2', 'arthur2']), {'ford2': 37, 'arthur2': 42}) self.assertEqual(caches['v2'].get_many(['ford2', 'arthur2'], version=1), {}) self.assertEqual(caches['v2'].get_many(['ford2', 'arthur2'], version=2), {'ford2': 37, 'arthur2': 42}) # v2 set, using default version = 2 caches['v2'].set_many({'ford3': 37, 'arthur3': 42}) self.assertEqual(cache.get_many(['ford3', 'arthur3']), {}) self.assertEqual(cache.get_many(['ford3', 'arthur3'], version=1), {}) self.assertEqual(cache.get_many(['ford3', 'arthur3'], version=2), {'ford3': 37, 'arthur3': 42}) self.assertEqual(caches['v2'].get_many(['ford3', 'arthur3']), {'ford3': 37, 'arthur3': 42}) self.assertEqual(caches['v2'].get_many(['ford3', 'arthur3'], version=1), {}) self.assertEqual(caches['v2'].get_many(['ford3', 'arthur3'], version=2), {'ford3': 37, 'arthur3': 42}) # v2 set, default version = 2, but manually override version = 1 caches['v2'].set_many({'ford4': 37, 'arthur4': 42}, version=1) self.assertEqual(cache.get_many(['ford4', 'arthur4']), {'ford4': 37, 'arthur4': 42}) self.assertEqual(cache.get_many(['ford4', 'arthur4'], version=1), {'ford4': 37, 'arthur4': 42}) self.assertEqual(cache.get_many(['ford4', 'arthur4'], version=2), {}) self.assertEqual(caches['v2'].get_many(['ford4', 'arthur4']), {}) self.assertEqual(caches['v2'].get_many(['ford4', 'arthur4'], version=1), {'ford4': 37, 'arthur4': 42}) self.assertEqual(caches['v2'].get_many(['ford4', 'arthur4'], version=2), {}) def test_incr_version(self): cache.set('answer', 42, version=2) self.assertIsNone(cache.get('answer')) self.assertIsNone(cache.get('answer', version=1)) self.assertEqual(cache.get('answer', version=2), 42) self.assertIsNone(cache.get('answer', version=3)) self.assertEqual(cache.incr_version('answer', version=2), 3) self.assertIsNone(cache.get('answer')) self.assertIsNone(cache.get('answer', version=1)) self.assertIsNone(cache.get('answer', version=2)) self.assertEqual(cache.get('answer', version=3), 42) caches['v2'].set('answer2', 42) self.assertEqual(caches['v2'].get('answer2'), 42) self.assertIsNone(caches['v2'].get('answer2', version=1)) self.assertEqual(caches['v2'].get('answer2', version=2), 42) self.assertIsNone(caches['v2'].get('answer2', version=3)) self.assertEqual(caches['v2'].incr_version('answer2'), 3) self.assertIsNone(caches['v2'].get('answer2')) self.assertIsNone(caches['v2'].get('answer2', version=1)) self.assertIsNone(caches['v2'].get('answer2', version=2)) self.assertEqual(caches['v2'].get('answer2', version=3), 42) with self.assertRaises(ValueError): cache.incr_version('does_not_exist') cache.set('null', None) self.assertEqual(cache.incr_version('null'), 2) def test_decr_version(self): cache.set('answer', 42, version=2) self.assertIsNone(cache.get('answer')) self.assertIsNone(cache.get('answer', version=1)) self.assertEqual(cache.get('answer', version=2), 42) self.assertEqual(cache.decr_version('answer', version=2), 1) self.assertEqual(cache.get('answer'), 42) self.assertEqual(cache.get('answer', version=1), 42) self.assertIsNone(cache.get('answer', version=2)) caches['v2'].set('answer2', 42) self.assertEqual(caches['v2'].get('answer2'), 42) self.assertIsNone(caches['v2'].get('answer2', version=1)) self.assertEqual(caches['v2'].get('answer2', version=2), 42) self.assertEqual(caches['v2'].decr_version('answer2'), 1) self.assertIsNone(caches['v2'].get('answer2')) self.assertEqual(caches['v2'].get('answer2', version=1), 42) self.assertIsNone(caches['v2'].get('answer2', version=2)) with self.assertRaises(ValueError): cache.decr_version('does_not_exist', version=2) cache.set('null', None, version=2) self.assertEqual(cache.decr_version('null', version=2), 1) def test_custom_key_func(self): # Two caches with different key functions aren't visible to each other cache.set('answer1', 42) self.assertEqual(cache.get('answer1'), 42) self.assertIsNone(caches['custom_key'].get('answer1')) self.assertIsNone(caches['custom_key2'].get('answer1')) caches['custom_key'].set('answer2', 42) self.assertIsNone(cache.get('answer2')) self.assertEqual(caches['custom_key'].get('answer2'), 42) self.assertEqual(caches['custom_key2'].get('answer2'), 42) @override_settings(CACHE_MIDDLEWARE_ALIAS=DEFAULT_CACHE_ALIAS) def test_cache_write_unpicklable_object(self): fetch_middleware = FetchFromCacheMiddleware(empty_response) request = self.factory.get('/cache/test') request._cache_update_cache = True get_cache_data = FetchFromCacheMiddleware(empty_response).process_request(request) self.assertIsNone(get_cache_data) content = 'Testing cookie serialization.' def get_response(req): response = HttpResponse(content) response.set_cookie('foo', 'bar') return response update_middleware = UpdateCacheMiddleware(get_response) response = update_middleware(request) get_cache_data = fetch_middleware.process_request(request) self.assertIsNotNone(get_cache_data) self.assertEqual(get_cache_data.content, content.encode()) self.assertEqual(get_cache_data.cookies, response.cookies) UpdateCacheMiddleware(lambda req: get_cache_data)(request) get_cache_data = fetch_middleware.process_request(request) self.assertIsNotNone(get_cache_data) self.assertEqual(get_cache_data.content, content.encode()) self.assertEqual(get_cache_data.cookies, response.cookies) def test_add_fail_on_pickleerror(self): # Shouldn't fail silently if trying to cache an unpicklable type. with self.assertRaises(pickle.PickleError): cache.add('unpicklable', Unpicklable()) def test_set_fail_on_pickleerror(self): with self.assertRaises(pickle.PickleError): cache.set('unpicklable', Unpicklable()) def test_get_or_set(self): self.assertIsNone(cache.get('projector')) self.assertEqual(cache.get_or_set('projector', 42), 42) self.assertEqual(cache.get('projector'), 42) self.assertIsNone(cache.get_or_set('null', None)) # Previous get_or_set() stores None in the cache. self.assertIsNone(cache.get('null', 'default')) def test_get_or_set_callable(self): def my_callable(): return 'value' self.assertEqual(cache.get_or_set('mykey', my_callable), 'value') self.assertEqual(cache.get_or_set('mykey', my_callable()), 'value') self.assertIsNone(cache.get_or_set('null', lambda: None)) # Previous get_or_set() stores None in the cache. self.assertIsNone(cache.get('null', 'default')) def test_get_or_set_version(self): msg = "get_or_set() missing 1 required positional argument: 'default'" self.assertEqual(cache.get_or_set('brian', 1979, version=2), 1979) with self.assertRaisesMessage(TypeError, msg): cache.get_or_set('brian') with self.assertRaisesMessage(TypeError, msg): cache.get_or_set('brian', version=1) self.assertIsNone(cache.get('brian', version=1)) self.assertEqual(cache.get_or_set('brian', 42, version=1), 42) self.assertEqual(cache.get_or_set('brian', 1979, version=2), 1979) self.assertIsNone(cache.get('brian', version=3)) def test_get_or_set_racing(self): with mock.patch('%s.%s' % (settings.CACHES['default']['BACKEND'], 'add')) as cache_add: # Simulate cache.add() failing to add a value. In that case, the # default value should be returned. cache_add.return_value = False self.assertEqual(cache.get_or_set('key', 'default'), 'default') @override_settings(CACHES=caches_setting_for_tests( BACKEND='django.core.cache.backends.db.DatabaseCache', # Spaces are used in the table name to ensure quoting/escaping is working LOCATION='test cache table' )) class DBCacheTests(BaseCacheTests, TransactionTestCase): available_apps = ['cache'] def setUp(self): # The super calls needs to happen first for the settings override. super().setUp() self.create_table() def tearDown(self): # The super call needs to happen first because it uses the database. super().tearDown() self.drop_table() def create_table(self): management.call_command('createcachetable', verbosity=0) def drop_table(self): with connection.cursor() as cursor: table_name = connection.ops.quote_name('test cache table') cursor.execute('DROP TABLE %s' % table_name) def test_get_many_num_queries(self): cache.set_many({'a': 1, 'b': 2}) cache.set('expired', 'expired', 0.01) with self.assertNumQueries(1): self.assertEqual(cache.get_many(['a', 'b']), {'a': 1, 'b': 2}) time.sleep(0.02) with self.assertNumQueries(2): self.assertEqual(cache.get_many(['a', 'b', 'expired']), {'a': 1, 'b': 2}) def test_delete_many_num_queries(self): cache.set_many({'a': 1, 'b': 2, 'c': 3}) with self.assertNumQueries(1): cache.delete_many(['a', 'b', 'c']) def test_cull_queries(self): old_max_entries = cache._max_entries # Force _cull to delete on first cached record. cache._max_entries = -1 with CaptureQueriesContext(connection) as captured_queries: try: cache.set('force_cull', 'value', 1000) finally: cache._max_entries = old_max_entries num_count_queries = sum('COUNT' in query['sql'] for query in captured_queries) self.assertEqual(num_count_queries, 1) # Column names are quoted. for query in captured_queries: sql = query['sql'] if 'expires' in sql: self.assertIn(connection.ops.quote_name('expires'), sql) if 'cache_key' in sql: self.assertIn(connection.ops.quote_name('cache_key'), sql) def test_delete_cursor_rowcount(self): """ The rowcount attribute should not be checked on a closed cursor. """ class MockedCursorWrapper(CursorWrapper): is_closed = False def close(self): self.cursor.close() self.is_closed = True @property def rowcount(self): if self.is_closed: raise Exception('Cursor is closed.') return self.cursor.rowcount cache.set_many({'a': 1, 'b': 2}) with mock.patch('django.db.backends.utils.CursorWrapper', MockedCursorWrapper): self.assertIs(cache.delete('a'), True) def test_zero_cull(self): self._perform_cull_test('zero_cull', 50, 18) def test_second_call_doesnt_crash(self): out = io.StringIO() management.call_command('createcachetable', stdout=out) self.assertEqual(out.getvalue(), "Cache table 'test cache table' already exists.\n" * len(settings.CACHES)) @override_settings(CACHES=caches_setting_for_tests( BACKEND='django.core.cache.backends.db.DatabaseCache', # Use another table name to avoid the 'table already exists' message. LOCATION='createcachetable_dry_run_mode' )) def test_createcachetable_dry_run_mode(self): out = io.StringIO() management.call_command('createcachetable', dry_run=True, stdout=out) output = out.getvalue() self.assertTrue(output.startswith("CREATE TABLE")) def test_createcachetable_with_table_argument(self): """ Delete and recreate cache table with legacy behavior (explicitly specifying the table name). """ self.drop_table() out = io.StringIO() management.call_command( 'createcachetable', 'test cache table', verbosity=2, stdout=out, ) self.assertEqual(out.getvalue(), "Cache table 'test cache table' created.\n") def test_has_key_query_columns_quoted(self): with CaptureQueriesContext(connection) as captured_queries: cache.has_key('key') self.assertEqual(len(captured_queries), 1) sql = captured_queries[0]['sql'] # Column names are quoted. self.assertIn(connection.ops.quote_name('expires'), sql) self.assertIn(connection.ops.quote_name('cache_key'), sql) @override_settings(USE_TZ=True) class DBCacheWithTimeZoneTests(DBCacheTests): pass class DBCacheRouter: """A router that puts the cache table on the 'other' database.""" def db_for_read(self, model, **hints): if model._meta.app_label == 'django_cache': return 'other' return None def db_for_write(self, model, **hints): if model._meta.app_label == 'django_cache': return 'other' return None def allow_migrate(self, db, app_label, **hints): if app_label == 'django_cache': return db == 'other' return None @override_settings( CACHES={ 'default': { 'BACKEND': 'django.core.cache.backends.db.DatabaseCache', 'LOCATION': 'my_cache_table', }, }, ) class CreateCacheTableForDBCacheTests(TestCase): databases = {'default', 'other'} @override_settings(DATABASE_ROUTERS=[DBCacheRouter()]) def test_createcachetable_observes_database_router(self): # cache table should not be created on 'default' with self.assertNumQueries(0, using='default'): management.call_command('createcachetable', database='default', verbosity=0) # cache table should be created on 'other' # Queries: # 1: check table doesn't already exist # 2: create savepoint (if transactional DDL is supported) # 3: create the table # 4: create the index # 5: release savepoint (if transactional DDL is supported) num = 5 if connections['other'].features.can_rollback_ddl else 3 with self.assertNumQueries(num, using='other'): management.call_command('createcachetable', database='other', verbosity=0) class PicklingSideEffect: def __init__(self, cache): self.cache = cache self.locked = False def __getstate__(self): self.locked = self.cache._lock.locked() return {} limit_locmem_entries = override_settings(CACHES=caches_setting_for_tests( BACKEND='django.core.cache.backends.locmem.LocMemCache', OPTIONS={'MAX_ENTRIES': 9}, )) @override_settings(CACHES=caches_setting_for_tests( BACKEND='django.core.cache.backends.locmem.LocMemCache', )) class LocMemCacheTests(BaseCacheTests, TestCase): def setUp(self): super().setUp() # LocMem requires a hack to make the other caches # share a data store with the 'normal' cache. caches['prefix']._cache = cache._cache caches['prefix']._expire_info = cache._expire_info caches['v2']._cache = cache._cache caches['v2']._expire_info = cache._expire_info caches['custom_key']._cache = cache._cache caches['custom_key']._expire_info = cache._expire_info caches['custom_key2']._cache = cache._cache caches['custom_key2']._expire_info = cache._expire_info @override_settings(CACHES={ 'default': {'BACKEND': 'django.core.cache.backends.locmem.LocMemCache'}, 'other': { 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache', 'LOCATION': 'other' }, }) def test_multiple_caches(self): "Multiple locmem caches are isolated" cache.set('value', 42) self.assertEqual(caches['default'].get('value'), 42) self.assertIsNone(caches['other'].get('value')) def test_locking_on_pickle(self): """#20613/#18541 -- Ensures pickling is done outside of the lock.""" bad_obj = PicklingSideEffect(cache) cache.set('set', bad_obj) self.assertFalse(bad_obj.locked, "Cache was locked during pickling") self.assertIs(cache.add('add', bad_obj), True) self.assertFalse(bad_obj.locked, "Cache was locked during pickling") def test_incr_decr_timeout(self): """incr/decr does not modify expiry time (matches memcached behavior)""" key = 'value' _key = cache.make_key(key) cache.set(key, 1, timeout=cache.default_timeout * 10) expire = cache._expire_info[_key] self.assertEqual(cache.incr(key), 2) self.assertEqual(expire, cache._expire_info[_key]) self.assertEqual(cache.decr(key), 1) self.assertEqual(expire, cache._expire_info[_key]) @limit_locmem_entries def test_lru_get(self): """get() moves cache keys.""" for key in range(9): cache.set(key, key, timeout=None) for key in range(6): self.assertEqual(cache.get(key), key) cache.set(9, 9, timeout=None) for key in range(6): self.assertEqual(cache.get(key), key) for key in range(6, 9): self.assertIsNone(cache.get(key)) self.assertEqual(cache.get(9), 9) @limit_locmem_entries def test_lru_set(self): """set() moves cache keys.""" for key in range(9): cache.set(key, key, timeout=None) for key in range(3, 9): cache.set(key, key, timeout=None) cache.set(9, 9, timeout=None) for key in range(3, 10): self.assertEqual(cache.get(key), key) for key in range(3): self.assertIsNone(cache.get(key)) @limit_locmem_entries def test_lru_incr(self): """incr() moves cache keys.""" for key in range(9): cache.set(key, key, timeout=None) for key in range(6): self.assertEqual(cache.incr(key), key + 1) cache.set(9, 9, timeout=None) for key in range(6): self.assertEqual(cache.get(key), key + 1) for key in range(6, 9): self.assertIsNone(cache.get(key)) self.assertEqual(cache.get(9), 9) # memcached and redis backends aren't guaranteed to be available. # To check the backends, the test settings file will need to contain at least # one cache backend setting that points at your cache server. configured_caches = {} for _cache_params in settings.CACHES.values(): configured_caches[_cache_params['BACKEND']] = _cache_params PyLibMCCache_params = configured_caches.get('django.core.cache.backends.memcached.PyLibMCCache') PyMemcacheCache_params = configured_caches.get('django.core.cache.backends.memcached.PyMemcacheCache') # The memcached backends don't support cull-related options like `MAX_ENTRIES`. memcached_excluded_caches = {'cull', 'zero_cull'} RedisCache_params = configured_caches.get('django.core.cache.backends.redis.RedisCache') # The redis backend does not support cull-related options like `MAX_ENTRIES`. redis_excluded_caches = {'cull', 'zero_cull'} class BaseMemcachedTests(BaseCacheTests): # By default it's assumed that the client doesn't clean up connections # properly, in which case the backend must do so after each request. should_disconnect_on_close = True def test_location_multiple_servers(self): locations = [ ['server1.tld', 'server2:11211'], 'server1.tld;server2:11211', 'server1.tld,server2:11211', ] for location in locations: with self.subTest(location=location): params = {'BACKEND': self.base_params['BACKEND'], 'LOCATION': location} with self.settings(CACHES={'default': params}): self.assertEqual(cache._servers, ['server1.tld', 'server2:11211']) def _perform_invalid_key_test(self, key, expected_warning): """ While other backends merely warn, memcached should raise for an invalid key. """ msg = expected_warning.replace(key, cache.make_key(key)) tests = [ ('add', [key, 1]), ('get', [key]), ('set', [key, 1]), ('incr', [key]), ('decr', [key]), ('touch', [key]), ('delete', [key]), ('get_many', [[key, 'b']]), ('set_many', [{key: 1, 'b': 2}]), ('delete_many', [[key, 'b']]), ] for operation, args in tests: with self.subTest(operation=operation): with self.assertRaises(InvalidCacheKey) as cm: getattr(cache, operation)(*args) self.assertEqual(str(cm.exception), msg) def test_invalid_with_version_key_length(self): # make_key() adds a version to the key and exceeds the limit. key = 'a' * 248 expected_warning = ( 'Cache key will cause errors if used with memcached: ' '%r (longer than %s)' % (key, 250) ) self._perform_invalid_key_test(key, expected_warning) def test_default_never_expiring_timeout(self): # Regression test for #22845 with self.settings(CACHES=caches_setting_for_tests( base=self.base_params, exclude=memcached_excluded_caches, TIMEOUT=None)): cache.set('infinite_foo', 'bar') self.assertEqual(cache.get('infinite_foo'), 'bar') def test_default_far_future_timeout(self): # Regression test for #22845 with self.settings(CACHES=caches_setting_for_tests( base=self.base_params, exclude=memcached_excluded_caches, # 60*60*24*365, 1 year TIMEOUT=31536000)): cache.set('future_foo', 'bar') self.assertEqual(cache.get('future_foo'), 'bar') def test_memcached_deletes_key_on_failed_set(self): # By default memcached allows objects up to 1MB. For the cache_db session # backend to always use the current session, memcached needs to delete # the old key if it fails to set. max_value_length = 2 ** 20 cache.set('small_value', 'a') self.assertEqual(cache.get('small_value'), 'a') large_value = 'a' * (max_value_length + 1) try: cache.set('small_value', large_value) except Exception: # Most clients (e.g. pymemcache or pylibmc) raise when the value is # too large. This test is primarily checking that the key was # deleted, so the return/exception behavior for the set() itself is # not important. pass # small_value should be deleted, or set if configured to accept larger values value = cache.get('small_value') self.assertTrue(value is None or value == large_value) def test_close(self): # For clients that don't manage their connections properly, the # connection is closed when the request is complete. signals.request_finished.disconnect(close_old_connections) try: with mock.patch.object(cache._class, 'disconnect_all', autospec=True) as mock_disconnect: signals.request_finished.send(self.__class__) self.assertIs(mock_disconnect.called, self.should_disconnect_on_close) finally: signals.request_finished.connect(close_old_connections) def test_set_many_returns_failing_keys(self): def fail_set_multi(mapping, *args, **kwargs): return mapping.keys() with mock.patch.object(cache._class, 'set_multi', side_effect=fail_set_multi): failing_keys = cache.set_many({'key': 'value'}) self.assertEqual(failing_keys, ['key']) @unittest.skipUnless(PyLibMCCache_params, "PyLibMCCache backend not configured") @override_settings(CACHES=caches_setting_for_tests( base=PyLibMCCache_params, exclude=memcached_excluded_caches, )) class PyLibMCCacheTests(BaseMemcachedTests, TestCase): base_params = PyLibMCCache_params # libmemcached manages its own connections. should_disconnect_on_close = False @property def incr_decr_type_error(self): return cache._lib.ClientError @override_settings(CACHES=caches_setting_for_tests( base=PyLibMCCache_params, exclude=memcached_excluded_caches, OPTIONS={ 'binary': True, 'behaviors': {'tcp_nodelay': True}, }, )) def test_pylibmc_options(self): self.assertTrue(cache._cache.binary) self.assertEqual(cache._cache.behaviors['tcp_nodelay'], int(True)) def test_pylibmc_client_servers(self): backend = self.base_params['BACKEND'] tests = [ ('unix:/run/memcached/socket', '/run/memcached/socket'), ('/run/memcached/socket', '/run/memcached/socket'), ('localhost', 'localhost'), ('localhost:11211', 'localhost:11211'), ('[::1]', '[::1]'), ('[::1]:11211', '[::1]:11211'), ('127.0.0.1', '127.0.0.1'), ('127.0.0.1:11211', '127.0.0.1:11211'), ] for location, expected in tests: settings = {'default': {'BACKEND': backend, 'LOCATION': location}} with self.subTest(location), self.settings(CACHES=settings): self.assertEqual(cache.client_servers, [expected]) @unittest.skipUnless(PyMemcacheCache_params, 'PyMemcacheCache backend not configured') @override_settings(CACHES=caches_setting_for_tests( base=PyMemcacheCache_params, exclude=memcached_excluded_caches, )) class PyMemcacheCacheTests(BaseMemcachedTests, TestCase): base_params = PyMemcacheCache_params @property def incr_decr_type_error(self): return cache._lib.exceptions.MemcacheClientError def test_pymemcache_highest_pickle_version(self): self.assertEqual( cache._cache.default_kwargs['serde']._serialize_func.keywords['pickle_version'], pickle.HIGHEST_PROTOCOL, ) for cache_key in settings.CACHES: for client_key, client in caches[cache_key]._cache.clients.items(): with self.subTest(cache_key=cache_key, server=client_key): self.assertEqual( client.serde._serialize_func.keywords['pickle_version'], pickle.HIGHEST_PROTOCOL, ) @override_settings(CACHES=caches_setting_for_tests( base=PyMemcacheCache_params, exclude=memcached_excluded_caches, OPTIONS={'no_delay': True}, )) def test_pymemcache_options(self): self.assertIs(cache._cache.default_kwargs['no_delay'], True) @override_settings(CACHES=caches_setting_for_tests( BACKEND='django.core.cache.backends.filebased.FileBasedCache', )) class FileBasedCacheTests(BaseCacheTests, TestCase): """ Specific test cases for the file-based cache. """ def setUp(self): super().setUp() self.dirname = self.mkdtemp() # Caches location cannot be modified through override_settings / modify_settings, # hence settings are manipulated directly here and the setting_changed signal # is triggered manually. for cache_params in settings.CACHES.values(): cache_params['LOCATION'] = self.dirname setting_changed.send(self.__class__, setting='CACHES', enter=False) def tearDown(self): super().tearDown() # Call parent first, as cache.clear() may recreate cache base directory shutil.rmtree(self.dirname) def mkdtemp(self): return tempfile.mkdtemp() def test_ignores_non_cache_files(self): fname = os.path.join(self.dirname, 'not-a-cache-file') with open(fname, 'w'): os.utime(fname, None) cache.clear() self.assertTrue(os.path.exists(fname), 'Expected cache.clear to ignore non cache files') os.remove(fname) def test_clear_does_not_remove_cache_dir(self): cache.clear() self.assertTrue(os.path.exists(self.dirname), 'Expected cache.clear to keep the cache dir') def test_creates_cache_dir_if_nonexistent(self): os.rmdir(self.dirname) cache.set('foo', 'bar') self.assertTrue(os.path.exists(self.dirname)) def test_get_ignores_enoent(self): cache.set('foo', 'bar') os.unlink(cache._key_to_file('foo')) # Returns the default instead of erroring. self.assertEqual(cache.get('foo', 'baz'), 'baz') @skipIf( sys.platform == 'win32', 'Windows only partially supports umasks and chmod.', ) def test_cache_dir_permissions(self): os.rmdir(self.dirname) dir_path = Path(self.dirname) / 'nested' / 'filebasedcache' for cache_params in settings.CACHES.values(): cache_params['LOCATION'] = dir_path setting_changed.send(self.__class__, setting='CACHES', enter=False) cache.set('foo', 'bar') self.assertIs(dir_path.exists(), True) tests = [ dir_path, dir_path.parent, dir_path.parent.parent, ] for directory in tests: with self.subTest(directory=directory): dir_mode = directory.stat().st_mode & 0o777 self.assertEqual(dir_mode, 0o700) def test_get_does_not_ignore_non_filenotfound_exceptions(self): with mock.patch('builtins.open', side_effect=OSError): with self.assertRaises(OSError): cache.get('foo') def test_empty_cache_file_considered_expired(self): cache_file = cache._key_to_file('foo') with open(cache_file, 'wb') as fh: fh.write(b'') with open(cache_file, 'rb') as fh: self.assertIs(cache._is_expired(fh), True) @unittest.skipUnless(RedisCache_params, "Redis backend not configured") @override_settings(CACHES=caches_setting_for_tests( base=RedisCache_params, exclude=redis_excluded_caches, )) class RedisCacheTests(BaseCacheTests, TestCase): def setUp(self): import redis super().setUp() self.lib = redis @property def incr_decr_type_error(self): return self.lib.ResponseError def test_cache_client_class(self): self.assertIs(cache._class, RedisCacheClient) self.assertIsInstance(cache._cache, RedisCacheClient) def test_get_backend_timeout_method(self): positive_timeout = 10 positive_backend_timeout = cache.get_backend_timeout(positive_timeout) self.assertEqual(positive_backend_timeout, positive_timeout) negative_timeout = -5 negative_backend_timeout = cache.get_backend_timeout(negative_timeout) self.assertEqual(negative_backend_timeout, 0) none_timeout = None none_backend_timeout = cache.get_backend_timeout(none_timeout) self.assertIsNone(none_backend_timeout) def test_get_connection_pool_index(self): pool_index = cache._cache._get_connection_pool_index(write=True) self.assertEqual(pool_index, 0) pool_index = cache._cache._get_connection_pool_index(write=False) if len(cache._cache._servers) == 1: self.assertEqual(pool_index, 0) else: self.assertGreater(pool_index, 0) self.assertLess(pool_index, len(cache._cache._servers)) def test_get_connection_pool(self): pool = cache._cache._get_connection_pool(write=True) self.assertIsInstance(pool, self.lib.ConnectionPool) pool = cache._cache._get_connection_pool(write=False) self.assertIsInstance(pool, self.lib.ConnectionPool) def test_get_client(self): self.assertIsInstance(cache._cache.get_client(), self.lib.Redis) def test_serializer_dumps(self): self.assertEqual(cache._cache._serializer.dumps(123), 123) self.assertIsInstance(cache._cache._serializer.dumps(True), bytes) self.assertIsInstance(cache._cache._serializer.dumps('abc'), bytes) class FileBasedCachePathLibTests(FileBasedCacheTests): def mkdtemp(self): tmp_dir = super().mkdtemp() return Path(tmp_dir) @override_settings(CACHES={ 'default': { 'BACKEND': 'cache.liberal_backend.CacheClass', }, }) class CustomCacheKeyValidationTests(SimpleTestCase): """ Tests for the ability to mixin a custom ``validate_key`` method to a custom cache backend that otherwise inherits from a builtin backend, and override the default key validation. Refs #6447. """ def test_custom_key_validation(self): # this key is both longer than 250 characters, and has spaces key = 'some key with spaces' * 15 val = 'a value' cache.set(key, val) self.assertEqual(cache.get(key), val) @override_settings( CACHES={ 'default': { 'BACKEND': 'cache.closeable_cache.CacheClass', } } ) class CacheClosingTests(SimpleTestCase): def test_close(self): self.assertFalse(cache.closed) signals.request_finished.send(self.__class__) self.assertTrue(cache.closed) def test_close_only_initialized(self): with self.settings(CACHES={ 'cache_1': { 'BACKEND': 'cache.closeable_cache.CacheClass', }, 'cache_2': { 'BACKEND': 'cache.closeable_cache.CacheClass', }, }): self.assertEqual(caches.all(initialized_only=True), []) signals.request_finished.send(self.__class__) self.assertEqual(caches.all(initialized_only=True), []) DEFAULT_MEMORY_CACHES_SETTINGS = { 'default': { 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache', 'LOCATION': 'unique-snowflake', } } NEVER_EXPIRING_CACHES_SETTINGS = copy.deepcopy(DEFAULT_MEMORY_CACHES_SETTINGS) NEVER_EXPIRING_CACHES_SETTINGS['default']['TIMEOUT'] = None class DefaultNonExpiringCacheKeyTests(SimpleTestCase): """ Settings having Cache arguments with a TIMEOUT=None create Caches that will set non-expiring keys. """ def setUp(self): # The 5 minute (300 seconds) default expiration time for keys is # defined in the implementation of the initializer method of the # BaseCache type. self.DEFAULT_TIMEOUT = caches[DEFAULT_CACHE_ALIAS].default_timeout def tearDown(self): del(self.DEFAULT_TIMEOUT) def test_default_expiration_time_for_keys_is_5_minutes(self): """The default expiration time of a cache key is 5 minutes. This value is defined in django.core.cache.backends.base.BaseCache.__init__(). """ self.assertEqual(300, self.DEFAULT_TIMEOUT) def test_caches_with_unset_timeout_has_correct_default_timeout(self): """Caches that have the TIMEOUT parameter undefined in the default settings will use the default 5 minute timeout. """ cache = caches[DEFAULT_CACHE_ALIAS] self.assertEqual(self.DEFAULT_TIMEOUT, cache.default_timeout) @override_settings(CACHES=NEVER_EXPIRING_CACHES_SETTINGS) def test_caches_set_with_timeout_as_none_has_correct_default_timeout(self): """Memory caches that have the TIMEOUT parameter set to `None` in the default settings with have `None` as the default timeout. This means "no timeout". """ cache = caches[DEFAULT_CACHE_ALIAS] self.assertIsNone(cache.default_timeout) self.assertIsNone(cache.get_backend_timeout()) @override_settings(CACHES=DEFAULT_MEMORY_CACHES_SETTINGS) def test_caches_with_unset_timeout_set_expiring_key(self): """Memory caches that have the TIMEOUT parameter unset will set cache keys having the default 5 minute timeout. """ key = "my-key" value = "my-value" cache = caches[DEFAULT_CACHE_ALIAS] cache.set(key, value) cache_key = cache.make_key(key) self.assertIsNotNone(cache._expire_info[cache_key]) @override_settings(CACHES=NEVER_EXPIRING_CACHES_SETTINGS) def test_caches_set_with_timeout_as_none_set_non_expiring_key(self): """Memory caches that have the TIMEOUT parameter set to `None` will set a non expiring key by default. """ key = "another-key" value = "another-value" cache = caches[DEFAULT_CACHE_ALIAS] cache.set(key, value) cache_key = cache.make_key(key) self.assertIsNone(cache._expire_info[cache_key]) @override_settings( CACHE_MIDDLEWARE_KEY_PREFIX='settingsprefix', CACHE_MIDDLEWARE_SECONDS=1, CACHES={ 'default': { 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache', }, }, USE_I18N=False, ALLOWED_HOSTS=['.example.com'], ) class CacheUtils(SimpleTestCase): """TestCase for django.utils.cache functions.""" host = 'www.example.com' path = '/cache/test/' factory = RequestFactory(HTTP_HOST=host) def tearDown(self): cache.clear() def _get_request_cache(self, method='GET', query_string=None, update_cache=None): request = self._get_request(self.host, self.path, method, query_string=query_string) request._cache_update_cache = update_cache if update_cache else True return request def test_patch_vary_headers(self): headers = ( # Initial vary, new headers, resulting vary. (None, ('Accept-Encoding',), 'Accept-Encoding'), ('Accept-Encoding', ('accept-encoding',), 'Accept-Encoding'), ('Accept-Encoding', ('ACCEPT-ENCODING',), 'Accept-Encoding'), ('Cookie', ('Accept-Encoding',), 'Cookie, Accept-Encoding'), ('Cookie, Accept-Encoding', ('Accept-Encoding',), 'Cookie, Accept-Encoding'), ('Cookie, Accept-Encoding', ('Accept-Encoding', 'cookie'), 'Cookie, Accept-Encoding'), (None, ('Accept-Encoding', 'COOKIE'), 'Accept-Encoding, COOKIE'), ('Cookie, Accept-Encoding', ('Accept-Encoding', 'cookie'), 'Cookie, Accept-Encoding'), ('Cookie , Accept-Encoding', ('Accept-Encoding', 'cookie'), 'Cookie, Accept-Encoding'), ('*', ('Accept-Language', 'Cookie'), '*'), ('Accept-Language, Cookie', ('*',), '*'), ) for initial_vary, newheaders, resulting_vary in headers: with self.subTest(initial_vary=initial_vary, newheaders=newheaders): response = HttpResponse() if initial_vary is not None: response.headers['Vary'] = initial_vary patch_vary_headers(response, newheaders) self.assertEqual(response.headers['Vary'], resulting_vary) def test_get_cache_key(self): request = self.factory.get(self.path) response = HttpResponse() # Expect None if no headers have been set yet. self.assertIsNone(get_cache_key(request)) # Set headers to an empty list. learn_cache_key(request, response) self.assertEqual( get_cache_key(request), 'views.decorators.cache.cache_page.settingsprefix.GET.' '18a03f9c9649f7d684af5db3524f5c99.d41d8cd98f00b204e9800998ecf8427e' ) # A specified key_prefix is taken into account. key_prefix = 'localprefix' learn_cache_key(request, response, key_prefix=key_prefix) self.assertEqual( get_cache_key(request, key_prefix=key_prefix), 'views.decorators.cache.cache_page.localprefix.GET.' '18a03f9c9649f7d684af5db3524f5c99.d41d8cd98f00b204e9800998ecf8427e' ) def test_get_cache_key_with_query(self): request = self.factory.get(self.path, {'test': 1}) response = HttpResponse() # Expect None if no headers have been set yet. self.assertIsNone(get_cache_key(request)) # Set headers to an empty list. learn_cache_key(request, response) # The querystring is taken into account. self.assertEqual( get_cache_key(request), 'views.decorators.cache.cache_page.settingsprefix.GET.' 'beaf87a9a99ee81c673ea2d67ccbec2a.d41d8cd98f00b204e9800998ecf8427e' ) def test_cache_key_varies_by_url(self): """ get_cache_key keys differ by fully-qualified URL instead of path """ request1 = self.factory.get(self.path, HTTP_HOST='sub-1.example.com') learn_cache_key(request1, HttpResponse()) request2 = self.factory.get(self.path, HTTP_HOST='sub-2.example.com') learn_cache_key(request2, HttpResponse()) self.assertNotEqual(get_cache_key(request1), get_cache_key(request2)) def test_learn_cache_key(self): request = self.factory.head(self.path) response = HttpResponse() response.headers['Vary'] = 'Pony' # Make sure that the Vary header is added to the key hash learn_cache_key(request, response) self.assertEqual( get_cache_key(request), 'views.decorators.cache.cache_page.settingsprefix.GET.' '18a03f9c9649f7d684af5db3524f5c99.d41d8cd98f00b204e9800998ecf8427e' ) def test_patch_cache_control(self): tests = ( # Initial Cache-Control, kwargs to patch_cache_control, expected Cache-Control parts (None, {'private': True}, {'private'}), ('', {'private': True}, {'private'}), # no-cache. ('', {'no_cache': 'Set-Cookie'}, {'no-cache=Set-Cookie'}), ('', {'no-cache': 'Set-Cookie'}, {'no-cache=Set-Cookie'}), ('no-cache=Set-Cookie', {'no_cache': True}, {'no-cache'}), ('no-cache=Set-Cookie,no-cache=Link', {'no_cache': True}, {'no-cache'}), ('no-cache=Set-Cookie', {'no_cache': 'Link'}, {'no-cache=Set-Cookie', 'no-cache=Link'}), ( 'no-cache=Set-Cookie,no-cache=Link', {'no_cache': 'Custom'}, {'no-cache=Set-Cookie', 'no-cache=Link', 'no-cache=Custom'}, ), # Test whether private/public attributes are mutually exclusive ('private', {'private': True}, {'private'}), ('private', {'public': True}, {'public'}), ('public', {'public': True}, {'public'}), ('public', {'private': True}, {'private'}), ('must-revalidate,max-age=60,private', {'public': True}, {'must-revalidate', 'max-age=60', 'public'}), ('must-revalidate,max-age=60,public', {'private': True}, {'must-revalidate', 'max-age=60', 'private'}), ('must-revalidate,max-age=60', {'public': True}, {'must-revalidate', 'max-age=60', 'public'}), ) cc_delim_re = re.compile(r'\s*,\s*') for initial_cc, newheaders, expected_cc in tests: with self.subTest(initial_cc=initial_cc, newheaders=newheaders): response = HttpResponse() if initial_cc is not None: response.headers['Cache-Control'] = initial_cc patch_cache_control(response, **newheaders) parts = set(cc_delim_re.split(response.headers['Cache-Control'])) self.assertEqual(parts, expected_cc) @override_settings( CACHES={ 'default': { 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache', 'KEY_PREFIX': 'cacheprefix', }, }, ) class PrefixedCacheUtils(CacheUtils): pass @override_settings( CACHE_MIDDLEWARE_SECONDS=60, CACHE_MIDDLEWARE_KEY_PREFIX='test', CACHES={ 'default': { 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache', }, }, ) class CacheHEADTest(SimpleTestCase): path = '/cache/test/' factory = RequestFactory() def tearDown(self): cache.clear() def _set_cache(self, request, msg): return UpdateCacheMiddleware(lambda req: HttpResponse(msg))(request) def test_head_caches_correctly(self): test_content = 'test content' request = self.factory.head(self.path) request._cache_update_cache = True self._set_cache(request, test_content) request = self.factory.head(self.path) request._cache_update_cache = True get_cache_data = FetchFromCacheMiddleware(empty_response).process_request(request) self.assertIsNotNone(get_cache_data) self.assertEqual(test_content.encode(), get_cache_data.content) def test_head_with_cached_get(self): test_content = 'test content' request = self.factory.get(self.path) request._cache_update_cache = True self._set_cache(request, test_content) request = self.factory.head(self.path) get_cache_data = FetchFromCacheMiddleware(empty_response).process_request(request) self.assertIsNotNone(get_cache_data) self.assertEqual(test_content.encode(), get_cache_data.content) @override_settings( CACHE_MIDDLEWARE_KEY_PREFIX='settingsprefix', CACHES={ 'default': { 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache', }, }, LANGUAGES=[ ('en', 'English'), ('es', 'Spanish'), ], ) class CacheI18nTest(SimpleTestCase): path = '/cache/test/' factory = RequestFactory() def tearDown(self): cache.clear() @override_settings(USE_I18N=True, USE_TZ=False) def test_cache_key_i18n_translation(self): request = self.factory.get(self.path) lang = translation.get_language() response = HttpResponse() key = learn_cache_key(request, response) self.assertIn(lang, key, "Cache keys should include the language name when translation is active") key2 = get_cache_key(request) self.assertEqual(key, key2) def check_accept_language_vary(self, accept_language, vary, reference_key): request = self.factory.get(self.path) request.META['HTTP_ACCEPT_LANGUAGE'] = accept_language request.META['HTTP_ACCEPT_ENCODING'] = 'gzip;q=1.0, identity; q=0.5, *;q=0' response = HttpResponse() response.headers['Vary'] = vary key = learn_cache_key(request, response) key2 = get_cache_key(request) self.assertEqual(key, reference_key) self.assertEqual(key2, reference_key) @override_settings(USE_I18N=True, USE_TZ=False) def test_cache_key_i18n_translation_accept_language(self): lang = translation.get_language() self.assertEqual(lang, 'en') request = self.factory.get(self.path) request.META['HTTP_ACCEPT_ENCODING'] = 'gzip;q=1.0, identity; q=0.5, *;q=0' response = HttpResponse() response.headers['Vary'] = 'accept-encoding' key = learn_cache_key(request, response) self.assertIn(lang, key, "Cache keys should include the language name when translation is active") self.check_accept_language_vary( 'en-us', 'cookie, accept-language, accept-encoding', key ) self.check_accept_language_vary( 'en-US', 'cookie, accept-encoding, accept-language', key ) self.check_accept_language_vary( 'en-US,en;q=0.8', 'accept-encoding, accept-language, cookie', key ) self.check_accept_language_vary( 'en-US,en;q=0.8,ko;q=0.6', 'accept-language, cookie, accept-encoding', key ) self.check_accept_language_vary( 'ko-kr,ko;q=0.8,en-us;q=0.5,en;q=0.3 ', 'accept-encoding, cookie, accept-language', key ) self.check_accept_language_vary( 'ko-KR,ko;q=0.8,en-US;q=0.6,en;q=0.4', 'accept-language, accept-encoding, cookie', key ) self.check_accept_language_vary( 'ko;q=1.0,en;q=0.5', 'cookie, accept-language, accept-encoding', key ) self.check_accept_language_vary( 'ko, en', 'cookie, accept-encoding, accept-language', key ) self.check_accept_language_vary( 'ko-KR, en-US', 'accept-encoding, accept-language, cookie', key ) @override_settings(USE_I18N=False, USE_TZ=True) def test_cache_key_i18n_timezone(self): request = self.factory.get(self.path) tz = timezone.get_current_timezone_name() response = HttpResponse() key = learn_cache_key(request, response) self.assertIn(tz, key, "Cache keys should include the time zone name when time zones are active") key2 = get_cache_key(request) self.assertEqual(key, key2) @override_settings(USE_I18N=False) def test_cache_key_no_i18n(self): request = self.factory.get(self.path) lang = translation.get_language() tz = timezone.get_current_timezone_name() response = HttpResponse() key = learn_cache_key(request, response) self.assertNotIn(lang, key, "Cache keys shouldn't include the language name when i18n isn't active") self.assertNotIn(tz, key, "Cache keys shouldn't include the time zone name when i18n isn't active") @override_settings( CACHE_MIDDLEWARE_KEY_PREFIX="test", CACHE_MIDDLEWARE_SECONDS=60, USE_I18N=True, ) def test_middleware(self): def set_cache(request, lang, msg): def get_response(req): return HttpResponse(msg) translation.activate(lang) return UpdateCacheMiddleware(get_response)(request) # cache with non empty request.GET request = self.factory.get(self.path, {'foo': 'bar', 'other': 'true'}) request._cache_update_cache = True get_cache_data = FetchFromCacheMiddleware(empty_response).process_request(request) # first access, cache must return None self.assertIsNone(get_cache_data) content = 'Check for cache with QUERY_STRING' def get_response(req): return HttpResponse(content) UpdateCacheMiddleware(get_response)(request) get_cache_data = FetchFromCacheMiddleware(empty_response).process_request(request) # cache must return content self.assertIsNotNone(get_cache_data) self.assertEqual(get_cache_data.content, content.encode()) # different QUERY_STRING, cache must be empty request = self.factory.get(self.path, {'foo': 'bar', 'somethingelse': 'true'}) request._cache_update_cache = True get_cache_data = FetchFromCacheMiddleware(empty_response).process_request(request) self.assertIsNone(get_cache_data) # i18n tests en_message = "Hello world!" es_message = "Hola mundo!" request = self.factory.get(self.path) request._cache_update_cache = True set_cache(request, 'en', en_message) get_cache_data = FetchFromCacheMiddleware(empty_response).process_request(request) # The cache can be recovered self.assertIsNotNone(get_cache_data) self.assertEqual(get_cache_data.content, en_message.encode()) # change the session language and set content request = self.factory.get(self.path) request._cache_update_cache = True set_cache(request, 'es', es_message) # change again the language translation.activate('en') # retrieve the content from cache get_cache_data = FetchFromCacheMiddleware(empty_response).process_request(request) self.assertEqual(get_cache_data.content, en_message.encode()) # change again the language translation.activate('es') get_cache_data = FetchFromCacheMiddleware(empty_response).process_request(request) self.assertEqual(get_cache_data.content, es_message.encode()) # reset the language translation.deactivate() @override_settings( CACHE_MIDDLEWARE_KEY_PREFIX="test", CACHE_MIDDLEWARE_SECONDS=60, ) def test_middleware_doesnt_cache_streaming_response(self): request = self.factory.get(self.path) get_cache_data = FetchFromCacheMiddleware(empty_response).process_request(request) self.assertIsNone(get_cache_data) def get_stream_response(req): return StreamingHttpResponse(['Check for cache with streaming content.']) UpdateCacheMiddleware(get_stream_response)(request) get_cache_data = FetchFromCacheMiddleware(empty_response).process_request(request) self.assertIsNone(get_cache_data) @override_settings( CACHES={ 'default': { 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache', 'KEY_PREFIX': 'cacheprefix' }, }, ) class PrefixedCacheI18nTest(CacheI18nTest): pass def hello_world_view(request, value): return HttpResponse('Hello World %s' % value) def csrf_view(request): return HttpResponse(csrf(request)['csrf_token']) @override_settings( CACHE_MIDDLEWARE_ALIAS='other', CACHE_MIDDLEWARE_KEY_PREFIX='middlewareprefix', CACHE_MIDDLEWARE_SECONDS=30, CACHES={ 'default': { 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache', }, 'other': { 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache', 'LOCATION': 'other', 'TIMEOUT': '1', }, }, ) class CacheMiddlewareTest(SimpleTestCase): factory = RequestFactory() def setUp(self): self.default_cache = caches['default'] self.other_cache = caches['other'] def tearDown(self): self.default_cache.clear() self.other_cache.clear() super().tearDown() def test_constructor(self): """ Ensure the constructor is correctly distinguishing between usage of CacheMiddleware as Middleware vs. usage of CacheMiddleware as view decorator and setting attributes appropriately. """ # If only one argument is passed in construction, it's being used as # middleware. middleware = CacheMiddleware(empty_response) # Now test object attributes against values defined in setUp above self.assertEqual(middleware.cache_timeout, 30) self.assertEqual(middleware.key_prefix, 'middlewareprefix') self.assertEqual(middleware.cache_alias, 'other') self.assertEqual(middleware.cache, self.other_cache) # If more arguments are being passed in construction, it's being used # as a decorator. First, test with "defaults": as_view_decorator = CacheMiddleware(empty_response, cache_alias=None, key_prefix=None) self.assertEqual(as_view_decorator.cache_timeout, 30) # Timeout value for 'default' cache, i.e. 30 self.assertEqual(as_view_decorator.key_prefix, '') # Value of DEFAULT_CACHE_ALIAS from django.core.cache self.assertEqual(as_view_decorator.cache_alias, 'default') self.assertEqual(as_view_decorator.cache, self.default_cache) # Next, test with custom values: as_view_decorator_with_custom = CacheMiddleware( hello_world_view, cache_timeout=60, cache_alias='other', key_prefix='foo' ) self.assertEqual(as_view_decorator_with_custom.cache_timeout, 60) self.assertEqual(as_view_decorator_with_custom.key_prefix, 'foo') self.assertEqual(as_view_decorator_with_custom.cache_alias, 'other') self.assertEqual(as_view_decorator_with_custom.cache, self.other_cache) def test_update_cache_middleware_constructor(self): middleware = UpdateCacheMiddleware(empty_response) self.assertEqual(middleware.cache_timeout, 30) self.assertIsNone(middleware.page_timeout) self.assertEqual(middleware.key_prefix, 'middlewareprefix') self.assertEqual(middleware.cache_alias, 'other') self.assertEqual(middleware.cache, self.other_cache) def test_fetch_cache_middleware_constructor(self): middleware = FetchFromCacheMiddleware(empty_response) self.assertEqual(middleware.key_prefix, 'middlewareprefix') self.assertEqual(middleware.cache_alias, 'other') self.assertEqual(middleware.cache, self.other_cache) def test_middleware(self): middleware = CacheMiddleware(hello_world_view) prefix_middleware = CacheMiddleware(hello_world_view, key_prefix='prefix1') timeout_middleware = CacheMiddleware(hello_world_view, cache_timeout=1) request = self.factory.get('/view/') # Put the request through the request middleware result = middleware.process_request(request) self.assertIsNone(result) response = hello_world_view(request, '1') # Now put the response through the response middleware response = middleware.process_response(request, response) # Repeating the request should result in a cache hit result = middleware.process_request(request) self.assertIsNotNone(result) self.assertEqual(result.content, b'Hello World 1') # The same request through a different middleware won't hit result = prefix_middleware.process_request(request) self.assertIsNone(result) # The same request with a timeout _will_ hit result = timeout_middleware.process_request(request) self.assertIsNotNone(result) self.assertEqual(result.content, b'Hello World 1') def test_view_decorator(self): # decorate the same view with different cache decorators default_view = cache_page(3)(hello_world_view) default_with_prefix_view = cache_page(3, key_prefix='prefix1')(hello_world_view) explicit_default_view = cache_page(3, cache='default')(hello_world_view) explicit_default_with_prefix_view = cache_page(3, cache='default', key_prefix='prefix1')(hello_world_view) other_view = cache_page(1, cache='other')(hello_world_view) other_with_prefix_view = cache_page(1, cache='other', key_prefix='prefix2')(hello_world_view) request = self.factory.get('/view/') # Request the view once response = default_view(request, '1') self.assertEqual(response.content, b'Hello World 1') # Request again -- hit the cache response = default_view(request, '2') self.assertEqual(response.content, b'Hello World 1') # Requesting the same view with the explicit cache should yield the same result response = explicit_default_view(request, '3') self.assertEqual(response.content, b'Hello World 1') # Requesting with a prefix will hit a different cache key response = explicit_default_with_prefix_view(request, '4') self.assertEqual(response.content, b'Hello World 4') # Hitting the same view again gives a cache hit response = explicit_default_with_prefix_view(request, '5') self.assertEqual(response.content, b'Hello World 4') # And going back to the implicit cache will hit the same cache response = default_with_prefix_view(request, '6') self.assertEqual(response.content, b'Hello World 4') # Requesting from an alternate cache won't hit cache response = other_view(request, '7') self.assertEqual(response.content, b'Hello World 7') # But a repeated hit will hit cache response = other_view(request, '8') self.assertEqual(response.content, b'Hello World 7') # And prefixing the alternate cache yields yet another cache entry response = other_with_prefix_view(request, '9') self.assertEqual(response.content, b'Hello World 9') # But if we wait a couple of seconds... time.sleep(2) # ... the default cache will still hit caches['default'] response = default_view(request, '11') self.assertEqual(response.content, b'Hello World 1') # ... the default cache with a prefix will still hit response = default_with_prefix_view(request, '12') self.assertEqual(response.content, b'Hello World 4') # ... the explicit default cache will still hit response = explicit_default_view(request, '13') self.assertEqual(response.content, b'Hello World 1') # ... the explicit default cache with a prefix will still hit response = explicit_default_with_prefix_view(request, '14') self.assertEqual(response.content, b'Hello World 4') # .. but a rapidly expiring cache won't hit response = other_view(request, '15') self.assertEqual(response.content, b'Hello World 15') # .. even if it has a prefix response = other_with_prefix_view(request, '16') self.assertEqual(response.content, b'Hello World 16') def test_cache_page_timeout(self): # Page timeout takes precedence over the "max-age" section of the # "Cache-Control". tests = [ (1, 3), # max_age < page_timeout. (3, 1), # max_age > page_timeout. ] for max_age, page_timeout in tests: with self.subTest(max_age=max_age, page_timeout=page_timeout): view = cache_page(timeout=page_timeout)( cache_control(max_age=max_age)(hello_world_view) ) request = self.factory.get('/view/') response = view(request, '1') self.assertEqual(response.content, b'Hello World 1') time.sleep(1) response = view(request, '2') self.assertEqual( response.content, b'Hello World 1' if page_timeout > max_age else b'Hello World 2', ) cache.clear() def test_cached_control_private_not_cached(self): """Responses with 'Cache-Control: private' are not cached.""" view_with_private_cache = cache_page(3)(cache_control(private=True)(hello_world_view)) request = self.factory.get('/view/') response = view_with_private_cache(request, '1') self.assertEqual(response.content, b'Hello World 1') response = view_with_private_cache(request, '2') self.assertEqual(response.content, b'Hello World 2') def test_sensitive_cookie_not_cached(self): """ Django must prevent caching of responses that set a user-specific (and maybe security sensitive) cookie in response to a cookie-less request. """ request = self.factory.get('/view/') csrf_middleware = CsrfViewMiddleware(csrf_view) csrf_middleware.process_view(request, csrf_view, (), {}) cache_middleware = CacheMiddleware(csrf_middleware) self.assertIsNone(cache_middleware.process_request(request)) cache_middleware(request) # Inserting a CSRF cookie in a cookie-less request prevented caching. self.assertIsNone(cache_middleware.process_request(request)) def test_304_response_has_http_caching_headers_but_not_cached(self): original_view = mock.Mock(return_value=HttpResponseNotModified()) view = cache_page(2)(original_view) request = self.factory.get('/view/') # The view shouldn't be cached on the second call. view(request).close() response = view(request) response.close() self.assertEqual(original_view.call_count, 2) self.assertIsInstance(response, HttpResponseNotModified) self.assertIn('Cache-Control', response) self.assertIn('Expires', response) def test_per_thread(self): """The cache instance is different for each thread.""" thread_caches = [] middleware = CacheMiddleware(empty_response) def runner(): thread_caches.append(middleware.cache) for _ in range(2): thread = threading.Thread(target=runner) thread.start() thread.join() self.assertIsNot(thread_caches[0], thread_caches[1]) @override_settings( CACHE_MIDDLEWARE_KEY_PREFIX='settingsprefix', CACHE_MIDDLEWARE_SECONDS=1, CACHES={ 'default': { 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache', }, }, USE_I18N=False, ) class TestWithTemplateResponse(SimpleTestCase): """ Tests various headers w/ TemplateResponse. Most are probably redundant since they manipulate the same object anyway but the ETag header is 'special' because it relies on the content being complete (which is not necessarily always the case with a TemplateResponse) """ path = '/cache/test/' factory = RequestFactory() def tearDown(self): cache.clear() def test_patch_vary_headers(self): headers = ( # Initial vary, new headers, resulting vary. (None, ('Accept-Encoding',), 'Accept-Encoding'), ('Accept-Encoding', ('accept-encoding',), 'Accept-Encoding'), ('Accept-Encoding', ('ACCEPT-ENCODING',), 'Accept-Encoding'), ('Cookie', ('Accept-Encoding',), 'Cookie, Accept-Encoding'), ('Cookie, Accept-Encoding', ('Accept-Encoding',), 'Cookie, Accept-Encoding'), ('Cookie, Accept-Encoding', ('Accept-Encoding', 'cookie'), 'Cookie, Accept-Encoding'), (None, ('Accept-Encoding', 'COOKIE'), 'Accept-Encoding, COOKIE'), ('Cookie, Accept-Encoding', ('Accept-Encoding', 'cookie'), 'Cookie, Accept-Encoding'), ('Cookie , Accept-Encoding', ('Accept-Encoding', 'cookie'), 'Cookie, Accept-Encoding'), ) for initial_vary, newheaders, resulting_vary in headers: with self.subTest(initial_vary=initial_vary, newheaders=newheaders): template = engines['django'].from_string("This is a test") response = TemplateResponse(HttpRequest(), template) if initial_vary is not None: response.headers['Vary'] = initial_vary patch_vary_headers(response, newheaders) self.assertEqual(response.headers['Vary'], resulting_vary) def test_get_cache_key(self): request = self.factory.get(self.path) template = engines['django'].from_string("This is a test") response = TemplateResponse(HttpRequest(), template) key_prefix = 'localprefix' # Expect None if no headers have been set yet. self.assertIsNone(get_cache_key(request)) # Set headers to an empty list. learn_cache_key(request, response) self.assertEqual( get_cache_key(request), 'views.decorators.cache.cache_page.settingsprefix.GET.' '58a0a05c8a5620f813686ff969c26853.d41d8cd98f00b204e9800998ecf8427e' ) # A specified key_prefix is taken into account. learn_cache_key(request, response, key_prefix=key_prefix) self.assertEqual( get_cache_key(request, key_prefix=key_prefix), 'views.decorators.cache.cache_page.localprefix.GET.' '58a0a05c8a5620f813686ff969c26853.d41d8cd98f00b204e9800998ecf8427e' ) def test_get_cache_key_with_query(self): request = self.factory.get(self.path, {'test': 1}) template = engines['django'].from_string("This is a test") response = TemplateResponse(HttpRequest(), template) # Expect None if no headers have been set yet. self.assertIsNone(get_cache_key(request)) # Set headers to an empty list. learn_cache_key(request, response) # The querystring is taken into account. self.assertEqual( get_cache_key(request), 'views.decorators.cache.cache_page.settingsprefix.GET.' '0f1c2d56633c943073c4569d9a9502fe.d41d8cd98f00b204e9800998ecf8427e' ) class TestMakeTemplateFragmentKey(SimpleTestCase): def test_without_vary_on(self): key = make_template_fragment_key('a.fragment') self.assertEqual(key, 'template.cache.a.fragment.d41d8cd98f00b204e9800998ecf8427e') def test_with_one_vary_on(self): key = make_template_fragment_key('foo', ['abc']) self.assertEqual(key, 'template.cache.foo.493e283d571a73056196f1a68efd0f66') def test_with_many_vary_on(self): key = make_template_fragment_key('bar', ['abc', 'def']) self.assertEqual(key, 'template.cache.bar.17c1a507a0cb58384f4c639067a93520') def test_proper_escaping(self): key = make_template_fragment_key('spam', ['abc:def%']) self.assertEqual(key, 'template.cache.spam.06c8ae8e8c430b69fb0a6443504153dc') def test_with_ints_vary_on(self): key = make_template_fragment_key('foo', [1, 2, 3, 4, 5]) self.assertEqual(key, 'template.cache.foo.7ae8fd2e0d25d651c683bdeebdb29461') def test_with_unicode_vary_on(self): key = make_template_fragment_key('foo', ['42º', '😀']) self.assertEqual(key, 'template.cache.foo.7ced1c94e543668590ba39b3c08b0237') def test_long_vary_on(self): key = make_template_fragment_key('foo', ['x' * 10000]) self.assertEqual(key, 'template.cache.foo.3670b349b5124aa56bdb50678b02b23a') class CacheHandlerTest(SimpleTestCase): def test_same_instance(self): """ Attempting to retrieve the same alias should yield the same instance. """ cache1 = caches['default'] cache2 = caches['default'] self.assertIs(cache1, cache2) def test_per_thread(self): """ Requesting the same alias from separate threads should yield separate instances. """ c = [] def runner(): c.append(caches['default']) for x in range(2): t = threading.Thread(target=runner) t.start() t.join() self.assertIsNot(c[0], c[1]) def test_nonexistent_alias(self): msg = "The connection 'nonexistent' doesn't exist." with self.assertRaisesMessage(InvalidCacheBackendError, msg): caches['nonexistent'] def test_nonexistent_backend(self): test_caches = CacheHandler({ 'invalid_backend': { 'BACKEND': 'django.nonexistent.NonexistentBackend', }, }) msg = ( "Could not find backend 'django.nonexistent.NonexistentBackend': " "No module named 'django.nonexistent'" ) with self.assertRaisesMessage(InvalidCacheBackendError, msg): test_caches['invalid_backend'] def test_all(self): test_caches = CacheHandler({ 'cache_1': { 'BACKEND': 'django.core.cache.backends.dummy.DummyCache', }, 'cache_2': { 'BACKEND': 'django.core.cache.backends.dummy.DummyCache', }, }) self.assertEqual(test_caches.all(initialized_only=True), []) cache_1 = test_caches['cache_1'] self.assertEqual(test_caches.all(initialized_only=True), [cache_1]) self.assertEqual(len(test_caches.all()), 2) # .all() initializes all caches. self.assertEqual(len(test_caches.all(initialized_only=True)), 2) self.assertEqual(test_caches.all(), test_caches.all(initialized_only=True))
dfb994f0b3f6e6c0402fd5de17911ccf91241cbce3edc54e80770b95ebaeb1c6
import asyncio from django.core.cache import CacheKeyWarning, cache from django.test import SimpleTestCase, override_settings from .tests import KEY_ERRORS_WITH_MEMCACHED_MSG @override_settings(CACHES={ 'default': { 'BACKEND': 'django.core.cache.backends.dummy.DummyCache', } }) class AsyncDummyCacheTests(SimpleTestCase): async def test_simple(self): """Dummy cache backend ignores cache set calls.""" await cache.aset('key', 'value') self.assertIsNone(await cache.aget('key')) async def test_aadd(self): """Add doesn't do anything in dummy cache backend.""" self.assertIs(await cache.aadd('key', 'value'), True) self.assertIs(await cache.aadd('key', 'new_value'), True) self.assertIsNone(await cache.aget('key')) async def test_non_existent(self): """Nonexistent keys aren't found in the dummy cache backend.""" self.assertIsNone(await cache.aget('does_not_exist')) self.assertEqual(await cache.aget('does_not_exist', 'default'), 'default') async def test_aget_many(self): """aget_many() returns nothing for the dummy cache backend.""" await cache.aset_many({'a': 'a', 'b': 'b', 'c': 'c', 'd': 'd'}) self.assertEqual(await cache.aget_many(['a', 'c', 'd']), {}) self.assertEqual(await cache.aget_many(['a', 'b', 'e']), {}) async def test_aget_many_invalid_key(self): msg = KEY_ERRORS_WITH_MEMCACHED_MSG % ':1:key with spaces' with self.assertWarnsMessage(CacheKeyWarning, msg): await cache.aget_many(['key with spaces']) async def test_adelete(self): """ Cache deletion is transparently ignored on the dummy cache backend. """ await cache.aset_many({'key1': 'spam', 'key2': 'eggs'}) self.assertIsNone(await cache.aget('key1')) self.assertIs(await cache.adelete('key1'), False) self.assertIsNone(await cache.aget('key1')) self.assertIsNone(await cache.aget('key2')) async def test_ahas_key(self): """ahas_key() doesn't ever return True for the dummy cache backend.""" await cache.aset('hello1', 'goodbye1') self.assertIs(await cache.ahas_key('hello1'), False) self.assertIs(await cache.ahas_key('goodbye1'), False) async def test_aincr(self): """Dummy cache values can't be incremented.""" await cache.aset('answer', 42) with self.assertRaises(ValueError): await cache.aincr('answer') with self.assertRaises(ValueError): await cache.aincr('does_not_exist') with self.assertRaises(ValueError): await cache.aincr('does_not_exist', -1) async def test_adecr(self): """Dummy cache values can't be decremented.""" await cache.aset('answer', 42) with self.assertRaises(ValueError): await cache.adecr('answer') with self.assertRaises(ValueError): await cache.adecr('does_not_exist') with self.assertRaises(ValueError): await cache.adecr('does_not_exist', -1) async def test_atouch(self): self.assertIs(await cache.atouch('key'), False) async def test_data_types(self): """All data types are ignored equally by the dummy cache.""" def f(): return 42 class C: def m(n): return 24 data = { 'string': 'this is a string', 'int': 42, 'list': [1, 2, 3, 4], 'tuple': (1, 2, 3, 4), 'dict': {'A': 1, 'B': 2}, 'function': f, 'class': C, } await cache.aset('data', data) self.assertIsNone(await cache.aget('data')) async def test_expiration(self): """Expiration has no effect on the dummy cache.""" await cache.aset('expire1', 'very quickly', 1) await cache.aset('expire2', 'very quickly', 1) await cache.aset('expire3', 'very quickly', 1) await asyncio.sleep(2) self.assertIsNone(await cache.aget('expire1')) self.assertIs(await cache.aadd('expire2', 'new_value'), True) self.assertIsNone(await cache.aget('expire2')) self.assertIs(await cache.ahas_key('expire3'), False) async def test_unicode(self): """Unicode values are ignored by the dummy cache.""" tests = { 'ascii': 'ascii_value', 'unicode_ascii': 'Iñtërnâtiônàlizætiøn1', 'Iñtërnâtiônàlizætiøn': 'Iñtërnâtiônàlizætiøn2', 'ascii2': {'x': 1}, } for key, value in tests.items(): with self.subTest(key=key): await cache.aset(key, value) self.assertIsNone(await cache.aget(key)) async def test_aset_many(self): """aset_many() does nothing for the dummy cache backend.""" self.assertEqual(await cache.aset_many({'a': 1, 'b': 2}), []) self.assertEqual( await cache.aset_many({'a': 1, 'b': 2}, timeout=2, version='1'), [], ) async def test_aset_many_invalid_key(self): msg = KEY_ERRORS_WITH_MEMCACHED_MSG % ':1:key with spaces' with self.assertWarnsMessage(CacheKeyWarning, msg): await cache.aset_many({'key with spaces': 'foo'}) async def test_adelete_many(self): """adelete_many() does nothing for the dummy cache backend.""" await cache.adelete_many(['a', 'b']) async def test_adelete_many_invalid_key(self): msg = KEY_ERRORS_WITH_MEMCACHED_MSG % ':1:key with spaces' with self.assertWarnsMessage(CacheKeyWarning, msg): await cache.adelete_many({'key with spaces': 'foo'}) async def test_aclear(self): """aclear() does nothing for the dummy cache backend.""" await cache.aclear() async def test_aclose(self): """aclose() does nothing for the dummy cache backend.""" await cache.aclose() async def test_aincr_version(self): """Dummy cache versions can't be incremented.""" await cache.aset('answer', 42) with self.assertRaises(ValueError): await cache.aincr_version('answer') with self.assertRaises(ValueError): await cache.aincr_version('answer', version=2) with self.assertRaises(ValueError): await cache.aincr_version('does_not_exist') async def test_adecr_version(self): """Dummy cache versions can't be decremented.""" await cache.aset('answer', 42) with self.assertRaises(ValueError): await cache.adecr_version('answer') with self.assertRaises(ValueError): await cache.adecr_version('answer', version=2) with self.assertRaises(ValueError): await cache.adecr_version('does_not_exist') async def test_aget_or_set(self): self.assertEqual(await cache.aget_or_set('key', 'default'), 'default') self.assertIsNone(await cache.aget_or_set('key', None)) async def test_aget_or_set_callable(self): def my_callable(): return 'default' self.assertEqual(await cache.aget_or_set('key', my_callable), 'default') self.assertEqual(await cache.aget_or_set('key', my_callable()), 'default')
383974a145be34a69387e60c254649e80ab5d8e27bde3a85fbe80d7e8e471add
import os import sys import unittest from importlib import import_module from zipimport import zipimporter from django.test import SimpleTestCase, modify_settings from django.test.utils import extend_sys_path from django.utils.module_loading import ( autodiscover_modules, import_string, module_has_submodule, ) from django.utils.version import PY310 class DefaultLoader(unittest.TestCase): def test_loader(self): "Normal module existence can be tested" test_module = import_module('utils_tests.test_module') test_no_submodule = import_module( 'utils_tests.test_no_submodule') # An importable child self.assertTrue(module_has_submodule(test_module, 'good_module')) mod = import_module('utils_tests.test_module.good_module') self.assertEqual(mod.content, 'Good Module') # A child that exists, but will generate an import error if loaded self.assertTrue(module_has_submodule(test_module, 'bad_module')) with self.assertRaises(ImportError): import_module('utils_tests.test_module.bad_module') # A child that doesn't exist self.assertFalse(module_has_submodule(test_module, 'no_such_module')) with self.assertRaises(ImportError): import_module('utils_tests.test_module.no_such_module') # A child that doesn't exist, but is the name of a package on the path self.assertFalse(module_has_submodule(test_module, 'django')) with self.assertRaises(ImportError): import_module('utils_tests.test_module.django') # Don't be confused by caching of import misses import types # NOQA: causes attempted import of utils_tests.types self.assertFalse(module_has_submodule(sys.modules['utils_tests'], 'types')) # A module which doesn't have a __path__ (so no submodules) self.assertFalse(module_has_submodule(test_no_submodule, 'anything')) with self.assertRaises(ImportError): import_module('utils_tests.test_no_submodule.anything') def test_has_sumbodule_with_dotted_path(self): """Nested module existence can be tested.""" test_module = import_module('utils_tests.test_module') # A grandchild that exists. self.assertIs(module_has_submodule(test_module, 'child_module.grandchild_module'), True) # A grandchild that doesn't exist. self.assertIs(module_has_submodule(test_module, 'child_module.no_such_module'), False) # A grandchild whose parent doesn't exist. self.assertIs(module_has_submodule(test_module, 'no_such_module.grandchild_module'), False) # A grandchild whose parent is not a package. self.assertIs(module_has_submodule(test_module, 'good_module.no_such_module'), False) class EggLoader(unittest.TestCase): def setUp(self): self.egg_dir = '%s/eggs' % os.path.dirname(__file__) def tearDown(self): sys.path_importer_cache.clear() sys.modules.pop('egg_module.sub1.sub2.bad_module', None) sys.modules.pop('egg_module.sub1.sub2.good_module', None) sys.modules.pop('egg_module.sub1.sub2', None) sys.modules.pop('egg_module.sub1', None) sys.modules.pop('egg_module.bad_module', None) sys.modules.pop('egg_module.good_module', None) sys.modules.pop('egg_module', None) def test_shallow_loader(self): "Module existence can be tested inside eggs" egg_name = '%s/test_egg.egg' % self.egg_dir with extend_sys_path(egg_name): egg_module = import_module('egg_module') # An importable child self.assertTrue(module_has_submodule(egg_module, 'good_module')) mod = import_module('egg_module.good_module') self.assertEqual(mod.content, 'Good Module') # A child that exists, but will generate an import error if loaded self.assertTrue(module_has_submodule(egg_module, 'bad_module')) with self.assertRaises(ImportError): import_module('egg_module.bad_module') # A child that doesn't exist self.assertFalse(module_has_submodule(egg_module, 'no_such_module')) with self.assertRaises(ImportError): import_module('egg_module.no_such_module') def test_deep_loader(self): "Modules deep inside an egg can still be tested for existence" egg_name = '%s/test_egg.egg' % self.egg_dir with extend_sys_path(egg_name): egg_module = import_module('egg_module.sub1.sub2') # An importable child self.assertTrue(module_has_submodule(egg_module, 'good_module')) mod = import_module('egg_module.sub1.sub2.good_module') self.assertEqual(mod.content, 'Deep Good Module') # A child that exists, but will generate an import error if loaded self.assertTrue(module_has_submodule(egg_module, 'bad_module')) with self.assertRaises(ImportError): import_module('egg_module.sub1.sub2.bad_module') # A child that doesn't exist self.assertFalse(module_has_submodule(egg_module, 'no_such_module')) with self.assertRaises(ImportError): import_module('egg_module.sub1.sub2.no_such_module') class ModuleImportTests(SimpleTestCase): def test_import_string(self): cls = import_string('django.utils.module_loading.import_string') self.assertEqual(cls, import_string) # Test exceptions raised with self.assertRaises(ImportError): import_string('no_dots_in_path') msg = 'Module "utils_tests" does not define a "unexistent" attribute' with self.assertRaisesMessage(ImportError, msg): import_string('utils_tests.unexistent') @modify_settings(INSTALLED_APPS={'append': 'utils_tests.test_module'}) class AutodiscoverModulesTestCase(SimpleTestCase): def tearDown(self): sys.path_importer_cache.clear() sys.modules.pop('utils_tests.test_module.another_bad_module', None) sys.modules.pop('utils_tests.test_module.another_good_module', None) sys.modules.pop('utils_tests.test_module.bad_module', None) sys.modules.pop('utils_tests.test_module.good_module', None) sys.modules.pop('utils_tests.test_module', None) def test_autodiscover_modules_found(self): autodiscover_modules('good_module') def test_autodiscover_modules_not_found(self): autodiscover_modules('missing_module') def test_autodiscover_modules_found_but_bad_module(self): with self.assertRaisesMessage(ImportError, "No module named 'a_package_name_that_does_not_exist'"): autodiscover_modules('bad_module') def test_autodiscover_modules_several_one_bad_module(self): with self.assertRaisesMessage(ImportError, "No module named 'a_package_name_that_does_not_exist'"): autodiscover_modules('good_module', 'bad_module') def test_autodiscover_modules_several_found(self): autodiscover_modules('good_module', 'another_good_module') def test_autodiscover_modules_several_found_with_registry(self): from .test_module import site autodiscover_modules('good_module', 'another_good_module', register_to=site) self.assertEqual(site._registry, {'lorem': 'ipsum'}) def test_validate_registry_keeps_intact(self): from .test_module import site with self.assertRaisesMessage(Exception, "Some random exception."): autodiscover_modules('another_bad_module', register_to=site) self.assertEqual(site._registry, {}) def test_validate_registry_resets_after_erroneous_module(self): from .test_module import site with self.assertRaisesMessage(Exception, "Some random exception."): autodiscover_modules('another_good_module', 'another_bad_module', register_to=site) self.assertEqual(site._registry, {'lorem': 'ipsum'}) def test_validate_registry_resets_after_missing_module(self): from .test_module import site autodiscover_modules('does_not_exist', 'another_good_module', 'does_not_exist2', register_to=site) self.assertEqual(site._registry, {'lorem': 'ipsum'}) if PY310: class TestFinder: def __init__(self, *args, **kwargs): self.importer = zipimporter(*args, **kwargs) def find_spec(self, path, target=None): return self.importer.find_spec(path, target) else: class TestFinder: def __init__(self, *args, **kwargs): self.importer = zipimporter(*args, **kwargs) def find_module(self, path): importer = self.importer.find_module(path) if importer is None: return return TestLoader(importer) class TestLoader: def __init__(self, importer): self.importer = importer def load_module(self, name): mod = self.importer.load_module(name) mod.__loader__ = self return mod class CustomLoader(EggLoader): """The Custom Loader test is exactly the same as the EggLoader, but it uses a custom defined Loader class. Although the EggLoader combines both functions into one class, this isn't required. """ def setUp(self): super().setUp() sys.path_hooks.insert(0, TestFinder) sys.path_importer_cache.clear() def tearDown(self): super().tearDown() sys.path_hooks.pop(0)
431b9f654341949988cd625c493a7588439d1e38d7ed6213aefa72cea5994269
import datetime import unittest from unittest import mock try: import pytz except ImportError: pytz = None try: import zoneinfo except ImportError: from backports import zoneinfo from django.test import SimpleTestCase, ignore_warnings, override_settings from django.utils import timezone from django.utils.deprecation import RemovedInDjango50Warning PARIS_ZI = zoneinfo.ZoneInfo('Europe/Paris') EAT = timezone.get_fixed_timezone(180) # Africa/Nairobi ICT = timezone.get_fixed_timezone(420) # Asia/Bangkok UTC = datetime.timezone.utc HAS_PYTZ = pytz is not None if not HAS_PYTZ: CET = None PARIS_IMPLS = (PARIS_ZI,) needs_pytz = unittest.skip('Test requires pytz') else: CET = pytz.timezone('Europe/Paris') PARIS_IMPLS = (PARIS_ZI, CET) def needs_pytz(f): return f class TimezoneTests(SimpleTestCase): def setUp(self): # RemovedInDjango50Warning timezone.get_default_timezone.cache_clear() def tearDown(self): # RemovedInDjango50Warning timezone.get_default_timezone.cache_clear() def test_default_timezone_is_zoneinfo(self): self.assertIsInstance(timezone.get_default_timezone(), zoneinfo.ZoneInfo) @needs_pytz @ignore_warnings(category=RemovedInDjango50Warning) @override_settings(USE_DEPRECATED_PYTZ=True) def test_setting_allows_fallback_to_pytz(self): self.assertIsInstance(timezone.get_default_timezone(), pytz.BaseTzInfo) def test_now(self): with override_settings(USE_TZ=True): self.assertTrue(timezone.is_aware(timezone.now())) with override_settings(USE_TZ=False): self.assertTrue(timezone.is_naive(timezone.now())) def test_localdate(self): naive = datetime.datetime(2015, 1, 1, 0, 0, 1) with self.assertRaisesMessage(ValueError, 'localtime() cannot be applied to a naive datetime'): timezone.localdate(naive) with self.assertRaisesMessage(ValueError, 'localtime() cannot be applied to a naive datetime'): timezone.localdate(naive, timezone=EAT) aware = datetime.datetime(2015, 1, 1, 0, 0, 1, tzinfo=ICT) self.assertEqual(timezone.localdate(aware, timezone=EAT), datetime.date(2014, 12, 31)) with timezone.override(EAT): self.assertEqual(timezone.localdate(aware), datetime.date(2014, 12, 31)) with mock.patch('django.utils.timezone.now', return_value=aware): self.assertEqual(timezone.localdate(timezone=EAT), datetime.date(2014, 12, 31)) with timezone.override(EAT): self.assertEqual(timezone.localdate(), datetime.date(2014, 12, 31)) def test_override(self): default = timezone.get_default_timezone() try: timezone.activate(ICT) with timezone.override(EAT): self.assertIs(EAT, timezone.get_current_timezone()) self.assertIs(ICT, timezone.get_current_timezone()) with timezone.override(None): self.assertIs(default, timezone.get_current_timezone()) self.assertIs(ICT, timezone.get_current_timezone()) timezone.deactivate() with timezone.override(EAT): self.assertIs(EAT, timezone.get_current_timezone()) self.assertIs(default, timezone.get_current_timezone()) with timezone.override(None): self.assertIs(default, timezone.get_current_timezone()) self.assertIs(default, timezone.get_current_timezone()) finally: timezone.deactivate() def test_override_decorator(self): default = timezone.get_default_timezone() @timezone.override(EAT) def func_tz_eat(): self.assertIs(EAT, timezone.get_current_timezone()) @timezone.override(None) def func_tz_none(): self.assertIs(default, timezone.get_current_timezone()) try: timezone.activate(ICT) func_tz_eat() self.assertIs(ICT, timezone.get_current_timezone()) func_tz_none() self.assertIs(ICT, timezone.get_current_timezone()) timezone.deactivate() func_tz_eat() self.assertIs(default, timezone.get_current_timezone()) func_tz_none() self.assertIs(default, timezone.get_current_timezone()) finally: timezone.deactivate() def test_override_string_tz(self): with timezone.override('Asia/Bangkok'): self.assertEqual(timezone.get_current_timezone_name(), 'Asia/Bangkok') def test_override_fixed_offset(self): with timezone.override(datetime.timezone(datetime.timedelta(), 'tzname')): self.assertEqual(timezone.get_current_timezone_name(), 'tzname') def test_activate_invalid_timezone(self): with self.assertRaisesMessage(ValueError, 'Invalid timezone: None'): timezone.activate(None) def test_is_aware(self): self.assertTrue(timezone.is_aware(datetime.datetime(2011, 9, 1, 13, 20, 30, tzinfo=EAT))) self.assertFalse(timezone.is_aware(datetime.datetime(2011, 9, 1, 13, 20, 30))) def test_is_naive(self): self.assertFalse(timezone.is_naive(datetime.datetime(2011, 9, 1, 13, 20, 30, tzinfo=EAT))) self.assertTrue(timezone.is_naive(datetime.datetime(2011, 9, 1, 13, 20, 30))) def test_make_aware(self): self.assertEqual( timezone.make_aware(datetime.datetime(2011, 9, 1, 13, 20, 30), EAT), datetime.datetime(2011, 9, 1, 13, 20, 30, tzinfo=EAT)) with self.assertRaises(ValueError): timezone.make_aware(datetime.datetime(2011, 9, 1, 13, 20, 30, tzinfo=EAT), EAT) def test_make_naive(self): self.assertEqual( timezone.make_naive(datetime.datetime(2011, 9, 1, 13, 20, 30, tzinfo=EAT), EAT), datetime.datetime(2011, 9, 1, 13, 20, 30)) self.assertEqual( timezone.make_naive(datetime.datetime(2011, 9, 1, 17, 20, 30, tzinfo=ICT), EAT), datetime.datetime(2011, 9, 1, 13, 20, 30)) with self.assertRaisesMessage(ValueError, 'make_naive() cannot be applied to a naive datetime'): timezone.make_naive(datetime.datetime(2011, 9, 1, 13, 20, 30), EAT) def test_make_naive_no_tz(self): self.assertEqual( timezone.make_naive(datetime.datetime(2011, 9, 1, 13, 20, 30, tzinfo=EAT)), datetime.datetime(2011, 9, 1, 5, 20, 30) ) def test_make_aware_no_tz(self): self.assertEqual( timezone.make_aware(datetime.datetime(2011, 9, 1, 13, 20, 30)), datetime.datetime(2011, 9, 1, 13, 20, 30, tzinfo=timezone.get_fixed_timezone(-300)) ) def test_make_aware2(self): CEST = datetime.timezone(datetime.timedelta(hours=2), 'CEST') for tz in PARIS_IMPLS: with self.subTest(repr(tz)): self.assertEqual( timezone.make_aware(datetime.datetime(2011, 9, 1, 12, 20, 30), tz), datetime.datetime(2011, 9, 1, 12, 20, 30, tzinfo=CEST)) if HAS_PYTZ: with self.assertRaises(ValueError): timezone.make_aware(CET.localize(datetime.datetime(2011, 9, 1, 12, 20, 30)), CET) with self.assertRaises(ValueError): timezone.make_aware(datetime.datetime(2011, 9, 1, 12, 20, 30, tzinfo=PARIS_ZI), PARIS_ZI) @needs_pytz def test_make_naive_pytz(self): self.assertEqual( timezone.make_naive(CET.localize(datetime.datetime(2011, 9, 1, 12, 20, 30)), CET), datetime.datetime(2011, 9, 1, 12, 20, 30)) self.assertEqual( timezone.make_naive( pytz.timezone("Asia/Bangkok").localize(datetime.datetime(2011, 9, 1, 17, 20, 30)), CET ), datetime.datetime(2011, 9, 1, 12, 20, 30)) with self.assertRaisesMessage(ValueError, 'make_naive() cannot be applied to a naive datetime'): timezone.make_naive(datetime.datetime(2011, 9, 1, 12, 20, 30), CET) def test_make_naive_zoneinfo(self): self.assertEqual( timezone.make_naive(datetime.datetime(2011, 9, 1, 12, 20, 30, tzinfo=PARIS_ZI), PARIS_ZI), datetime.datetime(2011, 9, 1, 12, 20, 30) ) self.assertEqual( timezone.make_naive(datetime.datetime(2011, 9, 1, 12, 20, 30, fold=1, tzinfo=PARIS_ZI), PARIS_ZI), datetime.datetime(2011, 9, 1, 12, 20, 30, fold=1) ) @needs_pytz @ignore_warnings(category=RemovedInDjango50Warning) def test_make_aware_pytz_ambiguous(self): # 2:30 happens twice, once before DST ends and once after ambiguous = datetime.datetime(2015, 10, 25, 2, 30) with self.assertRaises(pytz.AmbiguousTimeError): timezone.make_aware(ambiguous, timezone=CET) std = timezone.make_aware(ambiguous, timezone=CET, is_dst=False) dst = timezone.make_aware(ambiguous, timezone=CET, is_dst=True) self.assertEqual(std - dst, datetime.timedelta(hours=1)) self.assertEqual(std.tzinfo.utcoffset(std), datetime.timedelta(hours=1)) self.assertEqual(dst.tzinfo.utcoffset(dst), datetime.timedelta(hours=2)) def test_make_aware_zoneinfo_ambiguous(self): # 2:30 happens twice, once before DST ends and once after ambiguous = datetime.datetime(2015, 10, 25, 2, 30) std = timezone.make_aware(ambiguous.replace(fold=1), timezone=PARIS_ZI) dst = timezone.make_aware(ambiguous, timezone=PARIS_ZI) self.assertEqual( std.astimezone(UTC) - dst.astimezone(UTC), datetime.timedelta(hours=1) ) self.assertEqual(std.utcoffset(), datetime.timedelta(hours=1)) self.assertEqual(dst.utcoffset(), datetime.timedelta(hours=2)) @needs_pytz @ignore_warnings(category=RemovedInDjango50Warning) def test_make_aware_pytz_non_existent(self): # 2:30 never happened due to DST non_existent = datetime.datetime(2015, 3, 29, 2, 30) with self.assertRaises(pytz.NonExistentTimeError): timezone.make_aware(non_existent, timezone=CET) std = timezone.make_aware(non_existent, timezone=CET, is_dst=False) dst = timezone.make_aware(non_existent, timezone=CET, is_dst=True) self.assertEqual(std - dst, datetime.timedelta(hours=1)) self.assertEqual(std.tzinfo.utcoffset(std), datetime.timedelta(hours=1)) self.assertEqual(dst.tzinfo.utcoffset(dst), datetime.timedelta(hours=2)) def test_make_aware_zoneinfo_non_existent(self): # 2:30 never happened due to DST non_existent = datetime.datetime(2015, 3, 29, 2, 30) std = timezone.make_aware(non_existent, PARIS_ZI) dst = timezone.make_aware(non_existent.replace(fold=1), PARIS_ZI) self.assertEqual( std.astimezone(UTC) - dst.astimezone(UTC), datetime.timedelta(hours=1) ) self.assertEqual(std.utcoffset(), datetime.timedelta(hours=1)) self.assertEqual(dst.utcoffset(), datetime.timedelta(hours=2)) def test_make_aware_is_dst_deprecation_warning(self): msg = ( 'The is_dst argument to make_aware(), used by the Trunc() ' 'database functions and QuerySet.datetimes(), is deprecated as it ' 'has no effect with zoneinfo time zones.' ) with self.assertRaisesMessage(RemovedInDjango50Warning, msg): timezone.make_aware(datetime.datetime(2011, 9, 1, 13, 20, 30), EAT, is_dst=True) def test_get_timezone_name(self): """ The _get_timezone_name() helper must return the offset for fixed offset timezones, for usage with Trunc DB functions. The datetime.timezone examples show the current behavior. """ tests = [ # datetime.timezone, fixed offset with and without `name`. (datetime.timezone(datetime.timedelta(hours=10)), 'UTC+10:00'), (datetime.timezone(datetime.timedelta(hours=10), name='Etc/GMT-10'), 'Etc/GMT-10'), # zoneinfo, named and fixed offset. (zoneinfo.ZoneInfo('Europe/Madrid'), 'Europe/Madrid'), (zoneinfo.ZoneInfo('Etc/GMT-10'), '+10'), ] if HAS_PYTZ: tests += [ # pytz, named and fixed offset. (pytz.timezone('Europe/Madrid'), 'Europe/Madrid'), (pytz.timezone('Etc/GMT-10'), '+10'), ] for tz, expected in tests: with self.subTest(tz=tz, expected=expected): self.assertEqual(timezone._get_timezone_name(tz), expected) def test_get_default_timezone(self): self.assertEqual(timezone.get_default_timezone_name(), 'America/Chicago') def test_fixedoffset_timedelta(self): delta = datetime.timedelta(hours=1) self.assertEqual(timezone.get_fixed_timezone(delta).utcoffset(None), delta) def test_fixedoffset_negative_timedelta(self): delta = datetime.timedelta(hours=-2) self.assertEqual(timezone.get_fixed_timezone(delta).utcoffset(None), delta)
c907313fb74d554ae1ef7a53c95877403b61bd0d40ca90c3e3f0607a48896586
from django.test import SimpleTestCase from django.utils.deconstruct import deconstructible from django.utils.version import get_docs_version @deconstructible() class DeconstructibleClass: pass class DeconstructibleChildClass(DeconstructibleClass): pass @deconstructible( path='utils_tests.deconstructible_classes.DeconstructibleWithPathClass' ) class DeconstructibleWithPathClass: pass class DeconstructibleWithPathChildClass(DeconstructibleWithPathClass): pass @deconstructible( path='utils_tests.deconstructible_classes.DeconstructibleInvalidPathClass', ) class DeconstructibleInvalidPathClass: pass class DeconstructibleInvalidPathChildClass(DeconstructibleInvalidPathClass): pass class DeconstructibleTests(SimpleTestCase): def test_deconstruct(self): obj = DeconstructibleClass('arg', key='value') path, args, kwargs = obj.deconstruct() self.assertEqual(path, 'utils_tests.test_deconstruct.DeconstructibleClass') self.assertEqual(args, ('arg',)) self.assertEqual(kwargs, {'key': 'value'}) def test_deconstruct_with_path(self): obj = DeconstructibleWithPathClass('arg', key='value') path, args, kwargs = obj.deconstruct() self.assertEqual( path, 'utils_tests.deconstructible_classes.DeconstructibleWithPathClass', ) self.assertEqual(args, ('arg',)) self.assertEqual(kwargs, {'key': 'value'}) def test_deconstruct_child(self): obj = DeconstructibleChildClass('arg', key='value') path, args, kwargs = obj.deconstruct() self.assertEqual(path, 'utils_tests.test_deconstruct.DeconstructibleChildClass') self.assertEqual(args, ('arg',)) self.assertEqual(kwargs, {'key': 'value'}) def test_deconstruct_child_with_path(self): obj = DeconstructibleWithPathChildClass('arg', key='value') path, args, kwargs = obj.deconstruct() self.assertEqual( path, 'utils_tests.test_deconstruct.DeconstructibleWithPathChildClass', ) self.assertEqual(args, ('arg',)) self.assertEqual(kwargs, {'key': 'value'}) def test_invalid_path(self): obj = DeconstructibleInvalidPathClass() docs_version = get_docs_version() msg = ( f'Could not find object DeconstructibleInvalidPathClass in ' f'utils_tests.deconstructible_classes.\n' f'Please note that you cannot serialize things like inner ' f'classes. Please move the object into the main module body to ' f'use migrations.\n' f'For more information, see ' f'https://docs.djangoproject.com/en/{docs_version}/topics/' f'migrations/#serializing-values' ) with self.assertRaisesMessage(ValueError, msg): obj.deconstruct() def test_parent_invalid_path(self): obj = DeconstructibleInvalidPathChildClass('arg', key='value') path, args, kwargs = obj.deconstruct() self.assertEqual( path, 'utils_tests.test_deconstruct.DeconstructibleInvalidPathChildClass', ) self.assertEqual(args, ('arg',)) self.assertEqual(kwargs, {'key': 'value'})
90afe860dbca56d3e7c4730859ad773deabdc560257a7c952f34f665cad99812
import unittest from datetime import date, datetime, time, timedelta from django.utils.dateparse import ( parse_date, parse_datetime, parse_duration, parse_time, ) from django.utils.timezone import get_fixed_timezone class DateParseTests(unittest.TestCase): def test_parse_date(self): # Valid inputs self.assertEqual(parse_date('2012-04-23'), date(2012, 4, 23)) self.assertEqual(parse_date('2012-4-9'), date(2012, 4, 9)) # Invalid inputs self.assertIsNone(parse_date('20120423')) with self.assertRaises(ValueError): parse_date('2012-04-56') def test_parse_time(self): # Valid inputs self.assertEqual(parse_time('09:15:00'), time(9, 15)) self.assertEqual(parse_time('10:10'), time(10, 10)) self.assertEqual(parse_time('10:20:30.400'), time(10, 20, 30, 400000)) self.assertEqual(parse_time('10:20:30,400'), time(10, 20, 30, 400000)) self.assertEqual(parse_time('4:8:16'), time(4, 8, 16)) # Time zone offset is ignored. self.assertEqual(parse_time('00:05:23+04:00'), time(0, 5, 23)) # Invalid inputs self.assertIsNone(parse_time('00:05:')) self.assertIsNone(parse_time('00:05:23,')) self.assertIsNone(parse_time('00:05:23+')) self.assertIsNone(parse_time('00:05:23+25:00')) self.assertIsNone(parse_time('4:18:101')) self.assertIsNone(parse_time('091500')) with self.assertRaises(ValueError): parse_time('09:15:90') def test_parse_datetime(self): valid_inputs = ( ('2012-04-23T09:15:00', datetime(2012, 4, 23, 9, 15)), ('2012-4-9 4:8:16', datetime(2012, 4, 9, 4, 8, 16)), ('2012-04-23T09:15:00Z', datetime(2012, 4, 23, 9, 15, 0, 0, get_fixed_timezone(0))), ('2012-4-9 4:8:16-0320', datetime(2012, 4, 9, 4, 8, 16, 0, get_fixed_timezone(-200))), ('2012-04-23T10:20:30.400+02:30', datetime(2012, 4, 23, 10, 20, 30, 400000, get_fixed_timezone(150))), ('2012-04-23T10:20:30.400+02', datetime(2012, 4, 23, 10, 20, 30, 400000, get_fixed_timezone(120))), ('2012-04-23T10:20:30.400-02', datetime(2012, 4, 23, 10, 20, 30, 400000, get_fixed_timezone(-120))), ('2012-04-23T10:20:30,400-02', datetime(2012, 4, 23, 10, 20, 30, 400000, get_fixed_timezone(-120))), ('2012-04-23T10:20:30.400 +0230', datetime(2012, 4, 23, 10, 20, 30, 400000, get_fixed_timezone(150))), ('2012-04-23T10:20:30,400 +00', datetime(2012, 4, 23, 10, 20, 30, 400000, get_fixed_timezone(0))), ('2012-04-23T10:20:30 -02', datetime(2012, 4, 23, 10, 20, 30, 0, get_fixed_timezone(-120))), ) for source, expected in valid_inputs: with self.subTest(source=source): self.assertEqual(parse_datetime(source), expected) # Invalid inputs self.assertIsNone(parse_datetime('20120423091500')) with self.assertRaises(ValueError): parse_datetime('2012-04-56T09:15:90') class DurationParseTests(unittest.TestCase): def test_parse_python_format(self): timedeltas = [ timedelta(days=4, minutes=15, seconds=30, milliseconds=100), # fractions of seconds timedelta(hours=10, minutes=15, seconds=30), # hours, minutes, seconds timedelta(days=4, minutes=15, seconds=30), # multiple days timedelta(days=1, minutes=00, seconds=00), # single day timedelta(days=-4, minutes=15, seconds=30), # negative durations timedelta(minutes=15, seconds=30), # minute & seconds timedelta(seconds=30), # seconds ] for delta in timedeltas: with self.subTest(delta=delta): self.assertEqual(parse_duration(format(delta)), delta) def test_parse_postgresql_format(self): test_values = ( ('1 day', timedelta(1)), ('-1 day', timedelta(-1)), ('1 day 0:00:01', timedelta(days=1, seconds=1)), ('1 day -0:00:01', timedelta(days=1, seconds=-1)), ('-1 day -0:00:01', timedelta(days=-1, seconds=-1)), ('-1 day +0:00:01', timedelta(days=-1, seconds=1)), ('4 days 0:15:30.1', timedelta(days=4, minutes=15, seconds=30, milliseconds=100)), ('4 days 0:15:30.0001', timedelta(days=4, minutes=15, seconds=30, microseconds=100)), ('-4 days -15:00:30', timedelta(days=-4, hours=-15, seconds=-30)), ) for source, expected in test_values: with self.subTest(source=source): self.assertEqual(parse_duration(source), expected) def test_seconds(self): self.assertEqual(parse_duration('30'), timedelta(seconds=30)) def test_minutes_seconds(self): self.assertEqual(parse_duration('15:30'), timedelta(minutes=15, seconds=30)) self.assertEqual(parse_duration('5:30'), timedelta(minutes=5, seconds=30)) def test_hours_minutes_seconds(self): self.assertEqual(parse_duration('10:15:30'), timedelta(hours=10, minutes=15, seconds=30)) self.assertEqual(parse_duration('1:15:30'), timedelta(hours=1, minutes=15, seconds=30)) self.assertEqual(parse_duration('100:200:300'), timedelta(hours=100, minutes=200, seconds=300)) def test_days(self): self.assertEqual(parse_duration('4 15:30'), timedelta(days=4, minutes=15, seconds=30)) self.assertEqual(parse_duration('4 10:15:30'), timedelta(days=4, hours=10, minutes=15, seconds=30)) def test_fractions_of_seconds(self): test_values = ( ('15:30.1', timedelta(minutes=15, seconds=30, milliseconds=100)), ('15:30.01', timedelta(minutes=15, seconds=30, milliseconds=10)), ('15:30.001', timedelta(minutes=15, seconds=30, milliseconds=1)), ('15:30.0001', timedelta(minutes=15, seconds=30, microseconds=100)), ('15:30.00001', timedelta(minutes=15, seconds=30, microseconds=10)), ('15:30.000001', timedelta(minutes=15, seconds=30, microseconds=1)), ('15:30,000001', timedelta(minutes=15, seconds=30, microseconds=1)), ) for source, expected in test_values: with self.subTest(source=source): self.assertEqual(parse_duration(source), expected) def test_negative(self): test_values = ( ('-4 15:30', timedelta(days=-4, minutes=15, seconds=30)), ('-172800', timedelta(days=-2)), ('-15:30', timedelta(minutes=-15, seconds=-30)), ('-1:15:30', timedelta(hours=-1, minutes=-15, seconds=-30)), ('-30.1', timedelta(seconds=-30, milliseconds=-100)), ('-30,1', timedelta(seconds=-30, milliseconds=-100)), ('-00:01:01', timedelta(minutes=-1, seconds=-1)), ('-01:01', timedelta(seconds=-61)), ('-01:-01', None), ) for source, expected in test_values: with self.subTest(source=source): self.assertEqual(parse_duration(source), expected) def test_iso_8601(self): test_values = ( ('P4Y', None), ('P4M', None), ('P4W', None), ('P4D', timedelta(days=4)), ('-P1D', timedelta(days=-1)), ('P0.5D', timedelta(hours=12)), ('P0,5D', timedelta(hours=12)), ('-P0.5D', timedelta(hours=-12)), ('-P0,5D', timedelta(hours=-12)), ('PT5H', timedelta(hours=5)), ('-PT5H', timedelta(hours=-5)), ('PT5M', timedelta(minutes=5)), ('-PT5M', timedelta(minutes=-5)), ('PT5S', timedelta(seconds=5)), ('-PT5S', timedelta(seconds=-5)), ('PT0.000005S', timedelta(microseconds=5)), ('PT0,000005S', timedelta(microseconds=5)), ('-PT0.000005S', timedelta(microseconds=-5)), ('-PT0,000005S', timedelta(microseconds=-5)), ('-P4DT1H', timedelta(days=-4, hours=-1)), # Invalid separators for decimal fractions. ('P3(3D', None), ('PT3)3H', None), ('PT3|3M', None), ('PT3/3S', None), ) for source, expected in test_values: with self.subTest(source=source): self.assertEqual(parse_duration(source), expected)
c2db1e1d1cb1d691e2cc4bbb430babe2c3af90fddb09256ff819528042250d20
import os from datetime import datetime from django.test import SimpleTestCase from django.utils.functional import lazystr from django.utils.html import ( conditional_escape, escape, escapejs, format_html, html_safe, json_script, linebreaks, smart_urlquote, strip_spaces_between_tags, strip_tags, urlize, ) from django.utils.safestring import mark_safe class TestUtilsHtml(SimpleTestCase): def check_output(self, function, value, output=None): """ function(value) equals output. If output is None, function(value) equals value. """ if output is None: output = value self.assertEqual(function(value), output) def test_escape(self): items = ( ('&', '&amp;'), ('<', '&lt;'), ('>', '&gt;'), ('"', '&quot;'), ("'", '&#x27;'), ) # Substitution patterns for testing the above items. patterns = ("%s", "asdf%sfdsa", "%s1", "1%sb") for value, output in items: with self.subTest(value=value, output=output): for pattern in patterns: with self.subTest(value=value, output=output, pattern=pattern): self.check_output(escape, pattern % value, pattern % output) self.check_output(escape, lazystr(pattern % value), pattern % output) # Check repeated values. self.check_output(escape, value * 2, output * 2) # Verify it doesn't double replace &. self.check_output(escape, '<&', '&lt;&amp;') def test_format_html(self): self.assertEqual( format_html( "{} {} {third} {fourth}", "< Dangerous >", mark_safe("<b>safe</b>"), third="< dangerous again", fourth=mark_safe("<i>safe again</i>"), ), "&lt; Dangerous &gt; <b>safe</b> &lt; dangerous again <i>safe again</i>" ) def test_linebreaks(self): items = ( ("para1\n\npara2\r\rpara3", "<p>para1</p>\n\n<p>para2</p>\n\n<p>para3</p>"), ("para1\nsub1\rsub2\n\npara2", "<p>para1<br>sub1<br>sub2</p>\n\n<p>para2</p>"), ("para1\r\n\r\npara2\rsub1\r\rpara4", "<p>para1</p>\n\n<p>para2<br>sub1</p>\n\n<p>para4</p>"), ("para1\tmore\n\npara2", "<p>para1\tmore</p>\n\n<p>para2</p>"), ) for value, output in items: with self.subTest(value=value, output=output): self.check_output(linebreaks, value, output) self.check_output(linebreaks, lazystr(value), output) def test_strip_tags(self): items = ( ('<p>See: &#39;&eacute; is an apostrophe followed by e acute</p>', 'See: &#39;&eacute; is an apostrophe followed by e acute'), ('<p>See: &#x27;&eacute; is an apostrophe followed by e acute</p>', 'See: &#x27;&eacute; is an apostrophe followed by e acute'), ('<adf>a', 'a'), ('</adf>a', 'a'), ('<asdf><asdf>e', 'e'), ('hi, <f x', 'hi, <f x'), ('234<235, right?', '234<235, right?'), ('a4<a5 right?', 'a4<a5 right?'), ('b7>b2!', 'b7>b2!'), ('</fe', '</fe'), ('<x>b<y>', 'b'), ('a<p onclick="alert(\'<test>\')">b</p>c', 'abc'), ('a<p a >b</p>c', 'abc'), ('d<a:b c:d>e</p>f', 'def'), ('<strong>foo</strong><a href="http://example.com">bar</a>', 'foobar'), # caused infinite loop on Pythons not patched with # https://bugs.python.org/issue20288 ('&gotcha&#;<>', '&gotcha&#;<>'), ('<sc<!-- -->ript>test<<!-- -->/script>', 'ript>test'), ('<script>alert()</script>&h', 'alert()h'), ('><!' + ('&' * 16000) + 'D', '><!' + ('&' * 16000) + 'D'), ('X<<<<br>br>br>br>X', 'XX'), ) for value, output in items: with self.subTest(value=value, output=output): self.check_output(strip_tags, value, output) self.check_output(strip_tags, lazystr(value), output) def test_strip_tags_files(self): # Test with more lengthy content (also catching performance regressions) for filename in ('strip_tags1.html', 'strip_tags2.txt'): with self.subTest(filename=filename): path = os.path.join(os.path.dirname(__file__), 'files', filename) with open(path) as fp: content = fp.read() start = datetime.now() stripped = strip_tags(content) elapsed = datetime.now() - start self.assertEqual(elapsed.seconds, 0) self.assertIn("Please try again.", stripped) self.assertNotIn('<', stripped) def test_strip_spaces_between_tags(self): # Strings that should come out untouched. items = (' <adf>', '<adf> ', ' </adf> ', ' <f> x</f>') for value in items: with self.subTest(value=value): self.check_output(strip_spaces_between_tags, value) self.check_output(strip_spaces_between_tags, lazystr(value)) # Strings that have spaces to strip. items = ( ('<d> </d>', '<d></d>'), ('<p>hello </p>\n<p> world</p>', '<p>hello </p><p> world</p>'), ('\n<p>\t</p>\n<p> </p>\n', '\n<p></p><p></p>\n'), ) for value, output in items: with self.subTest(value=value, output=output): self.check_output(strip_spaces_between_tags, value, output) self.check_output(strip_spaces_between_tags, lazystr(value), output) def test_escapejs(self): items = ( ('"double quotes" and \'single quotes\'', '\\u0022double quotes\\u0022 and \\u0027single quotes\\u0027'), (r'\ : backslashes, too', '\\u005C : backslashes, too'), ( 'and lots of whitespace: \r\n\t\v\f\b', 'and lots of whitespace: \\u000D\\u000A\\u0009\\u000B\\u000C\\u0008' ), (r'<script>and this</script>', '\\u003Cscript\\u003Eand this\\u003C/script\\u003E'), ( 'paragraph separator:\u2029and line separator:\u2028', 'paragraph separator:\\u2029and line separator:\\u2028' ), ('`', '\\u0060'), ) for value, output in items: with self.subTest(value=value, output=output): self.check_output(escapejs, value, output) self.check_output(escapejs, lazystr(value), output) def test_json_script(self): tests = ( # "<", ">" and "&" are quoted inside JSON strings (('&<>', '<script id="test_id" type="application/json">"\\u0026\\u003C\\u003E"</script>')), # "<", ">" and "&" are quoted inside JSON objects ( {'a': '<script>test&ing</script>'}, '<script id="test_id" type="application/json">' '{"a": "\\u003Cscript\\u003Etest\\u0026ing\\u003C/script\\u003E"}</script>' ), # Lazy strings are quoted (lazystr('&<>'), '<script id="test_id" type="application/json">"\\u0026\\u003C\\u003E"</script>'), ( {'a': lazystr('<script>test&ing</script>')}, '<script id="test_id" type="application/json">' '{"a": "\\u003Cscript\\u003Etest\\u0026ing\\u003C/script\\u003E"}</script>' ), ) for arg, expected in tests: with self.subTest(arg=arg): self.assertEqual(json_script(arg, 'test_id'), expected) def test_json_script_without_id(self): self.assertHTMLEqual( json_script({'key': 'value'}), '<script type="application/json">{"key": "value"}</script>', ) def test_smart_urlquote(self): items = ( ('http://öäü.com/', 'http://xn--4ca9at.com/'), ('http://öäü.com/öäü/', 'http://xn--4ca9at.com/%C3%B6%C3%A4%C3%BC/'), # Everything unsafe is quoted, !*'();:@&=+$,/?#[]~ is considered # safe as per RFC. ('http://example.com/path/öäü/', 'http://example.com/path/%C3%B6%C3%A4%C3%BC/'), ('http://example.com/%C3%B6/ä/', 'http://example.com/%C3%B6/%C3%A4/'), ('http://example.com/?x=1&y=2+3&z=', 'http://example.com/?x=1&y=2+3&z='), ('http://example.com/?x=<>"\'', 'http://example.com/?x=%3C%3E%22%27'), ('http://example.com/?q=http://example.com/?x=1%26q=django', 'http://example.com/?q=http%3A%2F%2Fexample.com%2F%3Fx%3D1%26q%3Ddjango'), ('http://example.com/?q=http%3A%2F%2Fexample.com%2F%3Fx%3D1%26q%3Ddjango', 'http://example.com/?q=http%3A%2F%2Fexample.com%2F%3Fx%3D1%26q%3Ddjango'), ('http://.www.f oo.bar/', 'http://.www.f%20oo.bar/'), ) # IDNs are properly quoted for value, output in items: with self.subTest(value=value, output=output): self.assertEqual(smart_urlquote(value), output) def test_conditional_escape(self): s = '<h1>interop</h1>' self.assertEqual(conditional_escape(s), '&lt;h1&gt;interop&lt;/h1&gt;') self.assertEqual(conditional_escape(mark_safe(s)), s) self.assertEqual(conditional_escape(lazystr(mark_safe(s))), s) def test_html_safe(self): @html_safe class HtmlClass: def __str__(self): return "<h1>I'm a html class!</h1>" html_obj = HtmlClass() self.assertTrue(hasattr(HtmlClass, '__html__')) self.assertTrue(hasattr(html_obj, '__html__')) self.assertEqual(str(html_obj), html_obj.__html__()) def test_html_safe_subclass(self): class BaseClass: def __html__(self): # defines __html__ on its own return 'some html content' def __str__(self): return 'some non html content' @html_safe class Subclass(BaseClass): def __str__(self): # overrides __str__ and is marked as html_safe return 'some html safe content' subclass_obj = Subclass() self.assertEqual(str(subclass_obj), subclass_obj.__html__()) def test_html_safe_defines_html_error(self): msg = "can't apply @html_safe to HtmlClass because it defines __html__()." with self.assertRaisesMessage(ValueError, msg): @html_safe class HtmlClass: def __html__(self): return "<h1>I'm a html class!</h1>" def test_html_safe_doesnt_define_str(self): msg = "can't apply @html_safe to HtmlClass because it doesn't define __str__()." with self.assertRaisesMessage(ValueError, msg): @html_safe class HtmlClass: pass def test_urlize(self): tests = ( ( 'Search for google.com/?q=! and see.', 'Search for <a href="http://google.com/?q=">google.com/?q=</a>! and see.' ), ( 'Search for google.com/?q=1&lt! and see.', 'Search for <a href="http://google.com/?q=1%3C">google.com/?q=1&lt</a>! and see.' ), ( lazystr('Search for google.com/?q=!'), 'Search for <a href="http://google.com/?q=">google.com/?q=</a>!' ), ('[email protected]', '<a href="mailto:[email protected]">[email protected]</a>'), ) for value, output in tests: with self.subTest(value=value): self.assertEqual(urlize(value), output) def test_urlize_unchanged_inputs(self): tests = ( ('a' + '@a' * 50000) + 'a', # simple_email_re catastrophic test ('a' + '.' * 1000000) + 'a', # trailing_punctuation catastrophic test 'foo@', '@foo.com', '[email protected]', 'foo@localhost', 'foo@localhost.', ) for value in tests: with self.subTest(value=value): self.assertEqual(urlize(value), value)
1fbd08bf2e6f83897143f05cfecc383dd940cfbe1e028df3560dc83f1637a24d
from decimal import Decimal from sys import float_info from django.test import SimpleTestCase from django.utils.numberformat import format as nformat class TestNumberFormat(SimpleTestCase): def test_format_number(self): self.assertEqual(nformat(1234, '.'), '1234') self.assertEqual(nformat(1234.2, '.'), '1234.2') self.assertEqual(nformat(1234, '.', decimal_pos=2), '1234.00') self.assertEqual(nformat(1234, '.', grouping=2, thousand_sep=','), '1234') self.assertEqual(nformat(1234, '.', grouping=2, thousand_sep=',', force_grouping=True), '12,34') self.assertEqual(nformat(-1234.33, '.', decimal_pos=1), '-1234.3') # The use_l10n parameter can force thousand grouping behavior. with self.settings(USE_THOUSAND_SEPARATOR=True): self.assertEqual(nformat(1234, '.', grouping=3, thousand_sep=',', use_l10n=False), '1234') self.assertEqual(nformat(1234, '.', grouping=3, thousand_sep=',', use_l10n=True), '1,234') def test_format_string(self): self.assertEqual(nformat('1234', '.'), '1234') self.assertEqual(nformat('1234.2', '.'), '1234.2') self.assertEqual(nformat('1234', '.', decimal_pos=2), '1234.00') self.assertEqual(nformat('1234', '.', grouping=2, thousand_sep=','), '1234') self.assertEqual(nformat('1234', '.', grouping=2, thousand_sep=',', force_grouping=True), '12,34') self.assertEqual(nformat('-1234.33', '.', decimal_pos=1), '-1234.3') self.assertEqual(nformat('10000', '.', grouping=3, thousand_sep='comma', force_grouping=True), '10comma000') def test_large_number(self): most_max = ( '{}179769313486231570814527423731704356798070567525844996' '598917476803157260780028538760589558632766878171540458953' '514382464234321326889464182768467546703537516986049910576' '551282076245490090389328944075868508455133942304583236903' '222948165808559332123348274797826204144723168738177180919' '29988125040402618412485836{}' ) most_max2 = ( '{}35953862697246314162905484746340871359614113505168999' '31978349536063145215600570775211791172655337563430809179' '07028764928468642653778928365536935093407075033972099821' '15310256415249098018077865788815173701691026788460916647' '38064458963316171186642466965495956524082894463374763543' '61838599762500808052368249716736' ) int_max = int(float_info.max) self.assertEqual(nformat(int_max, '.'), most_max.format('', '8')) self.assertEqual(nformat(int_max + 1, '.'), most_max.format('', '9')) self.assertEqual(nformat(int_max * 2, '.'), most_max2.format('')) self.assertEqual(nformat(0 - int_max, '.'), most_max.format('-', '8')) self.assertEqual(nformat(-1 - int_max, '.'), most_max.format('-', '9')) self.assertEqual(nformat(-2 * int_max, '.'), most_max2.format('-')) def test_float_numbers(self): tests = [ (9e-10, 10, '0.0000000009'), (9e-19, 2, '0.00'), (.00000000000099, 0, '0'), (.00000000000099, 13, '0.0000000000009'), (1e16, None, '10000000000000000'), (1e16, 2, '10000000000000000.00'), # A float without a fractional part (3.) results in a ".0" when no # decimal_pos is given. Contrast that with the Decimal('3.') case # in test_decimal_numbers which doesn't return a fractional part. (3., None, '3.0'), ] for value, decimal_pos, expected_value in tests: with self.subTest(value=value, decimal_pos=decimal_pos): self.assertEqual(nformat(value, '.', decimal_pos), expected_value) # Thousand grouping behavior. self.assertEqual( nformat(1e16, '.', thousand_sep=',', grouping=3, force_grouping=True), '10,000,000,000,000,000', ) self.assertEqual( nformat(1e16, '.', decimal_pos=2, thousand_sep=',', grouping=3, force_grouping=True), '10,000,000,000,000,000.00', ) def test_decimal_numbers(self): self.assertEqual(nformat(Decimal('1234'), '.'), '1234') self.assertEqual(nformat(Decimal('1234.2'), '.'), '1234.2') self.assertEqual(nformat(Decimal('1234'), '.', decimal_pos=2), '1234.00') self.assertEqual(nformat(Decimal('1234'), '.', grouping=2, thousand_sep=','), '1234') self.assertEqual(nformat(Decimal('1234'), '.', grouping=2, thousand_sep=',', force_grouping=True), '12,34') self.assertEqual(nformat(Decimal('-1234.33'), '.', decimal_pos=1), '-1234.3') self.assertEqual(nformat(Decimal('0.00000001'), '.', decimal_pos=8), '0.00000001') self.assertEqual(nformat(Decimal('9e-19'), '.', decimal_pos=2), '0.00') self.assertEqual(nformat(Decimal('.00000000000099'), '.', decimal_pos=0), '0') self.assertEqual( nformat(Decimal('1e16'), '.', thousand_sep=',', grouping=3, force_grouping=True), '10,000,000,000,000,000' ) self.assertEqual( nformat(Decimal('1e16'), '.', decimal_pos=2, thousand_sep=',', grouping=3, force_grouping=True), '10,000,000,000,000,000.00' ) self.assertEqual(nformat(Decimal('3.'), '.'), '3') self.assertEqual(nformat(Decimal('3.0'), '.'), '3.0') # Very large & small numbers. tests = [ ('9e9999', None, '9e+9999'), ('9e9999', 3, '9.000e+9999'), ('9e201', None, '9e+201'), ('9e200', None, '9e+200'), ('1.2345e999', 2, '1.23e+999'), ('9e-999', None, '9e-999'), ('1e-7', 8, '0.00000010'), ('1e-8', 8, '0.00000001'), ('1e-9', 8, '0.00000000'), ('1e-10', 8, '0.00000000'), ('1e-11', 8, '0.00000000'), ('1' + ('0' * 300), 3, '1.000e+300'), ('0.{}1234'.format('0' * 299), 3, '0.000'), ] for value, decimal_pos, expected_value in tests: with self.subTest(value=value): self.assertEqual(nformat(Decimal(value), '.', decimal_pos), expected_value) def test_decimal_subclass(self): class EuroDecimal(Decimal): """ Wrapper for Decimal which prefixes each amount with the € symbol. """ def __format__(self, specifier, **kwargs): amount = super().__format__(specifier, **kwargs) return '€ {}'.format(amount) price = EuroDecimal('1.23') self.assertEqual(nformat(price, ','), '€ 1,23')
c22372271cd546f389a4b48f53ce9928cd591b7b76444e8b93af6bb2a4c45f53
from .test_deconstruct import DeconstructibleWithPathClass # NOQA
f55bc3d753314bf12005fad76c468345afe91c9691fc6fe21bd08dc847a151b1
from unittest import mock from django.test import SimpleTestCase from django.test.utils import ignore_warnings from django.utils.deprecation import RemovedInDjango50Warning from django.utils.functional import cached_property, classproperty, lazy class FunctionalTests(SimpleTestCase): def test_lazy(self): t = lazy(lambda: tuple(range(3)), list, tuple) for a, b in zip(t(), range(3)): self.assertEqual(a, b) def test_lazy_base_class(self): """lazy also finds base class methods in the proxy object""" class Base: def base_method(self): pass class Klazz(Base): pass t = lazy(lambda: Klazz(), Klazz)() self.assertIn('base_method', dir(t)) def test_lazy_base_class_override(self): """lazy finds the correct (overridden) method implementation""" class Base: def method(self): return 'Base' class Klazz(Base): def method(self): return 'Klazz' t = lazy(lambda: Klazz(), Base)() self.assertEqual(t.method(), 'Klazz') def test_lazy_object_to_string(self): class Klazz: def __str__(self): return "Î am ā Ǩlâzz." def __bytes__(self): return b"\xc3\x8e am \xc4\x81 binary \xc7\xa8l\xc3\xa2zz." t = lazy(lambda: Klazz(), Klazz)() self.assertEqual(str(t), "Î am ā Ǩlâzz.") self.assertEqual(bytes(t), b"\xc3\x8e am \xc4\x81 binary \xc7\xa8l\xc3\xa2zz.") def assertCachedPropertyWorks(self, attr, Class): with self.subTest(attr=attr): def get(source): return getattr(source, attr) obj = Class() class SubClass(Class): pass subobj = SubClass() # Docstring is preserved. self.assertEqual(get(Class).__doc__, 'Here is the docstring...') self.assertEqual(get(SubClass).__doc__, 'Here is the docstring...') # It's cached. self.assertEqual(get(obj), get(obj)) self.assertEqual(get(subobj), get(subobj)) # The correct value is returned. self.assertEqual(get(obj)[0], 1) self.assertEqual(get(subobj)[0], 1) # State isn't shared between instances. obj2 = Class() subobj2 = SubClass() self.assertNotEqual(get(obj), get(obj2)) self.assertNotEqual(get(subobj), get(subobj2)) # It behaves like a property when there's no instance. self.assertIsInstance(get(Class), cached_property) self.assertIsInstance(get(SubClass), cached_property) # 'other_value' doesn't become a property. self.assertTrue(callable(obj.other_value)) self.assertTrue(callable(subobj.other_value)) def test_cached_property(self): """cached_property caches its value and behaves like a property.""" class Class: @cached_property def value(self): """Here is the docstring...""" return 1, object() @cached_property def __foo__(self): """Here is the docstring...""" return 1, object() def other_value(self): """Here is the docstring...""" return 1, object() other = cached_property(other_value) attrs = ['value', 'other', '__foo__'] for attr in attrs: self.assertCachedPropertyWorks(attr, Class) @ignore_warnings(category=RemovedInDjango50Warning) def test_cached_property_name(self): class Class: def other_value(self): """Here is the docstring...""" return 1, object() other = cached_property(other_value, name='other') other2 = cached_property(other_value, name='different_name') self.assertCachedPropertyWorks('other', Class) # An explicit name is ignored. obj = Class() obj.other2 self.assertFalse(hasattr(obj, 'different_name')) def test_cached_property_name_deprecation_warning(self): def value(self): return 1 msg = "The name argument is deprecated as it's unnecessary as of Python 3.6." with self.assertWarnsMessage(RemovedInDjango50Warning, msg): cached_property(value, name='other_name') def test_cached_property_auto_name(self): """ cached_property caches its value and behaves like a property on mangled methods or when the name kwarg isn't set. """ class Class: @cached_property def __value(self): """Here is the docstring...""" return 1, object() def other_value(self): """Here is the docstring...""" return 1, object() other = cached_property(other_value) attrs = ['_Class__value', 'other'] for attr in attrs: self.assertCachedPropertyWorks(attr, Class) def test_cached_property_reuse_different_names(self): """Disallow this case because the decorated function wouldn't be cached.""" with self.assertRaises(RuntimeError) as ctx: class ReusedCachedProperty: @cached_property def a(self): pass b = a self.assertEqual( str(ctx.exception.__context__), str(TypeError( "Cannot assign the same cached_property to two different " "names ('a' and 'b')." )) ) def test_cached_property_reuse_same_name(self): """ Reusing a cached_property on different classes under the same name is allowed. """ counter = 0 @cached_property def _cp(_self): nonlocal counter counter += 1 return counter class A: cp = _cp class B: cp = _cp a = A() b = B() self.assertEqual(a.cp, 1) self.assertEqual(b.cp, 2) self.assertEqual(a.cp, 1) def test_cached_property_set_name_not_called(self): cp = cached_property(lambda s: None) class Foo: pass Foo.cp = cp msg = 'Cannot use cached_property instance without calling __set_name__() on it.' with self.assertRaisesMessage(TypeError, msg): Foo().cp def test_lazy_add(self): lazy_4 = lazy(lambda: 4, int) lazy_5 = lazy(lambda: 5, int) self.assertEqual(lazy_4() + lazy_5(), 9) def test_lazy_equality(self): """ == and != work correctly for Promises. """ lazy_a = lazy(lambda: 4, int) lazy_b = lazy(lambda: 4, int) lazy_c = lazy(lambda: 5, int) self.assertEqual(lazy_a(), lazy_b()) self.assertNotEqual(lazy_b(), lazy_c()) def test_lazy_repr_text(self): original_object = 'Lazy translation text' lazy_obj = lazy(lambda: original_object, str) self.assertEqual(repr(original_object), repr(lazy_obj())) def test_lazy_repr_int(self): original_object = 15 lazy_obj = lazy(lambda: original_object, int) self.assertEqual(repr(original_object), repr(lazy_obj())) def test_lazy_repr_bytes(self): original_object = b'J\xc3\xbcst a str\xc3\xadng' lazy_obj = lazy(lambda: original_object, bytes) self.assertEqual(repr(original_object), repr(lazy_obj())) def test_lazy_class_preparation_caching(self): # lazy() should prepare the proxy class only once i.e. the first time # it's used. lazified = lazy(lambda: 0, int) __proxy__ = lazified().__class__ with mock.patch.object(__proxy__, '__prepare_class__') as mocked: lazified() mocked.assert_not_called() def test_lazy_bytes_and_str_result_classes(self): lazy_obj = lazy(lambda: 'test', str, bytes) msg = 'Cannot call lazy() with both bytes and text return types.' with self.assertRaisesMessage(ValueError, msg): lazy_obj() def test_classproperty_getter(self): class Foo: foo_attr = 123 def __init__(self): self.foo_attr = 456 @classproperty def foo(cls): return cls.foo_attr class Bar: bar = classproperty() @bar.getter def bar(cls): return 123 self.assertEqual(Foo.foo, 123) self.assertEqual(Foo().foo, 123) self.assertEqual(Bar.bar, 123) self.assertEqual(Bar().bar, 123) def test_classproperty_override_getter(self): class Foo: @classproperty def foo(cls): return 123 @foo.getter def foo(cls): return 456 self.assertEqual(Foo.foo, 456) self.assertEqual(Foo().foo, 456)
fa40e465c4beea034b1c6b954362c4f01e7ab48a2f0f043249f051cb402be901
import hashlib import unittest from django.test import SimpleTestCase from django.utils.crypto import ( InvalidAlgorithm, constant_time_compare, pbkdf2, salted_hmac, ) class TestUtilsCryptoMisc(SimpleTestCase): def test_constant_time_compare(self): # It's hard to test for constant time, just test the result. self.assertTrue(constant_time_compare(b'spam', b'spam')) self.assertFalse(constant_time_compare(b'spam', b'eggs')) self.assertTrue(constant_time_compare('spam', 'spam')) self.assertFalse(constant_time_compare('spam', 'eggs')) def test_salted_hmac(self): tests = [ ((b'salt', b'value'), {}, 'b51a2e619c43b1ca4f91d15c57455521d71d61eb'), (('salt', 'value'), {}, 'b51a2e619c43b1ca4f91d15c57455521d71d61eb'), ( ('salt', 'value'), {'secret': 'abcdefg'}, '8bbee04ccddfa24772d1423a0ba43bd0c0e24b76', ), ( ('salt', 'value'), {'secret': 'x' * hashlib.sha1().block_size}, 'bd3749347b412b1b0a9ea65220e55767ac8e96b0', ), ( ('salt', 'value'), {'algorithm': 'sha256'}, 'ee0bf789e4e009371a5372c90f73fcf17695a8439c9108b0480f14e347b3f9ec', ), ( ('salt', 'value'), { 'algorithm': 'blake2b', 'secret': 'x' * hashlib.blake2b().block_size, }, 'fc6b9800a584d40732a07fa33fb69c35211269441823bca431a143853c32f' 'e836cf19ab881689528ede647dac412170cd5d3407b44c6d0f44630690c54' 'ad3d58', ), ] for args, kwargs, digest in tests: with self.subTest(args=args, kwargs=kwargs): self.assertEqual(salted_hmac(*args, **kwargs).hexdigest(), digest) def test_invalid_algorithm(self): msg = "'whatever' is not an algorithm accepted by the hashlib module." with self.assertRaisesMessage(InvalidAlgorithm, msg): salted_hmac('salt', 'value', algorithm='whatever') class TestUtilsCryptoPBKDF2(unittest.TestCase): # https://tools.ietf.org/html/draft-josefsson-pbkdf2-test-vectors-06 rfc_vectors = [ { "args": { "password": "password", "salt": "salt", "iterations": 1, "dklen": 20, "digest": hashlib.sha1, }, "result": "0c60c80f961f0e71f3a9b524af6012062fe037a6", }, { "args": { "password": "password", "salt": "salt", "iterations": 2, "dklen": 20, "digest": hashlib.sha1, }, "result": "ea6c014dc72d6f8ccd1ed92ace1d41f0d8de8957", }, { "args": { "password": "password", "salt": "salt", "iterations": 4096, "dklen": 20, "digest": hashlib.sha1, }, "result": "4b007901b765489abead49d926f721d065a429c1", }, # # this takes way too long :( # { # "args": { # "password": "password", # "salt": "salt", # "iterations": 16777216, # "dklen": 20, # "digest": hashlib.sha1, # }, # "result": "eefe3d61cd4da4e4e9945b3d6ba2158c2634e984", # }, { "args": { "password": "passwordPASSWORDpassword", "salt": "saltSALTsaltSALTsaltSALTsaltSALTsalt", "iterations": 4096, "dklen": 25, "digest": hashlib.sha1, }, "result": "3d2eec4fe41c849b80c8d83662c0e44a8b291a964cf2f07038", }, { "args": { "password": "pass\0word", "salt": "sa\0lt", "iterations": 4096, "dklen": 16, "digest": hashlib.sha1, }, "result": "56fa6aa75548099dcc37d7f03425e0c3", }, ] regression_vectors = [ { "args": { "password": "password", "salt": "salt", "iterations": 1, "dklen": 20, "digest": hashlib.sha256, }, "result": "120fb6cffcf8b32c43e7225256c4f837a86548c9", }, { "args": { "password": "password", "salt": "salt", "iterations": 1, "dklen": 20, "digest": hashlib.sha512, }, "result": "867f70cf1ade02cff3752599a3a53dc4af34c7a6", }, { "args": { "password": "password", "salt": "salt", "iterations": 1000, "dklen": 0, "digest": hashlib.sha512, }, "result": ("afe6c5530785b6cc6b1c6453384731bd5ee432ee" "549fd42fb6695779ad8a1c5bf59de69c48f774ef" "c4007d5298f9033c0241d5ab69305e7b64eceeb8d" "834cfec"), }, # Check leading zeros are not stripped (#17481) { "args": { "password": b'\xba', "salt": "salt", "iterations": 1, "dklen": 20, "digest": hashlib.sha1, }, "result": '0053d3b91a7f1e54effebd6d68771e8a6e0b2c5b', }, ] def test_public_vectors(self): for vector in self.rfc_vectors: result = pbkdf2(**vector['args']) self.assertEqual(result.hex(), vector['result']) def test_regression_vectors(self): for vector in self.regression_vectors: result = pbkdf2(**vector['args']) self.assertEqual(result.hex(), vector['result']) def test_default_hmac_alg(self): kwargs = {'password': b'password', 'salt': b'salt', 'iterations': 1, 'dklen': 20} self.assertEqual(pbkdf2(**kwargs), hashlib.pbkdf2_hmac(hash_name=hashlib.sha256().name, **kwargs))
13a99b57aab5970d3e51d9498a3cfc915f4422201be824dbd99e45175e985084
import contextlib import os import py_compile import shutil import sys import tempfile import threading import time import types import weakref import zipfile from importlib import import_module from pathlib import Path from subprocess import CompletedProcess from unittest import mock, skip, skipIf try: import zoneinfo except ImportError: from backports import zoneinfo import django.__main__ from django.apps.registry import Apps from django.test import SimpleTestCase from django.test.utils import extend_sys_path from django.utils import autoreload from django.utils.autoreload import WatchmanUnavailable from .test_module import __main__ as test_main, main_module as test_main_module from .utils import on_macos_with_hfs class TestIterModulesAndFiles(SimpleTestCase): def import_and_cleanup(self, name): import_module(name) self.addCleanup(lambda: sys.path_importer_cache.clear()) self.addCleanup(lambda: sys.modules.pop(name, None)) def clear_autoreload_caches(self): autoreload.iter_modules_and_files.cache_clear() def assertFileFound(self, filename): # Some temp directories are symlinks. Python resolves these fully while # importing. resolved_filename = filename.resolve(strict=True) self.clear_autoreload_caches() # Test uncached access self.assertIn(resolved_filename, list(autoreload.iter_all_python_module_files())) # Test cached access self.assertIn(resolved_filename, list(autoreload.iter_all_python_module_files())) self.assertEqual(autoreload.iter_modules_and_files.cache_info().hits, 1) def assertFileNotFound(self, filename): resolved_filename = filename.resolve(strict=True) self.clear_autoreload_caches() # Test uncached access self.assertNotIn(resolved_filename, list(autoreload.iter_all_python_module_files())) # Test cached access self.assertNotIn(resolved_filename, list(autoreload.iter_all_python_module_files())) self.assertEqual(autoreload.iter_modules_and_files.cache_info().hits, 1) def temporary_file(self, filename): dirname = tempfile.mkdtemp() self.addCleanup(shutil.rmtree, dirname) return Path(dirname) / filename def test_paths_are_pathlib_instances(self): for filename in autoreload.iter_all_python_module_files(): self.assertIsInstance(filename, Path) def test_file_added(self): """ When a file is added, it's returned by iter_all_python_module_files(). """ filename = self.temporary_file('test_deleted_removed_module.py') filename.touch() with extend_sys_path(str(filename.parent)): self.import_and_cleanup('test_deleted_removed_module') self.assertFileFound(filename.absolute()) def test_check_errors(self): """ When a file containing an error is imported in a function wrapped by check_errors(), gen_filenames() returns it. """ filename = self.temporary_file('test_syntax_error.py') filename.write_text("Ceci n'est pas du Python.") with extend_sys_path(str(filename.parent)): try: with self.assertRaises(SyntaxError): autoreload.check_errors(import_module)('test_syntax_error') finally: autoreload._exception = None self.assertFileFound(filename) def test_check_errors_catches_all_exceptions(self): """ Since Python may raise arbitrary exceptions when importing code, check_errors() must catch Exception, not just some subclasses. """ filename = self.temporary_file('test_exception.py') filename.write_text('raise Exception') with extend_sys_path(str(filename.parent)): try: with self.assertRaises(Exception): autoreload.check_errors(import_module)('test_exception') finally: autoreload._exception = None self.assertFileFound(filename) def test_zip_reload(self): """ Modules imported from zipped files have their archive location included in the result. """ zip_file = self.temporary_file('zip_import.zip') with zipfile.ZipFile(str(zip_file), 'w', zipfile.ZIP_DEFLATED) as zipf: zipf.writestr('test_zipped_file.py', '') with extend_sys_path(str(zip_file)): self.import_and_cleanup('test_zipped_file') self.assertFileFound(zip_file) def test_bytecode_conversion_to_source(self): """.pyc and .pyo files are included in the files list.""" filename = self.temporary_file('test_compiled.py') filename.touch() compiled_file = Path(py_compile.compile(str(filename), str(filename.with_suffix('.pyc')))) filename.unlink() with extend_sys_path(str(compiled_file.parent)): self.import_and_cleanup('test_compiled') self.assertFileFound(compiled_file) def test_weakref_in_sys_module(self): """iter_all_python_module_file() ignores weakref modules.""" time_proxy = weakref.proxy(time) sys.modules['time_proxy'] = time_proxy self.addCleanup(lambda: sys.modules.pop('time_proxy', None)) list(autoreload.iter_all_python_module_files()) # No crash. def test_module_without_spec(self): module = types.ModuleType('test_module') del module.__spec__ self.assertEqual(autoreload.iter_modules_and_files((module,), frozenset()), frozenset()) def test_main_module_is_resolved(self): main_module = sys.modules['__main__'] self.assertFileFound(Path(main_module.__file__)) def test_main_module_without_file_is_not_resolved(self): fake_main = types.ModuleType('__main__') self.assertEqual(autoreload.iter_modules_and_files((fake_main,), frozenset()), frozenset()) def test_path_with_embedded_null_bytes(self): for path in ( 'embedded_null_byte\x00.py', 'di\x00rectory/embedded_null_byte.py', ): with self.subTest(path=path): self.assertEqual( autoreload.iter_modules_and_files((), frozenset([path])), frozenset(), ) class TestChildArguments(SimpleTestCase): @mock.patch.dict(sys.modules, {'__main__': django.__main__}) @mock.patch('sys.argv', [django.__main__.__file__, 'runserver']) @mock.patch('sys.warnoptions', []) @mock.patch('sys._xoptions', {}) def test_run_as_module(self): self.assertEqual( autoreload.get_child_arguments(), [sys.executable, '-m', 'django', 'runserver'] ) @mock.patch.dict(sys.modules, {'__main__': test_main}) @mock.patch('sys.argv', [test_main.__file__, 'runserver']) @mock.patch('sys.warnoptions', []) @mock.patch('sys._xoptions', {}) def test_run_as_non_django_module(self): self.assertEqual( autoreload.get_child_arguments(), [sys.executable, '-m', 'utils_tests.test_module', 'runserver'], ) @mock.patch.dict(sys.modules, {'__main__': test_main_module}) @mock.patch('sys.argv', [test_main.__file__, 'runserver']) @mock.patch('sys.warnoptions', []) @mock.patch('sys._xoptions', {}) def test_run_as_non_django_module_non_package(self): self.assertEqual( autoreload.get_child_arguments(), [sys.executable, '-m', 'utils_tests.test_module.main_module', 'runserver'], ) @mock.patch('__main__.__spec__', None) @mock.patch('sys.argv', [__file__, 'runserver']) @mock.patch('sys.warnoptions', ['error']) @mock.patch('sys._xoptions', {}) def test_warnoptions(self): self.assertEqual( autoreload.get_child_arguments(), [sys.executable, '-Werror', __file__, 'runserver'] ) @mock.patch('sys.argv', [__file__, 'runserver']) @mock.patch('sys.warnoptions', []) @mock.patch('sys._xoptions', {'utf8': True, 'a': 'b'}) def test_xoptions(self): self.assertEqual( autoreload.get_child_arguments(), [sys.executable, '-Xutf8', '-Xa=b', __file__, 'runserver'], ) @mock.patch('__main__.__spec__', None) @mock.patch('sys.warnoptions', []) def test_exe_fallback(self): with tempfile.TemporaryDirectory() as tmpdir: exe_path = Path(tmpdir) / 'django-admin.exe' exe_path.touch() with mock.patch('sys.argv', [exe_path.with_suffix(''), 'runserver']): self.assertEqual( autoreload.get_child_arguments(), [exe_path, 'runserver'] ) @mock.patch('__main__.__spec__', None) @mock.patch('sys.warnoptions', []) @mock.patch('sys._xoptions', {}) def test_entrypoint_fallback(self): with tempfile.TemporaryDirectory() as tmpdir: script_path = Path(tmpdir) / 'django-admin-script.py' script_path.touch() with mock.patch('sys.argv', [script_path.with_name('django-admin'), 'runserver']): self.assertEqual( autoreload.get_child_arguments(), [sys.executable, script_path, 'runserver'] ) @mock.patch('__main__.__spec__', None) @mock.patch('sys.argv', ['does-not-exist', 'runserver']) @mock.patch('sys.warnoptions', []) def test_raises_runtimeerror(self): msg = 'Script does-not-exist does not exist.' with self.assertRaisesMessage(RuntimeError, msg): autoreload.get_child_arguments() @mock.patch('sys.argv', [__file__, 'runserver']) @mock.patch('sys.warnoptions', []) @mock.patch('sys._xoptions', {}) def test_module_no_spec(self): module = types.ModuleType('test_module') del module.__spec__ with mock.patch.dict(sys.modules, {'__main__': module}): self.assertEqual( autoreload.get_child_arguments(), [sys.executable, __file__, 'runserver'] ) class TestUtilities(SimpleTestCase): def test_is_django_module(self): for module, expected in ( (zoneinfo, False), (sys, False), (autoreload, True) ): with self.subTest(module=module): self.assertIs(autoreload.is_django_module(module), expected) def test_is_django_path(self): for module, expected in ( (zoneinfo.__file__, False), (contextlib.__file__, False), (autoreload.__file__, True) ): with self.subTest(module=module): self.assertIs(autoreload.is_django_path(module), expected) class TestCommonRoots(SimpleTestCase): def test_common_roots(self): paths = ( Path('/first/second'), Path('/first/second/third'), Path('/first/'), Path('/root/first/'), ) results = autoreload.common_roots(paths) self.assertCountEqual(results, [Path('/first/'), Path('/root/first/')]) class TestSysPathDirectories(SimpleTestCase): def setUp(self): self._directory = tempfile.TemporaryDirectory() self.directory = Path(self._directory.name).resolve(strict=True).absolute() self.file = self.directory / 'test' self.file.touch() def tearDown(self): self._directory.cleanup() def test_sys_paths_with_directories(self): with extend_sys_path(str(self.file)): paths = list(autoreload.sys_path_directories()) self.assertIn(self.file.parent, paths) def test_sys_paths_non_existing(self): nonexistent_file = Path(self.directory.name) / 'does_not_exist' with extend_sys_path(str(nonexistent_file)): paths = list(autoreload.sys_path_directories()) self.assertNotIn(nonexistent_file, paths) self.assertNotIn(nonexistent_file.parent, paths) def test_sys_paths_absolute(self): paths = list(autoreload.sys_path_directories()) self.assertTrue(all(p.is_absolute() for p in paths)) def test_sys_paths_directories(self): with extend_sys_path(str(self.directory)): paths = list(autoreload.sys_path_directories()) self.assertIn(self.directory, paths) class GetReloaderTests(SimpleTestCase): @mock.patch('django.utils.autoreload.WatchmanReloader') def test_watchman_unavailable(self, mocked_watchman): mocked_watchman.check_availability.side_effect = WatchmanUnavailable self.assertIsInstance(autoreload.get_reloader(), autoreload.StatReloader) @mock.patch.object(autoreload.WatchmanReloader, 'check_availability') def test_watchman_available(self, mocked_available): # If WatchmanUnavailable isn't raised, Watchman will be chosen. mocked_available.return_value = None result = autoreload.get_reloader() self.assertIsInstance(result, autoreload.WatchmanReloader) class RunWithReloaderTests(SimpleTestCase): @mock.patch.dict(os.environ, {autoreload.DJANGO_AUTORELOAD_ENV: 'true'}) @mock.patch('django.utils.autoreload.get_reloader') def test_swallows_keyboard_interrupt(self, mocked_get_reloader): mocked_get_reloader.side_effect = KeyboardInterrupt() autoreload.run_with_reloader(lambda: None) # No exception @mock.patch.dict(os.environ, {autoreload.DJANGO_AUTORELOAD_ENV: 'false'}) @mock.patch('django.utils.autoreload.restart_with_reloader') def test_calls_sys_exit(self, mocked_restart_reloader): mocked_restart_reloader.return_value = 1 with self.assertRaises(SystemExit) as exc: autoreload.run_with_reloader(lambda: None) self.assertEqual(exc.exception.code, 1) @mock.patch.dict(os.environ, {autoreload.DJANGO_AUTORELOAD_ENV: 'true'}) @mock.patch('django.utils.autoreload.start_django') @mock.patch('django.utils.autoreload.get_reloader') def test_calls_start_django(self, mocked_reloader, mocked_start_django): mocked_reloader.return_value = mock.sentinel.RELOADER autoreload.run_with_reloader(mock.sentinel.METHOD) self.assertEqual(mocked_start_django.call_count, 1) self.assertSequenceEqual( mocked_start_django.call_args[0], [mock.sentinel.RELOADER, mock.sentinel.METHOD] ) class StartDjangoTests(SimpleTestCase): @mock.patch('django.utils.autoreload.StatReloader') def test_watchman_becomes_unavailable(self, mocked_stat): mocked_stat.should_stop.return_value = True fake_reloader = mock.MagicMock() fake_reloader.should_stop = False fake_reloader.run.side_effect = autoreload.WatchmanUnavailable() autoreload.start_django(fake_reloader, lambda: None) self.assertEqual(mocked_stat.call_count, 1) @mock.patch('django.utils.autoreload.ensure_echo_on') def test_echo_on_called(self, mocked_echo): fake_reloader = mock.MagicMock() autoreload.start_django(fake_reloader, lambda: None) self.assertEqual(mocked_echo.call_count, 1) @mock.patch('django.utils.autoreload.check_errors') def test_check_errors_called(self, mocked_check_errors): fake_method = mock.MagicMock(return_value=None) fake_reloader = mock.MagicMock() autoreload.start_django(fake_reloader, fake_method) self.assertCountEqual(mocked_check_errors.call_args[0], [fake_method]) @mock.patch('threading.Thread') @mock.patch('django.utils.autoreload.check_errors') def test_starts_thread_with_args(self, mocked_check_errors, mocked_thread): fake_reloader = mock.MagicMock() fake_main_func = mock.MagicMock() fake_thread = mock.MagicMock() mocked_check_errors.return_value = fake_main_func mocked_thread.return_value = fake_thread autoreload.start_django(fake_reloader, fake_main_func, 123, abc=123) self.assertEqual(mocked_thread.call_count, 1) self.assertEqual( mocked_thread.call_args[1], {'target': fake_main_func, 'args': (123,), 'kwargs': {'abc': 123}, 'name': 'django-main-thread'} ) self.assertIs(fake_thread.daemon, True) self.assertTrue(fake_thread.start.called) class TestCheckErrors(SimpleTestCase): def test_mutates_error_files(self): fake_method = mock.MagicMock(side_effect=RuntimeError()) wrapped = autoreload.check_errors(fake_method) with mock.patch.object(autoreload, '_error_files') as mocked_error_files: try: with self.assertRaises(RuntimeError): wrapped() finally: autoreload._exception = None self.assertEqual(mocked_error_files.append.call_count, 1) class TestRaiseLastException(SimpleTestCase): @mock.patch('django.utils.autoreload._exception', None) def test_no_exception(self): # Should raise no exception if _exception is None autoreload.raise_last_exception() def test_raises_exception(self): class MyException(Exception): pass # Create an exception try: raise MyException('Test Message') except MyException: exc_info = sys.exc_info() with mock.patch('django.utils.autoreload._exception', exc_info): with self.assertRaisesMessage(MyException, 'Test Message'): autoreload.raise_last_exception() def test_raises_custom_exception(self): class MyException(Exception): def __init__(self, msg, extra_context): super().__init__(msg) self.extra_context = extra_context # Create an exception. try: raise MyException('Test Message', 'extra context') except MyException: exc_info = sys.exc_info() with mock.patch('django.utils.autoreload._exception', exc_info): with self.assertRaisesMessage(MyException, 'Test Message'): autoreload.raise_last_exception() def test_raises_exception_with_context(self): try: raise Exception(2) except Exception as e: try: raise Exception(1) from e except Exception: exc_info = sys.exc_info() with mock.patch('django.utils.autoreload._exception', exc_info): with self.assertRaises(Exception) as cm: autoreload.raise_last_exception() self.assertEqual(cm.exception.args[0], 1) self.assertEqual(cm.exception.__cause__.args[0], 2) class RestartWithReloaderTests(SimpleTestCase): executable = '/usr/bin/python' def patch_autoreload(self, argv): patch_call = mock.patch('django.utils.autoreload.subprocess.run', return_value=CompletedProcess(argv, 0)) patches = [ mock.patch('django.utils.autoreload.sys.argv', argv), mock.patch('django.utils.autoreload.sys.executable', self.executable), mock.patch('django.utils.autoreload.sys.warnoptions', ['all']), mock.patch('django.utils.autoreload.sys._xoptions', {}), ] for p in patches: p.start() self.addCleanup(p.stop) mock_call = patch_call.start() self.addCleanup(patch_call.stop) return mock_call def test_manage_py(self): with tempfile.TemporaryDirectory() as temp_dir: script = Path(temp_dir) / 'manage.py' script.touch() argv = [str(script), 'runserver'] mock_call = self.patch_autoreload(argv) with mock.patch('__main__.__spec__', None): autoreload.restart_with_reloader() self.assertEqual(mock_call.call_count, 1) self.assertEqual( mock_call.call_args[0][0], [self.executable, '-Wall'] + argv, ) def test_python_m_django(self): main = '/usr/lib/pythonX.Y/site-packages/django/__main__.py' argv = [main, 'runserver'] mock_call = self.patch_autoreload(argv) with mock.patch('django.__main__.__file__', main): with mock.patch.dict(sys.modules, {'__main__': django.__main__}): autoreload.restart_with_reloader() self.assertEqual(mock_call.call_count, 1) self.assertEqual(mock_call.call_args[0][0], [self.executable, '-Wall', '-m', 'django'] + argv[1:]) class ReloaderTests(SimpleTestCase): RELOADER_CLS = None def setUp(self): self._tempdir = tempfile.TemporaryDirectory() self.tempdir = Path(self._tempdir.name).resolve(strict=True).absolute() self.existing_file = self.ensure_file(self.tempdir / 'test.py') self.nonexistent_file = (self.tempdir / 'does_not_exist.py').absolute() self.reloader = self.RELOADER_CLS() def tearDown(self): self._tempdir.cleanup() self.reloader.stop() def ensure_file(self, path): path.parent.mkdir(exist_ok=True, parents=True) path.touch() # On Linux and Windows updating the mtime of a file using touch() will set a timestamp # value that is in the past, as the time value for the last kernel tick is used rather # than getting the correct absolute time. # To make testing simpler set the mtime to be the observed time when this function is # called. self.set_mtime(path, time.time()) return path.absolute() def set_mtime(self, fp, value): os.utime(str(fp), (value, value)) def increment_mtime(self, fp, by=1): current_time = time.time() self.set_mtime(fp, current_time + by) @contextlib.contextmanager def tick_twice(self): ticker = self.reloader.tick() next(ticker) yield next(ticker) class IntegrationTests: @mock.patch('django.utils.autoreload.BaseReloader.notify_file_changed') @mock.patch('django.utils.autoreload.iter_all_python_module_files', return_value=frozenset()) def test_glob(self, mocked_modules, notify_mock): non_py_file = self.ensure_file(self.tempdir / 'non_py_file') self.reloader.watch_dir(self.tempdir, '*.py') with self.tick_twice(): self.increment_mtime(non_py_file) self.increment_mtime(self.existing_file) self.assertEqual(notify_mock.call_count, 1) self.assertCountEqual(notify_mock.call_args[0], [self.existing_file]) @mock.patch('django.utils.autoreload.BaseReloader.notify_file_changed') @mock.patch('django.utils.autoreload.iter_all_python_module_files', return_value=frozenset()) def test_multiple_globs(self, mocked_modules, notify_mock): self.ensure_file(self.tempdir / 'x.test') self.reloader.watch_dir(self.tempdir, '*.py') self.reloader.watch_dir(self.tempdir, '*.test') with self.tick_twice(): self.increment_mtime(self.existing_file) self.assertEqual(notify_mock.call_count, 1) self.assertCountEqual(notify_mock.call_args[0], [self.existing_file]) @mock.patch('django.utils.autoreload.BaseReloader.notify_file_changed') @mock.patch('django.utils.autoreload.iter_all_python_module_files', return_value=frozenset()) def test_overlapping_globs(self, mocked_modules, notify_mock): self.reloader.watch_dir(self.tempdir, '*.py') self.reloader.watch_dir(self.tempdir, '*.p*') with self.tick_twice(): self.increment_mtime(self.existing_file) self.assertEqual(notify_mock.call_count, 1) self.assertCountEqual(notify_mock.call_args[0], [self.existing_file]) @mock.patch('django.utils.autoreload.BaseReloader.notify_file_changed') @mock.patch('django.utils.autoreload.iter_all_python_module_files', return_value=frozenset()) def test_glob_recursive(self, mocked_modules, notify_mock): non_py_file = self.ensure_file(self.tempdir / 'dir' / 'non_py_file') py_file = self.ensure_file(self.tempdir / 'dir' / 'file.py') self.reloader.watch_dir(self.tempdir, '**/*.py') with self.tick_twice(): self.increment_mtime(non_py_file) self.increment_mtime(py_file) self.assertEqual(notify_mock.call_count, 1) self.assertCountEqual(notify_mock.call_args[0], [py_file]) @mock.patch('django.utils.autoreload.BaseReloader.notify_file_changed') @mock.patch('django.utils.autoreload.iter_all_python_module_files', return_value=frozenset()) def test_multiple_recursive_globs(self, mocked_modules, notify_mock): non_py_file = self.ensure_file(self.tempdir / 'dir' / 'test.txt') py_file = self.ensure_file(self.tempdir / 'dir' / 'file.py') self.reloader.watch_dir(self.tempdir, '**/*.txt') self.reloader.watch_dir(self.tempdir, '**/*.py') with self.tick_twice(): self.increment_mtime(non_py_file) self.increment_mtime(py_file) self.assertEqual(notify_mock.call_count, 2) self.assertCountEqual(notify_mock.call_args_list, [mock.call(py_file), mock.call(non_py_file)]) @mock.patch('django.utils.autoreload.BaseReloader.notify_file_changed') @mock.patch('django.utils.autoreload.iter_all_python_module_files', return_value=frozenset()) def test_nested_glob_recursive(self, mocked_modules, notify_mock): inner_py_file = self.ensure_file(self.tempdir / 'dir' / 'file.py') self.reloader.watch_dir(self.tempdir, '**/*.py') self.reloader.watch_dir(inner_py_file.parent, '**/*.py') with self.tick_twice(): self.increment_mtime(inner_py_file) self.assertEqual(notify_mock.call_count, 1) self.assertCountEqual(notify_mock.call_args[0], [inner_py_file]) @mock.patch('django.utils.autoreload.BaseReloader.notify_file_changed') @mock.patch('django.utils.autoreload.iter_all_python_module_files', return_value=frozenset()) def test_overlapping_glob_recursive(self, mocked_modules, notify_mock): py_file = self.ensure_file(self.tempdir / 'dir' / 'file.py') self.reloader.watch_dir(self.tempdir, '**/*.p*') self.reloader.watch_dir(self.tempdir, '**/*.py*') with self.tick_twice(): self.increment_mtime(py_file) self.assertEqual(notify_mock.call_count, 1) self.assertCountEqual(notify_mock.call_args[0], [py_file]) class BaseReloaderTests(ReloaderTests): RELOADER_CLS = autoreload.BaseReloader def test_watch_dir_with_unresolvable_path(self): path = Path('unresolvable_directory') with mock.patch.object(Path, 'absolute', side_effect=FileNotFoundError): self.reloader.watch_dir(path, '**/*.mo') self.assertEqual(list(self.reloader.directory_globs), []) def test_watch_with_glob(self): self.reloader.watch_dir(self.tempdir, '*.py') watched_files = list(self.reloader.watched_files()) self.assertIn(self.existing_file, watched_files) def test_watch_files_with_recursive_glob(self): inner_file = self.ensure_file(self.tempdir / 'test' / 'test.py') self.reloader.watch_dir(self.tempdir, '**/*.py') watched_files = list(self.reloader.watched_files()) self.assertIn(self.existing_file, watched_files) self.assertIn(inner_file, watched_files) def test_run_loop_catches_stopiteration(self): def mocked_tick(): yield with mock.patch.object(self.reloader, 'tick', side_effect=mocked_tick) as tick: self.reloader.run_loop() self.assertEqual(tick.call_count, 1) def test_run_loop_stop_and_return(self): def mocked_tick(*args): yield self.reloader.stop() return # Raises StopIteration with mock.patch.object(self.reloader, 'tick', side_effect=mocked_tick) as tick: self.reloader.run_loop() self.assertEqual(tick.call_count, 1) def test_wait_for_apps_ready_checks_for_exception(self): app_reg = Apps() app_reg.ready_event.set() # thread.is_alive() is False if it's not started. dead_thread = threading.Thread() self.assertFalse(self.reloader.wait_for_apps_ready(app_reg, dead_thread)) def test_wait_for_apps_ready_without_exception(self): app_reg = Apps() app_reg.ready_event.set() thread = mock.MagicMock() thread.is_alive.return_value = True self.assertTrue(self.reloader.wait_for_apps_ready(app_reg, thread)) def skip_unless_watchman_available(): try: autoreload.WatchmanReloader.check_availability() except WatchmanUnavailable as e: return skip('Watchman unavailable: %s' % e) return lambda func: func @skip_unless_watchman_available() class WatchmanReloaderTests(ReloaderTests, IntegrationTests): RELOADER_CLS = autoreload.WatchmanReloader def setUp(self): super().setUp() # Shorten the timeout to speed up tests. self.reloader.client_timeout = int(os.environ.get('DJANGO_WATCHMAN_TIMEOUT', 2)) def test_watch_glob_ignores_non_existing_directories_two_levels(self): with mock.patch.object(self.reloader, '_subscribe') as mocked_subscribe: self.reloader._watch_glob(self.tempdir / 'does_not_exist' / 'more', ['*']) self.assertFalse(mocked_subscribe.called) def test_watch_glob_uses_existing_parent_directories(self): with mock.patch.object(self.reloader, '_subscribe') as mocked_subscribe: self.reloader._watch_glob(self.tempdir / 'does_not_exist', ['*']) self.assertSequenceEqual( mocked_subscribe.call_args[0], [ self.tempdir, 'glob-parent-does_not_exist:%s' % self.tempdir, ['anyof', ['match', 'does_not_exist/*', 'wholename']] ] ) def test_watch_glob_multiple_patterns(self): with mock.patch.object(self.reloader, '_subscribe') as mocked_subscribe: self.reloader._watch_glob(self.tempdir, ['*', '*.py']) self.assertSequenceEqual( mocked_subscribe.call_args[0], [ self.tempdir, 'glob:%s' % self.tempdir, ['anyof', ['match', '*', 'wholename'], ['match', '*.py', 'wholename']] ] ) def test_watched_roots_contains_files(self): paths = self.reloader.watched_roots([self.existing_file]) self.assertIn(self.existing_file.parent, paths) def test_watched_roots_contains_directory_globs(self): self.reloader.watch_dir(self.tempdir, '*.py') paths = self.reloader.watched_roots([]) self.assertIn(self.tempdir, paths) def test_watched_roots_contains_sys_path(self): with extend_sys_path(str(self.tempdir)): paths = self.reloader.watched_roots([]) self.assertIn(self.tempdir, paths) def test_check_server_status(self): self.assertTrue(self.reloader.check_server_status()) def test_check_server_status_raises_error(self): with mock.patch.object(self.reloader.client, 'query') as mocked_query: mocked_query.side_effect = Exception() with self.assertRaises(autoreload.WatchmanUnavailable): self.reloader.check_server_status() @mock.patch('pywatchman.client') def test_check_availability(self, mocked_client): mocked_client().capabilityCheck.side_effect = Exception() with self.assertRaisesMessage(WatchmanUnavailable, 'Cannot connect to the watchman service'): self.RELOADER_CLS.check_availability() @mock.patch('pywatchman.client') def test_check_availability_lower_version(self, mocked_client): mocked_client().capabilityCheck.return_value = {'version': '4.8.10'} with self.assertRaisesMessage(WatchmanUnavailable, 'Watchman 4.9 or later is required.'): self.RELOADER_CLS.check_availability() def test_pywatchman_not_available(self): with mock.patch.object(autoreload, 'pywatchman') as mocked: mocked.__bool__.return_value = False with self.assertRaisesMessage(WatchmanUnavailable, 'pywatchman not installed.'): self.RELOADER_CLS.check_availability() def test_update_watches_raises_exceptions(self): class TestException(Exception): pass with mock.patch.object(self.reloader, '_update_watches') as mocked_watches: with mock.patch.object(self.reloader, 'check_server_status') as mocked_server_status: mocked_watches.side_effect = TestException() mocked_server_status.return_value = True with self.assertRaises(TestException): self.reloader.update_watches() self.assertIsInstance(mocked_server_status.call_args[0][0], TestException) @mock.patch.dict(os.environ, {'DJANGO_WATCHMAN_TIMEOUT': '10'}) def test_setting_timeout_from_environment_variable(self): self.assertEqual(self.RELOADER_CLS().client_timeout, 10) @skipIf(on_macos_with_hfs(), "These tests do not work with HFS+ as a filesystem") class StatReloaderTests(ReloaderTests, IntegrationTests): RELOADER_CLS = autoreload.StatReloader def setUp(self): super().setUp() # Shorten the sleep time to speed up tests. self.reloader.SLEEP_TIME = 0.01 @mock.patch('django.utils.autoreload.StatReloader.notify_file_changed') def test_tick_does_not_trigger_twice(self, mock_notify_file_changed): with mock.patch.object(self.reloader, 'watched_files', return_value=[self.existing_file]): ticker = self.reloader.tick() next(ticker) self.increment_mtime(self.existing_file) next(ticker) next(ticker) self.assertEqual(mock_notify_file_changed.call_count, 1) def test_snapshot_files_ignores_missing_files(self): with mock.patch.object(self.reloader, 'watched_files', return_value=[self.nonexistent_file]): self.assertEqual(dict(self.reloader.snapshot_files()), {}) def test_snapshot_files_updates(self): with mock.patch.object(self.reloader, 'watched_files', return_value=[self.existing_file]): snapshot1 = dict(self.reloader.snapshot_files()) self.assertIn(self.existing_file, snapshot1) self.increment_mtime(self.existing_file) snapshot2 = dict(self.reloader.snapshot_files()) self.assertNotEqual(snapshot1[self.existing_file], snapshot2[self.existing_file]) def test_snapshot_files_with_duplicates(self): with mock.patch.object(self.reloader, 'watched_files', return_value=[self.existing_file, self.existing_file]): snapshot = list(self.reloader.snapshot_files()) self.assertEqual(len(snapshot), 1) self.assertEqual(snapshot[0][0], self.existing_file)
e8fe5c82ed063e91a690ac612635219f5c1451f80bb3f1385b5b20bf160354d7
import platform import unittest from datetime import datetime, timezone from unittest import mock from django.test import SimpleTestCase from django.utils.datastructures import MultiValueDict from django.utils.http import ( base36_to_int, escape_leading_slashes, http_date, int_to_base36, is_same_domain, parse_etags, parse_http_date, quote_etag, url_has_allowed_host_and_scheme, urlencode, urlsafe_base64_decode, urlsafe_base64_encode, ) class URLEncodeTests(SimpleTestCase): cannot_encode_none_msg = ( "Cannot encode None for key 'a' in a query string. Did you mean to " "pass an empty string or omit the value?" ) def test_tuples(self): self.assertEqual(urlencode((('a', 1), ('b', 2), ('c', 3))), 'a=1&b=2&c=3') def test_dict(self): result = urlencode({'a': 1, 'b': 2, 'c': 3}) # Dictionaries are treated as unordered. self.assertIn(result, [ 'a=1&b=2&c=3', 'a=1&c=3&b=2', 'b=2&a=1&c=3', 'b=2&c=3&a=1', 'c=3&a=1&b=2', 'c=3&b=2&a=1', ]) def test_dict_containing_sequence_not_doseq(self): self.assertEqual(urlencode({'a': [1, 2]}, doseq=False), 'a=%5B1%2C+2%5D') def test_dict_containing_tuple_not_doseq(self): self.assertEqual(urlencode({'a': (1, 2)}, doseq=False), 'a=%281%2C+2%29') def test_custom_iterable_not_doseq(self): class IterableWithStr: def __str__(self): return 'custom' def __iter__(self): yield from range(0, 3) self.assertEqual(urlencode({'a': IterableWithStr()}, doseq=False), 'a=custom') def test_dict_containing_sequence_doseq(self): self.assertEqual(urlencode({'a': [1, 2]}, doseq=True), 'a=1&a=2') def test_dict_containing_empty_sequence_doseq(self): self.assertEqual(urlencode({'a': []}, doseq=True), '') def test_multivaluedict(self): result = urlencode(MultiValueDict({ 'name': ['Adrian', 'Simon'], 'position': ['Developer'], }), doseq=True) # MultiValueDicts are similarly unordered. self.assertIn(result, [ 'name=Adrian&name=Simon&position=Developer', 'position=Developer&name=Adrian&name=Simon', ]) def test_dict_with_bytes_values(self): self.assertEqual(urlencode({'a': b'abc'}, doseq=True), 'a=abc') def test_dict_with_sequence_of_bytes(self): self.assertEqual(urlencode({'a': [b'spam', b'eggs', b'bacon']}, doseq=True), 'a=spam&a=eggs&a=bacon') def test_dict_with_bytearray(self): self.assertEqual(urlencode({'a': bytearray(range(2))}, doseq=True), 'a=0&a=1') def test_generator(self): self.assertEqual(urlencode({'a': range(2)}, doseq=True), 'a=0&a=1') self.assertEqual(urlencode({'a': range(2)}, doseq=False), 'a=range%280%2C+2%29') def test_none(self): with self.assertRaisesMessage(TypeError, self.cannot_encode_none_msg): urlencode({'a': None}) def test_none_in_sequence(self): with self.assertRaisesMessage(TypeError, self.cannot_encode_none_msg): urlencode({'a': [None]}, doseq=True) def test_none_in_generator(self): def gen(): yield None with self.assertRaisesMessage(TypeError, self.cannot_encode_none_msg): urlencode({'a': gen()}, doseq=True) class Base36IntTests(SimpleTestCase): def test_roundtrip(self): for n in [0, 1, 1000, 1000000]: self.assertEqual(n, base36_to_int(int_to_base36(n))) def test_negative_input(self): with self.assertRaisesMessage(ValueError, 'Negative base36 conversion input.'): int_to_base36(-1) def test_to_base36_errors(self): for n in ['1', 'foo', {1: 2}, (1, 2, 3), 3.141]: with self.assertRaises(TypeError): int_to_base36(n) def test_invalid_literal(self): for n in ['#', ' ']: with self.assertRaisesMessage(ValueError, "invalid literal for int() with base 36: '%s'" % n): base36_to_int(n) def test_input_too_large(self): with self.assertRaisesMessage(ValueError, 'Base36 input too large'): base36_to_int('1' * 14) def test_to_int_errors(self): for n in [123, {1: 2}, (1, 2, 3), 3.141]: with self.assertRaises(TypeError): base36_to_int(n) def test_values(self): for n, b36 in [(0, '0'), (1, '1'), (42, '16'), (818469960, 'django')]: self.assertEqual(int_to_base36(n), b36) self.assertEqual(base36_to_int(b36), n) class URLHasAllowedHostAndSchemeTests(unittest.TestCase): def test_bad_urls(self): bad_urls = ( 'http://example.com', 'http:///example.com', 'https://example.com', 'ftp://example.com', r'\\example.com', r'\\\example.com', r'/\\/example.com', r'\\\example.com', r'\\example.com', r'\\//example.com', r'/\/example.com', r'\/example.com', r'/\example.com', 'http:///example.com', r'http:/\//example.com', r'http:\/example.com', r'http:/\example.com', 'javascript:alert("XSS")', '\njavascript:alert(x)', '\x08//example.com', r'http://otherserver\@example.com', r'http:\\testserver\@example.com', r'http://testserver\me:[email protected]', r'http://testserver\@example.com', r'http:\\testserver\confirm\[email protected]', 'http:999999999', 'ftp:9999999999', '\n', 'http://[2001:cdba:0000:0000:0000:0000:3257:9652/', 'http://2001:cdba:0000:0000:0000:0000:3257:9652]/', ) for bad_url in bad_urls: with self.subTest(url=bad_url): self.assertIs( url_has_allowed_host_and_scheme(bad_url, allowed_hosts={'testserver', 'testserver2'}), False, ) def test_good_urls(self): good_urls = ( '/view/?param=http://example.com', '/view/?param=https://example.com', '/view?param=ftp://example.com', 'view/?param=//example.com', 'https://testserver/', 'HTTPS://testserver/', '//testserver/', 'http://testserver/[email protected]', '/url%20with%20spaces/', 'path/http:2222222222', ) for good_url in good_urls: with self.subTest(url=good_url): self.assertIs( url_has_allowed_host_and_scheme(good_url, allowed_hosts={'otherserver', 'testserver'}), True, ) def test_basic_auth(self): # Valid basic auth credentials are allowed. self.assertIs( url_has_allowed_host_and_scheme(r'http://user:pass@testserver/', allowed_hosts={'user:pass@testserver'}), True, ) def test_no_allowed_hosts(self): # A path without host is allowed. self.assertIs(url_has_allowed_host_and_scheme('/confirm/[email protected]', allowed_hosts=None), True) # Basic auth without host is not allowed. self.assertIs(url_has_allowed_host_and_scheme(r'http://testserver\@example.com', allowed_hosts=None), False) def test_allowed_hosts_str(self): self.assertIs(url_has_allowed_host_and_scheme('http://good.com/good', allowed_hosts='good.com'), True) self.assertIs(url_has_allowed_host_and_scheme('http://good.co/evil', allowed_hosts='good.com'), False) def test_secure_param_https_urls(self): secure_urls = ( 'https://example.com/p', 'HTTPS://example.com/p', '/view/?param=http://example.com', ) for url in secure_urls: with self.subTest(url=url): self.assertIs( url_has_allowed_host_and_scheme(url, allowed_hosts={'example.com'}, require_https=True), True, ) def test_secure_param_non_https_urls(self): insecure_urls = ( 'http://example.com/p', 'ftp://example.com/p', '//example.com/p', ) for url in insecure_urls: with self.subTest(url=url): self.assertIs( url_has_allowed_host_and_scheme(url, allowed_hosts={'example.com'}, require_https=True), False, ) class URLSafeBase64Tests(unittest.TestCase): def test_roundtrip(self): bytestring = b'foo' encoded = urlsafe_base64_encode(bytestring) decoded = urlsafe_base64_decode(encoded) self.assertEqual(bytestring, decoded) class IsSameDomainTests(unittest.TestCase): def test_good(self): for pair in ( ('example.com', 'example.com'), ('example.com', '.example.com'), ('foo.example.com', '.example.com'), ('example.com:8888', 'example.com:8888'), ('example.com:8888', '.example.com:8888'), ('foo.example.com:8888', '.example.com:8888'), ): self.assertIs(is_same_domain(*pair), True) def test_bad(self): for pair in ( ('example2.com', 'example.com'), ('foo.example.com', 'example.com'), ('example.com:9999', 'example.com:8888'), ('foo.example.com:8888', ''), ): self.assertIs(is_same_domain(*pair), False) class ETagProcessingTests(unittest.TestCase): def test_parsing(self): self.assertEqual( parse_etags(r'"" , "etag", "e\\tag", W/"weak"'), ['""', '"etag"', r'"e\\tag"', 'W/"weak"'] ) self.assertEqual(parse_etags('*'), ['*']) # Ignore RFC 2616 ETags that are invalid according to RFC 7232. self.assertEqual(parse_etags(r'"etag", "e\"t\"ag"'), ['"etag"']) def test_quoting(self): self.assertEqual(quote_etag('etag'), '"etag"') # unquoted self.assertEqual(quote_etag('"etag"'), '"etag"') # quoted self.assertEqual(quote_etag('W/"etag"'), 'W/"etag"') # quoted, weak class HttpDateProcessingTests(unittest.TestCase): def test_http_date(self): t = 1167616461.0 self.assertEqual(http_date(t), 'Mon, 01 Jan 2007 01:54:21 GMT') def test_parsing_rfc1123(self): parsed = parse_http_date('Sun, 06 Nov 1994 08:49:37 GMT') self.assertEqual( datetime.fromtimestamp(parsed, timezone.utc), datetime(1994, 11, 6, 8, 49, 37, tzinfo=timezone.utc), ) @unittest.skipIf(platform.architecture()[0] == '32bit', 'The Year 2038 problem.') @mock.patch('django.utils.http.datetime.datetime') def test_parsing_rfc850(self, mocked_datetime): mocked_datetime.side_effect = datetime mocked_datetime.now = mock.Mock() now_1 = datetime(2019, 11, 6, 8, 49, 37, tzinfo=timezone.utc) now_2 = datetime(2020, 11, 6, 8, 49, 37, tzinfo=timezone.utc) now_3 = datetime(2048, 11, 6, 8, 49, 37, tzinfo=timezone.utc) tests = ( (now_1, 'Tuesday, 31-Dec-69 08:49:37 GMT', datetime(2069, 12, 31, 8, 49, 37, tzinfo=timezone.utc)), (now_1, 'Tuesday, 10-Nov-70 08:49:37 GMT', datetime(1970, 11, 10, 8, 49, 37, tzinfo=timezone.utc)), (now_1, 'Sunday, 06-Nov-94 08:49:37 GMT', datetime(1994, 11, 6, 8, 49, 37, tzinfo=timezone.utc)), (now_2, 'Wednesday, 31-Dec-70 08:49:37 GMT', datetime(2070, 12, 31, 8, 49, 37, tzinfo=timezone.utc)), (now_2, 'Friday, 31-Dec-71 08:49:37 GMT', datetime(1971, 12, 31, 8, 49, 37, tzinfo=timezone.utc)), (now_3, 'Sunday, 31-Dec-00 08:49:37 GMT', datetime(2000, 12, 31, 8, 49, 37, tzinfo=timezone.utc)), (now_3, 'Friday, 31-Dec-99 08:49:37 GMT', datetime(1999, 12, 31, 8, 49, 37, tzinfo=timezone.utc)), ) for now, rfc850str, expected_date in tests: with self.subTest(rfc850str=rfc850str): mocked_datetime.now.return_value = now parsed = parse_http_date(rfc850str) mocked_datetime.now.assert_called_once_with(tz=timezone.utc) self.assertEqual( datetime.fromtimestamp(parsed, timezone.utc), expected_date, ) mocked_datetime.reset_mock() def test_parsing_asctime(self): parsed = parse_http_date('Sun Nov 6 08:49:37 1994') self.assertEqual( datetime.fromtimestamp(parsed, timezone.utc), datetime(1994, 11, 6, 8, 49, 37, tzinfo=timezone.utc), ) def test_parsing_asctime_nonascii_digits(self): """Non-ASCII unicode decimals raise an error.""" with self.assertRaises(ValueError): parse_http_date('Sun Nov 6 08:49:37 1994') with self.assertRaises(ValueError): parse_http_date('Sun Nov 12 08:49:37 1994') def test_parsing_year_less_than_70(self): parsed = parse_http_date('Sun Nov 6 08:49:37 0037') self.assertEqual( datetime.fromtimestamp(parsed, timezone.utc), datetime(2037, 11, 6, 8, 49, 37, tzinfo=timezone.utc), ) class EscapeLeadingSlashesTests(unittest.TestCase): def test(self): tests = ( ('//example.com', '/%2Fexample.com'), ('//', '/%2F'), ) for url, expected in tests: with self.subTest(url=url): self.assertEqual(escape_leading_slashes(url), expected)
efbf6c90d85119dcf488ce7ac2788f53914019c1926ca5d3aa68859da7dfb7a9
import json import sys from django.core.exceptions import SuspiciousFileOperation from django.test import SimpleTestCase from django.utils import text from django.utils.functional import lazystr from django.utils.text import format_lazy from django.utils.translation import gettext_lazy, override IS_WIDE_BUILD = (len('\U0001F4A9') == 1) class TestUtilsText(SimpleTestCase): def test_get_text_list(self): self.assertEqual(text.get_text_list(['a', 'b', 'c', 'd']), 'a, b, c or d') self.assertEqual(text.get_text_list(['a', 'b', 'c'], 'and'), 'a, b and c') self.assertEqual(text.get_text_list(['a', 'b'], 'and'), 'a and b') self.assertEqual(text.get_text_list(['a']), 'a') self.assertEqual(text.get_text_list([]), '') with override('ar'): self.assertEqual(text.get_text_list(['a', 'b', 'c']), "a، b أو c") def test_smart_split(self): testdata = [ ('This is "a person" test.', ['This', 'is', '"a person"', 'test.']), ('This is "a person\'s" test.', ['This', 'is', '"a person\'s"', 'test.']), ('This is "a person\\"s" test.', ['This', 'is', '"a person\\"s"', 'test.']), ('"a \'one', ['"a', "'one"]), ('all friends\' tests', ['all', 'friends\'', 'tests']), ('url search_page words="something else"', ['url', 'search_page', 'words="something else"']), ("url search_page words='something else'", ['url', 'search_page', "words='something else'"]), ('url search_page words "something else"', ['url', 'search_page', 'words', '"something else"']), ('url search_page words-"something else"', ['url', 'search_page', 'words-"something else"']), ('url search_page words=hello', ['url', 'search_page', 'words=hello']), ('url search_page words="something else', ['url', 'search_page', 'words="something', 'else']), ("cut:','|cut:' '", ["cut:','|cut:' '"]), (lazystr("a b c d"), # Test for #20231 ['a', 'b', 'c', 'd']), ] for test, expected in testdata: with self.subTest(value=test): self.assertEqual(list(text.smart_split(test)), expected) def test_truncate_chars(self): truncator = text.Truncator('The quick brown fox jumped over the lazy dog.') self.assertEqual('The quick brown fox jumped over the lazy dog.', truncator.chars(100)), self.assertEqual('The quick brown fox …', truncator.chars(21)), self.assertEqual('The quick brown fo.....', truncator.chars(23, '.....')), self.assertEqual('.....', truncator.chars(4, '.....')), nfc = text.Truncator('o\xfco\xfco\xfco\xfc') nfd = text.Truncator('ou\u0308ou\u0308ou\u0308ou\u0308') self.assertEqual('oüoüoüoü', nfc.chars(8)) self.assertEqual('oüoüoüoü', nfd.chars(8)) self.assertEqual('oü…', nfc.chars(3)) self.assertEqual('oü…', nfd.chars(3)) # Ensure the final length is calculated correctly when there are # combining characters with no precomposed form, and that combining # characters are not split up. truncator = text.Truncator('-B\u030AB\u030A----8') self.assertEqual('-B\u030A…', truncator.chars(3)) self.assertEqual('-B\u030AB\u030A-…', truncator.chars(5)) self.assertEqual('-B\u030AB\u030A----8', truncator.chars(8)) # Ensure the length of the end text is correctly calculated when it # contains combining characters with no precomposed form. truncator = text.Truncator('-----') self.assertEqual('---B\u030A', truncator.chars(4, 'B\u030A')) self.assertEqual('-----', truncator.chars(5, 'B\u030A')) # Make a best effort to shorten to the desired length, but requesting # a length shorter than the ellipsis shouldn't break self.assertEqual('…', text.Truncator('asdf').chars(0)) # lazy strings are handled correctly self.assertEqual(text.Truncator(lazystr('The quick brown fox')).chars(10), 'The quick…') def test_truncate_chars_html(self): perf_test_values = [ (('</a' + '\t' * 50000) + '//>', None), ('&' * 50000, '&' * 9 + '…'), ('_X<<<<<<<<<<<>', None), ] for value, expected in perf_test_values: with self.subTest(value=value): truncator = text.Truncator(value) self.assertEqual(expected if expected else value, truncator.chars(10, html=True)) def test_truncate_words(self): truncator = text.Truncator('The quick brown fox jumped over the lazy dog.') self.assertEqual('The quick brown fox jumped over the lazy dog.', truncator.words(10)) self.assertEqual('The quick brown fox…', truncator.words(4)) self.assertEqual('The quick brown fox[snip]', truncator.words(4, '[snip]')) # lazy strings are handled correctly truncator = text.Truncator(lazystr('The quick brown fox jumped over the lazy dog.')) self.assertEqual('The quick brown fox…', truncator.words(4)) def test_truncate_html_words(self): truncator = text.Truncator( '<p id="par"><strong><em>The quick brown fox jumped over the lazy dog.</em></strong></p>' ) self.assertEqual( '<p id="par"><strong><em>The quick brown fox jumped over the lazy dog.</em></strong></p>', truncator.words(10, html=True) ) self.assertEqual( '<p id="par"><strong><em>The quick brown fox…</em></strong></p>', truncator.words(4, html=True) ) self.assertEqual( '<p id="par"><strong><em>The quick brown fox....</em></strong></p>', truncator.words(4, '....', html=True) ) self.assertEqual( '<p id="par"><strong><em>The quick brown fox</em></strong></p>', truncator.words(4, '', html=True) ) # Test with new line inside tag truncator = text.Truncator( '<p>The quick <a href="xyz.html"\n id="mylink">brown fox</a> jumped over the lazy dog.</p>' ) self.assertEqual( '<p>The quick <a href="xyz.html"\n id="mylink">brown…</a></p>', truncator.words(3, html=True) ) # Test self-closing tags truncator = text.Truncator('<br/>The <hr />quick brown fox jumped over the lazy dog.') self.assertEqual('<br/>The <hr />quick brown…', truncator.words(3, html=True)) truncator = text.Truncator('<br>The <hr/>quick <em>brown fox</em> jumped over the lazy dog.') self.assertEqual('<br>The <hr/>quick <em>brown…</em>', truncator.words(3, html=True)) # Test html entities truncator = text.Truncator('<i>Buenos d&iacute;as! &#x00bf;C&oacute;mo est&aacute;?</i>') self.assertEqual('<i>Buenos d&iacute;as! &#x00bf;C&oacute;mo…</i>', truncator.words(3, html=True)) truncator = text.Truncator('<p>I &lt;3 python, what about you?</p>') self.assertEqual('<p>I &lt;3 python,…</p>', truncator.words(3, html=True)) perf_test_values = [ ('</a' + '\t' * 50000) + '//>', '&' * 50000, '_X<<<<<<<<<<<>', ] for value in perf_test_values: with self.subTest(value=value): truncator = text.Truncator(value) self.assertEqual(value, truncator.words(50, html=True)) def test_wrap(self): digits = '1234 67 9' self.assertEqual(text.wrap(digits, 100), '1234 67 9') self.assertEqual(text.wrap(digits, 9), '1234 67 9') self.assertEqual(text.wrap(digits, 8), '1234 67\n9') self.assertEqual(text.wrap('short\na long line', 7), 'short\na long\nline') self.assertEqual(text.wrap('do-not-break-long-words please? ok', 8), 'do-not-break-long-words\nplease?\nok') long_word = 'l%sng' % ('o' * 20) self.assertEqual(text.wrap(long_word, 20), long_word) self.assertEqual(text.wrap('a %s word' % long_word, 10), 'a\n%s\nword' % long_word) self.assertEqual(text.wrap(lazystr(digits), 100), '1234 67 9') def test_normalize_newlines(self): self.assertEqual(text.normalize_newlines("abc\ndef\rghi\r\n"), "abc\ndef\nghi\n") self.assertEqual(text.normalize_newlines("\n\r\r\n\r"), "\n\n\n\n") self.assertEqual(text.normalize_newlines("abcdefghi"), "abcdefghi") self.assertEqual(text.normalize_newlines(""), "") self.assertEqual(text.normalize_newlines(lazystr("abc\ndef\rghi\r\n")), "abc\ndef\nghi\n") def test_phone2numeric(self): numeric = text.phone2numeric('0800 flowers') self.assertEqual(numeric, '0800 3569377') lazy_numeric = lazystr(text.phone2numeric('0800 flowers')) self.assertEqual(lazy_numeric, '0800 3569377') def test_slugify(self): items = ( # given - expected - Unicode? ('Hello, World!', 'hello-world', False), ('spam & eggs', 'spam-eggs', False), (' multiple---dash and space ', 'multiple-dash-and-space', False), ('\t whitespace-in-value \n', 'whitespace-in-value', False), ('underscore_in-value', 'underscore_in-value', False), ('__strip__underscore-value___', 'strip__underscore-value', False), ('--strip-dash-value---', 'strip-dash-value', False), ('__strip-mixed-value---', 'strip-mixed-value', False), ('_ -strip-mixed-value _-', 'strip-mixed-value', False), ('spam & ıçüş', 'spam-ıçüş', True), ('foo ıç bar', 'foo-ıç-bar', True), (' foo ıç bar', 'foo-ıç-bar', True), ('你好', '你好', True), ('İstanbul', 'istanbul', True), ) for value, output, is_unicode in items: with self.subTest(value=value): self.assertEqual(text.slugify(value, allow_unicode=is_unicode), output) # Interning the result may be useful, e.g. when fed to Path. with self.subTest('intern'): self.assertEqual(sys.intern(text.slugify('a')), 'a') def test_unescape_string_literal(self): items = [ ('"abc"', 'abc'), ("'abc'", 'abc'), ('"a \"bc\""', 'a "bc"'), ("'\'ab\' c'", "'ab' c"), ] for value, output in items: with self.subTest(value=value): self.assertEqual(text.unescape_string_literal(value), output) self.assertEqual(text.unescape_string_literal(lazystr(value)), output) def test_unescape_string_literal_invalid_value(self): items = ['', 'abc', "'abc\""] for item in items: msg = f'Not a string literal: {item!r}' with self.assertRaisesMessage(ValueError, msg): text.unescape_string_literal(item) def test_get_valid_filename(self): filename = "^&'@{}[],$=!-#()%+~_123.txt" self.assertEqual(text.get_valid_filename(filename), "-_123.txt") self.assertEqual(text.get_valid_filename(lazystr(filename)), "-_123.txt") msg = "Could not derive file name from '???'" with self.assertRaisesMessage(SuspiciousFileOperation, msg): text.get_valid_filename('???') # After sanitizing this would yield '..'. msg = "Could not derive file name from '$.$.$'" with self.assertRaisesMessage(SuspiciousFileOperation, msg): text.get_valid_filename('$.$.$') def test_compress_sequence(self): data = [{'key': i} for i in range(10)] seq = list(json.JSONEncoder().iterencode(data)) seq = [s.encode() for s in seq] actual_length = len(b''.join(seq)) out = text.compress_sequence(seq) compressed_length = len(b''.join(out)) self.assertLess(compressed_length, actual_length) def test_format_lazy(self): self.assertEqual('django/test', format_lazy('{}/{}', 'django', lazystr('test'))) self.assertEqual('django/test', format_lazy('{0}/{1}', *('django', 'test'))) self.assertEqual('django/test', format_lazy('{a}/{b}', **{'a': 'django', 'b': 'test'})) self.assertEqual('django/test', format_lazy('{a[0]}/{a[1]}', a=('django', 'test'))) t = {} s = format_lazy('{0[a]}-{p[a]}', t, p=t) t['a'] = lazystr('django') self.assertEqual('django-django', s) t['a'] = 'update' self.assertEqual('update-update', s) # The format string can be lazy. (string comes from contrib.admin) s = format_lazy( gettext_lazy('Added {name} “{object}”.'), name='article', object='My first try', ) with override('fr'): self.assertEqual('Ajout de article «\xa0My first try\xa0».', s)
37589f507940c94e9ea1829578a6f6e1eb05b3b2d8147b29493590b15fe2d78d
import threading from datetime import datetime, timedelta from unittest import mock from django.core.exceptions import MultipleObjectsReturned, ObjectDoesNotExist from django.db import DEFAULT_DB_ALIAS, DatabaseError, connections, models from django.db.models.manager import BaseManager from django.db.models.query import MAX_GET_RESULTS, EmptyQuerySet from django.test import ( SimpleTestCase, TestCase, TransactionTestCase, skipUnlessDBFeature, ) from django.utils.translation import gettext_lazy from .models import ( Article, ArticleSelectOnSave, ChildPrimaryKeyWithDefault, FeaturedArticle, PrimaryKeyWithDefault, SelfRef, ) class ModelInstanceCreationTests(TestCase): def test_object_is_not_written_to_database_until_save_was_called(self): a = Article( id=None, headline='Parrot programs in Python', pub_date=datetime(2005, 7, 28), ) self.assertIsNone(a.id) self.assertEqual(Article.objects.all().count(), 0) # Save it into the database. You have to call save() explicitly. a.save() self.assertIsNotNone(a.id) self.assertEqual(Article.objects.all().count(), 1) def test_can_initialize_model_instance_using_positional_arguments(self): """ You can initialize a model instance using positional arguments, which should match the field order as defined in the model. """ a = Article(None, 'Second article', datetime(2005, 7, 29)) a.save() self.assertEqual(a.headline, 'Second article') self.assertEqual(a.pub_date, datetime(2005, 7, 29, 0, 0)) def test_can_create_instance_using_kwargs(self): a = Article( id=None, headline='Third article', pub_date=datetime(2005, 7, 30), ) a.save() self.assertEqual(a.headline, 'Third article') self.assertEqual(a.pub_date, datetime(2005, 7, 30, 0, 0)) def test_autofields_generate_different_values_for_each_instance(self): a1 = Article.objects.create(headline='First', pub_date=datetime(2005, 7, 30, 0, 0)) a2 = Article.objects.create(headline='First', pub_date=datetime(2005, 7, 30, 0, 0)) a3 = Article.objects.create(headline='First', pub_date=datetime(2005, 7, 30, 0, 0)) self.assertNotEqual(a3.id, a1.id) self.assertNotEqual(a3.id, a2.id) def test_can_mix_and_match_position_and_kwargs(self): # You can also mix and match position and keyword arguments, but # be sure not to duplicate field information. a = Article(None, 'Fourth article', pub_date=datetime(2005, 7, 31)) a.save() self.assertEqual(a.headline, 'Fourth article') def test_positional_and_keyword_args_for_the_same_field(self): msg = "Article() got both positional and keyword arguments for field '%s'." with self.assertRaisesMessage(TypeError, msg % 'headline'): Article(None, 'Fifth article', headline='Other headline.') with self.assertRaisesMessage(TypeError, msg % 'headline'): Article(None, 'Sixth article', headline='') with self.assertRaisesMessage(TypeError, msg % 'pub_date'): Article(None, 'Seventh article', datetime(2021, 3, 1), pub_date=None) def test_cannot_create_instance_with_invalid_kwargs(self): msg = "Article() got unexpected keyword arguments: 'foo'" with self.assertRaisesMessage(TypeError, msg): Article( id=None, headline='Some headline', pub_date=datetime(2005, 7, 31), foo='bar', ) msg = "Article() got unexpected keyword arguments: 'foo', 'bar'" with self.assertRaisesMessage(TypeError, msg): Article( id=None, headline='Some headline', pub_date=datetime(2005, 7, 31), foo='bar', bar='baz', ) def test_can_leave_off_value_for_autofield_and_it_gets_value_on_save(self): """ You can leave off the value for an AutoField when creating an object, because it'll get filled in automatically when you save(). """ a = Article(headline='Article 5', pub_date=datetime(2005, 7, 31)) a.save() self.assertEqual(a.headline, 'Article 5') self.assertIsNotNone(a.id) def test_leaving_off_a_field_with_default_set_the_default_will_be_saved(self): a = Article(pub_date=datetime(2005, 7, 31)) a.save() self.assertEqual(a.headline, 'Default headline') def test_for_datetimefields_saves_as_much_precision_as_was_given(self): """as much precision in *seconds*""" a1 = Article( headline='Article 7', pub_date=datetime(2005, 7, 31, 12, 30), ) a1.save() self.assertEqual(Article.objects.get(id__exact=a1.id).pub_date, datetime(2005, 7, 31, 12, 30)) a2 = Article( headline='Article 8', pub_date=datetime(2005, 7, 31, 12, 30, 45), ) a2.save() self.assertEqual(Article.objects.get(id__exact=a2.id).pub_date, datetime(2005, 7, 31, 12, 30, 45)) def test_saving_an_object_again_does_not_create_a_new_object(self): a = Article(headline='original', pub_date=datetime(2014, 5, 16)) a.save() current_id = a.id a.save() self.assertEqual(a.id, current_id) a.headline = 'Updated headline' a.save() self.assertEqual(a.id, current_id) def test_querysets_checking_for_membership(self): headlines = [ 'Parrot programs in Python', 'Second article', 'Third article'] some_pub_date = datetime(2014, 5, 16, 12, 1) for headline in headlines: Article(headline=headline, pub_date=some_pub_date).save() a = Article(headline='Some headline', pub_date=some_pub_date) a.save() # You can use 'in' to test for membership... self.assertIn(a, Article.objects.all()) # ... but there will often be more efficient ways if that is all you need: self.assertTrue(Article.objects.filter(id=a.id).exists()) def test_save_primary_with_default(self): # An UPDATE attempt is skipped when a primary key has default. with self.assertNumQueries(1): PrimaryKeyWithDefault().save() def test_save_parent_primary_with_default(self): # An UPDATE attempt is skipped when an inherited primary key has # default. with self.assertNumQueries(2): ChildPrimaryKeyWithDefault().save() class ModelTest(TestCase): def test_objects_attribute_is_only_available_on_the_class_itself(self): with self.assertRaisesMessage(AttributeError, "Manager isn't accessible via Article instances"): getattr(Article(), "objects",) self.assertFalse(hasattr(Article(), 'objects')) self.assertTrue(hasattr(Article, 'objects')) def test_queryset_delete_removes_all_items_in_that_queryset(self): headlines = [ 'An article', 'Article One', 'Amazing article', 'Boring article'] some_pub_date = datetime(2014, 5, 16, 12, 1) for headline in headlines: Article(headline=headline, pub_date=some_pub_date).save() self.assertQuerysetEqual( Article.objects.all().order_by('headline'), sorted(headlines), transform=lambda a: a.headline, ) Article.objects.filter(headline__startswith='A').delete() self.assertEqual(Article.objects.get().headline, 'Boring article') def test_not_equal_and_equal_operators_behave_as_expected_on_instances(self): some_pub_date = datetime(2014, 5, 16, 12, 1) a1 = Article.objects.create(headline='First', pub_date=some_pub_date) a2 = Article.objects.create(headline='Second', pub_date=some_pub_date) self.assertNotEqual(a1, a2) self.assertEqual(a1, Article.objects.get(id__exact=a1.id)) self.assertNotEqual(Article.objects.get(id__exact=a1.id), Article.objects.get(id__exact=a2.id)) def test_microsecond_precision(self): a9 = Article( headline='Article 9', pub_date=datetime(2005, 7, 31, 12, 30, 45, 180), ) a9.save() self.assertEqual(Article.objects.get(pk=a9.pk).pub_date, datetime(2005, 7, 31, 12, 30, 45, 180)) def test_manually_specify_primary_key(self): # You can manually specify the primary key when creating a new object. a101 = Article( id=101, headline='Article 101', pub_date=datetime(2005, 7, 31, 12, 30, 45), ) a101.save() a101 = Article.objects.get(pk=101) self.assertEqual(a101.headline, 'Article 101') def test_create_method(self): # You can create saved objects in a single step a10 = Article.objects.create( headline="Article 10", pub_date=datetime(2005, 7, 31, 12, 30, 45), ) self.assertEqual(Article.objects.get(headline="Article 10"), a10) def test_year_lookup_edge_case(self): # Edge-case test: A year lookup should retrieve all objects in # the given year, including Jan. 1 and Dec. 31. a11 = Article.objects.create( headline='Article 11', pub_date=datetime(2008, 1, 1), ) a12 = Article.objects.create( headline='Article 12', pub_date=datetime(2008, 12, 31, 23, 59, 59, 999999), ) self.assertSequenceEqual( Article.objects.filter(pub_date__year=2008), [a11, a12], ) def test_unicode_data(self): # Unicode data works, too. a = Article( headline='\u6797\u539f \u3081\u3050\u307f', pub_date=datetime(2005, 7, 28), ) a.save() self.assertEqual(Article.objects.get(pk=a.id).headline, '\u6797\u539f \u3081\u3050\u307f') def test_hash_function(self): # Model instances have a hash function, so they can be used in sets # or as dictionary keys. Two models compare as equal if their primary # keys are equal. a10 = Article.objects.create( headline="Article 10", pub_date=datetime(2005, 7, 31, 12, 30, 45), ) a11 = Article.objects.create( headline='Article 11', pub_date=datetime(2008, 1, 1), ) a12 = Article.objects.create( headline='Article 12', pub_date=datetime(2008, 12, 31, 23, 59, 59, 999999), ) s = {a10, a11, a12} self.assertIn(Article.objects.get(headline='Article 11'), s) def test_extra_method_select_argument_with_dashes_and_values(self): # The 'select' argument to extra() supports names with dashes in # them, as long as you use values(). Article.objects.bulk_create([ Article(headline='Article 10', pub_date=datetime(2005, 7, 31, 12, 30, 45)), Article(headline='Article 11', pub_date=datetime(2008, 1, 1)), Article(headline='Article 12', pub_date=datetime(2008, 12, 31, 23, 59, 59, 999999)), ]) dicts = Article.objects.filter( pub_date__year=2008).extra( select={'dashed-value': '1'}).values('headline', 'dashed-value') self.assertEqual( [sorted(d.items()) for d in dicts], [[('dashed-value', 1), ('headline', 'Article 11')], [('dashed-value', 1), ('headline', 'Article 12')]] ) def test_extra_method_select_argument_with_dashes(self): # If you use 'select' with extra() and names containing dashes on a # query that's *not* a values() query, those extra 'select' values # will silently be ignored. Article.objects.bulk_create([ Article(headline='Article 10', pub_date=datetime(2005, 7, 31, 12, 30, 45)), Article(headline='Article 11', pub_date=datetime(2008, 1, 1)), Article(headline='Article 12', pub_date=datetime(2008, 12, 31, 23, 59, 59, 999999)), ]) articles = Article.objects.filter( pub_date__year=2008).extra(select={'dashed-value': '1', 'undashedvalue': '2'}) self.assertEqual(articles[0].undashedvalue, 2) def test_create_relation_with_gettext_lazy(self): """ gettext_lazy objects work when saving model instances through various methods. Refs #10498. """ notlazy = 'test' lazy = gettext_lazy(notlazy) Article.objects.create(headline=lazy, pub_date=datetime.now()) article = Article.objects.get() self.assertEqual(article.headline, notlazy) # test that assign + save works with Promise objects article.headline = lazy article.save() self.assertEqual(article.headline, notlazy) # test .update() Article.objects.update(headline=lazy) article = Article.objects.get() self.assertEqual(article.headline, notlazy) # still test bulk_create() Article.objects.all().delete() Article.objects.bulk_create([Article(headline=lazy, pub_date=datetime.now())]) article = Article.objects.get() self.assertEqual(article.headline, notlazy) def test_emptyqs(self): msg = "EmptyQuerySet can't be instantiated" with self.assertRaisesMessage(TypeError, msg): EmptyQuerySet() self.assertIsInstance(Article.objects.none(), EmptyQuerySet) self.assertNotIsInstance('', EmptyQuerySet) def test_emptyqs_values(self): # test for #15959 Article.objects.create(headline='foo', pub_date=datetime.now()) with self.assertNumQueries(0): qs = Article.objects.none().values_list('pk') self.assertIsInstance(qs, EmptyQuerySet) self.assertEqual(len(qs), 0) def test_emptyqs_customqs(self): # A hacky test for custom QuerySet subclass - refs #17271 Article.objects.create(headline='foo', pub_date=datetime.now()) class CustomQuerySet(models.QuerySet): def do_something(self): return 'did something' qs = Article.objects.all() qs.__class__ = CustomQuerySet qs = qs.none() with self.assertNumQueries(0): self.assertEqual(len(qs), 0) self.assertIsInstance(qs, EmptyQuerySet) self.assertEqual(qs.do_something(), 'did something') def test_emptyqs_values_order(self): # Tests for ticket #17712 Article.objects.create(headline='foo', pub_date=datetime.now()) with self.assertNumQueries(0): self.assertEqual(len(Article.objects.none().values_list('id').order_by('id')), 0) with self.assertNumQueries(0): self.assertEqual(len(Article.objects.none().filter( id__in=Article.objects.values_list('id', flat=True))), 0) @skipUnlessDBFeature('can_distinct_on_fields') def test_emptyqs_distinct(self): # Tests for #19426 Article.objects.create(headline='foo', pub_date=datetime.now()) with self.assertNumQueries(0): self.assertEqual(len(Article.objects.none().distinct('headline', 'pub_date')), 0) def test_ticket_20278(self): sr = SelfRef.objects.create() with self.assertRaises(ObjectDoesNotExist): SelfRef.objects.get(selfref=sr) def test_eq(self): self.assertEqual(Article(id=1), Article(id=1)) self.assertNotEqual(Article(id=1), object()) self.assertNotEqual(object(), Article(id=1)) a = Article() self.assertEqual(a, a) self.assertEqual(a, mock.ANY) self.assertNotEqual(Article(), a) def test_hash(self): # Value based on PK self.assertEqual(hash(Article(id=1)), hash(1)) msg = 'Model instances without primary key value are unhashable' with self.assertRaisesMessage(TypeError, msg): # No PK value -> unhashable (because save() would then change # hash) hash(Article()) def test_missing_hash_not_inherited(self): class NoHash(models.Model): def __eq__(self, other): return super.__eq__(other) with self.assertRaisesMessage(TypeError, "unhashable type: 'NoHash'"): hash(NoHash(id=1)) def test_specified_parent_hash_inherited(self): class ParentHash(models.Model): def __eq__(self, other): return super.__eq__(other) __hash__ = models.Model.__hash__ self.assertEqual(hash(ParentHash(id=1)), 1) def test_delete_and_access_field(self): # Accessing a field after it's deleted from a model reloads its value. pub_date = datetime.now() article = Article.objects.create(headline='foo', pub_date=pub_date) new_pub_date = article.pub_date + timedelta(days=10) article.headline = 'bar' article.pub_date = new_pub_date del article.headline with self.assertNumQueries(1): self.assertEqual(article.headline, 'foo') # Fields that weren't deleted aren't reloaded. self.assertEqual(article.pub_date, new_pub_date) def test_multiple_objects_max_num_fetched(self): max_results = MAX_GET_RESULTS - 1 Article.objects.bulk_create( Article(headline='Area %s' % i, pub_date=datetime(2005, 7, 28)) for i in range(max_results) ) self.assertRaisesMessage( MultipleObjectsReturned, 'get() returned more than one Article -- it returned %d!' % max_results, Article.objects.get, headline__startswith='Area', ) Article.objects.create(headline='Area %s' % max_results, pub_date=datetime(2005, 7, 28)) self.assertRaisesMessage( MultipleObjectsReturned, 'get() returned more than one Article -- it returned more than %d!' % max_results, Article.objects.get, headline__startswith='Area', ) class ModelLookupTest(TestCase): @classmethod def setUpTestData(cls): # Create an Article. cls.a = Article( id=None, headline='Swallow programs in Python', pub_date=datetime(2005, 7, 28), ) # Save it into the database. You have to call save() explicitly. cls.a.save() def test_all_lookup(self): # Change values by changing the attributes, then calling save(). self.a.headline = 'Parrot programs in Python' self.a.save() # Article.objects.all() returns all the articles in the database. self.assertSequenceEqual(Article.objects.all(), [self.a]) def test_rich_lookup(self): # Django provides a rich database lookup API. self.assertEqual(Article.objects.get(id__exact=self.a.id), self.a) self.assertEqual(Article.objects.get(headline__startswith='Swallow'), self.a) self.assertEqual(Article.objects.get(pub_date__year=2005), self.a) self.assertEqual(Article.objects.get(pub_date__year=2005, pub_date__month=7), self.a) self.assertEqual(Article.objects.get(pub_date__year=2005, pub_date__month=7, pub_date__day=28), self.a) self.assertEqual(Article.objects.get(pub_date__week_day=5), self.a) def test_equal_lookup(self): # The "__exact" lookup type can be omitted, as a shortcut. self.assertEqual(Article.objects.get(id=self.a.id), self.a) self.assertEqual(Article.objects.get(headline='Swallow programs in Python'), self.a) self.assertSequenceEqual( Article.objects.filter(pub_date__year=2005), [self.a], ) self.assertSequenceEqual( Article.objects.filter(pub_date__year=2004), [], ) self.assertSequenceEqual( Article.objects.filter(pub_date__year=2005, pub_date__month=7), [self.a], ) self.assertSequenceEqual( Article.objects.filter(pub_date__week_day=5), [self.a], ) self.assertSequenceEqual( Article.objects.filter(pub_date__week_day=6), [], ) def test_does_not_exist(self): # Django raises an Article.DoesNotExist exception for get() if the # parameters don't match any object. with self.assertRaisesMessage(ObjectDoesNotExist, "Article matching query does not exist."): Article.objects.get(id__exact=2000,) # To avoid dict-ordering related errors check only one lookup # in single assert. with self.assertRaises(ObjectDoesNotExist): Article.objects.get(pub_date__year=2005, pub_date__month=8) with self.assertRaisesMessage(ObjectDoesNotExist, "Article matching query does not exist."): Article.objects.get(pub_date__week_day=6,) def test_lookup_by_primary_key(self): # Lookup by a primary key is the most common case, so Django # provides a shortcut for primary-key exact lookups. # The following is identical to articles.get(id=a.id). self.assertEqual(Article.objects.get(pk=self.a.id), self.a) # pk can be used as a shortcut for the primary key name in any query. self.assertSequenceEqual(Article.objects.filter(pk__in=[self.a.id]), [self.a]) # Model instances of the same type and same ID are considered equal. a = Article.objects.get(pk=self.a.id) b = Article.objects.get(pk=self.a.id) self.assertEqual(a, b) def test_too_many(self): # Create a very similar object a = Article( id=None, headline='Swallow bites Python', pub_date=datetime(2005, 7, 28), ) a.save() self.assertEqual(Article.objects.count(), 2) # Django raises an Article.MultipleObjectsReturned exception if the # lookup matches more than one object msg = "get() returned more than one Article -- it returned 2!" with self.assertRaisesMessage(MultipleObjectsReturned, msg): Article.objects.get(headline__startswith='Swallow',) with self.assertRaisesMessage(MultipleObjectsReturned, msg): Article.objects.get(pub_date__year=2005,) with self.assertRaisesMessage(MultipleObjectsReturned, msg): Article.objects.get(pub_date__year=2005, pub_date__month=7) class ConcurrentSaveTests(TransactionTestCase): available_apps = ['basic'] @skipUnlessDBFeature('test_db_allows_multiple_connections') def test_concurrent_delete_with_save(self): """ Test fetching, deleting and finally saving an object - we should get an insert in this case. """ a = Article.objects.create(headline='foo', pub_date=datetime.now()) exceptions = [] def deleter(): try: # Do not delete a directly - doing so alters its state. Article.objects.filter(pk=a.pk).delete() except Exception as e: exceptions.append(e) finally: connections[DEFAULT_DB_ALIAS].close() self.assertEqual(len(exceptions), 0) t = threading.Thread(target=deleter) t.start() t.join() a.save() self.assertEqual(Article.objects.get(pk=a.pk).headline, 'foo') class ManagerTest(SimpleTestCase): QUERYSET_PROXY_METHODS = [ 'none', 'count', 'dates', 'datetimes', 'distinct', 'extra', 'get', 'get_or_create', 'update_or_create', 'create', 'bulk_create', 'bulk_update', 'filter', 'aggregate', 'annotate', 'alias', 'complex_filter', 'exclude', 'in_bulk', 'iterator', 'earliest', 'latest', 'first', 'last', 'order_by', 'select_for_update', 'select_related', 'prefetch_related', 'values', 'values_list', 'update', 'reverse', 'defer', 'only', 'using', 'exists', 'contains', 'explain', '_insert', '_update', 'raw', 'union', 'intersection', 'difference', ] def test_manager_methods(self): """ This test ensures that the correct set of methods from `QuerySet` are copied onto `Manager`. It's particularly useful to prevent accidentally leaking new methods into `Manager`. New `QuerySet` methods that should also be copied onto `Manager` will need to be added to `ManagerTest.QUERYSET_PROXY_METHODS`. """ self.assertEqual( sorted(BaseManager._get_queryset_methods(models.QuerySet)), sorted(self.QUERYSET_PROXY_METHODS), ) class SelectOnSaveTests(TestCase): def test_select_on_save(self): a1 = Article.objects.create(pub_date=datetime.now()) with self.assertNumQueries(1): a1.save() asos = ArticleSelectOnSave.objects.create(pub_date=datetime.now()) with self.assertNumQueries(2): asos.save() with self.assertNumQueries(1): asos.save(force_update=True) Article.objects.all().delete() with self.assertRaisesMessage(DatabaseError, 'Forced update did not affect any rows.'): with self.assertNumQueries(1): asos.save(force_update=True) def test_select_on_save_lying_update(self): """ select_on_save works correctly if the database doesn't return correct information about matched rows from UPDATE. """ # Change the manager to not return "row matched" for update(). # We are going to change the Article's _base_manager class # dynamically. This is a bit of a hack, but it seems hard to # test this properly otherwise. Article's manager, because # proxy models use their parent model's _base_manager. orig_class = Article._base_manager._queryset_class class FakeQuerySet(models.QuerySet): # Make sure the _update method below is in fact called. called = False def _update(self, *args, **kwargs): FakeQuerySet.called = True super()._update(*args, **kwargs) return 0 try: Article._base_manager._queryset_class = FakeQuerySet asos = ArticleSelectOnSave.objects.create(pub_date=datetime.now()) with self.assertNumQueries(3): asos.save() self.assertTrue(FakeQuerySet.called) # This is not wanted behavior, but this is how Django has always # behaved for databases that do not return correct information # about matched rows for UPDATE. with self.assertRaisesMessage(DatabaseError, 'Forced update did not affect any rows.'): asos.save(force_update=True) msg = ( "An error occurred in the current transaction. You can't " "execute queries until the end of the 'atomic' block." ) with self.assertRaisesMessage(DatabaseError, msg): asos.save(update_fields=['pub_date']) finally: Article._base_manager._queryset_class = orig_class class ModelRefreshTests(TestCase): def test_refresh(self): a = Article.objects.create(pub_date=datetime.now()) Article.objects.create(pub_date=datetime.now()) Article.objects.filter(pk=a.pk).update(headline='new headline') with self.assertNumQueries(1): a.refresh_from_db() self.assertEqual(a.headline, 'new headline') orig_pub_date = a.pub_date new_pub_date = a.pub_date + timedelta(10) Article.objects.update(headline='new headline 2', pub_date=new_pub_date) with self.assertNumQueries(1): a.refresh_from_db(fields=['headline']) self.assertEqual(a.headline, 'new headline 2') self.assertEqual(a.pub_date, orig_pub_date) with self.assertNumQueries(1): a.refresh_from_db() self.assertEqual(a.pub_date, new_pub_date) def test_unknown_kwarg(self): s = SelfRef.objects.create() msg = "refresh_from_db() got an unexpected keyword argument 'unknown_kwarg'" with self.assertRaisesMessage(TypeError, msg): s.refresh_from_db(unknown_kwarg=10) def test_lookup_in_fields(self): s = SelfRef.objects.create() msg = 'Found "__" in fields argument. Relations and transforms are not allowed in fields.' with self.assertRaisesMessage(ValueError, msg): s.refresh_from_db(fields=['foo__bar']) def test_refresh_fk(self): s1 = SelfRef.objects.create() s2 = SelfRef.objects.create() s3 = SelfRef.objects.create(selfref=s1) s3_copy = SelfRef.objects.get(pk=s3.pk) s3_copy.selfref.touched = True s3.selfref = s2 s3.save() with self.assertNumQueries(1): s3_copy.refresh_from_db() with self.assertNumQueries(1): # The old related instance was thrown away (the selfref_id has # changed). It needs to be reloaded on access, so one query # executed. self.assertFalse(hasattr(s3_copy.selfref, 'touched')) self.assertEqual(s3_copy.selfref, s2) def test_refresh_null_fk(self): s1 = SelfRef.objects.create() s2 = SelfRef.objects.create(selfref=s1) s2.selfref = None s2.refresh_from_db() self.assertEqual(s2.selfref, s1) def test_refresh_unsaved(self): pub_date = datetime.now() a = Article.objects.create(pub_date=pub_date) a2 = Article(id=a.pk) with self.assertNumQueries(1): a2.refresh_from_db() self.assertEqual(a2.pub_date, pub_date) self.assertEqual(a2._state.db, "default") def test_refresh_fk_on_delete_set_null(self): a = Article.objects.create( headline='Parrot programs in Python', pub_date=datetime(2005, 7, 28), ) s1 = SelfRef.objects.create(article=a) a.delete() s1.refresh_from_db() self.assertIsNone(s1.article_id) self.assertIsNone(s1.article) def test_refresh_no_fields(self): a = Article.objects.create(pub_date=datetime.now()) with self.assertNumQueries(0): a.refresh_from_db(fields=[]) def test_refresh_clears_reverse_related(self): """refresh_from_db() clear cached reverse relations.""" article = Article.objects.create( headline='Parrot programs in Python', pub_date=datetime(2005, 7, 28), ) self.assertFalse(hasattr(article, 'featured')) FeaturedArticle.objects.create(article_id=article.pk) article.refresh_from_db() self.assertTrue(hasattr(article, 'featured')) def test_refresh_clears_one_to_one_field(self): article = Article.objects.create( headline='Parrot programs in Python', pub_date=datetime(2005, 7, 28), ) featured = FeaturedArticle.objects.create(article_id=article.pk) self.assertEqual(featured.article.headline, 'Parrot programs in Python') article.headline = 'Parrot programs in Python 2.0' article.save() featured.refresh_from_db() self.assertEqual(featured.article.headline, 'Parrot programs in Python 2.0') def test_prefetched_cache_cleared(self): a = Article.objects.create(pub_date=datetime(2005, 7, 28)) s = SelfRef.objects.create(article=a) # refresh_from_db() without fields=[...] a1_prefetched = Article.objects.prefetch_related('selfref_set').first() self.assertCountEqual(a1_prefetched.selfref_set.all(), [s]) s.article = None s.save() # Relation is cleared and prefetch cache is stale. self.assertCountEqual(a1_prefetched.selfref_set.all(), [s]) a1_prefetched.refresh_from_db() # Cache was cleared and new results are available. self.assertCountEqual(a1_prefetched.selfref_set.all(), []) # refresh_from_db() with fields=[...] a2_prefetched = Article.objects.prefetch_related('selfref_set').first() self.assertCountEqual(a2_prefetched.selfref_set.all(), []) s.article = a s.save() # Relation is added and prefetch cache is stale. self.assertCountEqual(a2_prefetched.selfref_set.all(), []) a2_prefetched.refresh_from_db(fields=['selfref_set']) # Cache was cleared and new results are available. self.assertCountEqual(a2_prefetched.selfref_set.all(), [s])
9748dfd6abf8e0be3764df6fa3154091384b62b404ffcff81b4a793bd6d283c8
import pickle from datetime import datetime from functools import partialmethod from io import StringIO from unittest import mock, skipIf from django.core import serializers from django.core.serializers import SerializerDoesNotExist from django.core.serializers.base import PickleSerializer, ProgressBar from django.db import connection, transaction from django.http import HttpResponse from django.test import SimpleTestCase, override_settings, skipUnlessDBFeature from django.test.utils import Approximate, ignore_warnings from django.utils.deprecation import RemovedInDjango50Warning from .models import ( Actor, Article, Author, AuthorProfile, BaseModel, Category, Child, ComplexModel, Movie, Player, ProxyBaseModel, ProxyProxyBaseModel, Score, Team, ) @override_settings( SERIALIZATION_MODULES={ "json2": "django.core.serializers.json", } ) class SerializerRegistrationTests(SimpleTestCase): def setUp(self): self.old_serializers = serializers._serializers serializers._serializers = {} def tearDown(self): serializers._serializers = self.old_serializers def test_register(self): "Registering a new serializer populates the full registry. Refs #14823" serializers.register_serializer('json3', 'django.core.serializers.json') public_formats = serializers.get_public_serializer_formats() self.assertIn('json3', public_formats) self.assertIn('json2', public_formats) self.assertIn('xml', public_formats) def test_unregister(self): "Unregistering a serializer doesn't cause the registry to be repopulated. Refs #14823" serializers.unregister_serializer('xml') serializers.register_serializer('json3', 'django.core.serializers.json') public_formats = serializers.get_public_serializer_formats() self.assertNotIn('xml', public_formats) self.assertIn('json3', public_formats) def test_unregister_unknown_serializer(self): with self.assertRaises(SerializerDoesNotExist): serializers.unregister_serializer("nonsense") def test_builtin_serializers(self): "Requesting a list of serializer formats populates the registry" all_formats = set(serializers.get_serializer_formats()) public_formats = set(serializers.get_public_serializer_formats()) self.assertIn('xml', all_formats), self.assertIn('xml', public_formats) self.assertIn('json2', all_formats) self.assertIn('json2', public_formats) self.assertIn('python', all_formats) self.assertNotIn('python', public_formats) def test_get_unknown_serializer(self): """ #15889: get_serializer('nonsense') raises a SerializerDoesNotExist """ with self.assertRaises(SerializerDoesNotExist): serializers.get_serializer("nonsense") with self.assertRaises(KeyError): serializers.get_serializer("nonsense") # SerializerDoesNotExist is instantiated with the nonexistent format with self.assertRaisesMessage(SerializerDoesNotExist, 'nonsense'): serializers.get_serializer("nonsense") def test_get_unknown_deserializer(self): with self.assertRaises(SerializerDoesNotExist): serializers.get_deserializer("nonsense") class SerializersTestBase: serializer_name = None # Set by subclasses to the serialization format name @classmethod def setUpTestData(cls): sports = Category.objects.create(name="Sports") music = Category.objects.create(name="Music") op_ed = Category.objects.create(name="Op-Ed") cls.joe = Author.objects.create(name='Joe') cls.jane = Author.objects.create(name='Jane') cls.a1 = Article( author=cls.jane, headline="Poker has no place on ESPN", pub_date=datetime(2006, 6, 16, 11, 00) ) cls.a1.save() cls.a1.categories.set([sports, op_ed]) cls.a2 = Article( author=cls.joe, headline="Time to reform copyright", pub_date=datetime(2006, 6, 16, 13, 00, 11, 345) ) cls.a2.save() cls.a2.categories.set([music, op_ed]) def test_serialize(self): """Basic serialization works.""" serial_str = serializers.serialize(self.serializer_name, Article.objects.all()) self.assertTrue(self._validate_output(serial_str)) def test_serializer_roundtrip(self): """Serialized content can be deserialized.""" serial_str = serializers.serialize(self.serializer_name, Article.objects.all()) models = list(serializers.deserialize(self.serializer_name, serial_str)) self.assertEqual(len(models), 2) def test_serialize_to_stream(self): obj = ComplexModel(field1='first', field2='second', field3='third') obj.save_base(raw=True) # Serialize the test database to a stream for stream in (StringIO(), HttpResponse()): serializers.serialize(self.serializer_name, [obj], indent=2, stream=stream) # Serialize normally for a comparison string_data = serializers.serialize(self.serializer_name, [obj], indent=2) # The two are the same if isinstance(stream, StringIO): self.assertEqual(string_data, stream.getvalue()) else: self.assertEqual(string_data, stream.content.decode()) def test_serialize_specific_fields(self): obj = ComplexModel(field1='first', field2='second', field3='third') obj.save_base(raw=True) # Serialize then deserialize the test database serialized_data = serializers.serialize( self.serializer_name, [obj], indent=2, fields=('field1', 'field3') ) result = next(serializers.deserialize(self.serializer_name, serialized_data)) # The deserialized object contains data in only the serialized fields. self.assertEqual(result.object.field1, 'first') self.assertEqual(result.object.field2, '') self.assertEqual(result.object.field3, 'third') def test_altering_serialized_output(self): """ The ability to create new objects by modifying serialized content. """ old_headline = "Poker has no place on ESPN" new_headline = "Poker has no place on television" serial_str = serializers.serialize(self.serializer_name, Article.objects.all()) serial_str = serial_str.replace(old_headline, new_headline) models = list(serializers.deserialize(self.serializer_name, serial_str)) # Prior to saving, old headline is in place self.assertTrue(Article.objects.filter(headline=old_headline)) self.assertFalse(Article.objects.filter(headline=new_headline)) for model in models: model.save() # After saving, new headline is in place self.assertTrue(Article.objects.filter(headline=new_headline)) self.assertFalse(Article.objects.filter(headline=old_headline)) def test_one_to_one_as_pk(self): """ If you use your own primary key field (such as a OneToOneField), it doesn't appear in the serialized field list - it replaces the pk identifier. """ AuthorProfile.objects.create(author=self.joe, date_of_birth=datetime(1970, 1, 1)) serial_str = serializers.serialize(self.serializer_name, AuthorProfile.objects.all()) self.assertFalse(self._get_field_values(serial_str, 'author')) for obj in serializers.deserialize(self.serializer_name, serial_str): self.assertEqual(obj.object.pk, self.joe.pk) def test_serialize_field_subset(self): """Output can be restricted to a subset of fields""" valid_fields = ('headline', 'pub_date') invalid_fields = ("author", "categories") serial_str = serializers.serialize(self.serializer_name, Article.objects.all(), fields=valid_fields) for field_name in invalid_fields: self.assertFalse(self._get_field_values(serial_str, field_name)) for field_name in valid_fields: self.assertTrue(self._get_field_values(serial_str, field_name)) def test_serialize_unicode_roundtrip(self): """Unicode makes the roundtrip intact""" actor_name = "Za\u017c\u00f3\u0142\u0107" movie_title = 'G\u0119\u015bl\u0105 ja\u017a\u0144' ac = Actor(name=actor_name) mv = Movie(title=movie_title, actor=ac) ac.save() mv.save() serial_str = serializers.serialize(self.serializer_name, [mv]) self.assertEqual(self._get_field_values(serial_str, "title")[0], movie_title) self.assertEqual(self._get_field_values(serial_str, "actor")[0], actor_name) obj_list = list(serializers.deserialize(self.serializer_name, serial_str)) mv_obj = obj_list[0].object self.assertEqual(mv_obj.title, movie_title) def test_unicode_serialization(self): unicode_name = 'יוניקוד' data = serializers.serialize(self.serializer_name, [Author(name=unicode_name)]) self.assertIn(unicode_name, data) objs = list(serializers.deserialize(self.serializer_name, data)) self.assertEqual(objs[0].object.name, unicode_name) def test_serialize_progressbar(self): fake_stdout = StringIO() serializers.serialize( self.serializer_name, Article.objects.all(), progress_output=fake_stdout, object_count=Article.objects.count() ) self.assertTrue( fake_stdout.getvalue().endswith('[' + '.' * ProgressBar.progress_width + ']\n') ) def test_serialize_superfluous_queries(self): """Ensure no superfluous queries are made when serializing ForeignKeys #17602 """ ac = Actor(name='Actor name') ac.save() mv = Movie(title='Movie title', actor_id=ac.pk) mv.save() with self.assertNumQueries(0): serializers.serialize(self.serializer_name, [mv]) def test_serialize_prefetch_related_m2m(self): # One query for the Article table and one for each prefetched m2m # field. with self.assertNumQueries(3): serializers.serialize( self.serializer_name, Article.objects.all().prefetch_related('categories', 'meta_data'), ) # One query for the Article table, and two m2m queries for each # article. with self.assertNumQueries(5): serializers.serialize(self.serializer_name, Article.objects.all()) def test_serialize_with_null_pk(self): """ Serialized data with no primary key results in a model instance with no id """ category = Category(name="Reference") serial_str = serializers.serialize(self.serializer_name, [category]) pk_value = self._get_pk_values(serial_str)[0] self.assertFalse(pk_value) cat_obj = list(serializers.deserialize(self.serializer_name, serial_str))[0].object self.assertIsNone(cat_obj.id) def test_float_serialization(self): """Float values serialize and deserialize intact""" sc = Score(score=3.4) sc.save() serial_str = serializers.serialize(self.serializer_name, [sc]) deserial_objs = list(serializers.deserialize(self.serializer_name, serial_str)) self.assertEqual(deserial_objs[0].object.score, Approximate(3.4, places=1)) def test_deferred_field_serialization(self): author = Author.objects.create(name='Victor Hugo') author = Author.objects.defer('name').get(pk=author.pk) serial_str = serializers.serialize(self.serializer_name, [author]) deserial_objs = list(serializers.deserialize(self.serializer_name, serial_str)) self.assertIsInstance(deserial_objs[0].object, Author) def test_custom_field_serialization(self): """Custom fields serialize and deserialize intact""" team_str = "Spartak Moskva" player = Player() player.name = "Soslan Djanaev" player.rank = 1 player.team = Team(team_str) player.save() serial_str = serializers.serialize(self.serializer_name, Player.objects.all()) team = self._get_field_values(serial_str, "team") self.assertTrue(team) self.assertEqual(team[0], team_str) deserial_objs = list(serializers.deserialize(self.serializer_name, serial_str)) self.assertEqual(deserial_objs[0].object.team.to_string(), player.team.to_string()) def test_pre_1000ad_date(self): """Year values before 1000AD are properly formatted""" # Regression for #12524 -- dates before 1000AD get prefixed # 0's on the year a = Article.objects.create( author=self.jane, headline="Nobody remembers the early years", pub_date=datetime(1, 2, 3, 4, 5, 6)) serial_str = serializers.serialize(self.serializer_name, [a]) date_values = self._get_field_values(serial_str, "pub_date") self.assertEqual(date_values[0].replace('T', ' '), "0001-02-03 04:05:06") def test_pkless_serialized_strings(self): """ Serialized strings without PKs can be turned into models """ deserial_objs = list(serializers.deserialize(self.serializer_name, self.pkless_str)) for obj in deserial_objs: self.assertFalse(obj.object.id) obj.save() self.assertEqual(Category.objects.all().count(), 5) def test_deterministic_mapping_ordering(self): """Mapping such as fields should be deterministically ordered. (#24558)""" output = serializers.serialize(self.serializer_name, [self.a1], indent=2) categories = self.a1.categories.values_list('pk', flat=True) self.assertEqual(output, self.mapping_ordering_str % { 'article_pk': self.a1.pk, 'author_pk': self.a1.author_id, 'first_category_pk': categories[0], 'second_category_pk': categories[1], }) def test_deserialize_force_insert(self): """Deserialized content can be saved with force_insert as a parameter.""" serial_str = serializers.serialize(self.serializer_name, [self.a1]) deserial_obj = list(serializers.deserialize(self.serializer_name, serial_str))[0] with mock.patch('django.db.models.Model') as mock_model: deserial_obj.save(force_insert=False) mock_model.save_base.assert_called_with(deserial_obj.object, raw=True, using=None, force_insert=False) @skipUnlessDBFeature('can_defer_constraint_checks') def test_serialize_proxy_model(self): BaseModel.objects.create(parent_data=1) base_objects = BaseModel.objects.all() proxy_objects = ProxyBaseModel.objects.all() proxy_proxy_objects = ProxyProxyBaseModel.objects.all() base_data = serializers.serialize("json", base_objects) proxy_data = serializers.serialize("json", proxy_objects) proxy_proxy_data = serializers.serialize("json", proxy_proxy_objects) self.assertEqual(base_data, proxy_data.replace('proxy', '')) self.assertEqual(base_data, proxy_proxy_data.replace('proxy', '')) def test_serialize_inherited_fields(self): child_1 = Child.objects.create(parent_data='a', child_data='b') child_2 = Child.objects.create(parent_data='c', child_data='d') child_1.parent_m2m.add(child_2) child_data = serializers.serialize(self.serializer_name, [child_1, child_2]) self.assertEqual(self._get_field_values(child_data, 'parent_m2m'), []) self.assertEqual(self._get_field_values(child_data, 'parent_data'), []) class SerializerAPITests(SimpleTestCase): def test_stream_class(self): class File: def __init__(self): self.lines = [] def write(self, line): self.lines.append(line) def getvalue(self): return ''.join(self.lines) class Serializer(serializers.json.Serializer): stream_class = File serializer = Serializer() data = serializer.serialize([Score(id=1, score=3.4)]) self.assertIs(serializer.stream_class, File) self.assertIsInstance(serializer.stream, File) self.assertEqual(data, '[{"model": "serializers.score", "pk": 1, "fields": {"score": 3.4}}]') class SerializersTransactionTestBase: available_apps = ['serializers'] @skipUnlessDBFeature('supports_forward_references') def test_forward_refs(self): """ Objects ids can be referenced before they are defined in the serialization data. """ # The deserialization process needs to run in a transaction in order # to test forward reference handling. with transaction.atomic(): objs = serializers.deserialize(self.serializer_name, self.fwd_ref_str) with connection.constraint_checks_disabled(): for obj in objs: obj.save() for model_cls in (Category, Author, Article): self.assertEqual(model_cls.objects.all().count(), 1) art_obj = Article.objects.all()[0] self.assertEqual(art_obj.categories.all().count(), 1) self.assertEqual(art_obj.author.name, "Agnes") class PickleSerializerTests(SimpleTestCase): @ignore_warnings(category=RemovedInDjango50Warning) def test_serializer_protocol(self): serializer = PickleSerializer(protocol=3) self.assertEqual(serializer.protocol, 3) # If protocol is not provided, it defaults to pickle.HIGHEST_PROTOCOL serializer = PickleSerializer() self.assertEqual(serializer.protocol, pickle.HIGHEST_PROTOCOL) @ignore_warnings(category=RemovedInDjango50Warning) def test_serializer_loads_dumps(self): serializer = PickleSerializer() test_data = 'test data' dump = serializer.dumps(test_data) self.assertEqual(serializer.loads(dump), test_data) def test_serializer_warning(self): msg = ( 'PickleSerializer is deprecated due to its security risk. Use ' 'JSONSerializer instead.' ) with self.assertRaisesMessage(RemovedInDjango50Warning, msg): PickleSerializer() def register_tests(test_class, method_name, test_func, exclude=()): """ Dynamically create serializer tests to ensure that all registered serializers are automatically tested. """ for format_ in serializers.get_serializer_formats(): if format_ == 'geojson' or format_ in exclude: continue decorated_func = skipIf( isinstance(serializers.get_serializer(format_), serializers.BadSerializer), 'The Python library for the %s serializer is not installed.' % format_, )(test_func) setattr(test_class, method_name % format_, partialmethod(decorated_func, format_))
9885c10d6e728a90cc9b9ae977ffcf34ec37270e48f7f2dcf9eefc750d52f5bb
from datetime import date from django import forms from django.contrib.admin.models import ADDITION, CHANGE, DELETION, LogEntry from django.contrib.admin.options import ( HORIZONTAL, VERTICAL, ModelAdmin, TabularInline, get_content_type_for_model, ) from django.contrib.admin.sites import AdminSite from django.contrib.admin.widgets import ( AdminDateWidget, AdminRadioSelect, AutocompleteSelect, AutocompleteSelectMultiple, ) from django.contrib.auth.models import User from django.db import models from django.forms.widgets import Select from django.test import SimpleTestCase, TestCase from django.test.utils import isolate_apps from .models import Band, Concert, Song class MockRequest: pass class MockSuperUser: def has_perm(self, perm, obj=None): return True request = MockRequest() request.user = MockSuperUser() class ModelAdminTests(TestCase): @classmethod def setUpTestData(cls): cls.band = Band.objects.create( name='The Doors', bio='', sign_date=date(1965, 1, 1), ) def setUp(self): self.site = AdminSite() def test_modeladmin_str(self): ma = ModelAdmin(Band, self.site) self.assertEqual(str(ma), 'modeladmin.ModelAdmin') def test_default_attributes(self): ma = ModelAdmin(Band, self.site) self.assertEqual(ma.actions, ()) self.assertEqual(ma.inlines, ()) # form/fields/fieldsets interaction ############################## def test_default_fields(self): ma = ModelAdmin(Band, self.site) self.assertEqual(list(ma.get_form(request).base_fields), ['name', 'bio', 'sign_date']) self.assertEqual(list(ma.get_fields(request)), ['name', 'bio', 'sign_date']) self.assertEqual(list(ma.get_fields(request, self.band)), ['name', 'bio', 'sign_date']) self.assertIsNone(ma.get_exclude(request, self.band)) def test_default_fieldsets(self): # fieldsets_add and fieldsets_change should return a special data structure that # is used in the templates. They should generate the "right thing" whether we # have specified a custom form, the fields argument, or nothing at all. # # Here's the default case. There are no custom form_add/form_change methods, # no fields argument, and no fieldsets argument. ma = ModelAdmin(Band, self.site) self.assertEqual(ma.get_fieldsets(request), [(None, {'fields': ['name', 'bio', 'sign_date']})]) self.assertEqual(ma.get_fieldsets(request, self.band), [(None, {'fields': ['name', 'bio', 'sign_date']})]) def test_get_fieldsets(self): # get_fieldsets() is called when figuring out form fields (#18681). class BandAdmin(ModelAdmin): def get_fieldsets(self, request, obj=None): return [(None, {'fields': ['name', 'bio']})] ma = BandAdmin(Band, self.site) form = ma.get_form(None) self.assertEqual(form._meta.fields, ['name', 'bio']) class InlineBandAdmin(TabularInline): model = Concert fk_name = 'main_band' can_delete = False def get_fieldsets(self, request, obj=None): return [(None, {'fields': ['day', 'transport']})] ma = InlineBandAdmin(Band, self.site) form = ma.get_formset(None).form self.assertEqual(form._meta.fields, ['day', 'transport']) def test_lookup_allowed_allows_nonexistent_lookup(self): """ A lookup_allowed allows a parameter whose field lookup doesn't exist. (#21129). """ class BandAdmin(ModelAdmin): fields = ['name'] ma = BandAdmin(Band, self.site) self.assertTrue(ma.lookup_allowed('name__nonexistent', 'test_value')) @isolate_apps('modeladmin') def test_lookup_allowed_onetoone(self): class Department(models.Model): code = models.CharField(max_length=4, unique=True) class Employee(models.Model): department = models.ForeignKey(Department, models.CASCADE, to_field="code") class EmployeeProfile(models.Model): employee = models.OneToOneField(Employee, models.CASCADE) class EmployeeInfo(models.Model): employee = models.OneToOneField(Employee, models.CASCADE) description = models.CharField(max_length=100) class EmployeeProfileAdmin(ModelAdmin): list_filter = [ 'employee__employeeinfo__description', 'employee__department__code', ] ma = EmployeeProfileAdmin(EmployeeProfile, self.site) # Reverse OneToOneField self.assertIs(ma.lookup_allowed('employee__employeeinfo__description', 'test_value'), True) # OneToOneField and ForeignKey self.assertIs(ma.lookup_allowed('employee__department__code', 'test_value'), True) def test_field_arguments(self): # If fields is specified, fieldsets_add and fieldsets_change should # just stick the fields into a formsets structure and return it. class BandAdmin(ModelAdmin): fields = ['name'] ma = BandAdmin(Band, self.site) self.assertEqual(list(ma.get_fields(request)), ['name']) self.assertEqual(list(ma.get_fields(request, self.band)), ['name']) self.assertEqual(ma.get_fieldsets(request), [(None, {'fields': ['name']})]) self.assertEqual(ma.get_fieldsets(request, self.band), [(None, {'fields': ['name']})]) def test_field_arguments_restricted_on_form(self): # If fields or fieldsets is specified, it should exclude fields on the # Form class to the fields specified. This may cause errors to be # raised in the db layer if required model fields aren't in fields/ # fieldsets, but that's preferable to ghost errors where a field in the # Form class isn't being displayed because it's not in fields/fieldsets. # Using `fields`. class BandAdmin(ModelAdmin): fields = ['name'] ma = BandAdmin(Band, self.site) self.assertEqual(list(ma.get_form(request).base_fields), ['name']) self.assertEqual(list(ma.get_form(request, self.band).base_fields), ['name']) # Using `fieldsets`. class BandAdmin(ModelAdmin): fieldsets = [(None, {'fields': ['name']})] ma = BandAdmin(Band, self.site) self.assertEqual(list(ma.get_form(request).base_fields), ['name']) self.assertEqual(list(ma.get_form(request, self.band).base_fields), ['name']) # Using `exclude`. class BandAdmin(ModelAdmin): exclude = ['bio'] ma = BandAdmin(Band, self.site) self.assertEqual(list(ma.get_form(request).base_fields), ['name', 'sign_date']) # You can also pass a tuple to `exclude`. class BandAdmin(ModelAdmin): exclude = ('bio',) ma = BandAdmin(Band, self.site) self.assertEqual(list(ma.get_form(request).base_fields), ['name', 'sign_date']) # Using `fields` and `exclude`. class BandAdmin(ModelAdmin): fields = ['name', 'bio'] exclude = ['bio'] ma = BandAdmin(Band, self.site) self.assertEqual(list(ma.get_form(request).base_fields), ['name']) def test_custom_form_meta_exclude_with_readonly(self): """ The custom ModelForm's `Meta.exclude` is respected when used in conjunction with `ModelAdmin.readonly_fields` and when no `ModelAdmin.exclude` is defined (#14496). """ # With ModelAdmin class AdminBandForm(forms.ModelForm): class Meta: model = Band exclude = ['bio'] class BandAdmin(ModelAdmin): readonly_fields = ['name'] form = AdminBandForm ma = BandAdmin(Band, self.site) self.assertEqual(list(ma.get_form(request).base_fields), ['sign_date']) # With InlineModelAdmin class AdminConcertForm(forms.ModelForm): class Meta: model = Concert exclude = ['day'] class ConcertInline(TabularInline): readonly_fields = ['transport'] form = AdminConcertForm fk_name = 'main_band' model = Concert class BandAdmin(ModelAdmin): inlines = [ConcertInline] ma = BandAdmin(Band, self.site) self.assertEqual( list(list(ma.get_formsets_with_inlines(request))[0][0]().forms[0].fields), ['main_band', 'opening_band', 'id', 'DELETE']) def test_custom_formfield_override_readonly(self): class AdminBandForm(forms.ModelForm): name = forms.CharField() class Meta: exclude = () model = Band class BandAdmin(ModelAdmin): form = AdminBandForm readonly_fields = ['name'] ma = BandAdmin(Band, self.site) # `name` shouldn't appear in base_fields because it's part of # readonly_fields. self.assertEqual( list(ma.get_form(request).base_fields), ['bio', 'sign_date'] ) # But it should appear in get_fields()/fieldsets() so it can be # displayed as read-only. self.assertEqual( list(ma.get_fields(request)), ['bio', 'sign_date', 'name'] ) self.assertEqual( list(ma.get_fieldsets(request)), [(None, {'fields': ['bio', 'sign_date', 'name']})] ) def test_custom_form_meta_exclude(self): """ The custom ModelForm's `Meta.exclude` is overridden if `ModelAdmin.exclude` or `InlineModelAdmin.exclude` are defined (#14496). """ # With ModelAdmin class AdminBandForm(forms.ModelForm): class Meta: model = Band exclude = ['bio'] class BandAdmin(ModelAdmin): exclude = ['name'] form = AdminBandForm ma = BandAdmin(Band, self.site) self.assertEqual(list(ma.get_form(request).base_fields), ['bio', 'sign_date']) # With InlineModelAdmin class AdminConcertForm(forms.ModelForm): class Meta: model = Concert exclude = ['day'] class ConcertInline(TabularInline): exclude = ['transport'] form = AdminConcertForm fk_name = 'main_band' model = Concert class BandAdmin(ModelAdmin): inlines = [ConcertInline] ma = BandAdmin(Band, self.site) self.assertEqual( list(list(ma.get_formsets_with_inlines(request))[0][0]().forms[0].fields), ['main_band', 'opening_band', 'day', 'id', 'DELETE'] ) def test_overriding_get_exclude(self): class BandAdmin(ModelAdmin): def get_exclude(self, request, obj=None): return ['name'] self.assertEqual( list(BandAdmin(Band, self.site).get_form(request).base_fields), ['bio', 'sign_date'] ) def test_get_exclude_overrides_exclude(self): class BandAdmin(ModelAdmin): exclude = ['bio'] def get_exclude(self, request, obj=None): return ['name'] self.assertEqual( list(BandAdmin(Band, self.site).get_form(request).base_fields), ['bio', 'sign_date'] ) def test_get_exclude_takes_obj(self): class BandAdmin(ModelAdmin): def get_exclude(self, request, obj=None): if obj: return ['sign_date'] return ['name'] self.assertEqual( list(BandAdmin(Band, self.site).get_form(request, self.band).base_fields), ['name', 'bio'] ) def test_custom_form_validation(self): # If a form is specified, it should use it allowing custom validation # to work properly. This won't break any of the admin widgets or media. class AdminBandForm(forms.ModelForm): delete = forms.BooleanField() class BandAdmin(ModelAdmin): form = AdminBandForm ma = BandAdmin(Band, self.site) self.assertEqual(list(ma.get_form(request).base_fields), ['name', 'bio', 'sign_date', 'delete']) self.assertEqual(type(ma.get_form(request).base_fields['sign_date'].widget), AdminDateWidget) def test_form_exclude_kwarg_override(self): """ The `exclude` kwarg passed to `ModelAdmin.get_form()` overrides all other declarations (#8999). """ class AdminBandForm(forms.ModelForm): class Meta: model = Band exclude = ['name'] class BandAdmin(ModelAdmin): exclude = ['sign_date'] form = AdminBandForm def get_form(self, request, obj=None, **kwargs): kwargs['exclude'] = ['bio'] return super().get_form(request, obj, **kwargs) ma = BandAdmin(Band, self.site) self.assertEqual(list(ma.get_form(request).base_fields), ['name', 'sign_date']) def test_formset_exclude_kwarg_override(self): """ The `exclude` kwarg passed to `InlineModelAdmin.get_formset()` overrides all other declarations (#8999). """ class AdminConcertForm(forms.ModelForm): class Meta: model = Concert exclude = ['day'] class ConcertInline(TabularInline): exclude = ['transport'] form = AdminConcertForm fk_name = 'main_band' model = Concert def get_formset(self, request, obj=None, **kwargs): kwargs['exclude'] = ['opening_band'] return super().get_formset(request, obj, **kwargs) class BandAdmin(ModelAdmin): inlines = [ConcertInline] ma = BandAdmin(Band, self.site) self.assertEqual( list(list(ma.get_formsets_with_inlines(request))[0][0]().forms[0].fields), ['main_band', 'day', 'transport', 'id', 'DELETE'] ) def test_formset_overriding_get_exclude_with_form_fields(self): class AdminConcertForm(forms.ModelForm): class Meta: model = Concert fields = ['main_band', 'opening_band', 'day', 'transport'] class ConcertInline(TabularInline): form = AdminConcertForm fk_name = 'main_band' model = Concert def get_exclude(self, request, obj=None): return ['opening_band'] class BandAdmin(ModelAdmin): inlines = [ConcertInline] ma = BandAdmin(Band, self.site) self.assertEqual( list(list(ma.get_formsets_with_inlines(request))[0][0]().forms[0].fields), ['main_band', 'day', 'transport', 'id', 'DELETE'] ) def test_formset_overriding_get_exclude_with_form_exclude(self): class AdminConcertForm(forms.ModelForm): class Meta: model = Concert exclude = ['day'] class ConcertInline(TabularInline): form = AdminConcertForm fk_name = 'main_band' model = Concert def get_exclude(self, request, obj=None): return ['opening_band'] class BandAdmin(ModelAdmin): inlines = [ConcertInline] ma = BandAdmin(Band, self.site) self.assertEqual( list(list(ma.get_formsets_with_inlines(request))[0][0]().forms[0].fields), ['main_band', 'day', 'transport', 'id', 'DELETE'] ) def test_raw_id_fields_widget_override(self): """ The autocomplete_fields, raw_id_fields, and radio_fields widgets may overridden by specifying a widget in get_formset(). """ class ConcertInline(TabularInline): model = Concert fk_name = 'main_band' raw_id_fields = ('opening_band',) def get_formset(self, request, obj=None, **kwargs): kwargs['widgets'] = {'opening_band': Select} return super().get_formset(request, obj, **kwargs) class BandAdmin(ModelAdmin): inlines = [ConcertInline] ma = BandAdmin(Band, self.site) band_widget = list(ma.get_formsets_with_inlines(request))[0][0]().forms[0].fields['opening_band'].widget # Without the override this would be ForeignKeyRawIdWidget. self.assertIsInstance(band_widget, Select) def test_queryset_override(self): # If the queryset of a ModelChoiceField in a custom form is overridden, # RelatedFieldWidgetWrapper doesn't mess that up. band2 = Band.objects.create(name='The Beatles', bio='', sign_date=date(1962, 1, 1)) ma = ModelAdmin(Concert, self.site) form = ma.get_form(request)() self.assertHTMLEqual( str(form["main_band"]), '<div class="related-widget-wrapper">' '<select name="main_band" id="id_main_band" required>' '<option value="" selected>---------</option>' '<option value="%d">The Beatles</option>' '<option value="%d">The Doors</option>' '</select></div>' % (band2.id, self.band.id) ) class AdminConcertForm(forms.ModelForm): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.fields["main_band"].queryset = Band.objects.filter(name='The Doors') class ConcertAdminWithForm(ModelAdmin): form = AdminConcertForm ma = ConcertAdminWithForm(Concert, self.site) form = ma.get_form(request)() self.assertHTMLEqual( str(form["main_band"]), '<div class="related-widget-wrapper">' '<select name="main_band" id="id_main_band" required>' '<option value="" selected>---------</option>' '<option value="%d">The Doors</option>' '</select></div>' % self.band.id ) def test_regression_for_ticket_15820(self): """ `obj` is passed from `InlineModelAdmin.get_fieldsets()` to `InlineModelAdmin.get_formset()`. """ class CustomConcertForm(forms.ModelForm): class Meta: model = Concert fields = ['day'] class ConcertInline(TabularInline): model = Concert fk_name = 'main_band' def get_formset(self, request, obj=None, **kwargs): if obj: kwargs['form'] = CustomConcertForm return super().get_formset(request, obj, **kwargs) class BandAdmin(ModelAdmin): inlines = [ConcertInline] Concert.objects.create(main_band=self.band, opening_band=self.band, day=1) ma = BandAdmin(Band, self.site) inline_instances = ma.get_inline_instances(request) fieldsets = list(inline_instances[0].get_fieldsets(request)) self.assertEqual(fieldsets[0][1]['fields'], ['main_band', 'opening_band', 'day', 'transport']) fieldsets = list(inline_instances[0].get_fieldsets(request, inline_instances[0].model)) self.assertEqual(fieldsets[0][1]['fields'], ['day']) # radio_fields behavior ########################################### def test_default_foreign_key_widget(self): # First, without any radio_fields specified, the widgets for ForeignKey # and fields with choices specified ought to be a basic Select widget. # ForeignKey widgets in the admin are wrapped with RelatedFieldWidgetWrapper so # they need to be handled properly when type checking. For Select fields, all of # the choices lists have a first entry of dashes. cma = ModelAdmin(Concert, self.site) cmafa = cma.get_form(request) self.assertEqual(type(cmafa.base_fields['main_band'].widget.widget), Select) self.assertEqual( list(cmafa.base_fields['main_band'].widget.choices), [('', '---------'), (self.band.id, 'The Doors')]) self.assertEqual(type(cmafa.base_fields['opening_band'].widget.widget), Select) self.assertEqual( list(cmafa.base_fields['opening_band'].widget.choices), [('', '---------'), (self.band.id, 'The Doors')] ) self.assertEqual(type(cmafa.base_fields['day'].widget), Select) self.assertEqual( list(cmafa.base_fields['day'].widget.choices), [('', '---------'), (1, 'Fri'), (2, 'Sat')] ) self.assertEqual(type(cmafa.base_fields['transport'].widget), Select) self.assertEqual( list(cmafa.base_fields['transport'].widget.choices), [('', '---------'), (1, 'Plane'), (2, 'Train'), (3, 'Bus')]) def test_foreign_key_as_radio_field(self): # Now specify all the fields as radio_fields. Widgets should now be # RadioSelect, and the choices list should have a first entry of 'None' if # blank=True for the model field. Finally, the widget should have the # 'radiolist' attr, and 'inline' as well if the field is specified HORIZONTAL. class ConcertAdmin(ModelAdmin): radio_fields = { 'main_band': HORIZONTAL, 'opening_band': VERTICAL, 'day': VERTICAL, 'transport': HORIZONTAL, } cma = ConcertAdmin(Concert, self.site) cmafa = cma.get_form(request) self.assertEqual(type(cmafa.base_fields['main_band'].widget.widget), AdminRadioSelect) self.assertEqual(cmafa.base_fields['main_band'].widget.attrs, {'class': 'radiolist inline'}) self.assertEqual( list(cmafa.base_fields['main_band'].widget.choices), [(self.band.id, 'The Doors')] ) self.assertEqual(type(cmafa.base_fields['opening_band'].widget.widget), AdminRadioSelect) self.assertEqual(cmafa.base_fields['opening_band'].widget.attrs, {'class': 'radiolist'}) self.assertEqual( list(cmafa.base_fields['opening_band'].widget.choices), [('', 'None'), (self.band.id, 'The Doors')] ) self.assertEqual(type(cmafa.base_fields['day'].widget), AdminRadioSelect) self.assertEqual(cmafa.base_fields['day'].widget.attrs, {'class': 'radiolist'}) self.assertEqual(list(cmafa.base_fields['day'].widget.choices), [(1, 'Fri'), (2, 'Sat')]) self.assertEqual(type(cmafa.base_fields['transport'].widget), AdminRadioSelect) self.assertEqual(cmafa.base_fields['transport'].widget.attrs, {'class': 'radiolist inline'}) self.assertEqual( list(cmafa.base_fields['transport'].widget.choices), [('', 'None'), (1, 'Plane'), (2, 'Train'), (3, 'Bus')] ) class AdminConcertForm(forms.ModelForm): class Meta: model = Concert exclude = ('transport',) class ConcertAdmin(ModelAdmin): form = AdminConcertForm ma = ConcertAdmin(Concert, self.site) self.assertEqual(list(ma.get_form(request).base_fields), ['main_band', 'opening_band', 'day']) class AdminConcertForm(forms.ModelForm): extra = forms.CharField() class Meta: model = Concert fields = ['extra', 'transport'] class ConcertAdmin(ModelAdmin): form = AdminConcertForm ma = ConcertAdmin(Concert, self.site) self.assertEqual(list(ma.get_form(request).base_fields), ['extra', 'transport']) class ConcertInline(TabularInline): form = AdminConcertForm model = Concert fk_name = 'main_band' can_delete = True class BandAdmin(ModelAdmin): inlines = [ConcertInline] ma = BandAdmin(Band, self.site) self.assertEqual( list(list(ma.get_formsets_with_inlines(request))[0][0]().forms[0].fields), ['extra', 'transport', 'id', 'DELETE', 'main_band'] ) def test_log_actions(self): ma = ModelAdmin(Band, self.site) mock_request = MockRequest() mock_request.user = User.objects.create(username='bill') content_type = get_content_type_for_model(self.band) tests = ( (ma.log_addition, ADDITION, {'added': {}}), (ma.log_change, CHANGE, {'changed': {'fields': ['name', 'bio']}}), (ma.log_deletion, DELETION, str(self.band)), ) for method, flag, message in tests: with self.subTest(name=method.__name__): created = method(mock_request, self.band, message) fetched = LogEntry.objects.filter(action_flag=flag).latest('id') self.assertEqual(created, fetched) self.assertEqual(fetched.action_flag, flag) self.assertEqual(fetched.content_type, content_type) self.assertEqual(fetched.object_id, str(self.band.pk)) self.assertEqual(fetched.user, mock_request.user) if flag == DELETION: self.assertEqual(fetched.change_message, '') self.assertEqual(fetched.object_repr, message) else: self.assertEqual(fetched.change_message, str(message)) self.assertEqual(fetched.object_repr, str(self.band)) def test_get_autocomplete_fields(self): class NameAdmin(ModelAdmin): search_fields = ['name'] class SongAdmin(ModelAdmin): autocomplete_fields = ['featuring'] fields = ['featuring', 'band'] class OtherSongAdmin(SongAdmin): def get_autocomplete_fields(self, request): return ['band'] self.site.register(Band, NameAdmin) try: # Uses autocomplete_fields if not overridden. model_admin = SongAdmin(Song, self.site) form = model_admin.get_form(request)() self.assertIsInstance(form.fields['featuring'].widget.widget, AutocompleteSelectMultiple) # Uses overridden get_autocomplete_fields model_admin = OtherSongAdmin(Song, self.site) form = model_admin.get_form(request)() self.assertIsInstance(form.fields['band'].widget.widget, AutocompleteSelect) finally: self.site.unregister(Band) def test_get_deleted_objects(self): mock_request = MockRequest() mock_request.user = User.objects.create_superuser(username='bob', email='[email protected]', password='test') self.site.register(Band, ModelAdmin) ma = self.site._registry[Band] deletable_objects, model_count, perms_needed, protected = ma.get_deleted_objects([self.band], request) self.assertEqual(deletable_objects, ['Band: The Doors']) self.assertEqual(model_count, {'bands': 1}) self.assertEqual(perms_needed, set()) self.assertEqual(protected, []) def test_get_deleted_objects_with_custom_has_delete_permission(self): """ ModelAdmin.get_deleted_objects() uses ModelAdmin.has_delete_permission() for permissions checking. """ mock_request = MockRequest() mock_request.user = User.objects.create_superuser(username='bob', email='[email protected]', password='test') class TestModelAdmin(ModelAdmin): def has_delete_permission(self, request, obj=None): return False self.site.register(Band, TestModelAdmin) ma = self.site._registry[Band] deletable_objects, model_count, perms_needed, protected = ma.get_deleted_objects([self.band], request) self.assertEqual(deletable_objects, ['Band: The Doors']) self.assertEqual(model_count, {'bands': 1}) self.assertEqual(perms_needed, {'band'}) self.assertEqual(protected, []) def test_modeladmin_repr(self): ma = ModelAdmin(Band, self.site) self.assertEqual( repr(ma), "<ModelAdmin: model=Band site=AdminSite(name='admin')>", ) class ModelAdminPermissionTests(SimpleTestCase): class MockUser: def has_module_perms(self, app_label): return app_label == 'modeladmin' class MockViewUser(MockUser): def has_perm(self, perm, obj=None): return perm == 'modeladmin.view_band' class MockAddUser(MockUser): def has_perm(self, perm, obj=None): return perm == 'modeladmin.add_band' class MockChangeUser(MockUser): def has_perm(self, perm, obj=None): return perm == 'modeladmin.change_band' class MockDeleteUser(MockUser): def has_perm(self, perm, obj=None): return perm == 'modeladmin.delete_band' def test_has_view_permission(self): """ has_view_permission() returns True for users who can view objects and False for users who can't. """ ma = ModelAdmin(Band, AdminSite()) request = MockRequest() request.user = self.MockViewUser() self.assertIs(ma.has_view_permission(request), True) request.user = self.MockAddUser() self.assertIs(ma.has_view_permission(request), False) request.user = self.MockChangeUser() self.assertIs(ma.has_view_permission(request), True) request.user = self.MockDeleteUser() self.assertIs(ma.has_view_permission(request), False) def test_has_add_permission(self): """ has_add_permission returns True for users who can add objects and False for users who can't. """ ma = ModelAdmin(Band, AdminSite()) request = MockRequest() request.user = self.MockViewUser() self.assertFalse(ma.has_add_permission(request)) request.user = self.MockAddUser() self.assertTrue(ma.has_add_permission(request)) request.user = self.MockChangeUser() self.assertFalse(ma.has_add_permission(request)) request.user = self.MockDeleteUser() self.assertFalse(ma.has_add_permission(request)) def test_inline_has_add_permission_uses_obj(self): class ConcertInline(TabularInline): model = Concert def has_add_permission(self, request, obj): return bool(obj) class BandAdmin(ModelAdmin): inlines = [ConcertInline] ma = BandAdmin(Band, AdminSite()) request = MockRequest() request.user = self.MockAddUser() self.assertEqual(ma.get_inline_instances(request), []) band = Band(name='The Doors', bio='', sign_date=date(1965, 1, 1)) inline_instances = ma.get_inline_instances(request, band) self.assertEqual(len(inline_instances), 1) self.assertIsInstance(inline_instances[0], ConcertInline) def test_has_change_permission(self): """ has_change_permission returns True for users who can edit objects and False for users who can't. """ ma = ModelAdmin(Band, AdminSite()) request = MockRequest() request.user = self.MockViewUser() self.assertIs(ma.has_change_permission(request), False) request.user = self.MockAddUser() self.assertFalse(ma.has_change_permission(request)) request.user = self.MockChangeUser() self.assertTrue(ma.has_change_permission(request)) request.user = self.MockDeleteUser() self.assertFalse(ma.has_change_permission(request)) def test_has_delete_permission(self): """ has_delete_permission returns True for users who can delete objects and False for users who can't. """ ma = ModelAdmin(Band, AdminSite()) request = MockRequest() request.user = self.MockViewUser() self.assertIs(ma.has_delete_permission(request), False) request.user = self.MockAddUser() self.assertFalse(ma.has_delete_permission(request)) request.user = self.MockChangeUser() self.assertFalse(ma.has_delete_permission(request)) request.user = self.MockDeleteUser() self.assertTrue(ma.has_delete_permission(request)) def test_has_module_permission(self): """ as_module_permission returns True for users who have any permission for the module and False for users who don't. """ ma = ModelAdmin(Band, AdminSite()) request = MockRequest() request.user = self.MockViewUser() self.assertIs(ma.has_module_permission(request), True) request.user = self.MockAddUser() self.assertTrue(ma.has_module_permission(request)) request.user = self.MockChangeUser() self.assertTrue(ma.has_module_permission(request)) request.user = self.MockDeleteUser() self.assertTrue(ma.has_module_permission(request)) original_app_label = ma.opts.app_label ma.opts.app_label = 'anotherapp' try: request.user = self.MockViewUser() self.assertIs(ma.has_module_permission(request), False) request.user = self.MockAddUser() self.assertFalse(ma.has_module_permission(request)) request.user = self.MockChangeUser() self.assertFalse(ma.has_module_permission(request)) request.user = self.MockDeleteUser() self.assertFalse(ma.has_module_permission(request)) finally: ma.opts.app_label = original_app_label
44cd7a93ce38ac0967b74dfa8e4a9238e0c860d397afe591b771fa27b8ede2b8
from django import forms from django.contrib import admin from django.contrib.admin import BooleanFieldListFilter, SimpleListFilter from django.contrib.admin.options import VERTICAL, ModelAdmin, TabularInline from django.contrib.admin.sites import AdminSite from django.core.checks import Error from django.db.models import CASCADE, F, Field, ForeignKey, Model from django.db.models.functions import Upper from django.forms.models import BaseModelFormSet from django.test import SimpleTestCase from .models import ( Band, Song, User, ValidationTestInlineModel, ValidationTestModel, ) class CheckTestCase(SimpleTestCase): def assertIsInvalid(self, model_admin, model, msg, id=None, hint=None, invalid_obj=None, admin_site=None): if admin_site is None: admin_site = AdminSite() invalid_obj = invalid_obj or model_admin admin_obj = model_admin(model, admin_site) self.assertEqual(admin_obj.check(), [Error(msg, hint=hint, obj=invalid_obj, id=id)]) def assertIsInvalidRegexp(self, model_admin, model, msg, id=None, hint=None, invalid_obj=None): """ Same as assertIsInvalid but treats the given msg as a regexp. """ invalid_obj = invalid_obj or model_admin admin_obj = model_admin(model, AdminSite()) errors = admin_obj.check() self.assertEqual(len(errors), 1) error = errors[0] self.assertEqual(error.hint, hint) self.assertEqual(error.obj, invalid_obj) self.assertEqual(error.id, id) self.assertRegex(error.msg, msg) def assertIsValid(self, model_admin, model, admin_site=None): if admin_site is None: admin_site = AdminSite() admin_obj = model_admin(model, admin_site) self.assertEqual(admin_obj.check(), []) class RawIdCheckTests(CheckTestCase): def test_not_iterable(self): class TestModelAdmin(ModelAdmin): raw_id_fields = 10 self.assertIsInvalid( TestModelAdmin, ValidationTestModel, "The value of 'raw_id_fields' must be a list or tuple.", 'admin.E001' ) def test_missing_field(self): class TestModelAdmin(ModelAdmin): raw_id_fields = ('non_existent_field',) self.assertIsInvalid( TestModelAdmin, ValidationTestModel, "The value of 'raw_id_fields[0]' refers to 'non_existent_field', " "which is not a field of 'modeladmin.ValidationTestModel'.", 'admin.E002' ) def test_invalid_field_type(self): class TestModelAdmin(ModelAdmin): raw_id_fields = ('name',) self.assertIsInvalid( TestModelAdmin, ValidationTestModel, "The value of 'raw_id_fields[0]' must be a foreign key or a " "many-to-many field.", 'admin.E003' ) def test_valid_case(self): class TestModelAdmin(ModelAdmin): raw_id_fields = ('users',) self.assertIsValid(TestModelAdmin, ValidationTestModel) def test_field_attname(self): class TestModelAdmin(ModelAdmin): raw_id_fields = ['band_id'] self.assertIsInvalid( TestModelAdmin, ValidationTestModel, "The value of 'raw_id_fields[0]' refers to 'band_id', which is " "not a field of 'modeladmin.ValidationTestModel'.", 'admin.E002', ) class FieldsetsCheckTests(CheckTestCase): def test_valid_case(self): class TestModelAdmin(ModelAdmin): fieldsets = (('General', {'fields': ('name',)}),) self.assertIsValid(TestModelAdmin, ValidationTestModel) def test_not_iterable(self): class TestModelAdmin(ModelAdmin): fieldsets = 10 self.assertIsInvalid( TestModelAdmin, ValidationTestModel, "The value of 'fieldsets' must be a list or tuple.", 'admin.E007' ) def test_non_iterable_item(self): class TestModelAdmin(ModelAdmin): fieldsets = ({},) self.assertIsInvalid( TestModelAdmin, ValidationTestModel, "The value of 'fieldsets[0]' must be a list or tuple.", 'admin.E008' ) def test_item_not_a_pair(self): class TestModelAdmin(ModelAdmin): fieldsets = ((),) self.assertIsInvalid( TestModelAdmin, ValidationTestModel, "The value of 'fieldsets[0]' must be of length 2.", 'admin.E009' ) def test_second_element_of_item_not_a_dict(self): class TestModelAdmin(ModelAdmin): fieldsets = (('General', ()),) self.assertIsInvalid( TestModelAdmin, ValidationTestModel, "The value of 'fieldsets[0][1]' must be a dictionary.", 'admin.E010' ) def test_missing_fields_key(self): class TestModelAdmin(ModelAdmin): fieldsets = (('General', {}),) self.assertIsInvalid( TestModelAdmin, ValidationTestModel, "The value of 'fieldsets[0][1]' must contain the key 'fields'.", 'admin.E011' ) class TestModelAdmin(ModelAdmin): fieldsets = (('General', {'fields': ('name',)}),) self.assertIsValid(TestModelAdmin, ValidationTestModel) def test_specified_both_fields_and_fieldsets(self): class TestModelAdmin(ModelAdmin): fieldsets = (('General', {'fields': ('name',)}),) fields = ['name'] self.assertIsInvalid( TestModelAdmin, ValidationTestModel, "Both 'fieldsets' and 'fields' are specified.", 'admin.E005' ) def test_duplicate_fields(self): class TestModelAdmin(ModelAdmin): fieldsets = [(None, {'fields': ['name', 'name']})] self.assertIsInvalid( TestModelAdmin, ValidationTestModel, "There are duplicate field(s) in 'fieldsets[0][1]'.", 'admin.E012' ) def test_duplicate_fields_in_fieldsets(self): class TestModelAdmin(ModelAdmin): fieldsets = [ (None, {'fields': ['name']}), (None, {'fields': ['name']}), ] self.assertIsInvalid( TestModelAdmin, ValidationTestModel, "There are duplicate field(s) in 'fieldsets[1][1]'.", 'admin.E012' ) def test_fieldsets_with_custom_form_validation(self): class BandAdmin(ModelAdmin): fieldsets = (('Band', {'fields': ('name',)}),) self.assertIsValid(BandAdmin, Band) class FieldsCheckTests(CheckTestCase): def test_duplicate_fields_in_fields(self): class TestModelAdmin(ModelAdmin): fields = ['name', 'name'] self.assertIsInvalid( TestModelAdmin, ValidationTestModel, "The value of 'fields' contains duplicate field(s).", 'admin.E006' ) def test_inline(self): class ValidationTestInline(TabularInline): model = ValidationTestInlineModel fields = 10 class TestModelAdmin(ModelAdmin): inlines = [ValidationTestInline] self.assertIsInvalid( TestModelAdmin, ValidationTestModel, "The value of 'fields' must be a list or tuple.", 'admin.E004', invalid_obj=ValidationTestInline ) class FormCheckTests(CheckTestCase): def test_invalid_type(self): class FakeForm: pass class TestModelAdmin(ModelAdmin): form = FakeForm class TestModelAdminWithNoForm(ModelAdmin): form = 'not a form' for model_admin in (TestModelAdmin, TestModelAdminWithNoForm): with self.subTest(model_admin): self.assertIsInvalid( model_admin, ValidationTestModel, "The value of 'form' must inherit from 'BaseModelForm'.", 'admin.E016' ) def test_fieldsets_with_custom_form_validation(self): class BandAdmin(ModelAdmin): fieldsets = (('Band', {'fields': ('name',)}),) self.assertIsValid(BandAdmin, Band) def test_valid_case(self): class AdminBandForm(forms.ModelForm): delete = forms.BooleanField() class BandAdmin(ModelAdmin): form = AdminBandForm fieldsets = ( ('Band', { 'fields': ('name', 'bio', 'sign_date', 'delete') }), ) self.assertIsValid(BandAdmin, Band) class FilterVerticalCheckTests(CheckTestCase): def test_not_iterable(self): class TestModelAdmin(ModelAdmin): filter_vertical = 10 self.assertIsInvalid( TestModelAdmin, ValidationTestModel, "The value of 'filter_vertical' must be a list or tuple.", 'admin.E017' ) def test_missing_field(self): class TestModelAdmin(ModelAdmin): filter_vertical = ('non_existent_field',) self.assertIsInvalid( TestModelAdmin, ValidationTestModel, "The value of 'filter_vertical[0]' refers to 'non_existent_field', " "which is not a field of 'modeladmin.ValidationTestModel'.", 'admin.E019' ) def test_invalid_field_type(self): class TestModelAdmin(ModelAdmin): filter_vertical = ('name',) self.assertIsInvalid( TestModelAdmin, ValidationTestModel, "The value of 'filter_vertical[0]' must be a many-to-many field.", 'admin.E020' ) def test_valid_case(self): class TestModelAdmin(ModelAdmin): filter_vertical = ('users',) self.assertIsValid(TestModelAdmin, ValidationTestModel) class FilterHorizontalCheckTests(CheckTestCase): def test_not_iterable(self): class TestModelAdmin(ModelAdmin): filter_horizontal = 10 self.assertIsInvalid( TestModelAdmin, ValidationTestModel, "The value of 'filter_horizontal' must be a list or tuple.", 'admin.E018' ) def test_missing_field(self): class TestModelAdmin(ModelAdmin): filter_horizontal = ('non_existent_field',) self.assertIsInvalid( TestModelAdmin, ValidationTestModel, "The value of 'filter_horizontal[0]' refers to 'non_existent_field', " "which is not a field of 'modeladmin.ValidationTestModel'.", 'admin.E019' ) def test_invalid_field_type(self): class TestModelAdmin(ModelAdmin): filter_horizontal = ('name',) self.assertIsInvalid( TestModelAdmin, ValidationTestModel, "The value of 'filter_horizontal[0]' must be a many-to-many field.", 'admin.E020' ) def test_valid_case(self): class TestModelAdmin(ModelAdmin): filter_horizontal = ('users',) self.assertIsValid(TestModelAdmin, ValidationTestModel) class RadioFieldsCheckTests(CheckTestCase): def test_not_dictionary(self): class TestModelAdmin(ModelAdmin): radio_fields = () self.assertIsInvalid( TestModelAdmin, ValidationTestModel, "The value of 'radio_fields' must be a dictionary.", 'admin.E021' ) def test_missing_field(self): class TestModelAdmin(ModelAdmin): radio_fields = {'non_existent_field': VERTICAL} self.assertIsInvalid( TestModelAdmin, ValidationTestModel, "The value of 'radio_fields' refers to 'non_existent_field', " "which is not a field of 'modeladmin.ValidationTestModel'.", 'admin.E022' ) def test_invalid_field_type(self): class TestModelAdmin(ModelAdmin): radio_fields = {'name': VERTICAL} self.assertIsInvalid( TestModelAdmin, ValidationTestModel, "The value of 'radio_fields' refers to 'name', which is not an instance " "of ForeignKey, and does not have a 'choices' definition.", 'admin.E023' ) def test_invalid_value(self): class TestModelAdmin(ModelAdmin): radio_fields = {'state': None} self.assertIsInvalid( TestModelAdmin, ValidationTestModel, "The value of 'radio_fields[\"state\"]' must be either admin.HORIZONTAL or admin.VERTICAL.", 'admin.E024' ) def test_valid_case(self): class TestModelAdmin(ModelAdmin): radio_fields = {'state': VERTICAL} self.assertIsValid(TestModelAdmin, ValidationTestModel) class PrepopulatedFieldsCheckTests(CheckTestCase): def test_not_list_or_tuple(self): class TestModelAdmin(ModelAdmin): prepopulated_fields = {'slug': 'test'} self.assertIsInvalid( TestModelAdmin, ValidationTestModel, 'The value of \'prepopulated_fields["slug"]\' must be a list ' 'or tuple.', 'admin.E029' ) def test_not_dictionary(self): class TestModelAdmin(ModelAdmin): prepopulated_fields = () self.assertIsInvalid( TestModelAdmin, ValidationTestModel, "The value of 'prepopulated_fields' must be a dictionary.", 'admin.E026' ) def test_missing_field(self): class TestModelAdmin(ModelAdmin): prepopulated_fields = {'non_existent_field': ('slug',)} self.assertIsInvalid( TestModelAdmin, ValidationTestModel, "The value of 'prepopulated_fields' refers to 'non_existent_field', " "which is not a field of 'modeladmin.ValidationTestModel'.", 'admin.E027' ) def test_missing_field_again(self): class TestModelAdmin(ModelAdmin): prepopulated_fields = {'slug': ('non_existent_field',)} self.assertIsInvalid( TestModelAdmin, ValidationTestModel, "The value of 'prepopulated_fields[\"slug\"][0]' refers to 'non_existent_field', " "which is not a field of 'modeladmin.ValidationTestModel'.", 'admin.E030' ) def test_invalid_field_type(self): class TestModelAdmin(ModelAdmin): prepopulated_fields = {'users': ('name',)} self.assertIsInvalid( TestModelAdmin, ValidationTestModel, "The value of 'prepopulated_fields' refers to 'users', which must not be " "a DateTimeField, a ForeignKey, a OneToOneField, or a ManyToManyField.", 'admin.E028' ) def test_valid_case(self): class TestModelAdmin(ModelAdmin): prepopulated_fields = {'slug': ('name',)} self.assertIsValid(TestModelAdmin, ValidationTestModel) def test_one_to_one_field(self): class TestModelAdmin(ModelAdmin): prepopulated_fields = {'best_friend': ('name',)} self.assertIsInvalid( TestModelAdmin, ValidationTestModel, "The value of 'prepopulated_fields' refers to 'best_friend', which must not be " "a DateTimeField, a ForeignKey, a OneToOneField, or a ManyToManyField.", 'admin.E028' ) class ListDisplayTests(CheckTestCase): def test_not_iterable(self): class TestModelAdmin(ModelAdmin): list_display = 10 self.assertIsInvalid( TestModelAdmin, ValidationTestModel, "The value of 'list_display' must be a list or tuple.", 'admin.E107' ) def test_missing_field(self): class TestModelAdmin(ModelAdmin): list_display = ('non_existent_field',) self.assertIsInvalid( TestModelAdmin, ValidationTestModel, "The value of 'list_display[0]' refers to 'non_existent_field', " "which is not a callable, an attribute of 'TestModelAdmin', " "or an attribute or method on 'modeladmin.ValidationTestModel'.", 'admin.E108' ) def test_invalid_field_type(self): class TestModelAdmin(ModelAdmin): list_display = ('users',) self.assertIsInvalid( TestModelAdmin, ValidationTestModel, "The value of 'list_display[0]' must not be a ManyToManyField.", 'admin.E109' ) def test_valid_case(self): @admin.display def a_callable(obj): pass class TestModelAdmin(ModelAdmin): @admin.display def a_method(self, obj): pass list_display = ('name', 'decade_published_in', 'a_method', a_callable) self.assertIsValid(TestModelAdmin, ValidationTestModel) def test_valid_field_accessible_via_instance(self): class PositionField(Field): """Custom field accessible only via instance.""" def contribute_to_class(self, cls, name): super().contribute_to_class(cls, name) setattr(cls, self.name, self) def __get__(self, instance, owner): if instance is None: raise AttributeError() class TestModel(Model): field = PositionField() class TestModelAdmin(ModelAdmin): list_display = ('field',) self.assertIsValid(TestModelAdmin, TestModel) class ListDisplayLinksCheckTests(CheckTestCase): def test_not_iterable(self): class TestModelAdmin(ModelAdmin): list_display_links = 10 self.assertIsInvalid( TestModelAdmin, ValidationTestModel, "The value of 'list_display_links' must be a list, a tuple, or None.", 'admin.E110' ) def test_missing_field(self): class TestModelAdmin(ModelAdmin): list_display_links = ('non_existent_field',) self.assertIsInvalid( TestModelAdmin, ValidationTestModel, ( "The value of 'list_display_links[0]' refers to " "'non_existent_field', which is not defined in 'list_display'." ), 'admin.E111' ) def test_missing_in_list_display(self): class TestModelAdmin(ModelAdmin): list_display_links = ('name',) self.assertIsInvalid( TestModelAdmin, ValidationTestModel, "The value of 'list_display_links[0]' refers to 'name', which is not defined in 'list_display'.", 'admin.E111' ) def test_valid_case(self): @admin.display def a_callable(obj): pass class TestModelAdmin(ModelAdmin): @admin.display def a_method(self, obj): pass list_display = ('name', 'decade_published_in', 'a_method', a_callable) list_display_links = ('name', 'decade_published_in', 'a_method', a_callable) self.assertIsValid(TestModelAdmin, ValidationTestModel) def test_None_is_valid_case(self): class TestModelAdmin(ModelAdmin): list_display_links = None self.assertIsValid(TestModelAdmin, ValidationTestModel) def test_list_display_links_check_skipped_if_get_list_display_overridden(self): """ list_display_links check is skipped if get_list_display() is overridden. """ class TestModelAdmin(ModelAdmin): list_display_links = ['name', 'subtitle'] def get_list_display(self, request): pass self.assertIsValid(TestModelAdmin, ValidationTestModel) def test_list_display_link_checked_for_list_tuple_if_get_list_display_overridden(self): """ list_display_links is checked for list/tuple/None even if get_list_display() is overridden. """ class TestModelAdmin(ModelAdmin): list_display_links = 'non-list/tuple' def get_list_display(self, request): pass self.assertIsInvalid( TestModelAdmin, ValidationTestModel, "The value of 'list_display_links' must be a list, a tuple, or None.", 'admin.E110' ) class ListFilterTests(CheckTestCase): def test_list_filter_validation(self): class TestModelAdmin(ModelAdmin): list_filter = 10 self.assertIsInvalid( TestModelAdmin, ValidationTestModel, "The value of 'list_filter' must be a list or tuple.", 'admin.E112' ) def test_not_list_filter_class(self): class TestModelAdmin(ModelAdmin): list_filter = ['RandomClass'] self.assertIsInvalid( TestModelAdmin, ValidationTestModel, "The value of 'list_filter[0]' refers to 'RandomClass', which " "does not refer to a Field.", 'admin.E116' ) def test_callable(self): def random_callable(): pass class TestModelAdmin(ModelAdmin): list_filter = [random_callable] self.assertIsInvalid( TestModelAdmin, ValidationTestModel, "The value of 'list_filter[0]' must inherit from 'ListFilter'.", 'admin.E113' ) def test_not_callable(self): class TestModelAdmin(ModelAdmin): list_filter = [[42, 42]] self.assertIsInvalid( TestModelAdmin, ValidationTestModel, "The value of 'list_filter[0][1]' must inherit from 'FieldListFilter'.", 'admin.E115' ) def test_missing_field(self): class TestModelAdmin(ModelAdmin): list_filter = ('non_existent_field',) self.assertIsInvalid( TestModelAdmin, ValidationTestModel, "The value of 'list_filter[0]' refers to 'non_existent_field', " "which does not refer to a Field.", 'admin.E116' ) def test_not_filter(self): class RandomClass: pass class TestModelAdmin(ModelAdmin): list_filter = (RandomClass,) self.assertIsInvalid( TestModelAdmin, ValidationTestModel, "The value of 'list_filter[0]' must inherit from 'ListFilter'.", 'admin.E113' ) def test_not_filter_again(self): class RandomClass: pass class TestModelAdmin(ModelAdmin): list_filter = (('is_active', RandomClass),) self.assertIsInvalid( TestModelAdmin, ValidationTestModel, "The value of 'list_filter[0][1]' must inherit from 'FieldListFilter'.", 'admin.E115' ) def test_not_filter_again_again(self): class AwesomeFilter(SimpleListFilter): def get_title(self): return 'awesomeness' def get_choices(self, request): return (('bit', 'A bit awesome'), ('very', 'Very awesome')) def get_queryset(self, cl, qs): return qs class TestModelAdmin(ModelAdmin): list_filter = (('is_active', AwesomeFilter),) self.assertIsInvalid( TestModelAdmin, ValidationTestModel, "The value of 'list_filter[0][1]' must inherit from 'FieldListFilter'.", 'admin.E115' ) def test_list_filter_is_func(self): def get_filter(): pass class TestModelAdmin(ModelAdmin): list_filter = [get_filter] self.assertIsInvalid( TestModelAdmin, ValidationTestModel, "The value of 'list_filter[0]' must inherit from 'ListFilter'.", 'admin.E113' ) def test_not_associated_with_field_name(self): class TestModelAdmin(ModelAdmin): list_filter = (BooleanFieldListFilter,) self.assertIsInvalid( TestModelAdmin, ValidationTestModel, "The value of 'list_filter[0]' must not inherit from 'FieldListFilter'.", 'admin.E114' ) def test_valid_case(self): class AwesomeFilter(SimpleListFilter): def get_title(self): return 'awesomeness' def get_choices(self, request): return (('bit', 'A bit awesome'), ('very', 'Very awesome')) def get_queryset(self, cl, qs): return qs class TestModelAdmin(ModelAdmin): list_filter = ('is_active', AwesomeFilter, ('is_active', BooleanFieldListFilter), 'no') self.assertIsValid(TestModelAdmin, ValidationTestModel) class ListPerPageCheckTests(CheckTestCase): def test_not_integer(self): class TestModelAdmin(ModelAdmin): list_per_page = 'hello' self.assertIsInvalid( TestModelAdmin, ValidationTestModel, "The value of 'list_per_page' must be an integer.", 'admin.E118' ) def test_valid_case(self): class TestModelAdmin(ModelAdmin): list_per_page = 100 self.assertIsValid(TestModelAdmin, ValidationTestModel) class ListMaxShowAllCheckTests(CheckTestCase): def test_not_integer(self): class TestModelAdmin(ModelAdmin): list_max_show_all = 'hello' self.assertIsInvalid( TestModelAdmin, ValidationTestModel, "The value of 'list_max_show_all' must be an integer.", 'admin.E119' ) def test_valid_case(self): class TestModelAdmin(ModelAdmin): list_max_show_all = 200 self.assertIsValid(TestModelAdmin, ValidationTestModel) class SearchFieldsCheckTests(CheckTestCase): def test_not_iterable(self): class TestModelAdmin(ModelAdmin): search_fields = 10 self.assertIsInvalid( TestModelAdmin, ValidationTestModel, "The value of 'search_fields' must be a list or tuple.", 'admin.E126' ) class DateHierarchyCheckTests(CheckTestCase): def test_missing_field(self): class TestModelAdmin(ModelAdmin): date_hierarchy = 'non_existent_field' self.assertIsInvalid( TestModelAdmin, ValidationTestModel, "The value of 'date_hierarchy' refers to 'non_existent_field', " "which does not refer to a Field.", 'admin.E127' ) def test_invalid_field_type(self): class TestModelAdmin(ModelAdmin): date_hierarchy = 'name' self.assertIsInvalid( TestModelAdmin, ValidationTestModel, "The value of 'date_hierarchy' must be a DateField or DateTimeField.", 'admin.E128' ) def test_valid_case(self): class TestModelAdmin(ModelAdmin): date_hierarchy = 'pub_date' self.assertIsValid(TestModelAdmin, ValidationTestModel) def test_related_valid_case(self): class TestModelAdmin(ModelAdmin): date_hierarchy = 'band__sign_date' self.assertIsValid(TestModelAdmin, ValidationTestModel) def test_related_invalid_field_type(self): class TestModelAdmin(ModelAdmin): date_hierarchy = 'band__name' self.assertIsInvalid( TestModelAdmin, ValidationTestModel, "The value of 'date_hierarchy' must be a DateField or DateTimeField.", 'admin.E128' ) class OrderingCheckTests(CheckTestCase): def test_not_iterable(self): class TestModelAdmin(ModelAdmin): ordering = 10 self.assertIsInvalid( TestModelAdmin, ValidationTestModel, "The value of 'ordering' must be a list or tuple.", 'admin.E031' ) class TestModelAdmin(ModelAdmin): ordering = ('non_existent_field',) self.assertIsInvalid( TestModelAdmin, ValidationTestModel, "The value of 'ordering[0]' refers to 'non_existent_field', " "which is not a field of 'modeladmin.ValidationTestModel'.", 'admin.E033' ) def test_random_marker_not_alone(self): class TestModelAdmin(ModelAdmin): ordering = ('?', 'name') self.assertIsInvalid( TestModelAdmin, ValidationTestModel, "The value of 'ordering' has the random ordering marker '?', but contains " "other fields as well.", 'admin.E032', hint='Either remove the "?", or remove the other fields.' ) def test_valid_random_marker_case(self): class TestModelAdmin(ModelAdmin): ordering = ('?',) self.assertIsValid(TestModelAdmin, ValidationTestModel) def test_valid_complex_case(self): class TestModelAdmin(ModelAdmin): ordering = ('band__name',) self.assertIsValid(TestModelAdmin, ValidationTestModel) def test_valid_case(self): class TestModelAdmin(ModelAdmin): ordering = ('name', 'pk') self.assertIsValid(TestModelAdmin, ValidationTestModel) def test_invalid_expression(self): class TestModelAdmin(ModelAdmin): ordering = (F('nonexistent'), ) self.assertIsInvalid( TestModelAdmin, ValidationTestModel, "The value of 'ordering[0]' refers to 'nonexistent', which is not " "a field of 'modeladmin.ValidationTestModel'.", 'admin.E033' ) def test_valid_expression(self): class TestModelAdmin(ModelAdmin): ordering = (Upper('name'), Upper('band__name').desc()) self.assertIsValid(TestModelAdmin, ValidationTestModel) class ListSelectRelatedCheckTests(CheckTestCase): def test_invalid_type(self): class TestModelAdmin(ModelAdmin): list_select_related = 1 self.assertIsInvalid( TestModelAdmin, ValidationTestModel, "The value of 'list_select_related' must be a boolean, tuple or list.", 'admin.E117' ) def test_valid_case(self): class TestModelAdmin(ModelAdmin): list_select_related = False self.assertIsValid(TestModelAdmin, ValidationTestModel) class SaveAsCheckTests(CheckTestCase): def test_not_boolean(self): class TestModelAdmin(ModelAdmin): save_as = 1 self.assertIsInvalid( TestModelAdmin, ValidationTestModel, "The value of 'save_as' must be a boolean.", 'admin.E101' ) def test_valid_case(self): class TestModelAdmin(ModelAdmin): save_as = True self.assertIsValid(TestModelAdmin, ValidationTestModel) class SaveOnTopCheckTests(CheckTestCase): def test_not_boolean(self): class TestModelAdmin(ModelAdmin): save_on_top = 1 self.assertIsInvalid( TestModelAdmin, ValidationTestModel, "The value of 'save_on_top' must be a boolean.", 'admin.E102' ) def test_valid_case(self): class TestModelAdmin(ModelAdmin): save_on_top = True self.assertIsValid(TestModelAdmin, ValidationTestModel) class InlinesCheckTests(CheckTestCase): def test_not_iterable(self): class TestModelAdmin(ModelAdmin): inlines = 10 self.assertIsInvalid( TestModelAdmin, ValidationTestModel, "The value of 'inlines' must be a list or tuple.", 'admin.E103' ) def test_not_correct_inline_field(self): class TestModelAdmin(ModelAdmin): inlines = [42] self.assertIsInvalidRegexp( TestModelAdmin, ValidationTestModel, r"'.*\.TestModelAdmin' must inherit from 'InlineModelAdmin'\.", 'admin.E104' ) def test_not_model_admin(self): class ValidationTestInline: pass class TestModelAdmin(ModelAdmin): inlines = [ValidationTestInline] self.assertIsInvalidRegexp( TestModelAdmin, ValidationTestModel, r"'.*\.ValidationTestInline' must inherit from 'InlineModelAdmin'\.", 'admin.E104' ) def test_missing_model_field(self): class ValidationTestInline(TabularInline): pass class TestModelAdmin(ModelAdmin): inlines = [ValidationTestInline] self.assertIsInvalidRegexp( TestModelAdmin, ValidationTestModel, r"'.*\.ValidationTestInline' must have a 'model' attribute\.", 'admin.E105' ) def test_invalid_model_type(self): class SomethingBad: pass class ValidationTestInline(TabularInline): model = SomethingBad class TestModelAdmin(ModelAdmin): inlines = [ValidationTestInline] self.assertIsInvalidRegexp( TestModelAdmin, ValidationTestModel, r"The value of '.*\.ValidationTestInline.model' must be a Model\.", 'admin.E106' ) def test_invalid_model(self): class ValidationTestInline(TabularInline): model = 'Not a class' class TestModelAdmin(ModelAdmin): inlines = [ValidationTestInline] self.assertIsInvalidRegexp( TestModelAdmin, ValidationTestModel, r"The value of '.*\.ValidationTestInline.model' must be a Model\.", 'admin.E106' ) def test_invalid_callable(self): def random_obj(): pass class TestModelAdmin(ModelAdmin): inlines = [random_obj] self.assertIsInvalidRegexp( TestModelAdmin, ValidationTestModel, r"'.*\.random_obj' must inherit from 'InlineModelAdmin'\.", 'admin.E104' ) def test_valid_case(self): class ValidationTestInline(TabularInline): model = ValidationTestInlineModel class TestModelAdmin(ModelAdmin): inlines = [ValidationTestInline] self.assertIsValid(TestModelAdmin, ValidationTestModel) class FkNameCheckTests(CheckTestCase): def test_missing_field(self): class ValidationTestInline(TabularInline): model = ValidationTestInlineModel fk_name = 'non_existent_field' class TestModelAdmin(ModelAdmin): inlines = [ValidationTestInline] self.assertIsInvalid( TestModelAdmin, ValidationTestModel, "'modeladmin.ValidationTestInlineModel' has no field named 'non_existent_field'.", 'admin.E202', invalid_obj=ValidationTestInline ) def test_valid_case(self): class ValidationTestInline(TabularInline): model = ValidationTestInlineModel fk_name = 'parent' class TestModelAdmin(ModelAdmin): inlines = [ValidationTestInline] self.assertIsValid(TestModelAdmin, ValidationTestModel) def test_proxy_model_parent(self): class Parent(Model): pass class ProxyChild(Parent): class Meta: proxy = True class ProxyProxyChild(ProxyChild): class Meta: proxy = True class Related(Model): proxy_child = ForeignKey(ProxyChild, on_delete=CASCADE) class InlineFkName(admin.TabularInline): model = Related fk_name = 'proxy_child' class InlineNoFkName(admin.TabularInline): model = Related class ProxyProxyChildAdminFkName(admin.ModelAdmin): inlines = [InlineFkName, InlineNoFkName] self.assertIsValid(ProxyProxyChildAdminFkName, ProxyProxyChild) class ExtraCheckTests(CheckTestCase): def test_not_integer(self): class ValidationTestInline(TabularInline): model = ValidationTestInlineModel extra = 'hello' class TestModelAdmin(ModelAdmin): inlines = [ValidationTestInline] self.assertIsInvalid( TestModelAdmin, ValidationTestModel, "The value of 'extra' must be an integer.", 'admin.E203', invalid_obj=ValidationTestInline ) def test_valid_case(self): class ValidationTestInline(TabularInline): model = ValidationTestInlineModel extra = 2 class TestModelAdmin(ModelAdmin): inlines = [ValidationTestInline] self.assertIsValid(TestModelAdmin, ValidationTestModel) class MaxNumCheckTests(CheckTestCase): def test_not_integer(self): class ValidationTestInline(TabularInline): model = ValidationTestInlineModel max_num = 'hello' class TestModelAdmin(ModelAdmin): inlines = [ValidationTestInline] self.assertIsInvalid( TestModelAdmin, ValidationTestModel, "The value of 'max_num' must be an integer.", 'admin.E204', invalid_obj=ValidationTestInline ) def test_valid_case(self): class ValidationTestInline(TabularInline): model = ValidationTestInlineModel max_num = 2 class TestModelAdmin(ModelAdmin): inlines = [ValidationTestInline] self.assertIsValid(TestModelAdmin, ValidationTestModel) class MinNumCheckTests(CheckTestCase): def test_not_integer(self): class ValidationTestInline(TabularInline): model = ValidationTestInlineModel min_num = 'hello' class TestModelAdmin(ModelAdmin): inlines = [ValidationTestInline] self.assertIsInvalid( TestModelAdmin, ValidationTestModel, "The value of 'min_num' must be an integer.", 'admin.E205', invalid_obj=ValidationTestInline ) def test_valid_case(self): class ValidationTestInline(TabularInline): model = ValidationTestInlineModel min_num = 2 class TestModelAdmin(ModelAdmin): inlines = [ValidationTestInline] self.assertIsValid(TestModelAdmin, ValidationTestModel) class FormsetCheckTests(CheckTestCase): def test_invalid_type(self): class FakeFormSet: pass class ValidationTestInline(TabularInline): model = ValidationTestInlineModel formset = FakeFormSet class TestModelAdmin(ModelAdmin): inlines = [ValidationTestInline] self.assertIsInvalid( TestModelAdmin, ValidationTestModel, "The value of 'formset' must inherit from 'BaseModelFormSet'.", 'admin.E206', invalid_obj=ValidationTestInline ) def test_inline_without_formset_class(self): class ValidationTestInlineWithoutFormsetClass(TabularInline): model = ValidationTestInlineModel formset = 'Not a FormSet Class' class TestModelAdminWithoutFormsetClass(ModelAdmin): inlines = [ValidationTestInlineWithoutFormsetClass] self.assertIsInvalid( TestModelAdminWithoutFormsetClass, ValidationTestModel, "The value of 'formset' must inherit from 'BaseModelFormSet'.", 'admin.E206', invalid_obj=ValidationTestInlineWithoutFormsetClass ) def test_valid_case(self): class RealModelFormSet(BaseModelFormSet): pass class ValidationTestInline(TabularInline): model = ValidationTestInlineModel formset = RealModelFormSet class TestModelAdmin(ModelAdmin): inlines = [ValidationTestInline] self.assertIsValid(TestModelAdmin, ValidationTestModel) class ListDisplayEditableTests(CheckTestCase): def test_list_display_links_is_none(self): """ list_display and list_editable can contain the same values when list_display_links is None """ class ProductAdmin(ModelAdmin): list_display = ['name', 'slug', 'pub_date'] list_editable = list_display list_display_links = None self.assertIsValid(ProductAdmin, ValidationTestModel) def test_list_display_first_item_same_as_list_editable_first_item(self): """ The first item in list_display can be the same as the first in list_editable. """ class ProductAdmin(ModelAdmin): list_display = ['name', 'slug', 'pub_date'] list_editable = ['name', 'slug'] list_display_links = ['pub_date'] self.assertIsValid(ProductAdmin, ValidationTestModel) def test_list_display_first_item_in_list_editable(self): """ The first item in list_display can be in list_editable as long as list_display_links is defined. """ class ProductAdmin(ModelAdmin): list_display = ['name', 'slug', 'pub_date'] list_editable = ['slug', 'name'] list_display_links = ['pub_date'] self.assertIsValid(ProductAdmin, ValidationTestModel) def test_list_display_first_item_same_as_list_editable_no_list_display_links(self): """ The first item in list_display cannot be the same as the first item in list_editable if list_display_links is not defined. """ class ProductAdmin(ModelAdmin): list_display = ['name'] list_editable = ['name'] self.assertIsInvalid( ProductAdmin, ValidationTestModel, "The value of 'list_editable[0]' refers to the first field " "in 'list_display' ('name'), which cannot be used unless " "'list_display_links' is set.", id='admin.E124', ) def test_list_display_first_item_in_list_editable_no_list_display_links(self): """ The first item in list_display cannot be in list_editable if list_display_links isn't defined. """ class ProductAdmin(ModelAdmin): list_display = ['name', 'slug', 'pub_date'] list_editable = ['slug', 'name'] self.assertIsInvalid( ProductAdmin, ValidationTestModel, "The value of 'list_editable[1]' refers to the first field " "in 'list_display' ('name'), which cannot be used unless " "'list_display_links' is set.", id='admin.E124', ) def test_both_list_editable_and_list_display_links(self): class ProductAdmin(ModelAdmin): list_editable = ('name',) list_display = ('name',) list_display_links = ('name',) self.assertIsInvalid( ProductAdmin, ValidationTestModel, "The value of 'name' cannot be in both 'list_editable' and " "'list_display_links'.", id='admin.E123', ) class AutocompleteFieldsTests(CheckTestCase): def test_autocomplete_e036(self): class Admin(ModelAdmin): autocomplete_fields = 'name' self.assertIsInvalid( Admin, Band, msg="The value of 'autocomplete_fields' must be a list or tuple.", id='admin.E036', invalid_obj=Admin, ) def test_autocomplete_e037(self): class Admin(ModelAdmin): autocomplete_fields = ('nonexistent',) self.assertIsInvalid( Admin, ValidationTestModel, msg=( "The value of 'autocomplete_fields[0]' refers to 'nonexistent', " "which is not a field of 'modeladmin.ValidationTestModel'." ), id='admin.E037', invalid_obj=Admin, ) def test_autocomplete_e38(self): class Admin(ModelAdmin): autocomplete_fields = ('name',) self.assertIsInvalid( Admin, ValidationTestModel, msg=( "The value of 'autocomplete_fields[0]' must be a foreign " "key or a many-to-many field." ), id='admin.E038', invalid_obj=Admin, ) def test_autocomplete_e039(self): class Admin(ModelAdmin): autocomplete_fields = ('band',) self.assertIsInvalid( Admin, Song, msg=( 'An admin for model "Band" has to be registered ' 'to be referenced by Admin.autocomplete_fields.' ), id='admin.E039', invalid_obj=Admin, ) def test_autocomplete_e040(self): class NoSearchFieldsAdmin(ModelAdmin): pass class AutocompleteAdmin(ModelAdmin): autocomplete_fields = ('featuring',) site = AdminSite() site.register(Band, NoSearchFieldsAdmin) self.assertIsInvalid( AutocompleteAdmin, Song, msg=( 'NoSearchFieldsAdmin must define "search_fields", because ' 'it\'s referenced by AutocompleteAdmin.autocomplete_fields.' ), id='admin.E040', invalid_obj=AutocompleteAdmin, admin_site=site, ) def test_autocomplete_is_valid(self): class SearchFieldsAdmin(ModelAdmin): search_fields = 'name' class AutocompleteAdmin(ModelAdmin): autocomplete_fields = ('featuring',) site = AdminSite() site.register(Band, SearchFieldsAdmin) self.assertIsValid(AutocompleteAdmin, Song, admin_site=site) def test_autocomplete_is_onetoone(self): class UserAdmin(ModelAdmin): search_fields = ('name',) class Admin(ModelAdmin): autocomplete_fields = ('best_friend',) site = AdminSite() site.register(User, UserAdmin) self.assertIsValid(Admin, ValidationTestModel, admin_site=site) class ActionsCheckTests(CheckTestCase): def test_custom_permissions_require_matching_has_method(self): @admin.action(permissions=['custom']) def custom_permission_action(modeladmin, request, queryset): pass class BandAdmin(ModelAdmin): actions = (custom_permission_action,) self.assertIsInvalid( BandAdmin, Band, 'BandAdmin must define a has_custom_permission() method for the ' 'custom_permission_action action.', id='admin.E129', ) def test_actions_not_unique(self): @admin.action def action(modeladmin, request, queryset): pass class BandAdmin(ModelAdmin): actions = (action, action) self.assertIsInvalid( BandAdmin, Band, "__name__ attributes of actions defined in BandAdmin must be " "unique. Name 'action' is not unique.", id='admin.E130', ) def test_actions_unique(self): @admin.action def action1(modeladmin, request, queryset): pass @admin.action def action2(modeladmin, request, queryset): pass class BandAdmin(ModelAdmin): actions = (action1, action2) self.assertIsValid(BandAdmin, Band)
18abefe2ba016df2f252af2df1240d8c77645c195786dc2d5ea6c3741ca37310
""" Tests for django test runner """ import collections.abc import multiprocessing import os import sys import unittest from unittest import mock from admin_scripts.tests import AdminScriptTestCase from django import db from django.conf import settings from django.core.exceptions import ImproperlyConfigured from django.core.management import call_command from django.core.management.base import SystemCheckError from django.test import ( SimpleTestCase, TransactionTestCase, skipUnlessDBFeature, ) from django.test.runner import ( DiscoverRunner, Shuffler, reorder_test_bin, reorder_tests, shuffle_tests, ) from django.test.testcases import connections_support_transactions from django.test.utils import ( captured_stderr, dependency_ordered, get_unique_databases_and_mirrors, iter_test_cases, ) from django.utils.deprecation import RemovedInDjango50Warning from .models import B, Person, Through class MySuite: def __init__(self): self.tests = [] def addTest(self, test): self.tests.append(test) def __iter__(self): yield from self.tests class TestSuiteTests(SimpleTestCase): def build_test_suite(self, test_classes, suite=None, suite_class=None): if suite_class is None: suite_class = unittest.TestSuite if suite is None: suite = suite_class() loader = unittest.defaultTestLoader for test_class in test_classes: tests = loader.loadTestsFromTestCase(test_class) subsuite = suite_class() # Only use addTest() to simplify testing a custom TestSuite. for test in tests: subsuite.addTest(test) suite.addTest(subsuite) return suite def make_test_suite(self, suite=None, suite_class=None): class Tests1(unittest.TestCase): def test1(self): pass def test2(self): pass class Tests2(unittest.TestCase): def test1(self): pass def test2(self): pass return self.build_test_suite( (Tests1, Tests2), suite=suite, suite_class=suite_class, ) def assertTestNames(self, tests, expected): # Each test.id() has a form like the following: # "test_runner.tests.IterTestCasesTests.test_iter_test_cases.<locals>.Tests1.test1". # It suffices to check only the last two parts. names = ['.'.join(test.id().split('.')[-2:]) for test in tests] self.assertEqual(names, expected) def test_iter_test_cases_basic(self): suite = self.make_test_suite() tests = iter_test_cases(suite) self.assertTestNames(tests, expected=[ 'Tests1.test1', 'Tests1.test2', 'Tests2.test1', 'Tests2.test2', ]) def test_iter_test_cases_string_input(self): msg = ( "Test 'a' must be a test case or test suite not string (was found " "in 'abc')." ) with self.assertRaisesMessage(TypeError, msg): list(iter_test_cases('abc')) def test_iter_test_cases_iterable_of_tests(self): class Tests(unittest.TestCase): def test1(self): pass def test2(self): pass tests = list(unittest.defaultTestLoader.loadTestsFromTestCase(Tests)) actual_tests = iter_test_cases(tests) self.assertTestNames(actual_tests, expected=[ 'Tests.test1', 'Tests.test2', ]) def test_iter_test_cases_custom_test_suite_class(self): suite = self.make_test_suite(suite_class=MySuite) tests = iter_test_cases(suite) self.assertTestNames(tests, expected=[ 'Tests1.test1', 'Tests1.test2', 'Tests2.test1', 'Tests2.test2', ]) def test_iter_test_cases_mixed_test_suite_classes(self): suite = self.make_test_suite(suite=MySuite()) child_suite = list(suite)[0] self.assertNotIsInstance(child_suite, MySuite) tests = list(iter_test_cases(suite)) self.assertEqual(len(tests), 4) self.assertNotIsInstance(tests[0], unittest.TestSuite) def make_tests(self): """Return an iterable of tests.""" suite = self.make_test_suite() tests = list(iter_test_cases(suite)) return tests def test_shuffle_tests(self): tests = self.make_tests() # Choose a seed that shuffles both the classes and methods. shuffler = Shuffler(seed=9) shuffled_tests = shuffle_tests(tests, shuffler) self.assertIsInstance(shuffled_tests, collections.abc.Iterator) self.assertTestNames(shuffled_tests, expected=[ 'Tests2.test1', 'Tests2.test2', 'Tests1.test2', 'Tests1.test1', ]) def test_reorder_test_bin_no_arguments(self): tests = self.make_tests() reordered_tests = reorder_test_bin(tests) self.assertIsInstance(reordered_tests, collections.abc.Iterator) self.assertTestNames(reordered_tests, expected=[ 'Tests1.test1', 'Tests1.test2', 'Tests2.test1', 'Tests2.test2', ]) def test_reorder_test_bin_reverse(self): tests = self.make_tests() reordered_tests = reorder_test_bin(tests, reverse=True) self.assertIsInstance(reordered_tests, collections.abc.Iterator) self.assertTestNames(reordered_tests, expected=[ 'Tests2.test2', 'Tests2.test1', 'Tests1.test2', 'Tests1.test1', ]) def test_reorder_test_bin_random(self): tests = self.make_tests() # Choose a seed that shuffles both the classes and methods. shuffler = Shuffler(seed=9) reordered_tests = reorder_test_bin(tests, shuffler=shuffler) self.assertIsInstance(reordered_tests, collections.abc.Iterator) self.assertTestNames(reordered_tests, expected=[ 'Tests2.test1', 'Tests2.test2', 'Tests1.test2', 'Tests1.test1', ]) def test_reorder_test_bin_random_and_reverse(self): tests = self.make_tests() # Choose a seed that shuffles both the classes and methods. shuffler = Shuffler(seed=9) reordered_tests = reorder_test_bin(tests, shuffler=shuffler, reverse=True) self.assertIsInstance(reordered_tests, collections.abc.Iterator) self.assertTestNames(reordered_tests, expected=[ 'Tests1.test1', 'Tests1.test2', 'Tests2.test2', 'Tests2.test1', ]) def test_reorder_tests_same_type_consecutive(self): """Tests of the same type are made consecutive.""" tests = self.make_tests() # Move the last item to the front. tests.insert(0, tests.pop()) self.assertTestNames(tests, expected=[ 'Tests2.test2', 'Tests1.test1', 'Tests1.test2', 'Tests2.test1', ]) reordered_tests = reorder_tests(tests, classes=[]) self.assertTestNames(reordered_tests, expected=[ 'Tests2.test2', 'Tests2.test1', 'Tests1.test1', 'Tests1.test2', ]) def test_reorder_tests_random(self): tests = self.make_tests() # Choose a seed that shuffles both the classes and methods. shuffler = Shuffler(seed=9) reordered_tests = reorder_tests(tests, classes=[], shuffler=shuffler) self.assertIsInstance(reordered_tests, collections.abc.Iterator) self.assertTestNames(reordered_tests, expected=[ 'Tests2.test1', 'Tests2.test2', 'Tests1.test2', 'Tests1.test1', ]) def test_reorder_tests_random_mixed_classes(self): tests = self.make_tests() # Move the last item to the front. tests.insert(0, tests.pop()) shuffler = Shuffler(seed=9) self.assertTestNames(tests, expected=[ 'Tests2.test2', 'Tests1.test1', 'Tests1.test2', 'Tests2.test1', ]) reordered_tests = reorder_tests(tests, classes=[], shuffler=shuffler) self.assertTestNames(reordered_tests, expected=[ 'Tests2.test1', 'Tests2.test2', 'Tests1.test2', 'Tests1.test1', ]) def test_reorder_tests_reverse_with_duplicates(self): class Tests1(unittest.TestCase): def test1(self): pass class Tests2(unittest.TestCase): def test2(self): pass def test3(self): pass suite = self.build_test_suite((Tests1, Tests2)) subsuite = list(suite)[0] suite.addTest(subsuite) tests = list(iter_test_cases(suite)) self.assertTestNames(tests, expected=[ 'Tests1.test1', 'Tests2.test2', 'Tests2.test3', 'Tests1.test1', ]) reordered_tests = reorder_tests(tests, classes=[]) self.assertTestNames(reordered_tests, expected=[ 'Tests1.test1', 'Tests2.test2', 'Tests2.test3', ]) reordered_tests = reorder_tests(tests, classes=[], reverse=True) self.assertTestNames(reordered_tests, expected=[ 'Tests2.test3', 'Tests2.test2', 'Tests1.test1', ]) class DependencyOrderingTests(unittest.TestCase): def test_simple_dependencies(self): raw = [ ('s1', ('s1_db', ['alpha'])), ('s2', ('s2_db', ['bravo'])), ('s3', ('s3_db', ['charlie'])), ] dependencies = { 'alpha': ['charlie'], 'bravo': ['charlie'], } ordered = dependency_ordered(raw, dependencies=dependencies) ordered_sigs = [sig for sig, value in ordered] self.assertIn('s1', ordered_sigs) self.assertIn('s2', ordered_sigs) self.assertIn('s3', ordered_sigs) self.assertLess(ordered_sigs.index('s3'), ordered_sigs.index('s1')) self.assertLess(ordered_sigs.index('s3'), ordered_sigs.index('s2')) def test_chained_dependencies(self): raw = [ ('s1', ('s1_db', ['alpha'])), ('s2', ('s2_db', ['bravo'])), ('s3', ('s3_db', ['charlie'])), ] dependencies = { 'alpha': ['bravo'], 'bravo': ['charlie'], } ordered = dependency_ordered(raw, dependencies=dependencies) ordered_sigs = [sig for sig, value in ordered] self.assertIn('s1', ordered_sigs) self.assertIn('s2', ordered_sigs) self.assertIn('s3', ordered_sigs) # Explicit dependencies self.assertLess(ordered_sigs.index('s2'), ordered_sigs.index('s1')) self.assertLess(ordered_sigs.index('s3'), ordered_sigs.index('s2')) # Implied dependencies self.assertLess(ordered_sigs.index('s3'), ordered_sigs.index('s1')) def test_multiple_dependencies(self): raw = [ ('s1', ('s1_db', ['alpha'])), ('s2', ('s2_db', ['bravo'])), ('s3', ('s3_db', ['charlie'])), ('s4', ('s4_db', ['delta'])), ] dependencies = { 'alpha': ['bravo', 'delta'], 'bravo': ['charlie'], 'delta': ['charlie'], } ordered = dependency_ordered(raw, dependencies=dependencies) ordered_sigs = [sig for sig, aliases in ordered] self.assertIn('s1', ordered_sigs) self.assertIn('s2', ordered_sigs) self.assertIn('s3', ordered_sigs) self.assertIn('s4', ordered_sigs) # Explicit dependencies self.assertLess(ordered_sigs.index('s2'), ordered_sigs.index('s1')) self.assertLess(ordered_sigs.index('s4'), ordered_sigs.index('s1')) self.assertLess(ordered_sigs.index('s3'), ordered_sigs.index('s2')) self.assertLess(ordered_sigs.index('s3'), ordered_sigs.index('s4')) # Implicit dependencies self.assertLess(ordered_sigs.index('s3'), ordered_sigs.index('s1')) def test_circular_dependencies(self): raw = [ ('s1', ('s1_db', ['alpha'])), ('s2', ('s2_db', ['bravo'])), ] dependencies = { 'bravo': ['alpha'], 'alpha': ['bravo'], } with self.assertRaises(ImproperlyConfigured): dependency_ordered(raw, dependencies=dependencies) def test_own_alias_dependency(self): raw = [ ('s1', ('s1_db', ['alpha', 'bravo'])) ] dependencies = { 'alpha': ['bravo'] } with self.assertRaises(ImproperlyConfigured): dependency_ordered(raw, dependencies=dependencies) # reordering aliases shouldn't matter raw = [ ('s1', ('s1_db', ['bravo', 'alpha'])) ] with self.assertRaises(ImproperlyConfigured): dependency_ordered(raw, dependencies=dependencies) class MockTestRunner: def __init__(self, *args, **kwargs): if parallel := kwargs.get('parallel'): sys.stderr.write(f'parallel={parallel}') MockTestRunner.run_tests = mock.Mock(return_value=[]) class ManageCommandTests(unittest.TestCase): def test_custom_test_runner(self): call_command('test', 'sites', testrunner='test_runner.tests.MockTestRunner') MockTestRunner.run_tests.assert_called_with(('sites',)) def test_bad_test_runner(self): with self.assertRaises(AttributeError): call_command('test', 'sites', testrunner='test_runner.NonexistentRunner') def test_time_recorded(self): with captured_stderr() as stderr: call_command('test', '--timing', 'sites', testrunner='test_runner.tests.MockTestRunner') self.assertIn('Total run took', stderr.getvalue()) # Isolate from the real environment. @mock.patch.dict(os.environ, {}, clear=True) @mock.patch.object(multiprocessing, 'cpu_count', return_value=12) # Python 3.8 on macOS defaults to 'spawn' mode. @mock.patch.object(multiprocessing, 'get_start_method', return_value='fork') class ManageCommandParallelTests(SimpleTestCase): def test_parallel_default(self, *mocked_objects): with captured_stderr() as stderr: call_command( 'test', '--parallel', testrunner='test_runner.tests.MockTestRunner', ) self.assertIn('parallel=12', stderr.getvalue()) def test_parallel_auto(self, *mocked_objects): with captured_stderr() as stderr: call_command( 'test', '--parallel=auto', testrunner='test_runner.tests.MockTestRunner', ) self.assertIn('parallel=12', stderr.getvalue()) def test_no_parallel(self, *mocked_objects): with captured_stderr() as stderr: call_command('test', testrunner='test_runner.tests.MockTestRunner') # Parallel is disabled by default. self.assertEqual(stderr.getvalue(), '') def test_parallel_spawn(self, mocked_get_start_method, mocked_cpu_count): mocked_get_start_method.return_value = 'spawn' with captured_stderr() as stderr: call_command( 'test', '--parallel=auto', testrunner='test_runner.tests.MockTestRunner', ) self.assertIn('parallel=1', stderr.getvalue()) def test_no_parallel_spawn(self, mocked_get_start_method, mocked_cpu_count): mocked_get_start_method.return_value = 'spawn' with captured_stderr() as stderr: call_command( 'test', testrunner='test_runner.tests.MockTestRunner', ) self.assertEqual(stderr.getvalue(), '') @mock.patch.dict(os.environ, {'DJANGO_TEST_PROCESSES': '7'}) def test_no_parallel_django_test_processes_env(self, *mocked_objects): with captured_stderr() as stderr: call_command('test', testrunner='test_runner.tests.MockTestRunner') self.assertEqual(stderr.getvalue(), '') @mock.patch.dict(os.environ, {'DJANGO_TEST_PROCESSES': 'invalid'}) def test_django_test_processes_env_non_int(self, *mocked_objects): with self.assertRaises(ValueError): call_command( 'test', '--parallel', testrunner='test_runner.tests.MockTestRunner', ) @mock.patch.dict(os.environ, {'DJANGO_TEST_PROCESSES': '7'}) def test_django_test_processes_parallel_default(self, *mocked_objects): for parallel in ['--parallel', '--parallel=auto']: with self.subTest(parallel=parallel): with captured_stderr() as stderr: call_command( 'test', parallel, testrunner='test_runner.tests.MockTestRunner', ) self.assertIn('parallel=7', stderr.getvalue()) class CustomTestRunnerOptionsSettingsTests(AdminScriptTestCase): """ Custom runners can add command line arguments. The runner is specified through a settings file. """ def setUp(self): super().setUp() settings = { 'TEST_RUNNER': '\'test_runner.runner.CustomOptionsTestRunner\'', } self.write_settings('settings.py', sdict=settings) def test_default_options(self): args = ['test', '--settings=test_project.settings'] out, err = self.run_django_admin(args) self.assertNoOutput(err) self.assertOutput(out, '1:2:3') def test_default_and_given_options(self): args = ['test', '--settings=test_project.settings', '--option_b=foo'] out, err = self.run_django_admin(args) self.assertNoOutput(err) self.assertOutput(out, '1:foo:3') def test_option_name_and_value_separated(self): args = ['test', '--settings=test_project.settings', '--option_b', 'foo'] out, err = self.run_django_admin(args) self.assertNoOutput(err) self.assertOutput(out, '1:foo:3') def test_all_options_given(self): args = ['test', '--settings=test_project.settings', '--option_a=bar', '--option_b=foo', '--option_c=31337'] out, err = self.run_django_admin(args) self.assertNoOutput(err) self.assertOutput(out, 'bar:foo:31337') class CustomTestRunnerOptionsCmdlineTests(AdminScriptTestCase): """ Custom runners can add command line arguments when the runner is specified using --testrunner. """ def setUp(self): super().setUp() self.write_settings('settings.py') def test_testrunner_option(self): args = [ 'test', '--testrunner', 'test_runner.runner.CustomOptionsTestRunner', '--option_a=bar', '--option_b=foo', '--option_c=31337' ] out, err = self.run_django_admin(args, 'test_project.settings') self.assertNoOutput(err) self.assertOutput(out, 'bar:foo:31337') def test_testrunner_equals(self): args = [ 'test', '--testrunner=test_runner.runner.CustomOptionsTestRunner', '--option_a=bar', '--option_b=foo', '--option_c=31337' ] out, err = self.run_django_admin(args, 'test_project.settings') self.assertNoOutput(err) self.assertOutput(out, 'bar:foo:31337') def test_no_testrunner(self): args = ['test', '--testrunner'] out, err = self.run_django_admin(args, 'test_project.settings') self.assertIn('usage', err) self.assertNotIn('Traceback', err) self.assertNoOutput(out) class Ticket17477RegressionTests(AdminScriptTestCase): def setUp(self): super().setUp() self.write_settings('settings.py') def test_ticket_17477(self): """'manage.py help test' works after r16352.""" args = ['help', 'test'] out, err = self.run_manage(args) self.assertNoOutput(err) class SQLiteInMemoryTestDbs(TransactionTestCase): available_apps = ['test_runner'] databases = {'default', 'other'} @unittest.skipUnless(all(db.connections[conn].vendor == 'sqlite' for conn in db.connections), "This is an sqlite-specific issue") def test_transaction_support(self): # Assert connections mocking is appropriately applied by preventing # any attempts at calling create_test_db on the global connection # objects. for connection in db.connections.all(): create_test_db = mock.patch.object( connection.creation, 'create_test_db', side_effect=AssertionError("Global connection object shouldn't be manipulated.") ) create_test_db.start() self.addCleanup(create_test_db.stop) for option_key, option_value in ( ('NAME', ':memory:'), ('TEST', {'NAME': ':memory:'})): tested_connections = db.ConnectionHandler({ 'default': { 'ENGINE': 'django.db.backends.sqlite3', option_key: option_value, }, 'other': { 'ENGINE': 'django.db.backends.sqlite3', option_key: option_value, }, }) with mock.patch('django.test.utils.connections', new=tested_connections): other = tested_connections['other'] DiscoverRunner(verbosity=0).setup_databases() msg = ( "DATABASES setting '%s' option set to sqlite3's ':memory:' value " "shouldn't interfere with transaction support detection." % option_key ) # Transaction support is properly initialized for the 'other' DB. self.assertTrue(other.features.supports_transactions, msg) # And all the DBs report that they support transactions. self.assertTrue(connections_support_transactions(), msg) class DummyBackendTest(unittest.TestCase): def test_setup_databases(self): """ setup_databases() doesn't fail with dummy database backend. """ tested_connections = db.ConnectionHandler({}) with mock.patch('django.test.utils.connections', new=tested_connections): runner_instance = DiscoverRunner(verbosity=0) old_config = runner_instance.setup_databases() runner_instance.teardown_databases(old_config) class AliasedDefaultTestSetupTest(unittest.TestCase): def test_setup_aliased_default_database(self): """ setup_databases() doesn't fail when 'default' is aliased """ tested_connections = db.ConnectionHandler({ 'default': { 'NAME': 'dummy' }, 'aliased': { 'NAME': 'dummy' } }) with mock.patch('django.test.utils.connections', new=tested_connections): runner_instance = DiscoverRunner(verbosity=0) old_config = runner_instance.setup_databases() runner_instance.teardown_databases(old_config) class SetupDatabasesTests(SimpleTestCase): def setUp(self): self.runner_instance = DiscoverRunner(verbosity=0) def test_setup_aliased_databases(self): tested_connections = db.ConnectionHandler({ 'default': { 'ENGINE': 'django.db.backends.dummy', 'NAME': 'dbname', }, 'other': { 'ENGINE': 'django.db.backends.dummy', 'NAME': 'dbname', } }) with mock.patch('django.db.backends.dummy.base.DatabaseWrapper.creation_class') as mocked_db_creation: with mock.patch('django.test.utils.connections', new=tested_connections): old_config = self.runner_instance.setup_databases() self.runner_instance.teardown_databases(old_config) mocked_db_creation.return_value.destroy_test_db.assert_called_once_with('dbname', 0, False) def test_setup_test_database_aliases(self): """ The default database must be the first because data migrations use the default alias by default. """ tested_connections = db.ConnectionHandler({ 'other': { 'ENGINE': 'django.db.backends.dummy', 'NAME': 'dbname', }, 'default': { 'ENGINE': 'django.db.backends.dummy', 'NAME': 'dbname', } }) with mock.patch('django.test.utils.connections', new=tested_connections): test_databases, _ = get_unique_databases_and_mirrors() self.assertEqual( test_databases, { ('', '', 'django.db.backends.dummy', 'test_dbname'): ( 'dbname', ['default', 'other'], ), }, ) def test_destroy_test_db_restores_db_name(self): tested_connections = db.ConnectionHandler({ 'default': { 'ENGINE': settings.DATABASES[db.DEFAULT_DB_ALIAS]["ENGINE"], 'NAME': 'xxx_test_database', }, }) # Using the real current name as old_name to not mess with the test suite. old_name = settings.DATABASES[db.DEFAULT_DB_ALIAS]["NAME"] with mock.patch('django.db.connections', new=tested_connections): tested_connections['default'].creation.destroy_test_db(old_name, verbosity=0, keepdb=True) self.assertEqual(tested_connections['default'].settings_dict["NAME"], old_name) def test_serialization(self): tested_connections = db.ConnectionHandler({ 'default': { 'ENGINE': 'django.db.backends.dummy', }, }) with mock.patch('django.db.backends.dummy.base.DatabaseWrapper.creation_class') as mocked_db_creation: with mock.patch('django.test.utils.connections', new=tested_connections): self.runner_instance.setup_databases() mocked_db_creation.return_value.create_test_db.assert_called_once_with( verbosity=0, autoclobber=False, serialize=True, keepdb=False ) def test_serialized_off(self): tested_connections = db.ConnectionHandler({ 'default': { 'ENGINE': 'django.db.backends.dummy', 'TEST': {'SERIALIZE': False}, }, }) msg = ( 'The SERIALIZE test database setting is deprecated as it can be ' 'inferred from the TestCase/TransactionTestCase.databases that ' 'enable the serialized_rollback feature.' ) with mock.patch('django.db.backends.dummy.base.DatabaseWrapper.creation_class') as mocked_db_creation: with mock.patch('django.test.utils.connections', new=tested_connections): with self.assertWarnsMessage(RemovedInDjango50Warning, msg): self.runner_instance.setup_databases() mocked_db_creation.return_value.create_test_db.assert_called_once_with( verbosity=0, autoclobber=False, serialize=False, keepdb=False ) @skipUnlessDBFeature('supports_sequence_reset') class AutoIncrementResetTest(TransactionTestCase): """ Creating the same models in different test methods receive the same PK values since the sequences are reset before each test method. """ available_apps = ['test_runner'] reset_sequences = True def _test(self): # Regular model p = Person.objects.create(first_name='Jack', last_name='Smith') self.assertEqual(p.pk, 1) # Auto-created many-to-many through model p.friends.add(Person.objects.create(first_name='Jacky', last_name='Smith')) self.assertEqual(p.friends.through.objects.first().pk, 1) # Many-to-many through model b = B.objects.create() t = Through.objects.create(person=p, b=b) self.assertEqual(t.pk, 1) def test_autoincrement_reset1(self): self._test() def test_autoincrement_reset2(self): self._test() class EmptyDefaultDatabaseTest(unittest.TestCase): def test_empty_default_database(self): """ An empty default database in settings does not raise an ImproperlyConfigured error when running a unit test that does not use a database. """ tested_connections = db.ConnectionHandler({'default': {}}) with mock.patch('django.db.connections', new=tested_connections): connection = tested_connections[db.utils.DEFAULT_DB_ALIAS] self.assertEqual(connection.settings_dict['ENGINE'], 'django.db.backends.dummy') connections_support_transactions() class RunTestsExceptionHandlingTests(unittest.TestCase): def test_run_checks_raises(self): """ Teardown functions are run when run_checks() raises SystemCheckError. """ with mock.patch('django.test.runner.DiscoverRunner.setup_test_environment'), \ mock.patch('django.test.runner.DiscoverRunner.setup_databases'), \ mock.patch('django.test.runner.DiscoverRunner.build_suite'), \ mock.patch('django.test.runner.DiscoverRunner.run_checks', side_effect=SystemCheckError), \ mock.patch('django.test.runner.DiscoverRunner.teardown_databases') as teardown_databases, \ mock.patch('django.test.runner.DiscoverRunner.teardown_test_environment') as teardown_test_environment: runner = DiscoverRunner(verbosity=0, interactive=False) with self.assertRaises(SystemCheckError): runner.run_tests(['test_runner_apps.sample.tests_sample.TestDjangoTestCase']) self.assertTrue(teardown_databases.called) self.assertTrue(teardown_test_environment.called) def test_run_checks_raises_and_teardown_raises(self): """ SystemCheckError is surfaced when run_checks() raises SystemCheckError and teardown databases() raises ValueError. """ with mock.patch('django.test.runner.DiscoverRunner.setup_test_environment'), \ mock.patch('django.test.runner.DiscoverRunner.setup_databases'), \ mock.patch('django.test.runner.DiscoverRunner.build_suite'), \ mock.patch('django.test.runner.DiscoverRunner.run_checks', side_effect=SystemCheckError), \ mock.patch('django.test.runner.DiscoverRunner.teardown_databases', side_effect=ValueError) \ as teardown_databases, \ mock.patch('django.test.runner.DiscoverRunner.teardown_test_environment') as teardown_test_environment: runner = DiscoverRunner(verbosity=0, interactive=False) with self.assertRaises(SystemCheckError): runner.run_tests(['test_runner_apps.sample.tests_sample.TestDjangoTestCase']) self.assertTrue(teardown_databases.called) self.assertFalse(teardown_test_environment.called) def test_run_checks_passes_and_teardown_raises(self): """ Exceptions on teardown are surfaced if no exceptions happen during run_checks(). """ with mock.patch('django.test.runner.DiscoverRunner.setup_test_environment'), \ mock.patch('django.test.runner.DiscoverRunner.setup_databases'), \ mock.patch('django.test.runner.DiscoverRunner.build_suite'), \ mock.patch('django.test.runner.DiscoverRunner.run_checks'), \ mock.patch('django.test.runner.DiscoverRunner.teardown_databases', side_effect=ValueError) \ as teardown_databases, \ mock.patch('django.test.runner.DiscoverRunner.teardown_test_environment') as teardown_test_environment: runner = DiscoverRunner(verbosity=0, interactive=False) with self.assertRaises(ValueError): # Suppress the output when running TestDjangoTestCase. with mock.patch('sys.stderr'): runner.run_tests(['test_runner_apps.sample.tests_sample.TestDjangoTestCase']) self.assertTrue(teardown_databases.called) self.assertFalse(teardown_test_environment.called) # RemovedInDjango50Warning class NoOpTestRunner(DiscoverRunner): def setup_test_environment(self, **kwargs): return def setup_databases(self, **kwargs): return def run_checks(self, databases): return def teardown_databases(self, old_config, **kwargs): return def teardown_test_environment(self, **kwargs): return class DiscoverRunnerExtraTestsDeprecationTests(SimpleTestCase): msg = 'The extra_tests argument is deprecated.' def get_runner(self): return NoOpTestRunner(verbosity=0, interactive=False) def test_extra_tests_build_suite(self): runner = self.get_runner() with self.assertWarnsMessage(RemovedInDjango50Warning, self.msg): runner.build_suite(extra_tests=[]) def test_extra_tests_run_tests(self): runner = self.get_runner() with captured_stderr(): with self.assertWarnsMessage(RemovedInDjango50Warning, self.msg): runner.run_tests( test_labels=['test_runner_apps.sample.tests_sample.EmptyTestCase'], extra_tests=[], )
9f33cc62dd2ddac406a4608bd570c8623a26159869437e0d28c3fcef0c86f135
import logging import multiprocessing import os import unittest.loader from argparse import ArgumentParser from contextlib import contextmanager from importlib import import_module from unittest import TestSuite, TextTestRunner, defaultTestLoader, mock from django.db import connections from django.test import SimpleTestCase from django.test.runner import DiscoverRunner, get_max_test_processes from django.test.utils import ( NullTimeKeeper, TimeKeeper, captured_stderr, captured_stdout, ) @contextmanager def change_cwd(directory): current_dir = os.path.abspath(os.path.dirname(__file__)) new_dir = os.path.join(current_dir, directory) old_cwd = os.getcwd() os.chdir(new_dir) try: yield finally: os.chdir(old_cwd) @contextmanager def change_loader_patterns(patterns): original_patterns = DiscoverRunner.test_loader.testNamePatterns DiscoverRunner.test_loader.testNamePatterns = patterns try: yield finally: DiscoverRunner.test_loader.testNamePatterns = original_patterns # Isolate from the real environment. @mock.patch.dict(os.environ, {}, clear=True) @mock.patch.object(multiprocessing, 'cpu_count', return_value=12) # Python 3.8 on macOS defaults to 'spawn' mode. @mock.patch.object(multiprocessing, 'get_start_method', return_value='fork') class DiscoverRunnerParallelArgumentTests(SimpleTestCase): def get_parser(self): parser = ArgumentParser() DiscoverRunner.add_arguments(parser) return parser def test_parallel_default(self, *mocked_objects): result = self.get_parser().parse_args([]) self.assertEqual(result.parallel, 0) def test_parallel_flag(self, *mocked_objects): result = self.get_parser().parse_args(['--parallel']) self.assertEqual(result.parallel, 'auto') def test_parallel_auto(self, *mocked_objects): result = self.get_parser().parse_args(['--parallel', 'auto']) self.assertEqual(result.parallel, 'auto') def test_parallel_count(self, *mocked_objects): result = self.get_parser().parse_args(['--parallel', '17']) self.assertEqual(result.parallel, 17) def test_parallel_invalid(self, *mocked_objects): with self.assertRaises(SystemExit), captured_stderr() as stderr: self.get_parser().parse_args(['--parallel', 'unaccepted']) msg = "argument --parallel: 'unaccepted' is not an integer or the string 'auto'" self.assertIn(msg, stderr.getvalue()) def test_get_max_test_processes(self, *mocked_objects): self.assertEqual(get_max_test_processes(), 12) @mock.patch.dict(os.environ, {'DJANGO_TEST_PROCESSES': '7'}) def test_get_max_test_processes_env_var(self, *mocked_objects): self.assertEqual(get_max_test_processes(), 7) def test_get_max_test_processes_spawn( self, mocked_get_start_method, mocked_cpu_count, ): mocked_get_start_method.return_value = 'spawn' self.assertEqual(get_max_test_processes(), 1) with mock.patch.dict(os.environ, {'DJANGO_TEST_PROCESSES': '7'}): self.assertEqual(get_max_test_processes(), 1) class DiscoverRunnerTests(SimpleTestCase): @staticmethod def get_test_methods_names(suite): return [ t.__class__.__name__ + '.' + t._testMethodName for t in suite._tests ] def test_init_debug_mode(self): runner = DiscoverRunner() self.assertFalse(runner.debug_mode) def test_add_arguments_shuffle(self): parser = ArgumentParser() DiscoverRunner.add_arguments(parser) ns = parser.parse_args([]) self.assertIs(ns.shuffle, False) ns = parser.parse_args(['--shuffle']) self.assertIsNone(ns.shuffle) ns = parser.parse_args(['--shuffle', '5']) self.assertEqual(ns.shuffle, 5) def test_add_arguments_debug_mode(self): parser = ArgumentParser() DiscoverRunner.add_arguments(parser) ns = parser.parse_args([]) self.assertFalse(ns.debug_mode) ns = parser.parse_args(["--debug-mode"]) self.assertTrue(ns.debug_mode) def test_setup_shuffler_no_shuffle_argument(self): runner = DiscoverRunner() self.assertIs(runner.shuffle, False) runner.setup_shuffler() self.assertIsNone(runner.shuffle_seed) def test_setup_shuffler_shuffle_none(self): runner = DiscoverRunner(shuffle=None) self.assertIsNone(runner.shuffle) with mock.patch('random.randint', return_value=1): with captured_stdout() as stdout: runner.setup_shuffler() self.assertEqual(stdout.getvalue(), 'Using shuffle seed: 1 (generated)\n') self.assertEqual(runner.shuffle_seed, 1) def test_setup_shuffler_shuffle_int(self): runner = DiscoverRunner(shuffle=2) self.assertEqual(runner.shuffle, 2) with captured_stdout() as stdout: runner.setup_shuffler() expected_out = 'Using shuffle seed: 2 (given)\n' self.assertEqual(stdout.getvalue(), expected_out) self.assertEqual(runner.shuffle_seed, 2) def test_load_tests_for_label_file_path(self): with change_cwd('.'): msg = ( "One of the test labels is a path to a file: " "'test_discover_runner.py', which is not supported. Use a " "dotted module name or path to a directory instead." ) with self.assertRaisesMessage(RuntimeError, msg): DiscoverRunner().load_tests_for_label('test_discover_runner.py', {}) def test_dotted_test_module(self): count = DiscoverRunner(verbosity=0).build_suite( ['test_runner_apps.sample.tests_sample'], ).countTestCases() self.assertEqual(count, 4) def test_dotted_test_class_vanilla_unittest(self): count = DiscoverRunner(verbosity=0).build_suite( ['test_runner_apps.sample.tests_sample.TestVanillaUnittest'], ).countTestCases() self.assertEqual(count, 1) def test_dotted_test_class_django_testcase(self): count = DiscoverRunner(verbosity=0).build_suite( ['test_runner_apps.sample.tests_sample.TestDjangoTestCase'], ).countTestCases() self.assertEqual(count, 1) def test_dotted_test_method_django_testcase(self): count = DiscoverRunner(verbosity=0).build_suite( ['test_runner_apps.sample.tests_sample.TestDjangoTestCase.test_sample'], ).countTestCases() self.assertEqual(count, 1) def test_pattern(self): count = DiscoverRunner( pattern="*_tests.py", verbosity=0, ).build_suite(['test_runner_apps.sample']).countTestCases() self.assertEqual(count, 1) def test_name_patterns(self): all_test_1 = [ 'DjangoCase1.test_1', 'DjangoCase2.test_1', 'SimpleCase1.test_1', 'SimpleCase2.test_1', 'UnittestCase1.test_1', 'UnittestCase2.test_1', ] all_test_2 = [ 'DjangoCase1.test_2', 'DjangoCase2.test_2', 'SimpleCase1.test_2', 'SimpleCase2.test_2', 'UnittestCase1.test_2', 'UnittestCase2.test_2', ] all_tests = sorted([*all_test_1, *all_test_2, 'UnittestCase2.test_3_test']) for pattern, expected in [ [['test_1'], all_test_1], [['UnittestCase1'], ['UnittestCase1.test_1', 'UnittestCase1.test_2']], [['*test'], ['UnittestCase2.test_3_test']], [['test*'], all_tests], [['test'], all_tests], [['test_1', 'test_2'], sorted([*all_test_1, *all_test_2])], [['test*1'], all_test_1], ]: with self.subTest(pattern): suite = DiscoverRunner( test_name_patterns=pattern, verbosity=0, ).build_suite(['test_runner_apps.simple']) self.assertEqual(expected, self.get_test_methods_names(suite)) def test_loader_patterns_not_mutated(self): runner = DiscoverRunner(test_name_patterns=['test_sample'], verbosity=0) tests = [ ('test_runner_apps.sample.tests', 1), ('test_runner_apps.sample.tests.Test.test_sample', 1), ('test_runner_apps.sample.empty', 0), ('test_runner_apps.sample.tests_sample.EmptyTestCase', 0), ] for test_labels, tests_count in tests: with self.subTest(test_labels=test_labels): with change_loader_patterns(['UnittestCase1']): count = runner.build_suite([test_labels]).countTestCases() self.assertEqual(count, tests_count) self.assertEqual(runner.test_loader.testNamePatterns, ['UnittestCase1']) def test_loader_patterns_not_mutated_when_test_label_is_file_path(self): runner = DiscoverRunner(test_name_patterns=['test_sample'], verbosity=0) with change_cwd('.'), change_loader_patterns(['UnittestCase1']): with self.assertRaises(RuntimeError): runner.build_suite(['test_discover_runner.py']) self.assertEqual(runner.test_loader.testNamePatterns, ['UnittestCase1']) def test_file_path(self): with change_cwd(".."): count = DiscoverRunner(verbosity=0).build_suite( ['test_runner_apps/sample/'], ).countTestCases() self.assertEqual(count, 5) def test_empty_label(self): """ If the test label is empty, discovery should happen on the current working directory. """ with change_cwd("."): suite = DiscoverRunner(verbosity=0).build_suite([]) self.assertEqual( suite._tests[0].id().split(".")[0], os.path.basename(os.getcwd()), ) def test_empty_test_case(self): count = DiscoverRunner(verbosity=0).build_suite( ['test_runner_apps.sample.tests_sample.EmptyTestCase'], ).countTestCases() self.assertEqual(count, 0) def test_discovery_on_package(self): count = DiscoverRunner(verbosity=0).build_suite( ['test_runner_apps.sample.tests'], ).countTestCases() self.assertEqual(count, 1) def test_ignore_adjacent(self): """ When given a dotted path to a module, unittest discovery searches not just the module, but also the directory containing the module. This results in tests from adjacent modules being run when they should not. The discover runner avoids this behavior. """ count = DiscoverRunner(verbosity=0).build_suite( ['test_runner_apps.sample.empty'], ).countTestCases() self.assertEqual(count, 0) def test_testcase_ordering(self): with change_cwd(".."): suite = DiscoverRunner(verbosity=0).build_suite(['test_runner_apps/sample/']) self.assertEqual( suite._tests[0].__class__.__name__, 'TestDjangoTestCase', msg="TestDjangoTestCase should be the first test case") self.assertEqual( suite._tests[1].__class__.__name__, 'TestZimpleTestCase', msg="TestZimpleTestCase should be the second test case") # All others can follow in unspecified order, including doctests self.assertIn('DocTestCase', [t.__class__.__name__ for t in suite._tests[2:]]) def test_duplicates_ignored(self): """ Tests shouldn't be discovered twice when discovering on overlapping paths. """ base_app = 'forms_tests' sub_app = 'forms_tests.field_tests' runner = DiscoverRunner(verbosity=0) with self.modify_settings(INSTALLED_APPS={'append': sub_app}): single = runner.build_suite([base_app]).countTestCases() dups = runner.build_suite([base_app, sub_app]).countTestCases() self.assertEqual(single, dups) def test_reverse(self): """ Reverse should reorder tests while maintaining the grouping specified by ``DiscoverRunner.reorder_by``. """ runner = DiscoverRunner(reverse=True, verbosity=0) suite = runner.build_suite( test_labels=('test_runner_apps.sample', 'test_runner_apps.simple')) self.assertIn('test_runner_apps.simple', next(iter(suite)).id(), msg="Test labels should be reversed.") suite = runner.build_suite(test_labels=('test_runner_apps.simple',)) suite = tuple(suite) self.assertIn('DjangoCase', suite[0].id(), msg="Test groups should not be reversed.") self.assertIn('SimpleCase', suite[4].id(), msg="Test groups order should be preserved.") self.assertIn('DjangoCase2', suite[0].id(), msg="Django test cases should be reversed.") self.assertIn('SimpleCase2', suite[4].id(), msg="Simple test cases should be reversed.") self.assertIn('UnittestCase2', suite[8].id(), msg="Unittest test cases should be reversed.") self.assertIn('test_2', suite[0].id(), msg="Methods of Django cases should be reversed.") self.assertIn('test_2', suite[4].id(), msg="Methods of simple cases should be reversed.") self.assertIn('test_2', suite[9].id(), msg="Methods of unittest cases should be reversed.") def test_build_suite_failed_tests_first(self): # The "doesnotexist" label results in a _FailedTest instance. suite = DiscoverRunner(verbosity=0).build_suite( test_labels=['test_runner_apps.sample', 'doesnotexist'], ) tests = list(suite) self.assertIsInstance(tests[0], unittest.loader._FailedTest) self.assertNotIsInstance(tests[-1], unittest.loader._FailedTest) def test_build_suite_shuffling(self): # These will result in unittest.loader._FailedTest instances rather # than TestCase objects, but they are sufficient for testing. labels = ['label1', 'label2', 'label3', 'label4'] cases = [ ({}, ['label1', 'label2', 'label3', 'label4']), ({'reverse': True}, ['label4', 'label3', 'label2', 'label1']), ({'shuffle': 8}, ['label4', 'label1', 'label3', 'label2']), ({'shuffle': 8, 'reverse': True}, ['label2', 'label3', 'label1', 'label4']), ] for kwargs, expected in cases: with self.subTest(kwargs=kwargs): # Prevent writing the seed to stdout. runner = DiscoverRunner(**kwargs, verbosity=0) tests = runner.build_suite(test_labels=labels) # The ids have the form "unittest.loader._FailedTest.label1". names = [test.id().split('.')[-1] for test in tests] self.assertEqual(names, expected) def test_overridable_get_test_runner_kwargs(self): self.assertIsInstance(DiscoverRunner().get_test_runner_kwargs(), dict) def test_overridable_test_suite(self): self.assertEqual(DiscoverRunner().test_suite, TestSuite) def test_overridable_test_runner(self): self.assertEqual(DiscoverRunner().test_runner, TextTestRunner) def test_overridable_test_loader(self): self.assertEqual(DiscoverRunner().test_loader, defaultTestLoader) def test_tags(self): runner = DiscoverRunner(tags=['core'], verbosity=0) self.assertEqual(runner.build_suite(['test_runner_apps.tagged.tests']).countTestCases(), 1) runner = DiscoverRunner(tags=['fast'], verbosity=0) self.assertEqual(runner.build_suite(['test_runner_apps.tagged.tests']).countTestCases(), 2) runner = DiscoverRunner(tags=['slow'], verbosity=0) self.assertEqual(runner.build_suite(['test_runner_apps.tagged.tests']).countTestCases(), 2) def test_exclude_tags(self): runner = DiscoverRunner(tags=['fast'], exclude_tags=['core'], verbosity=0) self.assertEqual(runner.build_suite(['test_runner_apps.tagged.tests']).countTestCases(), 1) runner = DiscoverRunner(tags=['fast'], exclude_tags=['slow'], verbosity=0) self.assertEqual(runner.build_suite(['test_runner_apps.tagged.tests']).countTestCases(), 0) runner = DiscoverRunner(exclude_tags=['slow'], verbosity=0) self.assertEqual(runner.build_suite(['test_runner_apps.tagged.tests']).countTestCases(), 0) def test_tag_inheritance(self): def count_tests(**kwargs): kwargs.setdefault('verbosity', 0) suite = DiscoverRunner(**kwargs).build_suite(['test_runner_apps.tagged.tests_inheritance']) return suite.countTestCases() self.assertEqual(count_tests(tags=['foo']), 4) self.assertEqual(count_tests(tags=['bar']), 2) self.assertEqual(count_tests(tags=['baz']), 2) self.assertEqual(count_tests(tags=['foo'], exclude_tags=['bar']), 2) self.assertEqual(count_tests(tags=['foo'], exclude_tags=['bar', 'baz']), 1) self.assertEqual(count_tests(exclude_tags=['foo']), 0) def test_tag_fail_to_load(self): with self.assertRaises(SyntaxError): import_module('test_runner_apps.tagged.tests_syntax_error') runner = DiscoverRunner(tags=['syntax_error'], verbosity=0) # A label that doesn't exist or cannot be loaded due to syntax errors # is always considered matching. suite = runner.build_suite(['doesnotexist', 'test_runner_apps.tagged']) self.assertEqual([test.id() for test in suite], [ 'unittest.loader._FailedTest.doesnotexist', 'unittest.loader._FailedTest.test_runner_apps.tagged.tests_syntax_error', ]) def test_included_tags_displayed(self): runner = DiscoverRunner(tags=['foo', 'bar'], verbosity=2) with captured_stdout() as stdout: runner.build_suite(['test_runner_apps.tagged.tests']) self.assertIn('Including test tag(s): bar, foo.\n', stdout.getvalue()) def test_excluded_tags_displayed(self): runner = DiscoverRunner(exclude_tags=['foo', 'bar'], verbosity=3) with captured_stdout() as stdout: runner.build_suite(['test_runner_apps.tagged.tests']) self.assertIn('Excluding test tag(s): bar, foo.\n', stdout.getvalue()) def test_number_of_tests_found_displayed(self): runner = DiscoverRunner() with captured_stdout() as stdout: runner.build_suite([ 'test_runner_apps.sample.tests_sample.TestDjangoTestCase', 'test_runner_apps.simple', ]) self.assertIn('Found 14 test(s).\n', stdout.getvalue()) def test_pdb_with_parallel(self): msg = ( 'You cannot use --pdb with parallel tests; pass --parallel=1 to ' 'use it.' ) with self.assertRaisesMessage(ValueError, msg): DiscoverRunner(pdb=True, parallel=2) def test_number_of_parallel_workers(self): """Number of processes doesn't exceed the number of TestCases.""" runner = DiscoverRunner(parallel=5, verbosity=0) suite = runner.build_suite(['test_runner_apps.tagged']) self.assertEqual(suite.processes, len(suite.subsuites)) def test_number_of_databases_parallel_test_suite(self): """ Number of databases doesn't exceed the number of TestCases with parallel tests. """ runner = DiscoverRunner(parallel=8, verbosity=0) suite = runner.build_suite(['test_runner_apps.tagged']) self.assertEqual(suite.processes, len(suite.subsuites)) self.assertEqual(runner.parallel, suite.processes) def test_number_of_databases_no_parallel_test_suite(self): """ Number of databases doesn't exceed the number of TestCases with non-parallel tests. """ runner = DiscoverRunner(parallel=8, verbosity=0) suite = runner.build_suite(['test_runner_apps.simple.tests.DjangoCase1']) self.assertEqual(runner.parallel, 1) self.assertIsInstance(suite, TestSuite) def test_buffer_mode_test_pass(self): runner = DiscoverRunner(buffer=True, verbosity=0) with captured_stdout() as stdout, captured_stderr() as stderr: suite = runner.build_suite([ 'test_runner_apps.buffer.tests_buffer.WriteToStdoutStderrTestCase.test_pass', ]) runner.run_suite(suite) self.assertNotIn('Write to stderr.', stderr.getvalue()) self.assertNotIn('Write to stdout.', stdout.getvalue()) def test_buffer_mode_test_fail(self): runner = DiscoverRunner(buffer=True, verbosity=0) with captured_stdout() as stdout, captured_stderr() as stderr: suite = runner.build_suite([ 'test_runner_apps.buffer.tests_buffer.WriteToStdoutStderrTestCase.test_fail', ]) runner.run_suite(suite) self.assertIn('Write to stderr.', stderr.getvalue()) self.assertIn('Write to stdout.', stdout.getvalue()) def run_suite_with_runner(self, runner_class, **kwargs): class MyRunner(DiscoverRunner): def test_runner(self, *args, **kwargs): return runner_class() runner = MyRunner(**kwargs) # Suppress logging "Using shuffle seed" to the console. with captured_stdout(): runner.setup_shuffler() with captured_stdout() as stdout: try: result = runner.run_suite(None) except RuntimeError as exc: result = str(exc) output = stdout.getvalue() return result, output def test_run_suite_logs_seed(self): class TestRunner: def run(self, suite): return '<fake-result>' expected_prefix = 'Used shuffle seed' # Test with and without shuffling enabled. result, output = self.run_suite_with_runner(TestRunner) self.assertEqual(result, '<fake-result>') self.assertNotIn(expected_prefix, output) result, output = self.run_suite_with_runner(TestRunner, shuffle=2) self.assertEqual(result, '<fake-result>') expected_output = f'{expected_prefix}: 2 (given)\n' self.assertEqual(output, expected_output) def test_run_suite_logs_seed_exception(self): """ run_suite() logs the seed when TestRunner.run() raises an exception. """ class TestRunner: def run(self, suite): raise RuntimeError('my exception') result, output = self.run_suite_with_runner(TestRunner, shuffle=2) self.assertEqual(result, 'my exception') expected_output = 'Used shuffle seed: 2 (given)\n' self.assertEqual(output, expected_output) @mock.patch('faulthandler.enable') def test_faulthandler_enabled(self, mocked_enable): with mock.patch('faulthandler.is_enabled', return_value=False): DiscoverRunner(enable_faulthandler=True) mocked_enable.assert_called() @mock.patch('faulthandler.enable') def test_faulthandler_already_enabled(self, mocked_enable): with mock.patch('faulthandler.is_enabled', return_value=True): DiscoverRunner(enable_faulthandler=True) mocked_enable.assert_not_called() @mock.patch('faulthandler.enable') def test_faulthandler_enabled_fileno(self, mocked_enable): # sys.stderr that is not an actual file. with mock.patch('faulthandler.is_enabled', return_value=False), captured_stderr(): DiscoverRunner(enable_faulthandler=True) mocked_enable.assert_called() @mock.patch('faulthandler.enable') def test_faulthandler_disabled(self, mocked_enable): with mock.patch('faulthandler.is_enabled', return_value=False): DiscoverRunner(enable_faulthandler=False) mocked_enable.assert_not_called() def test_timings_not_captured(self): runner = DiscoverRunner(timing=False) with captured_stderr() as stderr: with runner.time_keeper.timed('test'): pass runner.time_keeper.print_results() self.assertIsInstance(runner.time_keeper, NullTimeKeeper) self.assertNotIn('test', stderr.getvalue()) def test_timings_captured(self): runner = DiscoverRunner(timing=True) with captured_stderr() as stderr: with runner.time_keeper.timed('test'): pass runner.time_keeper.print_results() self.assertIsInstance(runner.time_keeper, TimeKeeper) self.assertIn('test', stderr.getvalue()) def test_log(self): custom_low_level = 5 custom_high_level = 45 msg = 'logging message' cases = [ (0, None, False), (0, custom_low_level, False), (0, logging.DEBUG, False), (0, logging.INFO, False), (0, logging.WARNING, False), (0, custom_high_level, False), (1, None, True), (1, custom_low_level, False), (1, logging.DEBUG, False), (1, logging.INFO, True), (1, logging.WARNING, True), (1, custom_high_level, True), (2, None, True), (2, custom_low_level, True), (2, logging.DEBUG, True), (2, logging.INFO, True), (2, logging.WARNING, True), (2, custom_high_level, True), (3, None, True), (3, custom_low_level, True), (3, logging.DEBUG, True), (3, logging.INFO, True), (3, logging.WARNING, True), (3, custom_high_level, True), ] for verbosity, level, output in cases: with self.subTest(verbosity=verbosity, level=level): with captured_stdout() as stdout: runner = DiscoverRunner(verbosity=verbosity) runner.log(msg, level) self.assertEqual(stdout.getvalue(), f'{msg}\n' if output else '') def test_log_logger(self): logger = logging.getLogger('test.logging') cases = [ (None, 'INFO:test.logging:log message'), # Test a low custom logging level. (5, 'Level 5:test.logging:log message'), (logging.DEBUG, 'DEBUG:test.logging:log message'), (logging.INFO, 'INFO:test.logging:log message'), (logging.WARNING, 'WARNING:test.logging:log message'), # Test a high custom logging level. (45, 'Level 45:test.logging:log message'), ] for level, expected in cases: with self.subTest(level=level): runner = DiscoverRunner(logger=logger) # Pass a logging level smaller than the smallest level in cases # in order to capture all messages. with self.assertLogs('test.logging', level=1) as cm: runner.log('log message', level) self.assertEqual(cm.output, [expected]) def test_suite_result_with_failure(self): cases = [ (1, 'FailureTestCase'), (1, 'ErrorTestCase'), (0, 'ExpectedFailureTestCase'), (1, 'UnexpectedSuccessTestCase'), ] runner = DiscoverRunner(verbosity=0) for expected_failures, testcase in cases: with self.subTest(testcase=testcase): suite = runner.build_suite([ f'test_runner_apps.failures.tests_failures.{testcase}', ]) with captured_stderr(): result = runner.run_suite(suite) failures = runner.suite_result(suite, result) self.assertEqual(failures, expected_failures) class DiscoverRunnerGetDatabasesTests(SimpleTestCase): runner = DiscoverRunner(verbosity=2) skip_msg = 'Skipping setup of unused database(s): ' def get_databases(self, test_labels): with captured_stdout() as stdout: suite = self.runner.build_suite(test_labels) databases = self.runner.get_databases(suite) return databases, stdout.getvalue() def assertSkippedDatabases(self, test_labels, expected_databases): databases, output = self.get_databases(test_labels) self.assertEqual(databases, expected_databases) skipped_databases = set(connections) - set(expected_databases) if skipped_databases: self.assertIn(self.skip_msg + ', '.join(sorted(skipped_databases)), output) else: self.assertNotIn(self.skip_msg, output) def test_mixed(self): databases, output = self.get_databases(['test_runner_apps.databases.tests']) self.assertEqual(databases, {'default': True, 'other': False}) self.assertNotIn(self.skip_msg, output) def test_all(self): databases, output = self.get_databases(['test_runner_apps.databases.tests.AllDatabasesTests']) self.assertEqual(databases, {alias: False for alias in connections}) self.assertNotIn(self.skip_msg, output) def test_default_and_other(self): self.assertSkippedDatabases([ 'test_runner_apps.databases.tests.DefaultDatabaseTests', 'test_runner_apps.databases.tests.OtherDatabaseTests', ], {'default': False, 'other': False}) def test_default_only(self): self.assertSkippedDatabases([ 'test_runner_apps.databases.tests.DefaultDatabaseTests', ], {'default': False}) def test_other_only(self): self.assertSkippedDatabases([ 'test_runner_apps.databases.tests.OtherDatabaseTests' ], {'other': False}) def test_no_databases_required(self): self.assertSkippedDatabases([ 'test_runner_apps.databases.tests.NoDatabaseTests' ], {}) def test_serialize(self): databases, _ = self.get_databases([ 'test_runner_apps.databases.tests.DefaultDatabaseSerializedTests' ]) self.assertEqual(databases, {'default': True})
274a2df02ef43be0d3868007d9313dee5ac86ecb1ee17577950f4d880bd8f0ff
from datetime import datetime from decimal import Decimal from django import forms from django.conf import settings from django.contrib import admin from django.contrib.admin import helpers from django.contrib.admin.utils import ( NestedObjects, display_for_field, display_for_value, flatten, flatten_fieldsets, help_text_for_field, label_for_field, lookup_field, quote, ) from django.db import DEFAULT_DB_ALIAS, models from django.test import SimpleTestCase, TestCase, override_settings from django.utils.formats import localize from django.utils.safestring import mark_safe from .models import ( Article, Car, Count, Event, EventGuide, Location, Site, Vehicle, ) class NestedObjectsTests(TestCase): """ Tests for ``NestedObject`` utility collection. """ @classmethod def setUpTestData(cls): cls.n = NestedObjects(using=DEFAULT_DB_ALIAS) cls.objs = [Count.objects.create(num=i) for i in range(5)] def _check(self, target): self.assertEqual(self.n.nested(lambda obj: obj.num), target) def _connect(self, i, j): self.objs[i].parent = self.objs[j] self.objs[i].save() def _collect(self, *indices): self.n.collect([self.objs[i] for i in indices]) def test_unrelated_roots(self): self._connect(2, 1) self._collect(0) self._collect(1) self._check([0, 1, [2]]) def test_siblings(self): self._connect(1, 0) self._connect(2, 0) self._collect(0) self._check([0, [1, 2]]) def test_non_added_parent(self): self._connect(0, 1) self._collect(0) self._check([0]) def test_cyclic(self): self._connect(0, 2) self._connect(1, 0) self._connect(2, 1) self._collect(0) self._check([0, [1, [2]]]) def test_queries(self): self._connect(1, 0) self._connect(2, 0) # 1 query to fetch all children of 0 (1 and 2) # 1 query to fetch all children of 1 and 2 (none) # Should not require additional queries to populate the nested graph. self.assertNumQueries(2, self._collect, 0) def test_on_delete_do_nothing(self): """ The nested collector doesn't query for DO_NOTHING objects. """ n = NestedObjects(using=DEFAULT_DB_ALIAS) objs = [Event.objects.create()] EventGuide.objects.create(event=objs[0]) with self.assertNumQueries(2): # One for Location, one for Guest, and no query for EventGuide n.collect(objs) def test_relation_on_abstract(self): """ NestedObjects.collect() doesn't trip (AttributeError) on the special notation for relations on abstract models (related_name that contains %(app_label)s and/or %(class)s) (#21846). """ n = NestedObjects(using=DEFAULT_DB_ALIAS) Car.objects.create() n.collect([Vehicle.objects.first()]) class UtilsTests(SimpleTestCase): empty_value = '-empty-' def test_values_from_lookup_field(self): """ Regression test for #12654: lookup_field """ SITE_NAME = 'example.com' TITLE_TEXT = 'Some title' CREATED_DATE = datetime.min ADMIN_METHOD = 'admin method' SIMPLE_FUNCTION = 'function' INSTANCE_ATTRIBUTE = 'attr' class MockModelAdmin: def get_admin_value(self, obj): return ADMIN_METHOD def simple_function(obj): return SIMPLE_FUNCTION site_obj = Site(domain=SITE_NAME) article = Article( site=site_obj, title=TITLE_TEXT, created=CREATED_DATE, ) article.non_field = INSTANCE_ATTRIBUTE verifications = ( ('site', SITE_NAME), ('created', localize(CREATED_DATE)), ('title', TITLE_TEXT), ('get_admin_value', ADMIN_METHOD), (simple_function, SIMPLE_FUNCTION), ('test_from_model', article.test_from_model()), ('non_field', INSTANCE_ATTRIBUTE) ) mock_admin = MockModelAdmin() for name, value in verifications: field, attr, resolved_value = lookup_field(name, article, mock_admin) if field is not None: resolved_value = display_for_field(resolved_value, field, self.empty_value) self.assertEqual(value, resolved_value) def test_null_display_for_field(self): """ Regression test for #12550: display_for_field should handle None value. """ display_value = display_for_field(None, models.CharField(), self.empty_value) self.assertEqual(display_value, self.empty_value) display_value = display_for_field(None, models.CharField( choices=( (None, "test_none"), ) ), self.empty_value) self.assertEqual(display_value, "test_none") display_value = display_for_field(None, models.DateField(), self.empty_value) self.assertEqual(display_value, self.empty_value) display_value = display_for_field(None, models.TimeField(), self.empty_value) self.assertEqual(display_value, self.empty_value) display_value = display_for_field(None, models.BooleanField(null=True), self.empty_value) expected = '<img src="%sadmin/img/icon-unknown.svg" alt="None" />' % settings.STATIC_URL self.assertHTMLEqual(display_value, expected) display_value = display_for_field(None, models.DecimalField(), self.empty_value) self.assertEqual(display_value, self.empty_value) display_value = display_for_field(None, models.FloatField(), self.empty_value) self.assertEqual(display_value, self.empty_value) display_value = display_for_field(None, models.JSONField(), self.empty_value) self.assertEqual(display_value, self.empty_value) def test_json_display_for_field(self): tests = [ ({'a': {'b': 'c'}}, '{"a": {"b": "c"}}'), (['a', 'b'], '["a", "b"]'), ('a', '"a"'), ({'a': '你好 世界'}, '{"a": "你好 世界"}'), ({('a', 'b'): 'c'}, "{('a', 'b'): 'c'}"), # Invalid JSON. ] for value, display_value in tests: with self.subTest(value=value): self.assertEqual( display_for_field(value, models.JSONField(), self.empty_value), display_value, ) def test_number_formats_display_for_field(self): display_value = display_for_field(12345.6789, models.FloatField(), self.empty_value) self.assertEqual(display_value, '12345.6789') display_value = display_for_field(Decimal('12345.6789'), models.DecimalField(), self.empty_value) self.assertEqual(display_value, '12345.6789') display_value = display_for_field(12345, models.IntegerField(), self.empty_value) self.assertEqual(display_value, '12345') @override_settings(USE_THOUSAND_SEPARATOR=True) def test_number_formats_with_thousand_separator_display_for_field(self): display_value = display_for_field(12345.6789, models.FloatField(), self.empty_value) self.assertEqual(display_value, '12,345.6789') display_value = display_for_field(Decimal('12345.6789'), models.DecimalField(), self.empty_value) self.assertEqual(display_value, '12,345.6789') display_value = display_for_field(12345, models.IntegerField(), self.empty_value) self.assertEqual(display_value, '12,345') def test_list_display_for_value(self): display_value = display_for_value([1, 2, 3], self.empty_value) self.assertEqual(display_value, '1, 2, 3') display_value = display_for_value([1, 2, 'buckle', 'my', 'shoe'], self.empty_value) self.assertEqual(display_value, '1, 2, buckle, my, shoe') @override_settings(USE_THOUSAND_SEPARATOR=True) def test_list_display_for_value_boolean(self): self.assertEqual( display_for_value(True, '', boolean=True), '<img src="/static/admin/img/icon-yes.svg" alt="True">' ) self.assertEqual( display_for_value(False, '', boolean=True), '<img src="/static/admin/img/icon-no.svg" alt="False">' ) self.assertEqual(display_for_value(True, ''), 'True') self.assertEqual(display_for_value(False, ''), 'False') def test_label_for_field(self): """ Tests for label_for_field """ self.assertEqual( label_for_field("title", Article), "title" ) self.assertEqual( label_for_field("hist", Article), "History" ) self.assertEqual( label_for_field("hist", Article, return_attr=True), ("History", None) ) self.assertEqual( label_for_field("__str__", Article), "article" ) with self.assertRaisesMessage(AttributeError, "Unable to lookup 'unknown' on Article"): label_for_field("unknown", Article) def test_callable(obj): return "nothing" self.assertEqual( label_for_field(test_callable, Article), "Test callable" ) self.assertEqual( label_for_field(test_callable, Article, return_attr=True), ("Test callable", test_callable) ) self.assertEqual( label_for_field("test_from_model", Article), "Test from model" ) self.assertEqual( label_for_field("test_from_model", Article, return_attr=True), ("Test from model", Article.test_from_model) ) self.assertEqual( label_for_field("test_from_model_with_override", Article), "not What you Expect" ) self.assertEqual( label_for_field(lambda x: "nothing", Article), "--" ) self.assertEqual(label_for_field('site_id', Article), 'Site id') class MockModelAdmin: @admin.display(description='not Really the Model') def test_from_model(self, obj): return "nothing" self.assertEqual( label_for_field("test_from_model", Article, model_admin=MockModelAdmin), "not Really the Model" ) self.assertEqual( label_for_field("test_from_model", Article, model_admin=MockModelAdmin, return_attr=True), ("not Really the Model", MockModelAdmin.test_from_model) ) def test_label_for_field_form_argument(self): class ArticleForm(forms.ModelForm): extra_form_field = forms.BooleanField() class Meta: fields = '__all__' model = Article self.assertEqual( label_for_field('extra_form_field', Article, form=ArticleForm()), 'Extra form field' ) msg = "Unable to lookup 'nonexistent' on Article or ArticleForm" with self.assertRaisesMessage(AttributeError, msg): label_for_field('nonexistent', Article, form=ArticleForm()), def test_label_for_property(self): class MockModelAdmin: @property @admin.display(description='property short description') def test_from_property(self): return "this if from property" self.assertEqual( label_for_field("test_from_property", Article, model_admin=MockModelAdmin), 'property short description' ) def test_help_text_for_field(self): tests = [ ('article', ''), ('unknown', ''), ('hist', 'History help text'), ] for name, help_text in tests: with self.subTest(name=name): self.assertEqual(help_text_for_field(name, Article), help_text) def test_related_name(self): """ Regression test for #13963 """ self.assertEqual( label_for_field('location', Event, return_attr=True), ('location', None), ) self.assertEqual( label_for_field('event', Location, return_attr=True), ('awesome event', None), ) self.assertEqual( label_for_field('guest', Event, return_attr=True), ('awesome guest', None), ) def test_safestring_in_field_label(self): # safestring should not be escaped class MyForm(forms.Form): text = forms.CharField(label=mark_safe('<i>text</i>')) cb = forms.BooleanField(label=mark_safe('<i>cb</i>')) form = MyForm() self.assertHTMLEqual(helpers.AdminField(form, 'text', is_first=False).label_tag(), '<label for="id_text" class="required inline"><i>text</i>:</label>') self.assertHTMLEqual(helpers.AdminField(form, 'cb', is_first=False).label_tag(), '<label for="id_cb" class="vCheckboxLabel required inline"><i>cb</i></label>') # normal strings needs to be escaped class MyForm(forms.Form): text = forms.CharField(label='&text') cb = forms.BooleanField(label='&cb') form = MyForm() self.assertHTMLEqual(helpers.AdminField(form, 'text', is_first=False).label_tag(), '<label for="id_text" class="required inline">&amp;text:</label>') self.assertHTMLEqual(helpers.AdminField(form, 'cb', is_first=False).label_tag(), '<label for="id_cb" class="vCheckboxLabel required inline">&amp;cb</label>') def test_flatten(self): flat_all = ['url', 'title', 'content', 'sites'] inputs = ( ((), []), (('url', 'title', ('content', 'sites')), flat_all), (('url', 'title', 'content', 'sites'), flat_all), ((('url', 'title'), ('content', 'sites')), flat_all) ) for orig, expected in inputs: self.assertEqual(flatten(orig), expected) def test_flatten_fieldsets(self): """ Regression test for #18051 """ fieldsets = ( (None, { 'fields': ('url', 'title', ('content', 'sites')) }), ) self.assertEqual(flatten_fieldsets(fieldsets), ['url', 'title', 'content', 'sites']) fieldsets = ( (None, { 'fields': ('url', 'title', ['content', 'sites']) }), ) self.assertEqual(flatten_fieldsets(fieldsets), ['url', 'title', 'content', 'sites']) def test_quote(self): self.assertEqual(quote('something\nor\nother'), 'something_0Aor_0Aother')
57a26959ec40551b9a51afb89d783761296788ec387c24b1c4c48a717e2f23a9
import json from datetime import datetime from django.contrib.admin.models import ADDITION, CHANGE, DELETION, LogEntry from django.contrib.admin.utils import quote from django.contrib.auth.models import User from django.contrib.contenttypes.models import ContentType from django.test import TestCase, override_settings from django.urls import reverse from django.utils import translation from django.utils.html import escape from .models import Article, ArticleProxy, Site @override_settings(ROOT_URLCONF='admin_utils.urls') class LogEntryTests(TestCase): @classmethod def setUpTestData(cls): cls.user = User.objects.create_superuser(username='super', password='secret', email='[email protected]') cls.site = Site.objects.create(domain='example.org') cls.a1 = Article.objects.create( site=cls.site, title="Title", created=datetime(2008, 3, 12, 11, 54), ) content_type_pk = ContentType.objects.get_for_model(Article).pk LogEntry.objects.log_action( cls.user.pk, content_type_pk, cls.a1.pk, repr(cls.a1), CHANGE, change_message='Changed something' ) def setUp(self): self.client.force_login(self.user) def test_logentry_save(self): """ LogEntry.action_time is a timestamp of the date when the entry was created. It shouldn't be updated on a subsequent save(). """ logentry = LogEntry.objects.get(content_type__model__iexact="article") action_time = logentry.action_time logentry.save() self.assertEqual(logentry.action_time, action_time) def test_logentry_change_message(self): """ LogEntry.change_message is stored as a dumped JSON structure to be able to get the message dynamically translated at display time. """ post_data = { 'site': self.site.pk, 'title': 'Changed', 'hist': 'Some content', 'created_0': '2008-03-12', 'created_1': '11:54', } change_url = reverse('admin:admin_utils_article_change', args=[quote(self.a1.pk)]) response = self.client.post(change_url, post_data) self.assertRedirects(response, reverse('admin:admin_utils_article_changelist')) logentry = LogEntry.objects.filter(content_type__model__iexact='article').latest('id') self.assertEqual(logentry.get_change_message(), 'Changed Title and History.') with translation.override('fr'): self.assertEqual(logentry.get_change_message(), 'Modification de Title et Historique.') add_url = reverse('admin:admin_utils_article_add') post_data['title'] = 'New' response = self.client.post(add_url, post_data) self.assertRedirects(response, reverse('admin:admin_utils_article_changelist')) logentry = LogEntry.objects.filter(content_type__model__iexact='article').latest('id') self.assertEqual(logentry.get_change_message(), 'Added.') with translation.override('fr'): self.assertEqual(logentry.get_change_message(), 'Ajout.') def test_logentry_change_message_not_json(self): """LogEntry.change_message was a string before Django 1.10.""" logentry = LogEntry(change_message='non-JSON string') self.assertEqual(logentry.get_change_message(), logentry.change_message) def test_logentry_change_message_localized_datetime_input(self): """ Localized date/time inputs shouldn't affect changed form data detection. """ post_data = { 'site': self.site.pk, 'title': 'Changed', 'hist': 'Some content', 'created_0': '12/03/2008', 'created_1': '11:54', } with translation.override('fr'): change_url = reverse('admin:admin_utils_article_change', args=[quote(self.a1.pk)]) response = self.client.post(change_url, post_data) self.assertRedirects(response, reverse('admin:admin_utils_article_changelist')) logentry = LogEntry.objects.filter(content_type__model__iexact='article').latest('id') self.assertEqual(logentry.get_change_message(), 'Changed Title and History.') def test_logentry_change_message_formsets(self): """ All messages for changed formsets are logged in a change message. """ a2 = Article.objects.create( site=self.site, title="Title second article", created=datetime(2012, 3, 18, 11, 54), ) post_data = { 'domain': 'example.com', # domain changed 'admin_articles-TOTAL_FORMS': '5', 'admin_articles-INITIAL_FORMS': '2', 'admin_articles-MIN_NUM_FORMS': '0', 'admin_articles-MAX_NUM_FORMS': '1000', # Changed title for 1st article 'admin_articles-0-id': str(self.a1.pk), 'admin_articles-0-site': str(self.site.pk), 'admin_articles-0-title': 'Changed Title', # Second article is deleted 'admin_articles-1-id': str(a2.pk), 'admin_articles-1-site': str(self.site.pk), 'admin_articles-1-title': 'Title second article', 'admin_articles-1-DELETE': 'on', # A new article is added 'admin_articles-2-site': str(self.site.pk), 'admin_articles-2-title': 'Added article', } change_url = reverse('admin:admin_utils_site_change', args=[quote(self.site.pk)]) response = self.client.post(change_url, post_data) self.assertRedirects(response, reverse('admin:admin_utils_site_changelist')) self.assertQuerysetEqual(Article.objects.filter(pk=a2.pk), []) logentry = LogEntry.objects.filter(content_type__model__iexact='site').latest('action_time') self.assertEqual( json.loads(logentry.change_message), [ {"changed": {"fields": ["Domain"]}}, {"added": {"object": "Added article", "name": "article"}}, {"changed": {"fields": ["Title", "not_a_form_field"], "object": "Changed Title", "name": "article"}}, {"deleted": {"object": "Title second article", "name": "article"}}, ] ) self.assertEqual( logentry.get_change_message(), 'Changed Domain. Added article “Added article”. ' 'Changed Title and not_a_form_field for article “Changed Title”. ' 'Deleted article “Title second article”.' ) with translation.override('fr'): self.assertEqual( logentry.get_change_message(), "Modification de Domain. Ajout de article « Added article ». " "Modification de Title et not_a_form_field pour l'objet " "article « Changed Title ». " "Suppression de article « Title second article »." ) def test_logentry_get_edited_object(self): """ LogEntry.get_edited_object() returns the edited object of a LogEntry object. """ logentry = LogEntry.objects.get(content_type__model__iexact="article") edited_obj = logentry.get_edited_object() self.assertEqual(logentry.object_id, str(edited_obj.pk)) def test_logentry_get_admin_url(self): """ LogEntry.get_admin_url returns a URL to edit the entry's object or None for nonexistent (possibly deleted) models. """ logentry = LogEntry.objects.get(content_type__model__iexact='article') expected_url = reverse('admin:admin_utils_article_change', args=(quote(self.a1.pk),)) self.assertEqual(logentry.get_admin_url(), expected_url) self.assertIn('article/%d/change/' % self.a1.pk, logentry.get_admin_url()) logentry.content_type.model = "nonexistent" self.assertIsNone(logentry.get_admin_url()) def test_logentry_unicode(self): log_entry = LogEntry() log_entry.action_flag = ADDITION self.assertTrue(str(log_entry).startswith('Added ')) log_entry.action_flag = CHANGE self.assertTrue(str(log_entry).startswith('Changed ')) log_entry.action_flag = DELETION self.assertTrue(str(log_entry).startswith('Deleted ')) # Make sure custom action_flags works log_entry.action_flag = 4 self.assertEqual(str(log_entry), 'LogEntry Object') def test_logentry_repr(self): logentry = LogEntry.objects.first() self.assertEqual(repr(logentry), str(logentry.action_time)) def test_log_action(self): content_type_pk = ContentType.objects.get_for_model(Article).pk log_entry = LogEntry.objects.log_action( self.user.pk, content_type_pk, self.a1.pk, repr(self.a1), CHANGE, change_message='Changed something else', ) self.assertEqual(log_entry, LogEntry.objects.latest('id')) def test_recentactions_without_content_type(self): """ If a LogEntry is missing content_type it will not display it in span tag under the hyperlink. """ response = self.client.get(reverse('admin:index')) link = reverse('admin:admin_utils_article_change', args=(quote(self.a1.pk),)) should_contain = """<a href="%s">%s</a>""" % (escape(link), escape(repr(self.a1))) self.assertContains(response, should_contain) should_contain = "Article" self.assertContains(response, should_contain) logentry = LogEntry.objects.get(content_type__model__iexact='article') # If the log entry doesn't have a content type it should still be # possible to view the Recent Actions part (#10275). logentry.content_type = None logentry.save() should_contain = should_contain.encode() counted_presence_before = response.content.count(should_contain) response = self.client.get(reverse('admin:index')) counted_presence_after = response.content.count(should_contain) self.assertEqual(counted_presence_before - 1, counted_presence_after) def test_proxy_model_content_type_is_used_for_log_entries(self): """ Log entries for proxy models should have the proxy model's contenttype (#21084). """ proxy_content_type = ContentType.objects.get_for_model(ArticleProxy, for_concrete_model=False) post_data = { 'site': self.site.pk, 'title': "Foo", 'hist': "Bar", 'created_0': '2015-12-25', 'created_1': '00:00', } changelist_url = reverse('admin:admin_utils_articleproxy_changelist') # add proxy_add_url = reverse('admin:admin_utils_articleproxy_add') response = self.client.post(proxy_add_url, post_data) self.assertRedirects(response, changelist_url) proxy_addition_log = LogEntry.objects.latest('id') self.assertEqual(proxy_addition_log.action_flag, ADDITION) self.assertEqual(proxy_addition_log.content_type, proxy_content_type) # change article_id = proxy_addition_log.object_id proxy_change_url = reverse('admin:admin_utils_articleproxy_change', args=(article_id,)) post_data['title'] = 'New' response = self.client.post(proxy_change_url, post_data) self.assertRedirects(response, changelist_url) proxy_change_log = LogEntry.objects.latest('id') self.assertEqual(proxy_change_log.action_flag, CHANGE) self.assertEqual(proxy_change_log.content_type, proxy_content_type) # delete proxy_delete_url = reverse('admin:admin_utils_articleproxy_delete', args=(article_id,)) response = self.client.post(proxy_delete_url, {'post': 'yes'}) self.assertRedirects(response, changelist_url) proxy_delete_log = LogEntry.objects.latest('id') self.assertEqual(proxy_delete_log.action_flag, DELETION) self.assertEqual(proxy_delete_log.content_type, proxy_content_type) def test_action_flag_choices(self): tests = ((1, 'Addition'), (2, 'Change'), (3, 'Deletion')) for action_flag, display_name in tests: with self.subTest(action_flag=action_flag): log = LogEntry(action_flag=action_flag) self.assertEqual(log.get_action_flag_display(), display_name)
4d560cc14b199a0719e91d380dfd6f29570f07ca32061fd4e477d9aea023b520
import datetime import pickle import sys import unittest from operator import attrgetter from threading import Lock from django.core.exceptions import EmptyResultSet, FieldError from django.db import DEFAULT_DB_ALIAS, connection from django.db.models import Count, Exists, F, Max, OuterRef, Q from django.db.models.expressions import RawSQL from django.db.models.sql.constants import LOUTER from django.db.models.sql.where import NothingNode, WhereNode from django.test import SimpleTestCase, TestCase, skipUnlessDBFeature from django.test.utils import CaptureQueriesContext from .models import ( FK1, Annotation, Article, Author, BaseA, BaseUser, Book, CategoryItem, CategoryRelationship, Celebrity, Channel, Chapter, Child, ChildObjectA, Classroom, CommonMixedCaseForeignKeys, Company, Cover, CustomPk, CustomPkTag, DateTimePK, Detail, DumbCategory, Eaten, Employment, ExtraInfo, Fan, Food, Identifier, Individual, Item, Job, JobResponsibilities, Join, LeafA, LeafB, LoopX, LoopZ, ManagedModel, Member, MixedCaseDbColumnCategoryItem, MixedCaseFieldCategoryItem, ModelA, ModelB, ModelC, ModelD, MyObject, NamedCategory, Node, Note, NullableName, Number, ObjectA, ObjectB, ObjectC, OneToOneCategory, Order, OrderItem, Page, Paragraph, Person, Plaything, PointerA, Program, ProxyCategory, ProxyObjectA, ProxyObjectB, Ranking, Related, RelatedIndividual, RelatedObject, Report, ReportComment, ReservedName, Responsibility, School, SharedConnection, SimpleCategory, SingleObject, SpecialCategory, Staff, StaffUser, Student, Tag, Task, Teacher, Ticket21203Child, Ticket21203Parent, Ticket23605A, Ticket23605B, Ticket23605C, TvChef, Valid, X, ) class Queries1Tests(TestCase): @classmethod def setUpTestData(cls): cls.nc1 = generic = NamedCategory.objects.create(name="Generic") cls.t1 = Tag.objects.create(name='t1', category=generic) cls.t2 = Tag.objects.create(name='t2', parent=cls.t1, category=generic) cls.t3 = Tag.objects.create(name='t3', parent=cls.t1) cls.t4 = Tag.objects.create(name='t4', parent=cls.t3) cls.t5 = Tag.objects.create(name='t5', parent=cls.t3) cls.n1 = Note.objects.create(note='n1', misc='foo', id=1) cls.n2 = Note.objects.create(note='n2', misc='bar', id=2) cls.n3 = Note.objects.create(note='n3', misc='foo', id=3, negate=False) cls.ann1 = Annotation.objects.create(name='a1', tag=cls.t1) cls.ann1.notes.add(cls.n1) ann2 = Annotation.objects.create(name='a2', tag=cls.t4) ann2.notes.add(cls.n2, cls.n3) # Create these out of order so that sorting by 'id' will be different to sorting # by 'info'. Helps detect some problems later. cls.e2 = ExtraInfo.objects.create(info='e2', note=cls.n2, value=41, filterable=False) e1 = ExtraInfo.objects.create(info='e1', note=cls.n1, value=42) cls.a1 = Author.objects.create(name='a1', num=1001, extra=e1) cls.a2 = Author.objects.create(name='a2', num=2002, extra=e1) cls.a3 = Author.objects.create(name='a3', num=3003, extra=cls.e2) cls.a4 = Author.objects.create(name='a4', num=4004, extra=cls.e2) cls.time1 = datetime.datetime(2007, 12, 19, 22, 25, 0) cls.time2 = datetime.datetime(2007, 12, 19, 21, 0, 0) time3 = datetime.datetime(2007, 12, 20, 22, 25, 0) time4 = datetime.datetime(2007, 12, 20, 21, 0, 0) cls.i1 = Item.objects.create(name='one', created=cls.time1, modified=cls.time1, creator=cls.a1, note=cls.n3) cls.i1.tags.set([cls.t1, cls.t2]) cls.i2 = Item.objects.create(name='two', created=cls.time2, creator=cls.a2, note=cls.n2) cls.i2.tags.set([cls.t1, cls.t3]) cls.i3 = Item.objects.create(name='three', created=time3, creator=cls.a2, note=cls.n3) cls.i4 = Item.objects.create(name='four', created=time4, creator=cls.a4, note=cls.n3) cls.i4.tags.set([cls.t4]) cls.r1 = Report.objects.create(name='r1', creator=cls.a1) cls.r2 = Report.objects.create(name='r2', creator=cls.a3) cls.r3 = Report.objects.create(name='r3') # Ordering by 'rank' gives us rank2, rank1, rank3. Ordering by the Meta.ordering # will be rank3, rank2, rank1. cls.rank1 = Ranking.objects.create(rank=2, author=cls.a2) cls.c1 = Cover.objects.create(title="first", item=cls.i4) cls.c2 = Cover.objects.create(title="second", item=cls.i2) def test_subquery_condition(self): qs1 = Tag.objects.filter(pk__lte=0) qs2 = Tag.objects.filter(parent__in=qs1) qs3 = Tag.objects.filter(parent__in=qs2) self.assertEqual(qs3.query.subq_aliases, {'T', 'U', 'V'}) self.assertIn('v0', str(qs3.query).lower()) qs4 = qs3.filter(parent__in=qs1) self.assertEqual(qs4.query.subq_aliases, {'T', 'U', 'V'}) # It is possible to reuse U for the second subquery, no need to use W. self.assertNotIn('w0', str(qs4.query).lower()) # So, 'U0."id"' is referenced in SELECT and WHERE twice. self.assertEqual(str(qs4.query).lower().count('u0.'), 4) def test_ticket1050(self): self.assertSequenceEqual( Item.objects.filter(tags__isnull=True), [self.i3], ) self.assertSequenceEqual( Item.objects.filter(tags__id__isnull=True), [self.i3], ) def test_ticket1801(self): self.assertSequenceEqual( Author.objects.filter(item=self.i2), [self.a2], ) self.assertSequenceEqual( Author.objects.filter(item=self.i3), [self.a2], ) self.assertSequenceEqual( Author.objects.filter(item=self.i2) & Author.objects.filter(item=self.i3), [self.a2], ) def test_ticket2306(self): # Checking that no join types are "left outer" joins. query = Item.objects.filter(tags=self.t2).query self.assertNotIn(LOUTER, [x.join_type for x in query.alias_map.values()]) self.assertSequenceEqual( Item.objects.filter(Q(tags=self.t1)).order_by('name'), [self.i1, self.i2], ) self.assertSequenceEqual( Item.objects.filter(Q(tags=self.t1)).filter(Q(tags=self.t2)), [self.i1], ) self.assertSequenceEqual( Item.objects.filter(Q(tags=self.t1)).filter(Q(creator__name='fred') | Q(tags=self.t2)), [self.i1], ) # Each filter call is processed "at once" against a single table, so this is # different from the previous example as it tries to find tags that are two # things at once (rather than two tags). self.assertSequenceEqual( Item.objects.filter(Q(tags=self.t1) & Q(tags=self.t2)), [] ) self.assertSequenceEqual( Item.objects.filter(Q(tags=self.t1), Q(creator__name='fred') | Q(tags=self.t2)), [] ) qs = Author.objects.filter(ranking__rank=2, ranking__id=self.rank1.id) self.assertSequenceEqual(list(qs), [self.a2]) self.assertEqual(2, qs.query.count_active_tables(), 2) qs = Author.objects.filter(ranking__rank=2).filter(ranking__id=self.rank1.id) self.assertEqual(qs.query.count_active_tables(), 3) def test_ticket4464(self): self.assertSequenceEqual( Item.objects.filter(tags=self.t1).filter(tags=self.t2), [self.i1], ) self.assertSequenceEqual( Item.objects.filter(tags__in=[self.t1, self.t2]).distinct().order_by('name'), [self.i1, self.i2], ) self.assertSequenceEqual( Item.objects.filter(tags__in=[self.t1, self.t2]).filter(tags=self.t3), [self.i2], ) # Make sure .distinct() works with slicing (this was broken in Oracle). self.assertSequenceEqual( Item.objects.filter(tags__in=[self.t1, self.t2]).order_by('name')[:3], [self.i1, self.i1, self.i2], ) self.assertSequenceEqual( Item.objects.filter(tags__in=[self.t1, self.t2]).distinct().order_by('name')[:3], [self.i1, self.i2], ) def test_tickets_2080_3592(self): self.assertSequenceEqual( Author.objects.filter(item__name='one') | Author.objects.filter(name='a3'), [self.a1, self.a3], ) self.assertSequenceEqual( Author.objects.filter(Q(item__name='one') | Q(name='a3')), [self.a1, self.a3], ) self.assertSequenceEqual( Author.objects.filter(Q(name='a3') | Q(item__name='one')), [self.a1, self.a3], ) self.assertSequenceEqual( Author.objects.filter(Q(item__name='three') | Q(report__name='r3')), [self.a2], ) def test_ticket6074(self): # Merging two empty result sets shouldn't leave a queryset with no constraints # (which would match everything). self.assertSequenceEqual(Author.objects.filter(Q(id__in=[])), []) self.assertSequenceEqual( Author.objects.filter(Q(id__in=[]) | Q(id__in=[])), [] ) def test_tickets_1878_2939(self): self.assertEqual(Item.objects.values('creator').distinct().count(), 3) # Create something with a duplicate 'name' so that we can test multi-column # cases (which require some tricky SQL transformations under the covers). xx = Item(name='four', created=self.time1, creator=self.a2, note=self.n1) xx.save() self.assertEqual( Item.objects.exclude(name='two').values('creator', 'name').distinct().count(), 4 ) self.assertEqual( ( Item.objects .exclude(name='two') .extra(select={'foo': '%s'}, select_params=(1,)) .values('creator', 'name', 'foo') .distinct() .count() ), 4 ) self.assertEqual( ( Item.objects .exclude(name='two') .extra(select={'foo': '%s'}, select_params=(1,)) .values('creator', 'name') .distinct() .count() ), 4 ) xx.delete() def test_ticket7323(self): self.assertEqual(Item.objects.values('creator', 'name').count(), 4) def test_ticket2253(self): q1 = Item.objects.order_by('name') q2 = Item.objects.filter(id=self.i1.id) self.assertSequenceEqual(q1, [self.i4, self.i1, self.i3, self.i2]) self.assertSequenceEqual(q2, [self.i1]) self.assertSequenceEqual( (q1 | q2).order_by('name'), [self.i4, self.i1, self.i3, self.i2], ) self.assertSequenceEqual((q1 & q2).order_by('name'), [self.i1]) q1 = Item.objects.filter(tags=self.t1) q2 = Item.objects.filter(note=self.n3, tags=self.t2) q3 = Item.objects.filter(creator=self.a4) self.assertSequenceEqual( ((q1 & q2) | q3).order_by('name'), [self.i4, self.i1], ) def test_order_by_tables(self): q1 = Item.objects.order_by('name') q2 = Item.objects.filter(id=self.i1.id) list(q2) combined_query = (q1 & q2).order_by('name').query self.assertEqual(len([ t for t in combined_query.alias_map if combined_query.alias_refcount[t] ]), 1) def test_order_by_join_unref(self): """ This test is related to the above one, testing that there aren't old JOINs in the query. """ qs = Celebrity.objects.order_by('greatest_fan__fan_of') self.assertIn('OUTER JOIN', str(qs.query)) qs = qs.order_by('id') self.assertNotIn('OUTER JOIN', str(qs.query)) def test_get_clears_ordering(self): """ get() should clear ordering for optimization purposes. """ with CaptureQueriesContext(connection) as captured_queries: Author.objects.order_by('name').get(pk=self.a1.pk) self.assertNotIn('order by', captured_queries[0]['sql'].lower()) def test_tickets_4088_4306(self): self.assertSequenceEqual(Report.objects.filter(creator=1001), [self.r1]) self.assertSequenceEqual( Report.objects.filter(creator__num=1001), [self.r1] ) self.assertSequenceEqual(Report.objects.filter(creator__id=1001), []) self.assertSequenceEqual( Report.objects.filter(creator__id=self.a1.id), [self.r1] ) self.assertSequenceEqual( Report.objects.filter(creator__name='a1'), [self.r1] ) def test_ticket4510(self): self.assertSequenceEqual( Author.objects.filter(report__name='r1'), [self.a1], ) def test_ticket7378(self): self.assertSequenceEqual(self.a1.report_set.all(), [self.r1]) def test_tickets_5324_6704(self): self.assertSequenceEqual( Item.objects.filter(tags__name='t4'), [self.i4], ) self.assertSequenceEqual( Item.objects.exclude(tags__name='t4').order_by('name').distinct(), [self.i1, self.i3, self.i2], ) self.assertSequenceEqual( Item.objects.exclude(tags__name='t4').order_by('name').distinct().reverse(), [self.i2, self.i3, self.i1], ) self.assertSequenceEqual( Author.objects.exclude(item__name='one').distinct().order_by('name'), [self.a2, self.a3, self.a4], ) # Excluding across a m2m relation when there is more than one related # object associated was problematic. self.assertSequenceEqual( Item.objects.exclude(tags__name='t1').order_by('name'), [self.i4, self.i3], ) self.assertSequenceEqual( Item.objects.exclude(tags__name='t1').exclude(tags__name='t4'), [self.i3], ) # Excluding from a relation that cannot be NULL should not use outer joins. query = Item.objects.exclude(creator__in=[self.a1, self.a2]).query self.assertNotIn(LOUTER, [x.join_type for x in query.alias_map.values()]) # Similarly, when one of the joins cannot possibly, ever, involve NULL # values (Author -> ExtraInfo, in the following), it should never be # promoted to a left outer join. So the following query should only # involve one "left outer" join (Author -> Item is 0-to-many). qs = Author.objects.filter(id=self.a1.id).filter(Q(extra__note=self.n1) | Q(item__note=self.n3)) self.assertEqual( len([ x for x in qs.query.alias_map.values() if x.join_type == LOUTER and qs.query.alias_refcount[x.table_alias] ]), 1 ) # The previous changes shouldn't affect nullable foreign key joins. self.assertSequenceEqual( Tag.objects.filter(parent__isnull=True).order_by('name'), [self.t1] ) self.assertSequenceEqual( Tag.objects.exclude(parent__isnull=True).order_by('name'), [self.t2, self.t3, self.t4, self.t5], ) self.assertSequenceEqual( Tag.objects.exclude(Q(parent__name='t1') | Q(parent__isnull=True)).order_by('name'), [self.t4, self.t5], ) self.assertSequenceEqual( Tag.objects.exclude(Q(parent__isnull=True) | Q(parent__name='t1')).order_by('name'), [self.t4, self.t5], ) self.assertSequenceEqual( Tag.objects.exclude(Q(parent__parent__isnull=True)).order_by('name'), [self.t4, self.t5], ) self.assertSequenceEqual( Tag.objects.filter(~Q(parent__parent__isnull=True)).order_by('name'), [self.t4, self.t5], ) def test_ticket2091(self): t = Tag.objects.get(name='t4') self.assertSequenceEqual(Item.objects.filter(tags__in=[t]), [self.i4]) def test_avoid_infinite_loop_on_too_many_subqueries(self): x = Tag.objects.filter(pk=1) local_recursion_limit = sys.getrecursionlimit() // 16 msg = 'Maximum recursion depth exceeded: too many subqueries.' with self.assertRaisesMessage(RecursionError, msg): for i in range(local_recursion_limit + 2): x = Tag.objects.filter(pk__in=x) def test_reasonable_number_of_subq_aliases(self): x = Tag.objects.filter(pk=1) for _ in range(20): x = Tag.objects.filter(pk__in=x) self.assertEqual( x.query.subq_aliases, { 'T', 'U', 'V', 'W', 'X', 'Y', 'Z', 'AA', 'AB', 'AC', 'AD', 'AE', 'AF', 'AG', 'AH', 'AI', 'AJ', 'AK', 'AL', 'AM', 'AN', } ) def test_heterogeneous_qs_combination(self): # Combining querysets built on different models should behave in a well-defined # fashion. We raise an error. msg = 'Cannot combine queries on two different base models.' with self.assertRaisesMessage(TypeError, msg): Author.objects.all() & Tag.objects.all() with self.assertRaisesMessage(TypeError, msg): Author.objects.all() | Tag.objects.all() def test_ticket3141(self): self.assertEqual(Author.objects.extra(select={'foo': '1'}).count(), 4) self.assertEqual( Author.objects.extra(select={'foo': '%s'}, select_params=(1,)).count(), 4 ) def test_ticket2400(self): self.assertSequenceEqual( Author.objects.filter(item__isnull=True), [self.a3], ) self.assertSequenceEqual( Tag.objects.filter(item__isnull=True), [self.t5], ) def test_ticket2496(self): self.assertSequenceEqual( Item.objects.extra(tables=['queries_author']).select_related().order_by('name')[:1], [self.i4], ) def test_error_raised_on_filter_with_dictionary(self): with self.assertRaisesMessage(FieldError, 'Cannot parse keyword query as dict'): Note.objects.filter({'note': 'n1', 'misc': 'foo'}) def test_tickets_2076_7256(self): # Ordering on related tables should be possible, even if the table is # not otherwise involved. self.assertSequenceEqual( Item.objects.order_by('note__note', 'name'), [self.i2, self.i4, self.i1, self.i3], ) # Ordering on a related field should use the remote model's default # ordering as a final step. self.assertSequenceEqual( Author.objects.order_by('extra', '-name'), [self.a2, self.a1, self.a4, self.a3], ) # Using remote model default ordering can span multiple models (in this # case, Cover is ordered by Item's default, which uses Note's default). self.assertSequenceEqual(Cover.objects.all(), [self.c1, self.c2]) # If the remote model does not have a default ordering, we order by its 'id' # field. self.assertSequenceEqual( Item.objects.order_by('creator', 'name'), [self.i1, self.i3, self.i2, self.i4], ) # Ordering by a many-valued attribute (e.g. a many-to-many or reverse # ForeignKey) is legal, but the results might not make sense. That # isn't Django's problem. Garbage in, garbage out. self.assertSequenceEqual( Item.objects.filter(tags__isnull=False).order_by('tags', 'id'), [self.i1, self.i2, self.i1, self.i2, self.i4], ) # If we replace the default ordering, Django adjusts the required # tables automatically. Item normally requires a join with Note to do # the default ordering, but that isn't needed here. qs = Item.objects.order_by('name') self.assertSequenceEqual(qs, [self.i4, self.i1, self.i3, self.i2]) self.assertEqual(len(qs.query.alias_map), 1) def test_tickets_2874_3002(self): qs = Item.objects.select_related().order_by('note__note', 'name') self.assertQuerysetEqual(qs, [self.i2, self.i4, self.i1, self.i3]) # This is also a good select_related() test because there are multiple # Note entries in the SQL. The two Note items should be different. self.assertEqual(repr(qs[0].note), '<Note: n2>') self.assertEqual(repr(qs[0].creator.extra.note), '<Note: n1>') def test_ticket3037(self): self.assertSequenceEqual( Item.objects.filter(Q(creator__name='a3', name='two') | Q(creator__name='a4', name='four')), [self.i4], ) def test_tickets_5321_7070(self): # Ordering columns must be included in the output columns. Note that # this means results that might otherwise be distinct are not (if there # are multiple values in the ordering cols), as in this example. This # isn't a bug; it's a warning to be careful with the selection of # ordering columns. self.assertSequenceEqual( Note.objects.values('misc').distinct().order_by('note', '-misc'), [{'misc': 'foo'}, {'misc': 'bar'}, {'misc': 'foo'}] ) def test_ticket4358(self): # If you don't pass any fields to values(), relation fields are # returned as "foo_id" keys, not "foo". For consistency, you should be # able to pass "foo_id" in the fields list and have it work, too. We # actually allow both "foo" and "foo_id". # The *_id version is returned by default. self.assertIn('note_id', ExtraInfo.objects.values()[0]) # You can also pass it in explicitly. self.assertSequenceEqual(ExtraInfo.objects.values('note_id'), [{'note_id': 1}, {'note_id': 2}]) # ...or use the field name. self.assertSequenceEqual(ExtraInfo.objects.values('note'), [{'note': 1}, {'note': 2}]) def test_ticket6154(self): # Multiple filter statements are joined using "AND" all the time. self.assertSequenceEqual( Author.objects.filter(id=self.a1.id).filter(Q(extra__note=self.n1) | Q(item__note=self.n3)), [self.a1], ) self.assertSequenceEqual( Author.objects.filter(Q(extra__note=self.n1) | Q(item__note=self.n3)).filter(id=self.a1.id), [self.a1], ) def test_ticket6981(self): self.assertSequenceEqual( Tag.objects.select_related('parent').order_by('name'), [self.t1, self.t2, self.t3, self.t4, self.t5], ) def test_ticket9926(self): self.assertSequenceEqual( Tag.objects.select_related("parent", "category").order_by('name'), [self.t1, self.t2, self.t3, self.t4, self.t5], ) self.assertSequenceEqual( Tag.objects.select_related('parent', "parent__category").order_by('name'), [self.t1, self.t2, self.t3, self.t4, self.t5], ) def test_tickets_6180_6203(self): # Dates with limits and/or counts self.assertEqual(Item.objects.count(), 4) self.assertEqual(Item.objects.datetimes('created', 'month').count(), 1) self.assertEqual(Item.objects.datetimes('created', 'day').count(), 2) self.assertEqual(len(Item.objects.datetimes('created', 'day')), 2) self.assertEqual(Item.objects.datetimes('created', 'day')[0], datetime.datetime(2007, 12, 19, 0, 0)) def test_tickets_7087_12242(self): # Dates with extra select columns self.assertSequenceEqual( Item.objects.datetimes('created', 'day').extra(select={'a': 1}), [datetime.datetime(2007, 12, 19, 0, 0), datetime.datetime(2007, 12, 20, 0, 0)], ) self.assertSequenceEqual( Item.objects.extra(select={'a': 1}).datetimes('created', 'day'), [datetime.datetime(2007, 12, 19, 0, 0), datetime.datetime(2007, 12, 20, 0, 0)], ) name = "one" self.assertSequenceEqual( Item.objects.datetimes('created', 'day').extra(where=['name=%s'], params=[name]), [datetime.datetime(2007, 12, 19, 0, 0)], ) self.assertSequenceEqual( Item.objects.extra(where=['name=%s'], params=[name]).datetimes('created', 'day'), [datetime.datetime(2007, 12, 19, 0, 0)], ) def test_ticket7155(self): # Nullable dates self.assertSequenceEqual( Item.objects.datetimes('modified', 'day'), [datetime.datetime(2007, 12, 19, 0, 0)], ) def test_order_by_rawsql(self): self.assertSequenceEqual( Item.objects.values('note__note').order_by( RawSQL('queries_note.note', ()), 'id', ), [ {'note__note': 'n2'}, {'note__note': 'n3'}, {'note__note': 'n3'}, {'note__note': 'n3'}, ], ) def test_ticket7096(self): # Make sure exclude() with multiple conditions continues to work. self.assertSequenceEqual( Tag.objects.filter(parent=self.t1, name='t3').order_by('name'), [self.t3], ) self.assertSequenceEqual( Tag.objects.exclude(parent=self.t1, name='t3').order_by('name'), [self.t1, self.t2, self.t4, self.t5], ) self.assertSequenceEqual( Item.objects.exclude(tags__name='t1', name='one').order_by('name').distinct(), [self.i4, self.i3, self.i2], ) self.assertSequenceEqual( Item.objects.filter(name__in=['three', 'four']).exclude(tags__name='t1').order_by('name'), [self.i4, self.i3], ) # More twisted cases, involving nested negations. self.assertSequenceEqual( Item.objects.exclude(~Q(tags__name='t1', name='one')), [self.i1], ) self.assertSequenceEqual( Item.objects.filter(~Q(tags__name='t1', name='one'), name='two'), [self.i2], ) self.assertSequenceEqual( Item.objects.exclude(~Q(tags__name='t1', name='one'), name='two'), [self.i4, self.i1, self.i3], ) def test_tickets_7204_7506(self): # Make sure querysets with related fields can be pickled. If this # doesn't crash, it's a Good Thing. pickle.dumps(Item.objects.all()) def test_ticket7813(self): # We should also be able to pickle things that use select_related(). # The only tricky thing here is to ensure that we do the related # selections properly after unpickling. qs = Item.objects.select_related() query = qs.query.get_compiler(qs.db).as_sql()[0] query2 = pickle.loads(pickle.dumps(qs.query)) self.assertEqual( query2.get_compiler(qs.db).as_sql()[0], query ) def test_deferred_load_qs_pickling(self): # Check pickling of deferred-loading querysets qs = Item.objects.defer('name', 'creator') q2 = pickle.loads(pickle.dumps(qs)) self.assertEqual(list(qs), list(q2)) q3 = pickle.loads(pickle.dumps(qs, pickle.HIGHEST_PROTOCOL)) self.assertEqual(list(qs), list(q3)) def test_ticket7277(self): self.assertSequenceEqual( self.n1.annotation_set.filter( Q(tag=self.t5) | Q(tag__children=self.t5) | Q(tag__children__children=self.t5) ), [self.ann1], ) def test_tickets_7448_7707(self): # Complex objects should be converted to strings before being used in # lookups. self.assertSequenceEqual( Item.objects.filter(created__in=[self.time1, self.time2]), [self.i1, self.i2], ) def test_ticket7235(self): # An EmptyQuerySet should not raise exceptions if it is filtered. Eaten.objects.create(meal='m') q = Eaten.objects.none() with self.assertNumQueries(0): self.assertQuerysetEqual(q.all(), []) self.assertQuerysetEqual(q.filter(meal='m'), []) self.assertQuerysetEqual(q.exclude(meal='m'), []) self.assertQuerysetEqual(q.complex_filter({'pk': 1}), []) self.assertQuerysetEqual(q.select_related('food'), []) self.assertQuerysetEqual(q.annotate(Count('food')), []) self.assertQuerysetEqual(q.order_by('meal', 'food'), []) self.assertQuerysetEqual(q.distinct(), []) self.assertQuerysetEqual( q.extra(select={'foo': "1"}), [] ) self.assertQuerysetEqual(q.reverse(), []) q.query.low_mark = 1 msg = 'Cannot change a query once a slice has been taken.' with self.assertRaisesMessage(TypeError, msg): q.extra(select={'foo': "1"}) self.assertQuerysetEqual(q.defer('meal'), []) self.assertQuerysetEqual(q.only('meal'), []) def test_ticket7791(self): # There were "issues" when ordering and distinct-ing on fields related # via ForeignKeys. self.assertEqual( len(Note.objects.order_by('extrainfo__info').distinct()), 3 ) # Pickling of QuerySets using datetimes() should work. qs = Item.objects.datetimes('created', 'month') pickle.loads(pickle.dumps(qs)) def test_ticket9997(self): # If a ValuesList or Values queryset is passed as an inner query, we # make sure it's only requesting a single value and use that as the # thing to select. self.assertSequenceEqual( Tag.objects.filter(name__in=Tag.objects.filter(parent=self.t1).values('name')), [self.t2, self.t3], ) # Multi-valued values() and values_list() querysets should raise errors. with self.assertRaisesMessage(TypeError, 'Cannot use multi-field values as a filter value.'): Tag.objects.filter(name__in=Tag.objects.filter(parent=self.t1).values('name', 'id')) with self.assertRaisesMessage(TypeError, 'Cannot use multi-field values as a filter value.'): Tag.objects.filter(name__in=Tag.objects.filter(parent=self.t1).values_list('name', 'id')) def test_ticket9985(self): # qs.values_list(...).values(...) combinations should work. self.assertSequenceEqual( Note.objects.values_list("note", flat=True).values("id").order_by("id"), [{'id': 1}, {'id': 2}, {'id': 3}] ) self.assertSequenceEqual( Annotation.objects.filter(notes__in=Note.objects.filter(note="n1").values_list('note').values('id')), [self.ann1], ) def test_ticket10205(self): # When bailing out early because of an empty "__in" filter, we need # to set things up correctly internally so that subqueries can continue properly. self.assertEqual(Tag.objects.filter(name__in=()).update(name="foo"), 0) def test_ticket10432(self): # Testing an empty "__in" filter with a generator as the value. def f(): return iter([]) n_obj = Note.objects.all()[0] def g(): yield n_obj.pk self.assertQuerysetEqual(Note.objects.filter(pk__in=f()), []) self.assertEqual(list(Note.objects.filter(pk__in=g())), [n_obj]) def test_ticket10742(self): # Queries used in an __in clause don't execute subqueries subq = Author.objects.filter(num__lt=3000) qs = Author.objects.filter(pk__in=subq) self.assertSequenceEqual(qs, [self.a1, self.a2]) # The subquery result cache should not be populated self.assertIsNone(subq._result_cache) subq = Author.objects.filter(num__lt=3000) qs = Author.objects.exclude(pk__in=subq) self.assertSequenceEqual(qs, [self.a3, self.a4]) # The subquery result cache should not be populated self.assertIsNone(subq._result_cache) subq = Author.objects.filter(num__lt=3000) self.assertSequenceEqual( Author.objects.filter(Q(pk__in=subq) & Q(name='a1')), [self.a1], ) # The subquery result cache should not be populated self.assertIsNone(subq._result_cache) def test_ticket7076(self): # Excluding shouldn't eliminate NULL entries. self.assertSequenceEqual( Item.objects.exclude(modified=self.time1).order_by('name'), [self.i4, self.i3, self.i2], ) self.assertSequenceEqual( Tag.objects.exclude(parent__name=self.t1.name), [self.t1, self.t4, self.t5], ) def test_ticket7181(self): # Ordering by related tables should accommodate nullable fields (this # test is a little tricky, since NULL ordering is database dependent. # Instead, we just count the number of results). self.assertEqual(len(Tag.objects.order_by('parent__name')), 5) # Empty querysets can be merged with others. self.assertSequenceEqual( Note.objects.none() | Note.objects.all(), [self.n1, self.n2, self.n3], ) self.assertSequenceEqual( Note.objects.all() | Note.objects.none(), [self.n1, self.n2, self.n3], ) self.assertSequenceEqual(Note.objects.none() & Note.objects.all(), []) self.assertSequenceEqual(Note.objects.all() & Note.objects.none(), []) def test_ticket8439(self): # Complex combinations of conjunctions, disjunctions and nullable # relations. self.assertSequenceEqual( Author.objects.filter(Q(item__note__extrainfo=self.e2) | Q(report=self.r1, name='xyz')), [self.a2], ) self.assertSequenceEqual( Author.objects.filter(Q(report=self.r1, name='xyz') | Q(item__note__extrainfo=self.e2)), [self.a2], ) self.assertSequenceEqual( Annotation.objects.filter(Q(tag__parent=self.t1) | Q(notes__note='n1', name='a1')), [self.ann1], ) xx = ExtraInfo.objects.create(info='xx', note=self.n3) self.assertSequenceEqual( Note.objects.filter(Q(extrainfo__author=self.a1) | Q(extrainfo=xx)), [self.n1, self.n3], ) q = Note.objects.filter(Q(extrainfo__author=self.a1) | Q(extrainfo=xx)).query self.assertEqual( len([x for x in q.alias_map.values() if x.join_type == LOUTER and q.alias_refcount[x.table_alias]]), 1 ) def test_ticket17429(self): """ Meta.ordering=None works the same as Meta.ordering=[] """ original_ordering = Tag._meta.ordering Tag._meta.ordering = None try: self.assertCountEqual( Tag.objects.all(), [self.t1, self.t2, self.t3, self.t4, self.t5], ) finally: Tag._meta.ordering = original_ordering def test_exclude(self): self.assertQuerysetEqual( Item.objects.exclude(tags__name='t4'), Item.objects.filter(~Q(tags__name='t4'))) self.assertQuerysetEqual( Item.objects.exclude(Q(tags__name='t4') | Q(tags__name='t3')), Item.objects.filter(~(Q(tags__name='t4') | Q(tags__name='t3')))) self.assertQuerysetEqual( Item.objects.exclude(Q(tags__name='t4') | ~Q(tags__name='t3')), Item.objects.filter(~(Q(tags__name='t4') | ~Q(tags__name='t3')))) def test_nested_exclude(self): self.assertQuerysetEqual( Item.objects.exclude(~Q(tags__name='t4')), Item.objects.filter(~~Q(tags__name='t4'))) def test_double_exclude(self): self.assertQuerysetEqual( Item.objects.filter(Q(tags__name='t4')), Item.objects.filter(~~Q(tags__name='t4'))) self.assertQuerysetEqual( Item.objects.filter(Q(tags__name='t4')), Item.objects.filter(~Q(~Q(tags__name='t4')))) def test_exclude_in(self): self.assertQuerysetEqual( Item.objects.exclude(Q(tags__name__in=['t4', 't3'])), Item.objects.filter(~Q(tags__name__in=['t4', 't3']))) self.assertQuerysetEqual( Item.objects.filter(Q(tags__name__in=['t4', 't3'])), Item.objects.filter(~~Q(tags__name__in=['t4', 't3']))) def test_ticket_10790_1(self): # Querying direct fields with isnull should trim the left outer join. # It also should not create INNER JOIN. q = Tag.objects.filter(parent__isnull=True) self.assertSequenceEqual(q, [self.t1]) self.assertNotIn('JOIN', str(q.query)) q = Tag.objects.filter(parent__isnull=False) self.assertSequenceEqual(q, [self.t2, self.t3, self.t4, self.t5]) self.assertNotIn('JOIN', str(q.query)) q = Tag.objects.exclude(parent__isnull=True) self.assertSequenceEqual(q, [self.t2, self.t3, self.t4, self.t5]) self.assertNotIn('JOIN', str(q.query)) q = Tag.objects.exclude(parent__isnull=False) self.assertSequenceEqual(q, [self.t1]) self.assertNotIn('JOIN', str(q.query)) q = Tag.objects.exclude(parent__parent__isnull=False) self.assertSequenceEqual(q, [self.t1, self.t2, self.t3]) self.assertEqual(str(q.query).count('LEFT OUTER JOIN'), 1) self.assertNotIn('INNER JOIN', str(q.query)) def test_ticket_10790_2(self): # Querying across several tables should strip only the last outer join, # while preserving the preceding inner joins. q = Tag.objects.filter(parent__parent__isnull=False) self.assertSequenceEqual(q, [self.t4, self.t5]) self.assertEqual(str(q.query).count('LEFT OUTER JOIN'), 0) self.assertEqual(str(q.query).count('INNER JOIN'), 1) # Querying without isnull should not convert anything to left outer join. q = Tag.objects.filter(parent__parent=self.t1) self.assertSequenceEqual(q, [self.t4, self.t5]) self.assertEqual(str(q.query).count('LEFT OUTER JOIN'), 0) self.assertEqual(str(q.query).count('INNER JOIN'), 1) def test_ticket_10790_3(self): # Querying via indirect fields should populate the left outer join q = NamedCategory.objects.filter(tag__isnull=True) self.assertEqual(str(q.query).count('LEFT OUTER JOIN'), 1) # join to dumbcategory ptr_id self.assertEqual(str(q.query).count('INNER JOIN'), 1) self.assertSequenceEqual(q, []) # Querying across several tables should strip only the last join, while # preserving the preceding left outer joins. q = NamedCategory.objects.filter(tag__parent__isnull=True) self.assertEqual(str(q.query).count('INNER JOIN'), 1) self.assertEqual(str(q.query).count('LEFT OUTER JOIN'), 1) self.assertSequenceEqual(q, [self.nc1]) def test_ticket_10790_4(self): # Querying across m2m field should not strip the m2m table from join. q = Author.objects.filter(item__tags__isnull=True) self.assertSequenceEqual(q, [self.a2, self.a3]) self.assertEqual(str(q.query).count('LEFT OUTER JOIN'), 2) self.assertNotIn('INNER JOIN', str(q.query)) q = Author.objects.filter(item__tags__parent__isnull=True) self.assertSequenceEqual(q, [self.a1, self.a2, self.a2, self.a3]) self.assertEqual(str(q.query).count('LEFT OUTER JOIN'), 3) self.assertNotIn('INNER JOIN', str(q.query)) def test_ticket_10790_5(self): # Querying with isnull=False across m2m field should not create outer joins q = Author.objects.filter(item__tags__isnull=False) self.assertSequenceEqual(q, [self.a1, self.a1, self.a2, self.a2, self.a4]) self.assertEqual(str(q.query).count('LEFT OUTER JOIN'), 0) self.assertEqual(str(q.query).count('INNER JOIN'), 2) q = Author.objects.filter(item__tags__parent__isnull=False) self.assertSequenceEqual(q, [self.a1, self.a2, self.a4]) self.assertEqual(str(q.query).count('LEFT OUTER JOIN'), 0) self.assertEqual(str(q.query).count('INNER JOIN'), 3) q = Author.objects.filter(item__tags__parent__parent__isnull=False) self.assertSequenceEqual(q, [self.a4]) self.assertEqual(str(q.query).count('LEFT OUTER JOIN'), 0) self.assertEqual(str(q.query).count('INNER JOIN'), 4) def test_ticket_10790_6(self): # Querying with isnull=True across m2m field should not create inner joins # and strip last outer join q = Author.objects.filter(item__tags__parent__parent__isnull=True) self.assertSequenceEqual( q, [self.a1, self.a1, self.a2, self.a2, self.a2, self.a3], ) self.assertEqual(str(q.query).count('LEFT OUTER JOIN'), 4) self.assertEqual(str(q.query).count('INNER JOIN'), 0) q = Author.objects.filter(item__tags__parent__isnull=True) self.assertSequenceEqual(q, [self.a1, self.a2, self.a2, self.a3]) self.assertEqual(str(q.query).count('LEFT OUTER JOIN'), 3) self.assertEqual(str(q.query).count('INNER JOIN'), 0) def test_ticket_10790_7(self): # Reverse querying with isnull should not strip the join q = Author.objects.filter(item__isnull=True) self.assertSequenceEqual(q, [self.a3]) self.assertEqual(str(q.query).count('LEFT OUTER JOIN'), 1) self.assertEqual(str(q.query).count('INNER JOIN'), 0) q = Author.objects.filter(item__isnull=False) self.assertSequenceEqual(q, [self.a1, self.a2, self.a2, self.a4]) self.assertEqual(str(q.query).count('LEFT OUTER JOIN'), 0) self.assertEqual(str(q.query).count('INNER JOIN'), 1) def test_ticket_10790_8(self): # Querying with combined q-objects should also strip the left outer join q = Tag.objects.filter(Q(parent__isnull=True) | Q(parent=self.t1)) self.assertSequenceEqual(q, [self.t1, self.t2, self.t3]) self.assertEqual(str(q.query).count('LEFT OUTER JOIN'), 0) self.assertEqual(str(q.query).count('INNER JOIN'), 0) def test_ticket_10790_combine(self): # Combining queries should not re-populate the left outer join q1 = Tag.objects.filter(parent__isnull=True) q2 = Tag.objects.filter(parent__isnull=False) q3 = q1 | q2 self.assertSequenceEqual(q3, [self.t1, self.t2, self.t3, self.t4, self.t5]) self.assertEqual(str(q3.query).count('LEFT OUTER JOIN'), 0) self.assertEqual(str(q3.query).count('INNER JOIN'), 0) q3 = q1 & q2 self.assertSequenceEqual(q3, []) self.assertEqual(str(q3.query).count('LEFT OUTER JOIN'), 0) self.assertEqual(str(q3.query).count('INNER JOIN'), 0) q2 = Tag.objects.filter(parent=self.t1) q3 = q1 | q2 self.assertSequenceEqual(q3, [self.t1, self.t2, self.t3]) self.assertEqual(str(q3.query).count('LEFT OUTER JOIN'), 0) self.assertEqual(str(q3.query).count('INNER JOIN'), 0) q3 = q2 | q1 self.assertSequenceEqual(q3, [self.t1, self.t2, self.t3]) self.assertEqual(str(q3.query).count('LEFT OUTER JOIN'), 0) self.assertEqual(str(q3.query).count('INNER JOIN'), 0) q1 = Tag.objects.filter(parent__isnull=True) q2 = Tag.objects.filter(parent__parent__isnull=True) q3 = q1 | q2 self.assertSequenceEqual(q3, [self.t1, self.t2, self.t3]) self.assertEqual(str(q3.query).count('LEFT OUTER JOIN'), 1) self.assertEqual(str(q3.query).count('INNER JOIN'), 0) q3 = q2 | q1 self.assertSequenceEqual(q3, [self.t1, self.t2, self.t3]) self.assertEqual(str(q3.query).count('LEFT OUTER JOIN'), 1) self.assertEqual(str(q3.query).count('INNER JOIN'), 0) def test_ticket19672(self): self.assertSequenceEqual( Report.objects.filter(Q(creator__isnull=False) & ~Q(creator__extra__value=41)), [self.r1], ) def test_ticket_20250(self): # A negated Q along with an annotated queryset failed in Django 1.4 qs = Author.objects.annotate(Count('item')) qs = qs.filter(~Q(extra__value=0)).order_by('name') self.assertIn('SELECT', str(qs.query)) self.assertSequenceEqual(qs, [self.a1, self.a2, self.a3, self.a4]) def test_lookup_constraint_fielderror(self): msg = ( "Cannot resolve keyword 'unknown_field' into field. Choices are: " "annotation, category, category_id, children, id, item, " "managedmodel, name, note, parent, parent_id" ) with self.assertRaisesMessage(FieldError, msg): Tag.objects.filter(unknown_field__name='generic') def test_common_mixed_case_foreign_keys(self): """ Valid query should be generated when fields fetched from joined tables include FKs whose names only differ by case. """ c1 = SimpleCategory.objects.create(name='c1') c2 = SimpleCategory.objects.create(name='c2') c3 = SimpleCategory.objects.create(name='c3') category = CategoryItem.objects.create(category=c1) mixed_case_field_category = MixedCaseFieldCategoryItem.objects.create(CaTeGoRy=c2) mixed_case_db_column_category = MixedCaseDbColumnCategoryItem.objects.create(category=c3) CommonMixedCaseForeignKeys.objects.create( category=category, mixed_case_field_category=mixed_case_field_category, mixed_case_db_column_category=mixed_case_db_column_category, ) qs = CommonMixedCaseForeignKeys.objects.values( 'category', 'mixed_case_field_category', 'mixed_case_db_column_category', 'category__category', 'mixed_case_field_category__CaTeGoRy', 'mixed_case_db_column_category__category', ) self.assertTrue(qs.first()) def test_excluded_intermediary_m2m_table_joined(self): self.assertSequenceEqual( Note.objects.filter(~Q(tag__annotation__name=F('note'))), [self.n1, self.n2, self.n3], ) self.assertSequenceEqual( Note.objects.filter(tag__annotation__name='a1').filter(~Q(tag__annotation__name=F('note'))), [], ) def test_field_with_filterable(self): self.assertSequenceEqual( Author.objects.filter(extra=self.e2), [self.a3, self.a4], ) def test_negate_field(self): self.assertSequenceEqual( Note.objects.filter(negate=True), [self.n1, self.n2], ) self.assertSequenceEqual(Note.objects.exclude(negate=True), [self.n3]) class Queries2Tests(TestCase): @classmethod def setUpTestData(cls): cls.num4 = Number.objects.create(num=4) cls.num8 = Number.objects.create(num=8) cls.num12 = Number.objects.create(num=12) def test_ticket4289(self): # A slight variation on the restricting the filtering choices by the # lookup constraints. self.assertSequenceEqual(Number.objects.filter(num__lt=4), []) self.assertSequenceEqual(Number.objects.filter(num__gt=8, num__lt=12), []) self.assertSequenceEqual( Number.objects.filter(num__gt=8, num__lt=13), [self.num12], ) self.assertSequenceEqual( Number.objects.filter(Q(num__lt=4) | Q(num__gt=8, num__lt=12)), [] ) self.assertSequenceEqual( Number.objects.filter(Q(num__gt=8, num__lt=12) | Q(num__lt=4)), [] ) self.assertSequenceEqual( Number.objects.filter(Q(num__gt=8) & Q(num__lt=12) | Q(num__lt=4)), [] ) self.assertSequenceEqual( Number.objects.filter(Q(num__gt=7) & Q(num__lt=12) | Q(num__lt=4)), [self.num8], ) def test_ticket12239(self): # Custom lookups are registered to round float values correctly on gte # and lt IntegerField queries. self.assertSequenceEqual( Number.objects.filter(num__gt=11.9), [self.num12], ) self.assertSequenceEqual(Number.objects.filter(num__gt=12), []) self.assertSequenceEqual(Number.objects.filter(num__gt=12.0), []) self.assertSequenceEqual(Number.objects.filter(num__gt=12.1), []) self.assertCountEqual( Number.objects.filter(num__lt=12), [self.num4, self.num8], ) self.assertCountEqual( Number.objects.filter(num__lt=12.0), [self.num4, self.num8], ) self.assertCountEqual( Number.objects.filter(num__lt=12.1), [self.num4, self.num8, self.num12], ) self.assertCountEqual( Number.objects.filter(num__gte=11.9), [self.num12], ) self.assertCountEqual( Number.objects.filter(num__gte=12), [self.num12], ) self.assertCountEqual( Number.objects.filter(num__gte=12.0), [self.num12], ) self.assertSequenceEqual(Number.objects.filter(num__gte=12.1), []) self.assertSequenceEqual(Number.objects.filter(num__gte=12.9), []) self.assertCountEqual( Number.objects.filter(num__lte=11.9), [self.num4, self.num8], ) self.assertCountEqual( Number.objects.filter(num__lte=12), [self.num4, self.num8, self.num12], ) self.assertCountEqual( Number.objects.filter(num__lte=12.0), [self.num4, self.num8, self.num12], ) self.assertCountEqual( Number.objects.filter(num__lte=12.1), [self.num4, self.num8, self.num12], ) self.assertCountEqual( Number.objects.filter(num__lte=12.9), [self.num4, self.num8, self.num12], ) def test_ticket7759(self): # Count should work with a partially read result set. count = Number.objects.count() qs = Number.objects.all() def run(): for obj in qs: return qs.count() == count self.assertTrue(run()) class Queries3Tests(TestCase): def test_ticket7107(self): # This shouldn't create an infinite loop. self.assertQuerysetEqual(Valid.objects.all(), []) def test_datetimes_invalid_field(self): # An error should be raised when QuerySet.datetimes() is passed the # wrong type of field. msg = "'name' isn't a DateField, TimeField, or DateTimeField." with self.assertRaisesMessage(TypeError, msg): Item.objects.datetimes('name', 'month') def test_ticket22023(self): with self.assertRaisesMessage(TypeError, "Cannot call only() after .values() or .values_list()"): Valid.objects.values().only() with self.assertRaisesMessage(TypeError, "Cannot call defer() after .values() or .values_list()"): Valid.objects.values().defer() class Queries4Tests(TestCase): @classmethod def setUpTestData(cls): generic = NamedCategory.objects.create(name="Generic") cls.t1 = Tag.objects.create(name='t1', category=generic) n1 = Note.objects.create(note='n1', misc='foo') n2 = Note.objects.create(note='n2', misc='bar') e1 = ExtraInfo.objects.create(info='e1', note=n1) e2 = ExtraInfo.objects.create(info='e2', note=n2) cls.a1 = Author.objects.create(name='a1', num=1001, extra=e1) cls.a3 = Author.objects.create(name='a3', num=3003, extra=e2) cls.r1 = Report.objects.create(name='r1', creator=cls.a1) cls.r2 = Report.objects.create(name='r2', creator=cls.a3) cls.r3 = Report.objects.create(name='r3') cls.i1 = Item.objects.create(name='i1', created=datetime.datetime.now(), note=n1, creator=cls.a1) cls.i2 = Item.objects.create(name='i2', created=datetime.datetime.now(), note=n1, creator=cls.a3) def test_ticket24525(self): tag = Tag.objects.create() anth100 = tag.note_set.create(note='ANTH', misc='100') math101 = tag.note_set.create(note='MATH', misc='101') s1 = tag.annotation_set.create(name='1') s2 = tag.annotation_set.create(name='2') s1.notes.set([math101, anth100]) s2.notes.set([math101]) result = math101.annotation_set.all() & tag.annotation_set.exclude(notes__in=[anth100]) self.assertEqual(list(result), [s2]) def test_ticket11811(self): unsaved_category = NamedCategory(name="Other") msg = 'Unsaved model instance <NamedCategory: Other> cannot be used in an ORM query.' with self.assertRaisesMessage(ValueError, msg): Tag.objects.filter(pk=self.t1.pk).update(category=unsaved_category) def test_ticket14876(self): # Note: when combining the query we need to have information available # about the join type of the trimmed "creator__isnull" join. If we # don't have that information, then the join is created as INNER JOIN # and results will be incorrect. q1 = Report.objects.filter(Q(creator__isnull=True) | Q(creator__extra__info='e1')) q2 = Report.objects.filter(Q(creator__isnull=True)) | Report.objects.filter(Q(creator__extra__info='e1')) self.assertCountEqual(q1, [self.r1, self.r3]) self.assertEqual(str(q1.query), str(q2.query)) q1 = Report.objects.filter(Q(creator__extra__info='e1') | Q(creator__isnull=True)) q2 = Report.objects.filter(Q(creator__extra__info='e1')) | Report.objects.filter(Q(creator__isnull=True)) self.assertCountEqual(q1, [self.r1, self.r3]) self.assertEqual(str(q1.query), str(q2.query)) q1 = Item.objects.filter(Q(creator=self.a1) | Q(creator__report__name='r1')).order_by() q2 = ( Item.objects .filter(Q(creator=self.a1)).order_by() | Item.objects.filter(Q(creator__report__name='r1')) .order_by() ) self.assertCountEqual(q1, [self.i1]) self.assertEqual(str(q1.query), str(q2.query)) q1 = Item.objects.filter(Q(creator__report__name='e1') | Q(creator=self.a1)).order_by() q2 = ( Item.objects.filter(Q(creator__report__name='e1')).order_by() | Item.objects.filter(Q(creator=self.a1)).order_by() ) self.assertCountEqual(q1, [self.i1]) self.assertEqual(str(q1.query), str(q2.query)) def test_combine_join_reuse(self): # Joins having identical connections are correctly recreated in the # rhs query, in case the query is ORed together (#18748). Report.objects.create(name='r4', creator=self.a1) q1 = Author.objects.filter(report__name='r5') q2 = Author.objects.filter(report__name='r4').filter(report__name='r1') combined = q1 | q2 self.assertEqual(str(combined.query).count('JOIN'), 2) self.assertEqual(len(combined), 1) self.assertEqual(combined[0].name, 'a1') def test_combine_or_filter_reuse(self): combined = Author.objects.filter(name='a1') | Author.objects.filter(name='a3') self.assertEqual(combined.get(name='a1'), self.a1) def test_join_reuse_order(self): # Join aliases are reused in order. This shouldn't raise AssertionError # because change_map contains a circular reference (#26522). s1 = School.objects.create() s2 = School.objects.create() s3 = School.objects.create() t1 = Teacher.objects.create() otherteachers = Teacher.objects.exclude(pk=t1.pk).exclude(friends=t1) qs1 = otherteachers.filter(schools=s1).filter(schools=s2) qs2 = otherteachers.filter(schools=s1).filter(schools=s3) self.assertQuerysetEqual(qs1 | qs2, []) def test_ticket7095(self): # Updates that are filtered on the model being updated are somewhat # tricky in MySQL. ManagedModel.objects.create(data='mm1', tag=self.t1, public=True) self.assertEqual(ManagedModel.objects.update(data='mm'), 1) # A values() or values_list() query across joined models must use outer # joins appropriately. # Note: In Oracle, we expect a null CharField to return '' instead of # None. if connection.features.interprets_empty_strings_as_nulls: expected_null_charfield_repr = '' else: expected_null_charfield_repr = None self.assertSequenceEqual( Report.objects.values_list("creator__extra__info", flat=True).order_by("name"), ['e1', 'e2', expected_null_charfield_repr], ) # Similarly for select_related(), joins beyond an initial nullable join # must use outer joins so that all results are included. self.assertSequenceEqual( Report.objects.select_related("creator", "creator__extra").order_by("name"), [self.r1, self.r2, self.r3] ) # When there are multiple paths to a table from another table, we have # to be careful not to accidentally reuse an inappropriate join when # using select_related(). We used to return the parent's Detail record # here by mistake. d1 = Detail.objects.create(data="d1") d2 = Detail.objects.create(data="d2") m1 = Member.objects.create(name="m1", details=d1) m2 = Member.objects.create(name="m2", details=d2) Child.objects.create(person=m2, parent=m1) obj = m1.children.select_related("person__details")[0] self.assertEqual(obj.person.details.data, 'd2') def test_order_by_resetting(self): # Calling order_by() with no parameters removes any existing ordering on the # model. But it should still be possible to add new ordering after that. qs = Author.objects.order_by().order_by('name') self.assertIn('ORDER BY', qs.query.get_compiler(qs.db).as_sql()[0]) def test_order_by_reverse_fk(self): # It is possible to order by reverse of foreign key, although that can lead # to duplicate results. c1 = SimpleCategory.objects.create(name="category1") c2 = SimpleCategory.objects.create(name="category2") CategoryItem.objects.create(category=c1) CategoryItem.objects.create(category=c2) CategoryItem.objects.create(category=c1) self.assertSequenceEqual(SimpleCategory.objects.order_by('categoryitem', 'pk'), [c1, c2, c1]) def test_filter_reverse_non_integer_pk(self): date_obj = DateTimePK.objects.create() extra_obj = ExtraInfo.objects.create(info='extra', date=date_obj) self.assertEqual( DateTimePK.objects.filter(extrainfo=extra_obj).get(), date_obj, ) def test_ticket10181(self): # Avoid raising an EmptyResultSet if an inner query is probably # empty (and hence, not executed). self.assertQuerysetEqual( Tag.objects.filter(id__in=Tag.objects.filter(id__in=[])), [] ) def test_ticket15316_filter_false(self): c1 = SimpleCategory.objects.create(name="category1") c2 = SpecialCategory.objects.create(name="named category1", special_name="special1") c3 = SpecialCategory.objects.create(name="named category2", special_name="special2") CategoryItem.objects.create(category=c1) ci2 = CategoryItem.objects.create(category=c2) ci3 = CategoryItem.objects.create(category=c3) qs = CategoryItem.objects.filter(category__specialcategory__isnull=False) self.assertEqual(qs.count(), 2) self.assertSequenceEqual(qs, [ci2, ci3]) def test_ticket15316_exclude_false(self): c1 = SimpleCategory.objects.create(name="category1") c2 = SpecialCategory.objects.create(name="named category1", special_name="special1") c3 = SpecialCategory.objects.create(name="named category2", special_name="special2") ci1 = CategoryItem.objects.create(category=c1) CategoryItem.objects.create(category=c2) CategoryItem.objects.create(category=c3) qs = CategoryItem.objects.exclude(category__specialcategory__isnull=False) self.assertEqual(qs.count(), 1) self.assertSequenceEqual(qs, [ci1]) def test_ticket15316_filter_true(self): c1 = SimpleCategory.objects.create(name="category1") c2 = SpecialCategory.objects.create(name="named category1", special_name="special1") c3 = SpecialCategory.objects.create(name="named category2", special_name="special2") ci1 = CategoryItem.objects.create(category=c1) CategoryItem.objects.create(category=c2) CategoryItem.objects.create(category=c3) qs = CategoryItem.objects.filter(category__specialcategory__isnull=True) self.assertEqual(qs.count(), 1) self.assertSequenceEqual(qs, [ci1]) def test_ticket15316_exclude_true(self): c1 = SimpleCategory.objects.create(name="category1") c2 = SpecialCategory.objects.create(name="named category1", special_name="special1") c3 = SpecialCategory.objects.create(name="named category2", special_name="special2") CategoryItem.objects.create(category=c1) ci2 = CategoryItem.objects.create(category=c2) ci3 = CategoryItem.objects.create(category=c3) qs = CategoryItem.objects.exclude(category__specialcategory__isnull=True) self.assertEqual(qs.count(), 2) self.assertSequenceEqual(qs, [ci2, ci3]) def test_ticket15316_one2one_filter_false(self): c = SimpleCategory.objects.create(name="cat") c0 = SimpleCategory.objects.create(name="cat0") c1 = SimpleCategory.objects.create(name="category1") OneToOneCategory.objects.create(category=c1, new_name="new1") OneToOneCategory.objects.create(category=c0, new_name="new2") CategoryItem.objects.create(category=c) ci2 = CategoryItem.objects.create(category=c0) ci3 = CategoryItem.objects.create(category=c1) qs = CategoryItem.objects.filter(category__onetoonecategory__isnull=False).order_by('pk') self.assertEqual(qs.count(), 2) self.assertSequenceEqual(qs, [ci2, ci3]) def test_ticket15316_one2one_exclude_false(self): c = SimpleCategory.objects.create(name="cat") c0 = SimpleCategory.objects.create(name="cat0") c1 = SimpleCategory.objects.create(name="category1") OneToOneCategory.objects.create(category=c1, new_name="new1") OneToOneCategory.objects.create(category=c0, new_name="new2") ci1 = CategoryItem.objects.create(category=c) CategoryItem.objects.create(category=c0) CategoryItem.objects.create(category=c1) qs = CategoryItem.objects.exclude(category__onetoonecategory__isnull=False) self.assertEqual(qs.count(), 1) self.assertSequenceEqual(qs, [ci1]) def test_ticket15316_one2one_filter_true(self): c = SimpleCategory.objects.create(name="cat") c0 = SimpleCategory.objects.create(name="cat0") c1 = SimpleCategory.objects.create(name="category1") OneToOneCategory.objects.create(category=c1, new_name="new1") OneToOneCategory.objects.create(category=c0, new_name="new2") ci1 = CategoryItem.objects.create(category=c) CategoryItem.objects.create(category=c0) CategoryItem.objects.create(category=c1) qs = CategoryItem.objects.filter(category__onetoonecategory__isnull=True) self.assertEqual(qs.count(), 1) self.assertSequenceEqual(qs, [ci1]) def test_ticket15316_one2one_exclude_true(self): c = SimpleCategory.objects.create(name="cat") c0 = SimpleCategory.objects.create(name="cat0") c1 = SimpleCategory.objects.create(name="category1") OneToOneCategory.objects.create(category=c1, new_name="new1") OneToOneCategory.objects.create(category=c0, new_name="new2") CategoryItem.objects.create(category=c) ci2 = CategoryItem.objects.create(category=c0) ci3 = CategoryItem.objects.create(category=c1) qs = CategoryItem.objects.exclude(category__onetoonecategory__isnull=True).order_by('pk') self.assertEqual(qs.count(), 2) self.assertSequenceEqual(qs, [ci2, ci3]) class Queries5Tests(TestCase): @classmethod def setUpTestData(cls): # Ordering by 'rank' gives us rank2, rank1, rank3. Ordering by the # Meta.ordering will be rank3, rank2, rank1. cls.n1 = Note.objects.create(note='n1', misc='foo', id=1) cls.n2 = Note.objects.create(note='n2', misc='bar', id=2) e1 = ExtraInfo.objects.create(info='e1', note=cls.n1) e2 = ExtraInfo.objects.create(info='e2', note=cls.n2) a1 = Author.objects.create(name='a1', num=1001, extra=e1) a2 = Author.objects.create(name='a2', num=2002, extra=e1) a3 = Author.objects.create(name='a3', num=3003, extra=e2) cls.rank2 = Ranking.objects.create(rank=2, author=a2) cls.rank1 = Ranking.objects.create(rank=1, author=a3) cls.rank3 = Ranking.objects.create(rank=3, author=a1) def test_ordering(self): # Cross model ordering is possible in Meta, too. self.assertSequenceEqual( Ranking.objects.all(), [self.rank3, self.rank2, self.rank1], ) self.assertSequenceEqual( Ranking.objects.all().order_by('rank'), [self.rank1, self.rank2, self.rank3], ) # Ordering of extra() pieces is possible, too and you can mix extra # fields and model fields in the ordering. self.assertSequenceEqual( Ranking.objects.extra(tables=['django_site'], order_by=['-django_site.id', 'rank']), [self.rank1, self.rank2, self.rank3], ) sql = 'case when %s > 2 then 1 else 0 end' % connection.ops.quote_name('rank') qs = Ranking.objects.extra(select={'good': sql}) self.assertEqual( [o.good for o in qs.extra(order_by=('-good',))], [True, False, False] ) self.assertSequenceEqual( qs.extra(order_by=('-good', 'id')), [self.rank3, self.rank2, self.rank1], ) # Despite having some extra aliases in the query, we can still omit # them in a values() query. dicts = qs.values('id', 'rank').order_by('id') self.assertEqual( [d['rank'] for d in dicts], [2, 1, 3] ) def test_ticket7256(self): # An empty values() call includes all aliases, including those from an # extra() sql = 'case when %s > 2 then 1 else 0 end' % connection.ops.quote_name('rank') qs = Ranking.objects.extra(select={'good': sql}) dicts = qs.values().order_by('id') for d in dicts: del d['id'] del d['author_id'] self.assertEqual( [sorted(d.items()) for d in dicts], [[('good', 0), ('rank', 2)], [('good', 0), ('rank', 1)], [('good', 1), ('rank', 3)]] ) def test_ticket7045(self): # Extra tables used to crash SQL construction on the second use. qs = Ranking.objects.extra(tables=['django_site']) qs.query.get_compiler(qs.db).as_sql() # test passes if this doesn't raise an exception. qs.query.get_compiler(qs.db).as_sql() def test_ticket9848(self): # Make sure that updates which only filter on sub-tables don't # inadvertently update the wrong records (bug #9848). author_start = Author.objects.get(name='a1') ranking_start = Ranking.objects.get(author__name='a1') # Make sure that the IDs from different tables don't happen to match. self.assertSequenceEqual( Ranking.objects.filter(author__name='a1'), [self.rank3], ) self.assertEqual( Ranking.objects.filter(author__name='a1').update(rank=4636), 1 ) r = Ranking.objects.get(author__name='a1') self.assertEqual(r.id, ranking_start.id) self.assertEqual(r.author.id, author_start.id) self.assertEqual(r.rank, 4636) r.rank = 3 r.save() self.assertSequenceEqual( Ranking.objects.all(), [self.rank3, self.rank2, self.rank1], ) def test_ticket5261(self): # Test different empty excludes. self.assertSequenceEqual( Note.objects.exclude(Q()), [self.n1, self.n2], ) self.assertSequenceEqual( Note.objects.filter(~Q()), [self.n1, self.n2], ) self.assertSequenceEqual( Note.objects.filter(~Q() | ~Q()), [self.n1, self.n2], ) self.assertSequenceEqual( Note.objects.exclude(~Q() & ~Q()), [self.n1, self.n2], ) def test_extra_select_literal_percent_s(self): # Allow %%s to escape select clauses self.assertEqual( Note.objects.extra(select={'foo': "'%%s'"})[0].foo, '%s' ) self.assertEqual( Note.objects.extra(select={'foo': "'%%s bar %%s'"})[0].foo, '%s bar %s' ) self.assertEqual( Note.objects.extra(select={'foo': "'bar %%s'"})[0].foo, 'bar %s' ) def test_queryset_reuse(self): # Using querysets doesn't mutate aliases. authors = Author.objects.filter(Q(name='a1') | Q(name='nonexistent')) self.assertEqual(Ranking.objects.filter(author__in=authors).get(), self.rank3) self.assertEqual(authors.count(), 1) class SelectRelatedTests(TestCase): def test_tickets_3045_3288(self): # Once upon a time, select_related() with circular relations would loop # infinitely if you forgot to specify "depth". Now we set an arbitrary # default upper bound. self.assertQuerysetEqual(X.objects.all(), []) self.assertQuerysetEqual(X.objects.select_related(), []) class SubclassFKTests(TestCase): def test_ticket7778(self): # Model subclasses could not be deleted if a nullable foreign key # relates to a model that relates back. num_celebs = Celebrity.objects.count() tvc = TvChef.objects.create(name="Huey") self.assertEqual(Celebrity.objects.count(), num_celebs + 1) Fan.objects.create(fan_of=tvc) Fan.objects.create(fan_of=tvc) tvc.delete() # The parent object should have been deleted as well. self.assertEqual(Celebrity.objects.count(), num_celebs) class CustomPkTests(TestCase): def test_ticket7371(self): self.assertQuerysetEqual(Related.objects.order_by('custom'), []) class NullableRelOrderingTests(TestCase): def test_ticket10028(self): # Ordering by model related to nullable relations(!) should use outer # joins, so that all results are included. p1 = Plaything.objects.create(name="p1") self.assertSequenceEqual(Plaything.objects.all(), [p1]) def test_join_already_in_query(self): # Ordering by model related to nullable relations should not change # the join type of already existing joins. Plaything.objects.create(name="p1") s = SingleObject.objects.create(name='s') r = RelatedObject.objects.create(single=s, f=1) p2 = Plaything.objects.create(name="p2", others=r) qs = Plaything.objects.all().filter(others__isnull=False).order_by('pk') self.assertNotIn('JOIN', str(qs.query)) qs = Plaything.objects.all().filter(others__f__isnull=False).order_by('pk') self.assertIn('INNER', str(qs.query)) qs = qs.order_by('others__single__name') # The ordering by others__single__pk will add one new join (to single) # and that join must be LEFT join. The already existing join to related # objects must be kept INNER. So, we have both an INNER and a LEFT join # in the query. self.assertEqual(str(qs.query).count('LEFT'), 1) self.assertEqual(str(qs.query).count('INNER'), 1) self.assertSequenceEqual(qs, [p2]) class DisjunctiveFilterTests(TestCase): @classmethod def setUpTestData(cls): cls.n1 = Note.objects.create(note='n1', misc='foo', id=1) cls.e1 = ExtraInfo.objects.create(info='e1', note=cls.n1) def test_ticket7872(self): # Another variation on the disjunctive filtering theme. # For the purposes of this regression test, it's important that there is no # Join object related to the LeafA we create. l1 = LeafA.objects.create(data='first') self.assertSequenceEqual(LeafA.objects.all(), [l1]) self.assertSequenceEqual( LeafA.objects.filter(Q(data='first') | Q(join__b__data='second')), [l1], ) def test_ticket8283(self): # Checking that applying filters after a disjunction works correctly. self.assertSequenceEqual( (ExtraInfo.objects.filter(note=self.n1) | ExtraInfo.objects.filter(info='e2')).filter(note=self.n1), [self.e1], ) self.assertSequenceEqual( (ExtraInfo.objects.filter(info='e2') | ExtraInfo.objects.filter(note=self.n1)).filter(note=self.n1), [self.e1], ) class Queries6Tests(TestCase): @classmethod def setUpTestData(cls): generic = NamedCategory.objects.create(name="Generic") cls.t1 = Tag.objects.create(name='t1', category=generic) cls.t2 = Tag.objects.create(name='t2', parent=cls.t1, category=generic) cls.t3 = Tag.objects.create(name='t3', parent=cls.t1) cls.t4 = Tag.objects.create(name='t4', parent=cls.t3) cls.t5 = Tag.objects.create(name='t5', parent=cls.t3) n1 = Note.objects.create(note='n1', misc='foo', id=1) cls.ann1 = Annotation.objects.create(name='a1', tag=cls.t1) cls.ann1.notes.add(n1) cls.ann2 = Annotation.objects.create(name='a2', tag=cls.t4) def test_parallel_iterators(self): # Parallel iterators work. qs = Tag.objects.all() i1, i2 = iter(qs), iter(qs) self.assertEqual(repr(next(i1)), '<Tag: t1>') self.assertEqual(repr(next(i1)), '<Tag: t2>') self.assertEqual(repr(next(i2)), '<Tag: t1>') self.assertEqual(repr(next(i2)), '<Tag: t2>') self.assertEqual(repr(next(i2)), '<Tag: t3>') self.assertEqual(repr(next(i1)), '<Tag: t3>') qs = X.objects.all() self.assertFalse(qs) self.assertFalse(qs) def test_nested_queries_sql(self): # Nested queries should not evaluate the inner query as part of constructing the # SQL (so we should see a nested query here, indicated by two "SELECT" calls). qs = Annotation.objects.filter(notes__in=Note.objects.filter(note="xyzzy")) self.assertEqual( qs.query.get_compiler(qs.db).as_sql()[0].count('SELECT'), 2 ) def test_tickets_8921_9188(self): # Incorrect SQL was being generated for certain types of exclude() # queries that crossed multi-valued relations (#8921, #9188 and some # preemptively discovered cases). self.assertSequenceEqual( PointerA.objects.filter(connection__pointerb__id=1), [] ) self.assertSequenceEqual( PointerA.objects.exclude(connection__pointerb__id=1), [] ) self.assertSequenceEqual( Tag.objects.exclude(children=None), [self.t1, self.t3], ) # This example is tricky because the parent could be NULL, so only checking # parents with annotations omits some results (tag t1, in this case). self.assertSequenceEqual( Tag.objects.exclude(parent__annotation__name="a1"), [self.t1, self.t4, self.t5], ) # The annotation->tag link is single values and tag->children links is # multi-valued. So we have to split the exclude filter in the middle # and then optimize the inner query without losing results. self.assertSequenceEqual( Annotation.objects.exclude(tag__children__name="t2"), [self.ann2], ) # Nested queries are possible (although should be used with care, since # they have performance problems on backends like MySQL. self.assertSequenceEqual( Annotation.objects.filter(notes__in=Note.objects.filter(note="n1")), [self.ann1], ) def test_ticket3739(self): # The all() method on querysets returns a copy of the queryset. q1 = Tag.objects.order_by('name') self.assertIsNot(q1, q1.all()) def test_ticket_11320(self): qs = Tag.objects.exclude(category=None).exclude(category__name='foo') self.assertEqual(str(qs.query).count(' INNER JOIN '), 1) def test_distinct_ordered_sliced_subquery_aggregation(self): self.assertEqual(Tag.objects.distinct().order_by('category__name')[:3].count(), 3) def test_multiple_columns_with_the_same_name_slice(self): self.assertEqual( list(Tag.objects.order_by('name').values_list('name', 'category__name')[:2]), [('t1', 'Generic'), ('t2', 'Generic')], ) self.assertSequenceEqual( Tag.objects.order_by('name').select_related('category')[:2], [self.t1, self.t2], ) self.assertEqual( list(Tag.objects.order_by('-name').values_list('name', 'parent__name')[:2]), [('t5', 't3'), ('t4', 't3')], ) self.assertSequenceEqual( Tag.objects.order_by('-name').select_related('parent')[:2], [self.t5, self.t4], ) def test_col_alias_quoted(self): with CaptureQueriesContext(connection) as captured_queries: self.assertEqual( Tag.objects.values('parent').annotate( tag_per_parent=Count('pk'), ).aggregate(Max('tag_per_parent')), {'tag_per_parent__max': 2}, ) sql = captured_queries[0]['sql'] self.assertIn('AS %s' % connection.ops.quote_name('col1'), sql) class RawQueriesTests(TestCase): @classmethod def setUpTestData(cls): Note.objects.create(note='n1', misc='foo', id=1) def test_ticket14729(self): # Test representation of raw query with one or few parameters passed as list query = "SELECT * FROM queries_note WHERE note = %s" params = ['n1'] qs = Note.objects.raw(query, params=params) self.assertEqual(repr(qs), "<RawQuerySet: SELECT * FROM queries_note WHERE note = n1>") query = "SELECT * FROM queries_note WHERE note = %s and misc = %s" params = ['n1', 'foo'] qs = Note.objects.raw(query, params=params) self.assertEqual(repr(qs), "<RawQuerySet: SELECT * FROM queries_note WHERE note = n1 and misc = foo>") class GeneratorExpressionTests(SimpleTestCase): def test_ticket10432(self): # Using an empty iterator as the rvalue for an "__in" # lookup is legal. self.assertCountEqual(Note.objects.filter(pk__in=iter(())), []) class ComparisonTests(TestCase): @classmethod def setUpTestData(cls): cls.n1 = Note.objects.create(note='n1', misc='foo', id=1) e1 = ExtraInfo.objects.create(info='e1', note=cls.n1) cls.a2 = Author.objects.create(name='a2', num=2002, extra=e1) def test_ticket8597(self): # Regression tests for case-insensitive comparisons item_ab = Item.objects.create(name="a_b", created=datetime.datetime.now(), creator=self.a2, note=self.n1) item_xy = Item.objects.create(name="x%y", created=datetime.datetime.now(), creator=self.a2, note=self.n1) self.assertSequenceEqual( Item.objects.filter(name__iexact="A_b"), [item_ab], ) self.assertSequenceEqual( Item.objects.filter(name__iexact="x%Y"), [item_xy], ) self.assertSequenceEqual( Item.objects.filter(name__istartswith="A_b"), [item_ab], ) self.assertSequenceEqual( Item.objects.filter(name__iendswith="A_b"), [item_ab], ) class ExistsSql(TestCase): def test_exists(self): with CaptureQueriesContext(connection) as captured_queries: self.assertFalse(Tag.objects.exists()) # Ok - so the exist query worked - but did it include too many columns? self.assertEqual(len(captured_queries), 1) qstr = captured_queries[0]['sql'] id, name = connection.ops.quote_name('id'), connection.ops.quote_name('name') self.assertNotIn(id, qstr) self.assertNotIn(name, qstr) def test_ticket_18414(self): Article.objects.create(name='one', created=datetime.datetime.now()) Article.objects.create(name='one', created=datetime.datetime.now()) Article.objects.create(name='two', created=datetime.datetime.now()) self.assertTrue(Article.objects.exists()) self.assertTrue(Article.objects.distinct().exists()) self.assertTrue(Article.objects.distinct()[1:3].exists()) self.assertFalse(Article.objects.distinct()[1:1].exists()) @skipUnlessDBFeature('can_distinct_on_fields') def test_ticket_18414_distinct_on(self): Article.objects.create(name='one', created=datetime.datetime.now()) Article.objects.create(name='one', created=datetime.datetime.now()) Article.objects.create(name='two', created=datetime.datetime.now()) self.assertTrue(Article.objects.distinct('name').exists()) self.assertTrue(Article.objects.distinct('name')[1:2].exists()) self.assertFalse(Article.objects.distinct('name')[2:3].exists()) class QuerysetOrderedTests(unittest.TestCase): """ Tests for the Queryset.ordered attribute. """ def test_no_default_or_explicit_ordering(self): self.assertIs(Annotation.objects.all().ordered, False) def test_cleared_default_ordering(self): self.assertIs(Tag.objects.all().ordered, True) self.assertIs(Tag.objects.all().order_by().ordered, False) def test_explicit_ordering(self): self.assertIs(Annotation.objects.all().order_by('id').ordered, True) def test_empty_queryset(self): self.assertIs(Annotation.objects.none().ordered, True) def test_order_by_extra(self): self.assertIs(Annotation.objects.all().extra(order_by=['id']).ordered, True) def test_annotated_ordering(self): qs = Annotation.objects.annotate(num_notes=Count('notes')) self.assertIs(qs.ordered, False) self.assertIs(qs.order_by('num_notes').ordered, True) def test_annotated_default_ordering(self): qs = Tag.objects.annotate(num_notes=Count('pk')) self.assertIs(qs.ordered, False) self.assertIs(qs.order_by('name').ordered, True) def test_annotated_values_default_ordering(self): qs = Tag.objects.values('name').annotate(num_notes=Count('pk')) self.assertIs(qs.ordered, False) self.assertIs(qs.order_by('name').ordered, True) @skipUnlessDBFeature('allow_sliced_subqueries_with_in') class SubqueryTests(TestCase): @classmethod def setUpTestData(cls): NamedCategory.objects.create(id=1, name='first') NamedCategory.objects.create(id=2, name='second') NamedCategory.objects.create(id=3, name='third') NamedCategory.objects.create(id=4, name='fourth') def test_ordered_subselect(self): "Subselects honor any manual ordering" query = DumbCategory.objects.filter(id__in=DumbCategory.objects.order_by('-id')[0:2]) self.assertEqual(set(query.values_list('id', flat=True)), {3, 4}) query = DumbCategory.objects.filter(id__in=DumbCategory.objects.order_by('-id')[:2]) self.assertEqual(set(query.values_list('id', flat=True)), {3, 4}) query = DumbCategory.objects.filter(id__in=DumbCategory.objects.order_by('-id')[1:2]) self.assertEqual(set(query.values_list('id', flat=True)), {3}) query = DumbCategory.objects.filter(id__in=DumbCategory.objects.order_by('-id')[2:]) self.assertEqual(set(query.values_list('id', flat=True)), {1, 2}) def test_slice_subquery_and_query(self): """ Slice a query that has a sliced subquery """ query = DumbCategory.objects.filter(id__in=DumbCategory.objects.order_by('-id')[0:2])[0:2] self.assertEqual({x.id for x in query}, {3, 4}) query = DumbCategory.objects.filter(id__in=DumbCategory.objects.order_by('-id')[1:3])[1:3] self.assertEqual({x.id for x in query}, {3}) query = DumbCategory.objects.filter(id__in=DumbCategory.objects.order_by('-id')[2:])[1:] self.assertEqual({x.id for x in query}, {2}) def test_related_sliced_subquery(self): """ Related objects constraints can safely contain sliced subqueries. refs #22434 """ generic = NamedCategory.objects.create(id=5, name="Generic") t1 = Tag.objects.create(name='t1', category=generic) t2 = Tag.objects.create(name='t2', category=generic) ManagedModel.objects.create(data='mm1', tag=t1, public=True) mm2 = ManagedModel.objects.create(data='mm2', tag=t2, public=True) query = ManagedModel.normal_manager.filter( tag__in=Tag.objects.order_by('-id')[:1] ) self.assertEqual({x.id for x in query}, {mm2.id}) def test_sliced_delete(self): "Delete queries can safely contain sliced subqueries" DumbCategory.objects.filter(id__in=DumbCategory.objects.order_by('-id')[0:1]).delete() self.assertEqual(set(DumbCategory.objects.values_list('id', flat=True)), {1, 2, 3}) DumbCategory.objects.filter(id__in=DumbCategory.objects.order_by('-id')[1:2]).delete() self.assertEqual(set(DumbCategory.objects.values_list('id', flat=True)), {1, 3}) DumbCategory.objects.filter(id__in=DumbCategory.objects.order_by('-id')[1:]).delete() self.assertEqual(set(DumbCategory.objects.values_list('id', flat=True)), {3}) def test_distinct_ordered_sliced_subquery(self): # Implicit values('id'). self.assertSequenceEqual( NamedCategory.objects.filter( id__in=NamedCategory.objects.distinct().order_by('name')[0:2], ).order_by('name').values_list('name', flat=True), ['first', 'fourth'] ) # Explicit values('id'). self.assertSequenceEqual( NamedCategory.objects.filter( id__in=NamedCategory.objects.distinct().order_by('-name').values('id')[0:2], ).order_by('name').values_list('name', flat=True), ['second', 'third'] ) # Annotated value. self.assertSequenceEqual( DumbCategory.objects.filter( id__in=DumbCategory.objects.annotate( double_id=F('id') * 2 ).order_by('id').distinct().values('double_id')[0:2], ).order_by('id').values_list('id', flat=True), [2, 4] ) class QuerySetBitwiseOperationTests(TestCase): @classmethod def setUpTestData(cls): cls.school = School.objects.create() cls.room_1 = Classroom.objects.create(school=cls.school, has_blackboard=False, name='Room 1') cls.room_2 = Classroom.objects.create(school=cls.school, has_blackboard=True, name='Room 2') cls.room_3 = Classroom.objects.create(school=cls.school, has_blackboard=True, name='Room 3') cls.room_4 = Classroom.objects.create(school=cls.school, has_blackboard=False, name='Room 4') tag = Tag.objects.create() cls.annotation_1 = Annotation.objects.create(tag=tag) annotation_2 = Annotation.objects.create(tag=tag) note = cls.annotation_1.notes.create(tag=tag) cls.base_user_1 = BaseUser.objects.create(annotation=cls.annotation_1) cls.base_user_2 = BaseUser.objects.create(annotation=annotation_2) cls.task = Task.objects.create( owner=cls.base_user_2, creator=cls.base_user_2, note=note, ) @skipUnlessDBFeature('allow_sliced_subqueries_with_in') def test_or_with_rhs_slice(self): qs1 = Classroom.objects.filter(has_blackboard=True) qs2 = Classroom.objects.filter(has_blackboard=False)[:1] self.assertCountEqual(qs1 | qs2, [self.room_1, self.room_2, self.room_3]) @skipUnlessDBFeature('allow_sliced_subqueries_with_in') def test_or_with_lhs_slice(self): qs1 = Classroom.objects.filter(has_blackboard=True)[:1] qs2 = Classroom.objects.filter(has_blackboard=False) self.assertCountEqual(qs1 | qs2, [self.room_1, self.room_2, self.room_4]) @skipUnlessDBFeature('allow_sliced_subqueries_with_in') def test_or_with_both_slice(self): qs1 = Classroom.objects.filter(has_blackboard=False)[:1] qs2 = Classroom.objects.filter(has_blackboard=True)[:1] self.assertCountEqual(qs1 | qs2, [self.room_1, self.room_2]) @skipUnlessDBFeature('allow_sliced_subqueries_with_in') def test_or_with_both_slice_and_ordering(self): qs1 = Classroom.objects.filter(has_blackboard=False).order_by('-pk')[:1] qs2 = Classroom.objects.filter(has_blackboard=True).order_by('-name')[:1] self.assertCountEqual(qs1 | qs2, [self.room_3, self.room_4]) def test_subquery_aliases(self): combined = School.objects.filter(pk__isnull=False) & School.objects.filter( Exists(Classroom.objects.filter( has_blackboard=True, school=OuterRef('pk'), )), ) self.assertSequenceEqual(combined, [self.school]) nested_combined = School.objects.filter(pk__in=combined.values('pk')) self.assertSequenceEqual(nested_combined, [self.school]) def test_conflicting_aliases_during_combine(self): qs1 = self.annotation_1.baseuser_set.all() qs2 = BaseUser.objects.filter( Q(owner__note__in=self.annotation_1.notes.all()) | Q(creator__note__in=self.annotation_1.notes.all()) ) self.assertSequenceEqual(qs1, [self.base_user_1]) self.assertSequenceEqual(qs2, [self.base_user_2]) self.assertCountEqual(qs2 | qs1, qs1 | qs2) self.assertCountEqual(qs2 | qs1, [self.base_user_1, self.base_user_2]) class CloneTests(TestCase): def test_evaluated_queryset_as_argument(self): "#13227 -- If a queryset is already evaluated, it can still be used as a query arg" n = Note(note='Test1', misc='misc') n.save() e = ExtraInfo(info='good', note=n) e.save() n_list = Note.objects.all() # Evaluate the Note queryset, populating the query cache list(n_list) # Make one of cached results unpickable. n_list._result_cache[0].lock = Lock() with self.assertRaises(TypeError): pickle.dumps(n_list) # Use the note queryset in a query, and evaluate # that query in a way that involves cloning. self.assertEqual(ExtraInfo.objects.filter(note__in=n_list)[0].info, 'good') def test_no_model_options_cloning(self): """ Cloning a queryset does not get out of hand. While complete testing is impossible, this is a sanity check against invalid use of deepcopy. refs #16759. """ opts_class = type(Note._meta) note_deepcopy = getattr(opts_class, "__deepcopy__", None) opts_class.__deepcopy__ = lambda obj, memo: self.fail("Model options shouldn't be cloned.") try: Note.objects.filter(pk__lte=F('pk') + 1).all() finally: if note_deepcopy is None: delattr(opts_class, "__deepcopy__") else: opts_class.__deepcopy__ = note_deepcopy def test_no_fields_cloning(self): """ Cloning a queryset does not get out of hand. While complete testing is impossible, this is a sanity check against invalid use of deepcopy. refs #16759. """ opts_class = type(Note._meta.get_field("misc")) note_deepcopy = getattr(opts_class, "__deepcopy__", None) opts_class.__deepcopy__ = lambda obj, memo: self.fail("Model fields shouldn't be cloned") try: Note.objects.filter(note=F('misc')).all() finally: if note_deepcopy is None: delattr(opts_class, "__deepcopy__") else: opts_class.__deepcopy__ = note_deepcopy class EmptyQuerySetTests(SimpleTestCase): def test_emptyqueryset_values(self): # #14366 -- Calling .values() on an empty QuerySet and then cloning # that should not cause an error self.assertCountEqual(Number.objects.none().values('num').order_by('num'), []) def test_values_subquery(self): self.assertCountEqual(Number.objects.filter(pk__in=Number.objects.none().values('pk')), []) self.assertCountEqual(Number.objects.filter(pk__in=Number.objects.none().values_list('pk')), []) def test_ticket_19151(self): # #19151 -- Calling .values() or .values_list() on an empty QuerySet # should return an empty QuerySet and not cause an error. q = Author.objects.none() self.assertCountEqual(q.values(), []) self.assertCountEqual(q.values_list(), []) class ValuesQuerysetTests(TestCase): @classmethod def setUpTestData(cls): Number.objects.create(num=72) def test_flat_values_list(self): qs = Number.objects.values_list("num") qs = qs.values_list("num", flat=True) self.assertSequenceEqual(qs, [72]) def test_extra_values(self): # testing for ticket 14930 issues qs = Number.objects.extra(select={'value_plus_x': 'num+%s', 'value_minus_x': 'num-%s'}, select_params=(1, 2)) qs = qs.order_by('value_minus_x') qs = qs.values('num') self.assertSequenceEqual(qs, [{'num': 72}]) def test_extra_values_order_twice(self): # testing for ticket 14930 issues qs = Number.objects.extra(select={'value_plus_one': 'num+1', 'value_minus_one': 'num-1'}) qs = qs.order_by('value_minus_one').order_by('value_plus_one') qs = qs.values('num') self.assertSequenceEqual(qs, [{'num': 72}]) def test_extra_values_order_multiple(self): # Postgres doesn't allow constants in order by, so check for that. qs = Number.objects.extra(select={ 'value_plus_one': 'num+1', 'value_minus_one': 'num-1', 'constant_value': '1' }) qs = qs.order_by('value_plus_one', 'value_minus_one', 'constant_value') qs = qs.values('num') self.assertSequenceEqual(qs, [{'num': 72}]) def test_extra_values_order_in_extra(self): # testing for ticket 14930 issues qs = Number.objects.extra( select={'value_plus_one': 'num+1', 'value_minus_one': 'num-1'}, order_by=['value_minus_one'], ) qs = qs.values('num') def test_extra_select_params_values_order_in_extra(self): # testing for 23259 issue qs = Number.objects.extra( select={'value_plus_x': 'num+%s'}, select_params=[1], order_by=['value_plus_x'], ) qs = qs.filter(num=72) qs = qs.values('num') self.assertSequenceEqual(qs, [{'num': 72}]) def test_extra_multiple_select_params_values_order_by(self): # testing for 23259 issue qs = Number.objects.extra(select={'value_plus_x': 'num+%s', 'value_minus_x': 'num-%s'}, select_params=(72, 72)) qs = qs.order_by('value_minus_x') qs = qs.filter(num=1) qs = qs.values('num') self.assertSequenceEqual(qs, []) def test_extra_values_list(self): # testing for ticket 14930 issues qs = Number.objects.extra(select={'value_plus_one': 'num+1'}) qs = qs.order_by('value_plus_one') qs = qs.values_list('num') self.assertSequenceEqual(qs, [(72,)]) def test_flat_extra_values_list(self): # testing for ticket 14930 issues qs = Number.objects.extra(select={'value_plus_one': 'num+1'}) qs = qs.order_by('value_plus_one') qs = qs.values_list('num', flat=True) self.assertSequenceEqual(qs, [72]) def test_field_error_values_list(self): # see #23443 msg = "Cannot resolve keyword %r into field. Join on 'name' not permitted." % 'foo' with self.assertRaisesMessage(FieldError, msg): Tag.objects.values_list('name__foo') def test_named_values_list_flat(self): msg = "'flat' and 'named' can't be used together." with self.assertRaisesMessage(TypeError, msg): Number.objects.values_list('num', flat=True, named=True) def test_named_values_list_bad_field_name(self): msg = "Type names and field names must be valid identifiers: '1'" with self.assertRaisesMessage(ValueError, msg): Number.objects.extra(select={'1': 'num+1'}).values_list('1', named=True).first() def test_named_values_list_with_fields(self): qs = Number.objects.extra(select={'num2': 'num+1'}).annotate(Count('id')) values = qs.values_list('num', 'num2', named=True).first() self.assertEqual(type(values).__name__, 'Row') self.assertEqual(values._fields, ('num', 'num2')) self.assertEqual(values.num, 72) self.assertEqual(values.num2, 73) def test_named_values_list_without_fields(self): qs = Number.objects.extra(select={'num2': 'num+1'}).annotate(Count('id')) values = qs.values_list(named=True).first() self.assertEqual(type(values).__name__, 'Row') self.assertEqual( values._fields, ('num2', 'id', 'num', 'other_num', 'another_num', 'id__count'), ) self.assertEqual(values.num, 72) self.assertEqual(values.num2, 73) self.assertEqual(values.id__count, 1) def test_named_values_list_expression_with_default_alias(self): expr = Count('id') values = Number.objects.annotate(id__count1=expr).values_list(expr, 'id__count1', named=True).first() self.assertEqual(values._fields, ('id__count2', 'id__count1')) def test_named_values_list_expression(self): expr = F('num') + 1 qs = Number.objects.annotate(combinedexpression1=expr).values_list(expr, 'combinedexpression1', named=True) values = qs.first() self.assertEqual(values._fields, ('combinedexpression2', 'combinedexpression1')) def test_named_values_pickle(self): value = Number.objects.values_list('num', 'other_num', named=True).get() self.assertEqual(value, (72, None)) self.assertEqual(pickle.loads(pickle.dumps(value)), value) class QuerySetSupportsPythonIdioms(TestCase): @classmethod def setUpTestData(cls): some_date = datetime.datetime(2014, 5, 16, 12, 1) cls.articles = [ Article.objects.create(name=f'Article {i}', created=some_date) for i in range(1, 8) ] def get_ordered_articles(self): return Article.objects.all().order_by('name') def test_can_get_items_using_index_and_slice_notation(self): self.assertEqual(self.get_ordered_articles()[0].name, 'Article 1') self.assertSequenceEqual( self.get_ordered_articles()[1:3], [self.articles[1], self.articles[2]], ) def test_slicing_with_steps_can_be_used(self): self.assertSequenceEqual( self.get_ordered_articles()[::2], [ self.articles[0], self.articles[2], self.articles[4], self.articles[6], ] ) def test_slicing_without_step_is_lazy(self): with self.assertNumQueries(0): self.get_ordered_articles()[0:5] def test_slicing_with_tests_is_not_lazy(self): with self.assertNumQueries(1): self.get_ordered_articles()[0:5:3] def test_slicing_can_slice_again_after_slicing(self): self.assertSequenceEqual( self.get_ordered_articles()[0:5][0:2], [self.articles[0], self.articles[1]], ) self.assertSequenceEqual(self.get_ordered_articles()[0:5][4:], [self.articles[4]]) self.assertSequenceEqual(self.get_ordered_articles()[0:5][5:], []) # Some more tests! self.assertSequenceEqual( self.get_ordered_articles()[2:][0:2], [self.articles[2], self.articles[3]], ) self.assertSequenceEqual( self.get_ordered_articles()[2:][:2], [self.articles[2], self.articles[3]], ) self.assertSequenceEqual(self.get_ordered_articles()[2:][2:3], [self.articles[4]]) # Using an offset without a limit is also possible. self.assertSequenceEqual( self.get_ordered_articles()[5:], [self.articles[5], self.articles[6]], ) def test_slicing_cannot_filter_queryset_once_sliced(self): msg = 'Cannot filter a query once a slice has been taken.' with self.assertRaisesMessage(TypeError, msg): Article.objects.all()[0:5].filter(id=1) def test_slicing_cannot_reorder_queryset_once_sliced(self): msg = 'Cannot reorder a query once a slice has been taken.' with self.assertRaisesMessage(TypeError, msg): Article.objects.all()[0:5].order_by('id') def test_slicing_cannot_combine_queries_once_sliced(self): msg = 'Cannot combine queries once a slice has been taken.' with self.assertRaisesMessage(TypeError, msg): Article.objects.all()[0:1] & Article.objects.all()[4:5] def test_slicing_negative_indexing_not_supported_for_single_element(self): """hint: inverting your ordering might do what you need""" msg = 'Negative indexing is not supported.' with self.assertRaisesMessage(ValueError, msg): Article.objects.all()[-1] def test_slicing_negative_indexing_not_supported_for_range(self): """hint: inverting your ordering might do what you need""" msg = 'Negative indexing is not supported.' with self.assertRaisesMessage(ValueError, msg): Article.objects.all()[0:-5] with self.assertRaisesMessage(ValueError, msg): Article.objects.all()[-1:] def test_invalid_index(self): msg = 'QuerySet indices must be integers or slices, not str.' with self.assertRaisesMessage(TypeError, msg): Article.objects.all()['foo'] def test_can_get_number_of_items_in_queryset_using_standard_len(self): self.assertEqual(len(Article.objects.filter(name__exact='Article 1')), 1) def test_can_combine_queries_using_and_and_or_operators(self): s1 = Article.objects.filter(name__exact='Article 1') s2 = Article.objects.filter(name__exact='Article 2') self.assertSequenceEqual( (s1 | s2).order_by('name'), [self.articles[0], self.articles[1]], ) self.assertSequenceEqual(s1 & s2, []) class WeirdQuerysetSlicingTests(TestCase): @classmethod def setUpTestData(cls): Number.objects.create(num=1) Number.objects.create(num=2) Article.objects.create(name='one', created=datetime.datetime.now()) Article.objects.create(name='two', created=datetime.datetime.now()) Article.objects.create(name='three', created=datetime.datetime.now()) Article.objects.create(name='four', created=datetime.datetime.now()) food = Food.objects.create(name='spam') Eaten.objects.create(meal='spam with eggs', food=food) def test_tickets_7698_10202(self): # People like to slice with '0' as the high-water mark. self.assertQuerysetEqual(Article.objects.all()[0:0], []) self.assertQuerysetEqual(Article.objects.all()[0:0][:10], []) self.assertEqual(Article.objects.all()[:0].count(), 0) msg = 'Cannot change a query once a slice has been taken.' with self.assertRaisesMessage(TypeError, msg): Article.objects.all()[:0].latest('created') def test_empty_resultset_sql(self): # ticket #12192 self.assertNumQueries(0, lambda: list(Number.objects.all()[1:1])) def test_empty_sliced_subquery(self): self.assertEqual(Eaten.objects.filter(food__in=Food.objects.all()[0:0]).count(), 0) def test_empty_sliced_subquery_exclude(self): self.assertEqual(Eaten.objects.exclude(food__in=Food.objects.all()[0:0]).count(), 1) def test_zero_length_values_slicing(self): n = 42 with self.assertNumQueries(0): self.assertQuerysetEqual(Article.objects.values()[n:n], []) self.assertQuerysetEqual(Article.objects.values_list()[n:n], []) class EscapingTests(TestCase): def test_ticket_7302(self): # Reserved names are appropriately escaped r_a = ReservedName.objects.create(name='a', order=42) r_b = ReservedName.objects.create(name='b', order=37) self.assertSequenceEqual( ReservedName.objects.all().order_by('order'), [r_b, r_a], ) self.assertSequenceEqual( ReservedName.objects.extra(select={'stuff': 'name'}, order_by=('order', 'stuff')), [r_b, r_a], ) class ToFieldTests(TestCase): def test_in_query(self): apple = Food.objects.create(name="apple") pear = Food.objects.create(name="pear") lunch = Eaten.objects.create(food=apple, meal="lunch") dinner = Eaten.objects.create(food=pear, meal="dinner") self.assertEqual( set(Eaten.objects.filter(food__in=[apple, pear])), {lunch, dinner}, ) def test_in_subquery(self): apple = Food.objects.create(name="apple") lunch = Eaten.objects.create(food=apple, meal="lunch") self.assertEqual( set(Eaten.objects.filter(food__in=Food.objects.filter(name='apple'))), {lunch} ) self.assertEqual( set(Eaten.objects.filter(food__in=Food.objects.filter(name='apple').values('eaten__meal'))), set() ) self.assertEqual( set(Food.objects.filter(eaten__in=Eaten.objects.filter(meal='lunch'))), {apple} ) def test_nested_in_subquery(self): extra = ExtraInfo.objects.create() author = Author.objects.create(num=42, extra=extra) report = Report.objects.create(creator=author) comment = ReportComment.objects.create(report=report) comments = ReportComment.objects.filter( report__in=Report.objects.filter( creator__in=extra.author_set.all(), ), ) self.assertSequenceEqual(comments, [comment]) def test_reverse_in(self): apple = Food.objects.create(name="apple") pear = Food.objects.create(name="pear") lunch_apple = Eaten.objects.create(food=apple, meal="lunch") lunch_pear = Eaten.objects.create(food=pear, meal="dinner") self.assertEqual( set(Food.objects.filter(eaten__in=[lunch_apple, lunch_pear])), {apple, pear} ) def test_single_object(self): apple = Food.objects.create(name="apple") lunch = Eaten.objects.create(food=apple, meal="lunch") dinner = Eaten.objects.create(food=apple, meal="dinner") self.assertEqual( set(Eaten.objects.filter(food=apple)), {lunch, dinner} ) def test_single_object_reverse(self): apple = Food.objects.create(name="apple") lunch = Eaten.objects.create(food=apple, meal="lunch") self.assertEqual( set(Food.objects.filter(eaten=lunch)), {apple} ) def test_recursive_fk(self): node1 = Node.objects.create(num=42) node2 = Node.objects.create(num=1, parent=node1) self.assertEqual( list(Node.objects.filter(parent=node1)), [node2] ) def test_recursive_fk_reverse(self): node1 = Node.objects.create(num=42) node2 = Node.objects.create(num=1, parent=node1) self.assertEqual( list(Node.objects.filter(node=node2)), [node1] ) class IsNullTests(TestCase): def test_primary_key(self): custom = CustomPk.objects.create(name='pk') null = Related.objects.create() notnull = Related.objects.create(custom=custom) self.assertSequenceEqual(Related.objects.filter(custom__isnull=False), [notnull]) self.assertSequenceEqual(Related.objects.filter(custom__isnull=True), [null]) def test_to_field(self): apple = Food.objects.create(name="apple") e1 = Eaten.objects.create(food=apple, meal="lunch") e2 = Eaten.objects.create(meal="lunch") self.assertSequenceEqual( Eaten.objects.filter(food__isnull=False), [e1], ) self.assertSequenceEqual( Eaten.objects.filter(food__isnull=True), [e2], ) class ConditionalTests(TestCase): """Tests whose execution depend on different environment conditions like Python version or DB backend features""" @classmethod def setUpTestData(cls): generic = NamedCategory.objects.create(name="Generic") t1 = Tag.objects.create(name='t1', category=generic) Tag.objects.create(name='t2', parent=t1, category=generic) t3 = Tag.objects.create(name='t3', parent=t1) Tag.objects.create(name='t4', parent=t3) Tag.objects.create(name='t5', parent=t3) def test_infinite_loop(self): # If you're not careful, it's possible to introduce infinite loops via # default ordering on foreign keys in a cycle. We detect that. with self.assertRaisesMessage(FieldError, 'Infinite loop caused by ordering.'): list(LoopX.objects.all()) # Force queryset evaluation with list() with self.assertRaisesMessage(FieldError, 'Infinite loop caused by ordering.'): list(LoopZ.objects.all()) # Force queryset evaluation with list() # Note that this doesn't cause an infinite loop, since the default # ordering on the Tag model is empty (and thus defaults to using "id" # for the related field). self.assertEqual(len(Tag.objects.order_by('parent')), 5) # ... but you can still order in a non-recursive fashion among linked # fields (the previous test failed because the default ordering was # recursive). self.assertQuerysetEqual( LoopX.objects.all().order_by('y__x__y__x__id'), [] ) # When grouping without specifying ordering, we add an explicit "ORDER BY NULL" # portion in MySQL to prevent unnecessary sorting. @skipUnlessDBFeature('requires_explicit_null_ordering_when_grouping') def test_null_ordering_added(self): query = Tag.objects.values_list('parent_id', flat=True).order_by().query query.group_by = ['parent_id'] sql = query.get_compiler(DEFAULT_DB_ALIAS).as_sql()[0] fragment = "ORDER BY " pos = sql.find(fragment) self.assertEqual(sql.find(fragment, pos + 1), -1) self.assertEqual(sql.find("NULL", pos + len(fragment)), pos + len(fragment)) def test_in_list_limit(self): # The "in" lookup works with lists of 1000 items or more. # The numbers amount is picked to force three different IN batches # for Oracle, yet to be less than 2100 parameter limit for MSSQL. numbers = list(range(2050)) max_query_params = connection.features.max_query_params if max_query_params is None or max_query_params >= len(numbers): Number.objects.bulk_create(Number(num=num) for num in numbers) for number in [1000, 1001, 2000, len(numbers)]: with self.subTest(number=number): self.assertEqual(Number.objects.filter(num__in=numbers[:number]).count(), number) class UnionTests(unittest.TestCase): """ Tests for the union of two querysets. Bug #12252. """ @classmethod def setUpTestData(cls): objectas = [] objectbs = [] objectcs = [] a_info = ['one', 'two', 'three'] for name in a_info: o = ObjectA(name=name) o.save() objectas.append(o) b_info = [('un', 1, objectas[0]), ('deux', 2, objectas[0]), ('trois', 3, objectas[2])] for name, number, objecta in b_info: o = ObjectB(name=name, num=number, objecta=objecta) o.save() objectbs.append(o) c_info = [('ein', objectas[2], objectbs[2]), ('zwei', objectas[1], objectbs[1])] for name, objecta, objectb in c_info: o = ObjectC(name=name, objecta=objecta, objectb=objectb) o.save() objectcs.append(o) def check_union(self, model, Q1, Q2): filter = model.objects.filter self.assertEqual(set(filter(Q1) | filter(Q2)), set(filter(Q1 | Q2))) self.assertEqual(set(filter(Q2) | filter(Q1)), set(filter(Q1 | Q2))) def test_A_AB(self): Q1 = Q(name='two') Q2 = Q(objectb__name='deux') self.check_union(ObjectA, Q1, Q2) def test_A_AB2(self): Q1 = Q(name='two') Q2 = Q(objectb__name='deux', objectb__num=2) self.check_union(ObjectA, Q1, Q2) def test_AB_ACB(self): Q1 = Q(objectb__name='deux') Q2 = Q(objectc__objectb__name='deux') self.check_union(ObjectA, Q1, Q2) def test_BAB_BAC(self): Q1 = Q(objecta__objectb__name='deux') Q2 = Q(objecta__objectc__name='ein') self.check_union(ObjectB, Q1, Q2) def test_BAB_BACB(self): Q1 = Q(objecta__objectb__name='deux') Q2 = Q(objecta__objectc__objectb__name='trois') self.check_union(ObjectB, Q1, Q2) def test_BA_BCA__BAB_BAC_BCA(self): Q1 = Q(objecta__name='one', objectc__objecta__name='two') Q2 = Q(objecta__objectc__name='ein', objectc__objecta__name='three', objecta__objectb__name='trois') self.check_union(ObjectB, Q1, Q2) class DefaultValuesInsertTest(TestCase): def test_no_extra_params(self): """ Can create an instance of a model with only the PK field (#17056)." """ DumbCategory.objects.create() class ExcludeTests(TestCase): @classmethod def setUpTestData(cls): f1 = Food.objects.create(name='apples') cls.f2 = Food.objects.create(name='oranges') Eaten.objects.create(food=f1, meal='dinner') cls.j1 = Job.objects.create(name='Manager') cls.r1 = Responsibility.objects.create(description='Playing golf') cls.j2 = Job.objects.create(name='Programmer') cls.r2 = Responsibility.objects.create(description='Programming') JobResponsibilities.objects.create(job=cls.j1, responsibility=cls.r1) JobResponsibilities.objects.create(job=cls.j2, responsibility=cls.r2) def test_to_field(self): self.assertSequenceEqual( Food.objects.exclude(eaten__meal='dinner'), [self.f2], ) self.assertSequenceEqual( Job.objects.exclude(responsibilities__description='Playing golf'), [self.j2], ) self.assertSequenceEqual( Responsibility.objects.exclude(jobs__name='Manager'), [self.r2], ) def test_ticket14511(self): alex = Person.objects.get_or_create(name='Alex')[0] jane = Person.objects.get_or_create(name='Jane')[0] oracle = Company.objects.get_or_create(name='Oracle')[0] google = Company.objects.get_or_create(name='Google')[0] microsoft = Company.objects.get_or_create(name='Microsoft')[0] intel = Company.objects.get_or_create(name='Intel')[0] def employ(employer, employee, title): Employment.objects.get_or_create(employee=employee, employer=employer, title=title) employ(oracle, alex, 'Engineer') employ(oracle, alex, 'Developer') employ(google, alex, 'Engineer') employ(google, alex, 'Manager') employ(microsoft, alex, 'Manager') employ(intel, alex, 'Manager') employ(microsoft, jane, 'Developer') employ(intel, jane, 'Manager') alex_tech_employers = alex.employers.filter( employment__title__in=('Engineer', 'Developer')).distinct().order_by('name') self.assertSequenceEqual(alex_tech_employers, [google, oracle]) alex_nontech_employers = alex.employers.exclude( employment__title__in=('Engineer', 'Developer')).distinct().order_by('name') self.assertSequenceEqual(alex_nontech_employers, [google, intel, microsoft]) def test_exclude_reverse_fk_field_ref(self): tag = Tag.objects.create() Note.objects.create(tag=tag, note='note') annotation = Annotation.objects.create(name='annotation', tag=tag) self.assertEqual(Annotation.objects.exclude(tag__note__note=F('name')).get(), annotation) def test_exclude_with_circular_fk_relation(self): self.assertEqual(ObjectB.objects.exclude(objecta__objectb__name=F('name')).count(), 0) def test_subquery_exclude_outerref(self): qs = JobResponsibilities.objects.filter( Exists(Responsibility.objects.exclude(jobs=OuterRef('job'))), ) self.assertTrue(qs.exists()) self.r1.delete() self.assertFalse(qs.exists()) def test_exclude_nullable_fields(self): number = Number.objects.create(num=1, other_num=1) Number.objects.create(num=2, other_num=2, another_num=2) self.assertSequenceEqual( Number.objects.exclude(other_num=F('another_num')), [number], ) self.assertSequenceEqual( Number.objects.exclude(num=F('another_num')), [number], ) def test_exclude_multivalued_exists(self): with CaptureQueriesContext(connection) as captured_queries: self.assertSequenceEqual( Job.objects.exclude(responsibilities__description='Programming'), [self.j1], ) self.assertIn('exists', captured_queries[0]['sql'].lower()) def test_exclude_subquery(self): subquery = JobResponsibilities.objects.filter( responsibility__description='bar', ) | JobResponsibilities.objects.exclude( job__responsibilities__description='foo', ) self.assertCountEqual( Job.objects.annotate( responsibility=subquery.filter( job=OuterRef('name'), ).values('id')[:1] ), [self.j1, self.j2], ) def test_exclude_unsaved_o2o_object(self): jack = Staff.objects.create(name='jack') jack_staff = StaffUser.objects.create(staff=jack) unsaved_object = Staff(name='jane') self.assertIsNone(unsaved_object.pk) self.assertSequenceEqual(StaffUser.objects.exclude(staff=unsaved_object), [jack_staff]) class ExcludeTest17600(TestCase): """ Some regressiontests for ticket #17600. Some of these likely duplicate other existing tests. """ @classmethod def setUpTestData(cls): # Create a few Orders. cls.o1 = Order.objects.create(pk=1) cls.o2 = Order.objects.create(pk=2) cls.o3 = Order.objects.create(pk=3) # Create some OrderItems for the first order with homogeneous # status_id values cls.oi1 = OrderItem.objects.create(order=cls.o1, status=1) cls.oi2 = OrderItem.objects.create(order=cls.o1, status=1) cls.oi3 = OrderItem.objects.create(order=cls.o1, status=1) # Create some OrderItems for the second order with heterogeneous # status_id values cls.oi4 = OrderItem.objects.create(order=cls.o2, status=1) cls.oi5 = OrderItem.objects.create(order=cls.o2, status=2) cls.oi6 = OrderItem.objects.create(order=cls.o2, status=3) # Create some OrderItems for the second order with heterogeneous # status_id values cls.oi7 = OrderItem.objects.create(order=cls.o3, status=2) cls.oi8 = OrderItem.objects.create(order=cls.o3, status=3) cls.oi9 = OrderItem.objects.create(order=cls.o3, status=4) def test_exclude_plain(self): """ This should exclude Orders which have some items with status 1 """ self.assertSequenceEqual( Order.objects.exclude(items__status=1), [self.o3], ) def test_exclude_plain_distinct(self): """ This should exclude Orders which have some items with status 1 """ self.assertSequenceEqual( Order.objects.exclude(items__status=1).distinct(), [self.o3], ) def test_exclude_with_q_object_distinct(self): """ This should exclude Orders which have some items with status 1 """ self.assertSequenceEqual( Order.objects.exclude(Q(items__status=1)).distinct(), [self.o3], ) def test_exclude_with_q_object_no_distinct(self): """ This should exclude Orders which have some items with status 1 """ self.assertSequenceEqual( Order.objects.exclude(Q(items__status=1)), [self.o3], ) def test_exclude_with_q_is_equal_to_plain_exclude(self): """ Using exclude(condition) and exclude(Q(condition)) should yield the same QuerySet """ self.assertEqual( list(Order.objects.exclude(items__status=1).distinct()), list(Order.objects.exclude(Q(items__status=1)).distinct())) def test_exclude_with_q_is_equal_to_plain_exclude_variation(self): """ Using exclude(condition) and exclude(Q(condition)) should yield the same QuerySet """ self.assertEqual( list(Order.objects.exclude(items__status=1)), list(Order.objects.exclude(Q(items__status=1)).distinct())) @unittest.expectedFailure def test_only_orders_with_all_items_having_status_1(self): """ This should only return orders having ALL items set to status 1, or those items not having any orders at all. The correct way to write this query in SQL seems to be using two nested subqueries. """ self.assertQuerysetEqual( Order.objects.exclude(~Q(items__status=1)).distinct(), [self.o1], ) class Exclude15786(TestCase): """Regression test for #15786""" def test_ticket15786(self): c1 = SimpleCategory.objects.create(name='c1') c2 = SimpleCategory.objects.create(name='c2') OneToOneCategory.objects.create(category=c1) OneToOneCategory.objects.create(category=c2) rel = CategoryRelationship.objects.create(first=c1, second=c2) self.assertEqual( CategoryRelationship.objects.exclude( first__onetoonecategory=F('second__onetoonecategory') ).get(), rel ) class NullInExcludeTest(TestCase): @classmethod def setUpTestData(cls): NullableName.objects.create(name='i1') NullableName.objects.create() def test_null_in_exclude_qs(self): none_val = '' if connection.features.interprets_empty_strings_as_nulls else None self.assertQuerysetEqual( NullableName.objects.exclude(name__in=[]), ['i1', none_val], attrgetter('name')) self.assertQuerysetEqual( NullableName.objects.exclude(name__in=['i1']), [none_val], attrgetter('name')) self.assertQuerysetEqual( NullableName.objects.exclude(name__in=['i3']), ['i1', none_val], attrgetter('name')) inner_qs = NullableName.objects.filter(name='i1').values_list('name') self.assertQuerysetEqual( NullableName.objects.exclude(name__in=inner_qs), [none_val], attrgetter('name')) # The inner queryset wasn't executed - it should be turned # into subquery above self.assertIs(inner_qs._result_cache, None) @unittest.expectedFailure def test_col_not_in_list_containing_null(self): """ The following case is not handled properly because SQL's COL NOT IN (list containing null) handling is too weird to abstract away. """ self.assertQuerysetEqual( NullableName.objects.exclude(name__in=[None]), ['i1'], attrgetter('name')) def test_double_exclude(self): self.assertEqual( list(NullableName.objects.filter(~~Q(name='i1'))), list(NullableName.objects.filter(Q(name='i1')))) self.assertNotIn( 'IS NOT NULL', str(NullableName.objects.filter(~~Q(name='i1')).query)) class EmptyStringsAsNullTest(TestCase): """ Filtering on non-null character fields works as expected. The reason for these tests is that Oracle treats '' as NULL, and this can cause problems in query construction. Refs #17957. """ @classmethod def setUpTestData(cls): cls.nc = NamedCategory.objects.create(name='') def test_direct_exclude(self): self.assertQuerysetEqual( NamedCategory.objects.exclude(name__in=['nonexistent']), [self.nc.pk], attrgetter('pk') ) def test_joined_exclude(self): self.assertQuerysetEqual( DumbCategory.objects.exclude(namedcategory__name__in=['nonexistent']), [self.nc.pk], attrgetter('pk') ) def test_21001(self): foo = NamedCategory.objects.create(name='foo') self.assertQuerysetEqual( NamedCategory.objects.exclude(name=''), [foo.pk], attrgetter('pk') ) class ProxyQueryCleanupTest(TestCase): def test_evaluated_proxy_count(self): """ Generating the query string doesn't alter the query's state in irreversible ways. Refs #18248. """ ProxyCategory.objects.create() qs = ProxyCategory.objects.all() self.assertEqual(qs.count(), 1) str(qs.query) self.assertEqual(qs.count(), 1) class WhereNodeTest(SimpleTestCase): class DummyNode: def as_sql(self, compiler, connection): return 'dummy', [] class MockCompiler: def compile(self, node): return node.as_sql(self, connection) def __call__(self, name): return connection.ops.quote_name(name) def test_empty_full_handling_conjunction(self): compiler = WhereNodeTest.MockCompiler() w = WhereNode(children=[NothingNode()]) with self.assertRaises(EmptyResultSet): w.as_sql(compiler, connection) w.negate() self.assertEqual(w.as_sql(compiler, connection), ('', [])) w = WhereNode(children=[self.DummyNode(), self.DummyNode()]) self.assertEqual(w.as_sql(compiler, connection), ('(dummy AND dummy)', [])) w.negate() self.assertEqual(w.as_sql(compiler, connection), ('NOT (dummy AND dummy)', [])) w = WhereNode(children=[NothingNode(), self.DummyNode()]) with self.assertRaises(EmptyResultSet): w.as_sql(compiler, connection) w.negate() self.assertEqual(w.as_sql(compiler, connection), ('', [])) def test_empty_full_handling_disjunction(self): compiler = WhereNodeTest.MockCompiler() w = WhereNode(children=[NothingNode()], connector='OR') with self.assertRaises(EmptyResultSet): w.as_sql(compiler, connection) w.negate() self.assertEqual(w.as_sql(compiler, connection), ('', [])) w = WhereNode(children=[self.DummyNode(), self.DummyNode()], connector='OR') self.assertEqual(w.as_sql(compiler, connection), ('(dummy OR dummy)', [])) w.negate() self.assertEqual(w.as_sql(compiler, connection), ('NOT (dummy OR dummy)', [])) w = WhereNode(children=[NothingNode(), self.DummyNode()], connector='OR') self.assertEqual(w.as_sql(compiler, connection), ('dummy', [])) w.negate() self.assertEqual(w.as_sql(compiler, connection), ('NOT (dummy)', [])) def test_empty_nodes(self): compiler = WhereNodeTest.MockCompiler() empty_w = WhereNode() w = WhereNode(children=[empty_w, empty_w]) self.assertEqual(w.as_sql(compiler, connection), ('', [])) w.negate() with self.assertRaises(EmptyResultSet): w.as_sql(compiler, connection) w.connector = 'OR' with self.assertRaises(EmptyResultSet): w.as_sql(compiler, connection) w.negate() self.assertEqual(w.as_sql(compiler, connection), ('', [])) w = WhereNode(children=[empty_w, NothingNode()], connector='OR') self.assertEqual(w.as_sql(compiler, connection), ('', [])) w = WhereNode(children=[empty_w, NothingNode()], connector='AND') with self.assertRaises(EmptyResultSet): w.as_sql(compiler, connection) class QuerySetExceptionTests(SimpleTestCase): def test_iter_exceptions(self): qs = ExtraInfo.objects.only('author') msg = "'ManyToOneRel' object has no attribute 'attname'" with self.assertRaisesMessage(AttributeError, msg): list(qs) def test_invalid_order_by(self): msg = ( "Cannot resolve keyword '*' into field. Choices are: created, id, " "name" ) with self.assertRaisesMessage(FieldError, msg): Article.objects.order_by('*') def test_invalid_order_by_raw_column_alias(self): msg = ( "Cannot resolve keyword 'queries_author.name' into field. Choices " "are: cover, created, creator, creator_id, id, modified, name, " "note, note_id, tags" ) with self.assertRaisesMessage(FieldError, msg): Item.objects.values('creator__name').order_by('queries_author.name') def test_invalid_queryset_model(self): msg = 'Cannot use QuerySet for "Article": Use a QuerySet for "ExtraInfo".' with self.assertRaisesMessage(ValueError, msg): list(Author.objects.filter(extra=Article.objects.all())) class NullJoinPromotionOrTest(TestCase): @classmethod def setUpTestData(cls): cls.d1 = ModelD.objects.create(name='foo') d2 = ModelD.objects.create(name='bar') cls.a1 = ModelA.objects.create(name='a1', d=cls.d1) c = ModelC.objects.create(name='c') b = ModelB.objects.create(name='b', c=c) cls.a2 = ModelA.objects.create(name='a2', b=b, d=d2) def test_ticket_17886(self): # The first Q-object is generating the match, the rest of the filters # should not remove the match even if they do not match anything. The # problem here was that b__name generates a LOUTER JOIN, then # b__c__name generates join to c, which the ORM tried to promote but # failed as that join isn't nullable. q_obj = ( Q(d__name='foo') | Q(b__name='foo') | Q(b__c__name='foo') ) qset = ModelA.objects.filter(q_obj) self.assertEqual(list(qset), [self.a1]) # We generate one INNER JOIN to D. The join is direct and not nullable # so we can use INNER JOIN for it. However, we can NOT use INNER JOIN # for the b->c join, as a->b is nullable. self.assertEqual(str(qset.query).count('INNER JOIN'), 1) def test_isnull_filter_promotion(self): qs = ModelA.objects.filter(Q(b__name__isnull=True)) self.assertEqual(str(qs.query).count('LEFT OUTER'), 1) self.assertEqual(list(qs), [self.a1]) qs = ModelA.objects.filter(~Q(b__name__isnull=True)) self.assertEqual(str(qs.query).count('INNER JOIN'), 1) self.assertEqual(list(qs), [self.a2]) qs = ModelA.objects.filter(~~Q(b__name__isnull=True)) self.assertEqual(str(qs.query).count('LEFT OUTER'), 1) self.assertEqual(list(qs), [self.a1]) qs = ModelA.objects.filter(Q(b__name__isnull=False)) self.assertEqual(str(qs.query).count('INNER JOIN'), 1) self.assertEqual(list(qs), [self.a2]) qs = ModelA.objects.filter(~Q(b__name__isnull=False)) self.assertEqual(str(qs.query).count('LEFT OUTER'), 1) self.assertEqual(list(qs), [self.a1]) qs = ModelA.objects.filter(~~Q(b__name__isnull=False)) self.assertEqual(str(qs.query).count('INNER JOIN'), 1) self.assertEqual(list(qs), [self.a2]) def test_null_join_demotion(self): qs = ModelA.objects.filter(Q(b__name__isnull=False) & Q(b__name__isnull=True)) self.assertIn(' INNER JOIN ', str(qs.query)) qs = ModelA.objects.filter(Q(b__name__isnull=True) & Q(b__name__isnull=False)) self.assertIn(' INNER JOIN ', str(qs.query)) qs = ModelA.objects.filter(Q(b__name__isnull=False) | Q(b__name__isnull=True)) self.assertIn(' LEFT OUTER JOIN ', str(qs.query)) qs = ModelA.objects.filter(Q(b__name__isnull=True) | Q(b__name__isnull=False)) self.assertIn(' LEFT OUTER JOIN ', str(qs.query)) def test_ticket_21366(self): n = Note.objects.create(note='n', misc='m') e = ExtraInfo.objects.create(info='info', note=n) a = Author.objects.create(name='Author1', num=1, extra=e) Ranking.objects.create(rank=1, author=a) r1 = Report.objects.create(name='Foo', creator=a) r2 = Report.objects.create(name='Bar') Report.objects.create(name='Bar', creator=a) qs = Report.objects.filter( Q(creator__ranking__isnull=True) | Q(creator__ranking__rank=1, name='Foo') ) self.assertEqual(str(qs.query).count('LEFT OUTER JOIN'), 2) self.assertEqual(str(qs.query).count(' JOIN '), 2) self.assertSequenceEqual(qs.order_by('name'), [r2, r1]) def test_ticket_21748(self): i1 = Identifier.objects.create(name='i1') i2 = Identifier.objects.create(name='i2') i3 = Identifier.objects.create(name='i3') Program.objects.create(identifier=i1) Channel.objects.create(identifier=i1) Program.objects.create(identifier=i2) self.assertSequenceEqual(Identifier.objects.filter(program=None, channel=None), [i3]) self.assertSequenceEqual(Identifier.objects.exclude(program=None, channel=None).order_by('name'), [i1, i2]) def test_ticket_21748_double_negated_and(self): i1 = Identifier.objects.create(name='i1') i2 = Identifier.objects.create(name='i2') Identifier.objects.create(name='i3') p1 = Program.objects.create(identifier=i1) c1 = Channel.objects.create(identifier=i1) Program.objects.create(identifier=i2) # Check the ~~Q() (or equivalently .exclude(~Q)) works like Q() for # join promotion. qs1_doubleneg = Identifier.objects.exclude(~Q(program__id=p1.id, channel__id=c1.id)).order_by('pk') qs1_filter = Identifier.objects.filter(program__id=p1.id, channel__id=c1.id).order_by('pk') self.assertQuerysetEqual(qs1_doubleneg, qs1_filter, lambda x: x) self.assertEqual(str(qs1_filter.query).count('JOIN'), str(qs1_doubleneg.query).count('JOIN')) self.assertEqual(2, str(qs1_doubleneg.query).count('INNER JOIN')) self.assertEqual(str(qs1_filter.query).count('INNER JOIN'), str(qs1_doubleneg.query).count('INNER JOIN')) def test_ticket_21748_double_negated_or(self): i1 = Identifier.objects.create(name='i1') i2 = Identifier.objects.create(name='i2') Identifier.objects.create(name='i3') p1 = Program.objects.create(identifier=i1) c1 = Channel.objects.create(identifier=i1) p2 = Program.objects.create(identifier=i2) # Test OR + doubleneg. The expected result is that channel is LOUTER # joined, program INNER joined qs1_filter = Identifier.objects.filter( Q(program__id=p2.id, channel__id=c1.id) | Q(program__id=p1.id) ).order_by('pk') qs1_doubleneg = Identifier.objects.exclude( ~Q(Q(program__id=p2.id, channel__id=c1.id) | Q(program__id=p1.id)) ).order_by('pk') self.assertQuerysetEqual(qs1_doubleneg, qs1_filter, lambda x: x) self.assertEqual(str(qs1_filter.query).count('JOIN'), str(qs1_doubleneg.query).count('JOIN')) self.assertEqual(1, str(qs1_doubleneg.query).count('INNER JOIN')) self.assertEqual(str(qs1_filter.query).count('INNER JOIN'), str(qs1_doubleneg.query).count('INNER JOIN')) def test_ticket_21748_complex_filter(self): i1 = Identifier.objects.create(name='i1') i2 = Identifier.objects.create(name='i2') Identifier.objects.create(name='i3') p1 = Program.objects.create(identifier=i1) c1 = Channel.objects.create(identifier=i1) p2 = Program.objects.create(identifier=i2) # Finally, a more complex case, one time in a way where each # NOT is pushed to lowest level in the boolean tree, and # another query where this isn't done. qs1 = Identifier.objects.filter( ~Q(~Q(program__id=p2.id, channel__id=c1.id) & Q(program__id=p1.id)) ).order_by('pk') qs2 = Identifier.objects.filter( Q(Q(program__id=p2.id, channel__id=c1.id) | ~Q(program__id=p1.id)) ).order_by('pk') self.assertQuerysetEqual(qs1, qs2, lambda x: x) self.assertEqual(str(qs1.query).count('JOIN'), str(qs2.query).count('JOIN')) self.assertEqual(0, str(qs1.query).count('INNER JOIN')) self.assertEqual(str(qs1.query).count('INNER JOIN'), str(qs2.query).count('INNER JOIN')) class ReverseJoinTrimmingTest(TestCase): def test_reverse_trimming(self): # We don't accidentally trim reverse joins - we can't know if there is # anything on the other side of the join, so trimming reverse joins # can't be done, ever. t = Tag.objects.create() qs = Tag.objects.filter(annotation__tag=t.pk) self.assertIn('INNER JOIN', str(qs.query)) self.assertEqual(list(qs), []) class JoinReuseTest(TestCase): """ The queries reuse joins sensibly (for example, direct joins are always reused). """ def test_fk_reuse(self): qs = Annotation.objects.filter(tag__name='foo').filter(tag__name='bar') self.assertEqual(str(qs.query).count('JOIN'), 1) def test_fk_reuse_select_related(self): qs = Annotation.objects.filter(tag__name='foo').select_related('tag') self.assertEqual(str(qs.query).count('JOIN'), 1) def test_fk_reuse_annotation(self): qs = Annotation.objects.filter(tag__name='foo').annotate(cnt=Count('tag__name')) self.assertEqual(str(qs.query).count('JOIN'), 1) def test_fk_reuse_disjunction(self): qs = Annotation.objects.filter(Q(tag__name='foo') | Q(tag__name='bar')) self.assertEqual(str(qs.query).count('JOIN'), 1) def test_fk_reuse_order_by(self): qs = Annotation.objects.filter(tag__name='foo').order_by('tag__name') self.assertEqual(str(qs.query).count('JOIN'), 1) def test_revo2o_reuse(self): qs = Detail.objects.filter(member__name='foo').filter(member__name='foo') self.assertEqual(str(qs.query).count('JOIN'), 1) def test_revfk_noreuse(self): qs = Author.objects.filter(report__name='r4').filter(report__name='r1') self.assertEqual(str(qs.query).count('JOIN'), 2) def test_inverted_q_across_relations(self): """ When a trimmable join is specified in the query (here school__), the ORM detects it and removes unnecessary joins. The set of reusable joins are updated after trimming the query so that other lookups don't consider that the outer query's filters are in effect for the subquery (#26551). """ springfield_elementary = School.objects.create() hogward = School.objects.create() Student.objects.create(school=springfield_elementary) hp = Student.objects.create(school=hogward) Classroom.objects.create(school=hogward, name='Potion') Classroom.objects.create(school=springfield_elementary, name='Main') qs = Student.objects.filter( ~(Q(school__classroom__name='Main') & Q(school__classroom__has_blackboard=None)) ) self.assertSequenceEqual(qs, [hp]) class DisjunctionPromotionTests(TestCase): def test_disjunction_promotion_select_related(self): fk1 = FK1.objects.create(f1='f1', f2='f2') basea = BaseA.objects.create(a=fk1) qs = BaseA.objects.filter(Q(a=fk1) | Q(b=2)) self.assertEqual(str(qs.query).count(' JOIN '), 0) qs = qs.select_related('a', 'b') self.assertEqual(str(qs.query).count(' INNER JOIN '), 0) self.assertEqual(str(qs.query).count(' LEFT OUTER JOIN '), 2) with self.assertNumQueries(1): self.assertSequenceEqual(qs, [basea]) self.assertEqual(qs[0].a, fk1) self.assertIs(qs[0].b, None) def test_disjunction_promotion1(self): # Pre-existing join, add two ORed filters to the same join, # all joins can be INNER JOINS. qs = BaseA.objects.filter(a__f1='foo') self.assertEqual(str(qs.query).count('INNER JOIN'), 1) qs = qs.filter(Q(b__f1='foo') | Q(b__f2='foo')) self.assertEqual(str(qs.query).count('INNER JOIN'), 2) # Reverse the order of AND and OR filters. qs = BaseA.objects.filter(Q(b__f1='foo') | Q(b__f2='foo')) self.assertEqual(str(qs.query).count('INNER JOIN'), 1) qs = qs.filter(a__f1='foo') self.assertEqual(str(qs.query).count('INNER JOIN'), 2) def test_disjunction_promotion2(self): qs = BaseA.objects.filter(a__f1='foo') self.assertEqual(str(qs.query).count('INNER JOIN'), 1) # Now we have two different joins in an ORed condition, these # must be OUTER joins. The pre-existing join should remain INNER. qs = qs.filter(Q(b__f1='foo') | Q(c__f2='foo')) self.assertEqual(str(qs.query).count('INNER JOIN'), 1) self.assertEqual(str(qs.query).count('LEFT OUTER JOIN'), 2) # Reverse case. qs = BaseA.objects.filter(Q(b__f1='foo') | Q(c__f2='foo')) self.assertEqual(str(qs.query).count('LEFT OUTER JOIN'), 2) qs = qs.filter(a__f1='foo') self.assertEqual(str(qs.query).count('INNER JOIN'), 1) self.assertEqual(str(qs.query).count('LEFT OUTER JOIN'), 2) def test_disjunction_promotion3(self): qs = BaseA.objects.filter(a__f2='bar') self.assertEqual(str(qs.query).count('INNER JOIN'), 1) # The ANDed a__f2 filter allows us to use keep using INNER JOIN # even inside the ORed case. If the join to a__ returns nothing, # the ANDed filter for a__f2 can't be true. qs = qs.filter(Q(a__f1='foo') | Q(b__f2='foo')) self.assertEqual(str(qs.query).count('INNER JOIN'), 1) self.assertEqual(str(qs.query).count('LEFT OUTER JOIN'), 1) def test_disjunction_promotion3_demote(self): # This one needs demotion logic: the first filter causes a to be # outer joined, the second filter makes it inner join again. qs = BaseA.objects.filter( Q(a__f1='foo') | Q(b__f2='foo')).filter(a__f2='bar') self.assertEqual(str(qs.query).count('INNER JOIN'), 1) self.assertEqual(str(qs.query).count('LEFT OUTER JOIN'), 1) def test_disjunction_promotion4_demote(self): qs = BaseA.objects.filter(Q(a=1) | Q(a=2)) self.assertEqual(str(qs.query).count('JOIN'), 0) # Demote needed for the "a" join. It is marked as outer join by # above filter (even if it is trimmed away). qs = qs.filter(a__f1='foo') self.assertEqual(str(qs.query).count('INNER JOIN'), 1) def test_disjunction_promotion4(self): qs = BaseA.objects.filter(a__f1='foo') self.assertEqual(str(qs.query).count('INNER JOIN'), 1) qs = qs.filter(Q(a=1) | Q(a=2)) self.assertEqual(str(qs.query).count('INNER JOIN'), 1) def test_disjunction_promotion5_demote(self): qs = BaseA.objects.filter(Q(a=1) | Q(a=2)) # Note that the above filters on a force the join to an # inner join even if it is trimmed. self.assertEqual(str(qs.query).count('JOIN'), 0) qs = qs.filter(Q(a__f1='foo') | Q(b__f1='foo')) # So, now the a__f1 join doesn't need promotion. self.assertEqual(str(qs.query).count('INNER JOIN'), 1) # But b__f1 does. self.assertEqual(str(qs.query).count('LEFT OUTER JOIN'), 1) qs = BaseA.objects.filter(Q(a__f1='foo') | Q(b__f1='foo')) # Now the join to a is created as LOUTER self.assertEqual(str(qs.query).count('LEFT OUTER JOIN'), 2) qs = qs.filter(Q(a=1) | Q(a=2)) self.assertEqual(str(qs.query).count('INNER JOIN'), 1) self.assertEqual(str(qs.query).count('LEFT OUTER JOIN'), 1) def test_disjunction_promotion6(self): qs = BaseA.objects.filter(Q(a=1) | Q(a=2)) self.assertEqual(str(qs.query).count('JOIN'), 0) qs = BaseA.objects.filter(Q(a__f1='foo') & Q(b__f1='foo')) self.assertEqual(str(qs.query).count('INNER JOIN'), 2) self.assertEqual(str(qs.query).count('LEFT OUTER JOIN'), 0) qs = BaseA.objects.filter(Q(a__f1='foo') & Q(b__f1='foo')) self.assertEqual(str(qs.query).count('LEFT OUTER JOIN'), 0) self.assertEqual(str(qs.query).count('INNER JOIN'), 2) qs = qs.filter(Q(a=1) | Q(a=2)) self.assertEqual(str(qs.query).count('INNER JOIN'), 2) self.assertEqual(str(qs.query).count('LEFT OUTER JOIN'), 0) def test_disjunction_promotion7(self): qs = BaseA.objects.filter(Q(a=1) | Q(a=2)) self.assertEqual(str(qs.query).count('JOIN'), 0) qs = BaseA.objects.filter(Q(a__f1='foo') | (Q(b__f1='foo') & Q(a__f1='bar'))) self.assertEqual(str(qs.query).count('INNER JOIN'), 1) self.assertEqual(str(qs.query).count('LEFT OUTER JOIN'), 1) qs = BaseA.objects.filter( (Q(a__f1='foo') | Q(b__f1='foo')) & (Q(a__f1='bar') | Q(c__f1='foo')) ) self.assertEqual(str(qs.query).count('LEFT OUTER JOIN'), 3) self.assertEqual(str(qs.query).count('INNER JOIN'), 0) qs = BaseA.objects.filter( Q(a__f1='foo') | Q(a__f1='bar') & (Q(b__f1='bar') | Q(c__f1='foo')) ) self.assertEqual(str(qs.query).count('LEFT OUTER JOIN'), 2) self.assertEqual(str(qs.query).count('INNER JOIN'), 1) def test_disjunction_promotion_fexpression(self): qs = BaseA.objects.filter(Q(a__f1=F('b__f1')) | Q(b__f1='foo')) self.assertEqual(str(qs.query).count('LEFT OUTER JOIN'), 1) self.assertEqual(str(qs.query).count('INNER JOIN'), 1) qs = BaseA.objects.filter(Q(a__f1=F('c__f1')) | Q(b__f1='foo')) self.assertEqual(str(qs.query).count('LEFT OUTER JOIN'), 3) qs = BaseA.objects.filter(Q(a__f1=F('b__f1')) | Q(a__f2=F('b__f2')) | Q(c__f1='foo')) self.assertEqual(str(qs.query).count('LEFT OUTER JOIN'), 3) qs = BaseA.objects.filter(Q(a__f1=F('c__f1')) | (Q(pk=1) & Q(pk=2))) self.assertEqual(str(qs.query).count('LEFT OUTER JOIN'), 2) self.assertEqual(str(qs.query).count('INNER JOIN'), 0) class ManyToManyExcludeTest(TestCase): def test_exclude_many_to_many(self): i_extra = Identifier.objects.create(name='extra') i_program = Identifier.objects.create(name='program') program = Program.objects.create(identifier=i_program) i_channel = Identifier.objects.create(name='channel') channel = Channel.objects.create(identifier=i_channel) channel.programs.add(program) # channel contains 'program1', so all Identifiers except that one # should be returned self.assertSequenceEqual( Identifier.objects.exclude(program__channel=channel).order_by('name'), [i_channel, i_extra], ) self.assertSequenceEqual( Identifier.objects.exclude(program__channel=None).order_by('name'), [i_program], ) def test_ticket_12823(self): pg3 = Page.objects.create(text='pg3') pg2 = Page.objects.create(text='pg2') pg1 = Page.objects.create(text='pg1') pa1 = Paragraph.objects.create(text='pa1') pa1.page.set([pg1, pg2]) pa2 = Paragraph.objects.create(text='pa2') pa2.page.set([pg2, pg3]) pa3 = Paragraph.objects.create(text='pa3') ch1 = Chapter.objects.create(title='ch1', paragraph=pa1) ch2 = Chapter.objects.create(title='ch2', paragraph=pa2) ch3 = Chapter.objects.create(title='ch3', paragraph=pa3) b1 = Book.objects.create(title='b1', chapter=ch1) b2 = Book.objects.create(title='b2', chapter=ch2) b3 = Book.objects.create(title='b3', chapter=ch3) q = Book.objects.exclude(chapter__paragraph__page__text='pg1') self.assertNotIn('IS NOT NULL', str(q.query)) self.assertEqual(len(q), 2) self.assertNotIn(b1, q) self.assertIn(b2, q) self.assertIn(b3, q) class RelabelCloneTest(TestCase): def test_ticket_19964(self): my1 = MyObject.objects.create(data='foo') my1.parent = my1 my1.save() my2 = MyObject.objects.create(data='bar', parent=my1) parents = MyObject.objects.filter(parent=F('id')) children = MyObject.objects.filter(parent__in=parents).exclude(parent=F('id')) self.assertEqual(list(parents), [my1]) # Evaluating the children query (which has parents as part of it) does # not change results for the parents query. self.assertEqual(list(children), [my2]) self.assertEqual(list(parents), [my1]) class Ticket20101Tests(TestCase): def test_ticket_20101(self): """ Tests QuerySet ORed combining in exclude subquery case. """ t = Tag.objects.create(name='foo') a1 = Annotation.objects.create(tag=t, name='a1') a2 = Annotation.objects.create(tag=t, name='a2') a3 = Annotation.objects.create(tag=t, name='a3') n = Note.objects.create(note='foo', misc='bar') qs1 = Note.objects.exclude(annotation__in=[a1, a2]) qs2 = Note.objects.filter(annotation__in=[a3]) self.assertIn(n, qs1) self.assertNotIn(n, qs2) self.assertIn(n, (qs1 | qs2)) class EmptyStringPromotionTests(SimpleTestCase): def test_empty_string_promotion(self): qs = RelatedObject.objects.filter(single__name='') if connection.features.interprets_empty_strings_as_nulls: self.assertIn('LEFT OUTER JOIN', str(qs.query)) else: self.assertNotIn('LEFT OUTER JOIN', str(qs.query)) class ValuesSubqueryTests(TestCase): def test_values_in_subquery(self): # If a values() queryset is used, then the given values # will be used instead of forcing use of the relation's field. o1 = Order.objects.create(id=-2) o2 = Order.objects.create(id=-1) oi1 = OrderItem.objects.create(order=o1, status=0) oi1.status = oi1.pk oi1.save() OrderItem.objects.create(order=o2, status=0) # The query below should match o1 as it has related order_item # with id == status. self.assertSequenceEqual(Order.objects.filter(items__in=OrderItem.objects.values_list('status')), [o1]) class DoubleInSubqueryTests(TestCase): def test_double_subquery_in(self): lfa1 = LeafA.objects.create(data='foo') lfa2 = LeafA.objects.create(data='bar') lfb1 = LeafB.objects.create(data='lfb1') lfb2 = LeafB.objects.create(data='lfb2') Join.objects.create(a=lfa1, b=lfb1) Join.objects.create(a=lfa2, b=lfb2) leaf_as = LeafA.objects.filter(data='foo').values_list('pk', flat=True) joins = Join.objects.filter(a__in=leaf_as).values_list('b__id', flat=True) qs = LeafB.objects.filter(pk__in=joins) self.assertSequenceEqual(qs, [lfb1]) class Ticket18785Tests(SimpleTestCase): def test_ticket_18785(self): # Test join trimming from ticket18785 qs = Item.objects.exclude( note__isnull=False ).filter( name='something', creator__extra__isnull=True ).order_by() self.assertEqual(1, str(qs.query).count('INNER JOIN')) self.assertEqual(0, str(qs.query).count('OUTER JOIN')) class Ticket20788Tests(TestCase): def test_ticket_20788(self): Paragraph.objects.create() paragraph = Paragraph.objects.create() page = paragraph.page.create() chapter = Chapter.objects.create(paragraph=paragraph) Book.objects.create(chapter=chapter) paragraph2 = Paragraph.objects.create() Page.objects.create() chapter2 = Chapter.objects.create(paragraph=paragraph2) book2 = Book.objects.create(chapter=chapter2) sentences_not_in_pub = Book.objects.exclude(chapter__paragraph__page=page) self.assertSequenceEqual(sentences_not_in_pub, [book2]) class Ticket12807Tests(TestCase): def test_ticket_12807(self): p1 = Paragraph.objects.create() p2 = Paragraph.objects.create() # The ORed condition below should have no effect on the query - the # ~Q(pk__in=[]) will always be True. qs = Paragraph.objects.filter((Q(pk=p2.pk) | ~Q(pk__in=[])) & Q(pk=p1.pk)) self.assertSequenceEqual(qs, [p1]) class RelatedLookupTypeTests(TestCase): error = 'Cannot query "%s": Must be "%s" instance.' @classmethod def setUpTestData(cls): cls.oa = ObjectA.objects.create(name="oa") cls.poa = ProxyObjectA.objects.get(name="oa") cls.coa = ChildObjectA.objects.create(name="coa") cls.wrong_type = Order.objects.create(id=cls.oa.pk) cls.ob = ObjectB.objects.create(name="ob", objecta=cls.oa, num=1) cls.pob1 = ProxyObjectB.objects.create(name="pob", objecta=cls.oa, num=2) cls.pob = ProxyObjectB.objects.all() cls.c = ObjectC.objects.create(childobjecta=cls.coa) def test_wrong_type_lookup(self): """ A ValueError is raised when the incorrect object type is passed to a query lookup. """ # Passing incorrect object type with self.assertRaisesMessage(ValueError, self.error % (self.wrong_type, ObjectA._meta.object_name)): ObjectB.objects.get(objecta=self.wrong_type) with self.assertRaisesMessage(ValueError, self.error % (self.wrong_type, ObjectA._meta.object_name)): ObjectB.objects.filter(objecta__in=[self.wrong_type]) with self.assertRaisesMessage(ValueError, self.error % (self.wrong_type, ObjectA._meta.object_name)): ObjectB.objects.filter(objecta=self.wrong_type) with self.assertRaisesMessage(ValueError, self.error % (self.wrong_type, ObjectB._meta.object_name)): ObjectA.objects.filter(objectb__in=[self.wrong_type, self.ob]) # Passing an object of the class on which query is done. with self.assertRaisesMessage(ValueError, self.error % (self.ob, ObjectA._meta.object_name)): ObjectB.objects.filter(objecta__in=[self.poa, self.ob]) with self.assertRaisesMessage(ValueError, self.error % (self.ob, ChildObjectA._meta.object_name)): ObjectC.objects.exclude(childobjecta__in=[self.coa, self.ob]) def test_wrong_backward_lookup(self): """ A ValueError is raised when the incorrect object type is passed to a query lookup for backward relations. """ with self.assertRaisesMessage(ValueError, self.error % (self.oa, ObjectB._meta.object_name)): ObjectA.objects.filter(objectb__in=[self.oa, self.ob]) with self.assertRaisesMessage(ValueError, self.error % (self.oa, ObjectB._meta.object_name)): ObjectA.objects.exclude(objectb=self.oa) with self.assertRaisesMessage(ValueError, self.error % (self.wrong_type, ObjectB._meta.object_name)): ObjectA.objects.get(objectb=self.wrong_type) def test_correct_lookup(self): """ When passing proxy model objects, child objects, or parent objects, lookups work fine. """ out_a = [self.oa] out_b = [self.ob, self.pob1] out_c = [self.c] # proxy model objects self.assertSequenceEqual(ObjectB.objects.filter(objecta=self.poa).order_by('name'), out_b) self.assertSequenceEqual(ObjectA.objects.filter(objectb__in=self.pob).order_by('pk'), out_a * 2) # child objects self.assertSequenceEqual(ObjectB.objects.filter(objecta__in=[self.coa]), []) self.assertSequenceEqual(ObjectB.objects.filter(objecta__in=[self.poa, self.coa]).order_by('name'), out_b) self.assertSequenceEqual( ObjectB.objects.filter(objecta__in=iter([self.poa, self.coa])).order_by('name'), out_b ) # parent objects self.assertSequenceEqual(ObjectC.objects.exclude(childobjecta=self.oa), out_c) # QuerySet related object type checking shouldn't issue queries # (the querysets aren't evaluated here, hence zero queries) (#23266). with self.assertNumQueries(0): ObjectB.objects.filter(objecta__in=ObjectA.objects.all()) def test_values_queryset_lookup(self): """ #23396 - Ensure ValueQuerySets are not checked for compatibility with the lookup field """ # Make sure the num and objecta field values match. ob = ObjectB.objects.get(name='ob') ob.num = ob.objecta.pk ob.save() pob = ObjectB.objects.get(name='pob') pob.num = pob.objecta.pk pob.save() self.assertSequenceEqual(ObjectB.objects.filter( objecta__in=ObjectB.objects.all().values_list('num') ).order_by('pk'), [ob, pob]) class Ticket14056Tests(TestCase): def test_ticket_14056(self): s1 = SharedConnection.objects.create(data='s1') s2 = SharedConnection.objects.create(data='s2') s3 = SharedConnection.objects.create(data='s3') PointerA.objects.create(connection=s2) expected_ordering = ( [s1, s3, s2] if connection.features.nulls_order_largest else [s2, s1, s3] ) self.assertSequenceEqual(SharedConnection.objects.order_by('-pointera__connection', 'pk'), expected_ordering) class Ticket20955Tests(TestCase): def test_ticket_20955(self): jack = Staff.objects.create(name='jackstaff') jackstaff = StaffUser.objects.create(staff=jack) jill = Staff.objects.create(name='jillstaff') jillstaff = StaffUser.objects.create(staff=jill) task = Task.objects.create(creator=jackstaff, owner=jillstaff, title="task") task_get = Task.objects.get(pk=task.pk) # Load data so that assertNumQueries doesn't complain about the get # version's queries. task_get.creator.staffuser.staff task_get.owner.staffuser.staff qs = Task.objects.select_related( 'creator__staffuser__staff', 'owner__staffuser__staff') self.assertEqual(str(qs.query).count(' JOIN '), 6) task_select_related = qs.get(pk=task.pk) with self.assertNumQueries(0): self.assertEqual(task_select_related.creator.staffuser.staff, task_get.creator.staffuser.staff) self.assertEqual(task_select_related.owner.staffuser.staff, task_get.owner.staffuser.staff) class Ticket21203Tests(TestCase): def test_ticket_21203(self): p = Ticket21203Parent.objects.create(parent_bool=True) c = Ticket21203Child.objects.create(parent=p) qs = Ticket21203Child.objects.select_related('parent').defer('parent__created') self.assertSequenceEqual(qs, [c]) self.assertIs(qs[0].parent.parent_bool, True) class ValuesJoinPromotionTests(TestCase): def test_values_no_promotion_for_existing(self): qs = Node.objects.filter(parent__parent__isnull=False) self.assertIn(' INNER JOIN ', str(qs.query)) qs = qs.values('parent__parent__id') self.assertIn(' INNER JOIN ', str(qs.query)) # Make sure there is a left outer join without the filter. qs = Node.objects.values('parent__parent__id') self.assertIn(' LEFT OUTER JOIN ', str(qs.query)) def test_non_nullable_fk_not_promoted(self): qs = ObjectB.objects.values('objecta__name') self.assertIn(' INNER JOIN ', str(qs.query)) def test_ticket_21376(self): a = ObjectA.objects.create() ObjectC.objects.create(objecta=a) qs = ObjectC.objects.filter( Q(objecta=a) | Q(objectb__objecta=a), ) qs = qs.filter( Q(objectb=1) | Q(objecta=a), ) self.assertEqual(qs.count(), 1) tblname = connection.ops.quote_name(ObjectB._meta.db_table) self.assertIn(' LEFT OUTER JOIN %s' % tblname, str(qs.query)) class ForeignKeyToBaseExcludeTests(TestCase): def test_ticket_21787(self): sc1 = SpecialCategory.objects.create(special_name='sc1', name='sc1') sc2 = SpecialCategory.objects.create(special_name='sc2', name='sc2') sc3 = SpecialCategory.objects.create(special_name='sc3', name='sc3') c1 = CategoryItem.objects.create(category=sc1) CategoryItem.objects.create(category=sc2) self.assertSequenceEqual(SpecialCategory.objects.exclude(categoryitem__id=c1.pk).order_by('name'), [sc2, sc3]) self.assertSequenceEqual(SpecialCategory.objects.filter(categoryitem__id=c1.pk), [sc1]) class ReverseM2MCustomPkTests(TestCase): def test_ticket_21879(self): cpt1 = CustomPkTag.objects.create(id='cpt1', tag='cpt1') cp1 = CustomPk.objects.create(name='cp1', extra='extra') cp1.custompktag_set.add(cpt1) self.assertSequenceEqual(CustomPk.objects.filter(custompktag=cpt1), [cp1]) self.assertSequenceEqual(CustomPkTag.objects.filter(custom_pk=cp1), [cpt1]) class Ticket22429Tests(TestCase): def test_ticket_22429(self): sc1 = School.objects.create() st1 = Student.objects.create(school=sc1) sc2 = School.objects.create() st2 = Student.objects.create(school=sc2) cr = Classroom.objects.create(school=sc1) cr.students.add(st1) queryset = Student.objects.filter(~Q(classroom__school=F('school'))) self.assertSequenceEqual(queryset, [st2]) class Ticket23605Tests(TestCase): def test_ticket_23605(self): # Test filtering on a complicated q-object from ticket's report. # The query structure is such that we have multiple nested subqueries. # The original problem was that the inner queries weren't relabeled # correctly. # See also #24090. a1 = Ticket23605A.objects.create() a2 = Ticket23605A.objects.create() c1 = Ticket23605C.objects.create(field_c0=10000.0) Ticket23605B.objects.create( field_b0=10000.0, field_b1=True, modelc_fk=c1, modela_fk=a1) complex_q = Q(pk__in=Ticket23605A.objects.filter( Q( # True for a1 as field_b0 = 10000, field_c0=10000 # False for a2 as no ticket23605b found ticket23605b__field_b0__gte=1000000 / F("ticket23605b__modelc_fk__field_c0") ) & # True for a1 (field_b1=True) Q(ticket23605b__field_b1=True) & ~Q(ticket23605b__pk__in=Ticket23605B.objects.filter( ~( # Same filters as above commented filters, but # double-negated (one for Q() above, one for # parentheses). So, again a1 match, a2 not. Q(field_b1=True) & Q(field_b0__gte=1000000 / F("modelc_fk__field_c0")) ) ))).filter(ticket23605b__field_b1=True)) qs1 = Ticket23605A.objects.filter(complex_q) self.assertSequenceEqual(qs1, [a1]) qs2 = Ticket23605A.objects.exclude(complex_q) self.assertSequenceEqual(qs2, [a2]) class TestTicket24279(TestCase): def test_ticket_24278(self): School.objects.create() qs = School.objects.filter(Q(pk__in=()) | Q()) self.assertQuerysetEqual(qs, []) class TestInvalidValuesRelation(SimpleTestCase): def test_invalid_values(self): msg = "Field 'id' expected a number but got 'abc'." with self.assertRaisesMessage(ValueError, msg): Annotation.objects.filter(tag='abc') with self.assertRaisesMessage(ValueError, msg): Annotation.objects.filter(tag__in=[123, 'abc']) class TestTicket24605(TestCase): def test_ticket_24605(self): """ Subquery table names should be quoted. """ i1 = Individual.objects.create(alive=True) RelatedIndividual.objects.create(related=i1) i2 = Individual.objects.create(alive=False) RelatedIndividual.objects.create(related=i2) i3 = Individual.objects.create(alive=True) i4 = Individual.objects.create(alive=False) self.assertSequenceEqual(Individual.objects.filter(Q(alive=False), Q(related_individual__isnull=True)), [i4]) self.assertSequenceEqual( Individual.objects.exclude(Q(alive=False), Q(related_individual__isnull=True)).order_by('pk'), [i1, i2, i3] ) class Ticket23622Tests(TestCase): @skipUnlessDBFeature('can_distinct_on_fields') def test_ticket_23622(self): """ Make sure __pk__in and __in work the same for related fields when using a distinct on subquery. """ a1 = Ticket23605A.objects.create() a2 = Ticket23605A.objects.create() c1 = Ticket23605C.objects.create(field_c0=0.0) Ticket23605B.objects.create( modela_fk=a1, field_b0=123, field_b1=True, modelc_fk=c1, ) Ticket23605B.objects.create( modela_fk=a1, field_b0=23, field_b1=True, modelc_fk=c1, ) Ticket23605B.objects.create( modela_fk=a1, field_b0=234, field_b1=True, modelc_fk=c1, ) Ticket23605B.objects.create( modela_fk=a1, field_b0=12, field_b1=True, modelc_fk=c1, ) Ticket23605B.objects.create( modela_fk=a2, field_b0=567, field_b1=True, modelc_fk=c1, ) Ticket23605B.objects.create( modela_fk=a2, field_b0=76, field_b1=True, modelc_fk=c1, ) Ticket23605B.objects.create( modela_fk=a2, field_b0=7, field_b1=True, modelc_fk=c1, ) Ticket23605B.objects.create( modela_fk=a2, field_b0=56, field_b1=True, modelc_fk=c1, ) qx = ( Q(ticket23605b__pk__in=Ticket23605B.objects.order_by('modela_fk', '-field_b1').distinct('modela_fk')) & Q(ticket23605b__field_b0__gte=300) ) qy = ( Q(ticket23605b__in=Ticket23605B.objects.order_by('modela_fk', '-field_b1').distinct('modela_fk')) & Q(ticket23605b__field_b0__gte=300) ) self.assertEqual( set(Ticket23605A.objects.filter(qx).values_list('pk', flat=True)), set(Ticket23605A.objects.filter(qy).values_list('pk', flat=True)) ) self.assertSequenceEqual(Ticket23605A.objects.filter(qx), [a2])
76a333cd7339cccb3ad5f4670474ab64f1d521b70ed1fc5c93d2e73771871924
""" Various complex queries that have been problematic in the past. """ from django.db import models from django.db.models.functions import Now class DumbCategory(models.Model): pass class ProxyCategory(DumbCategory): class Meta: proxy = True class NamedCategory(DumbCategory): name = models.CharField(max_length=10) def __str__(self): return self.name class Tag(models.Model): name = models.CharField(max_length=10) parent = models.ForeignKey( 'self', models.SET_NULL, blank=True, null=True, related_name='children', ) category = models.ForeignKey(NamedCategory, models.SET_NULL, null=True, default=None) class Meta: ordering = ['name'] def __str__(self): return self.name class Note(models.Model): note = models.CharField(max_length=100) misc = models.CharField(max_length=25) tag = models.ForeignKey(Tag, models.SET_NULL, blank=True, null=True) negate = models.BooleanField(default=True) class Meta: ordering = ['note'] def __str__(self): return self.note class Annotation(models.Model): name = models.CharField(max_length=10) tag = models.ForeignKey(Tag, models.CASCADE) notes = models.ManyToManyField(Note) def __str__(self): return self.name class DateTimePK(models.Model): date = models.DateTimeField(primary_key=True, auto_now_add=True) class ExtraInfo(models.Model): info = models.CharField(max_length=100) note = models.ForeignKey(Note, models.CASCADE, null=True) value = models.IntegerField(null=True) date = models.ForeignKey(DateTimePK, models.SET_NULL, null=True) filterable = models.BooleanField(default=True) class Meta: ordering = ['info'] def __str__(self): return self.info class Author(models.Model): name = models.CharField(max_length=10) num = models.IntegerField(unique=True) extra = models.ForeignKey(ExtraInfo, models.CASCADE) class Meta: ordering = ['name'] def __str__(self): return self.name class Item(models.Model): name = models.CharField(max_length=10) created = models.DateTimeField() modified = models.DateTimeField(blank=True, null=True) tags = models.ManyToManyField(Tag, blank=True) creator = models.ForeignKey(Author, models.CASCADE) note = models.ForeignKey(Note, models.CASCADE) class Meta: ordering = ['-note', 'name'] def __str__(self): return self.name class Report(models.Model): name = models.CharField(max_length=10) creator = models.ForeignKey(Author, models.SET_NULL, to_field='num', null=True) def __str__(self): return self.name class ReportComment(models.Model): report = models.ForeignKey(Report, models.CASCADE) class Ranking(models.Model): rank = models.IntegerField() author = models.ForeignKey(Author, models.CASCADE) class Meta: # A complex ordering specification. Should stress the system a bit. ordering = ('author__extra__note', 'author__name', 'rank') def __str__(self): return '%d: %s' % (self.rank, self.author.name) class Cover(models.Model): title = models.CharField(max_length=50) item = models.ForeignKey(Item, models.CASCADE) class Meta: ordering = ['item'] def __str__(self): return self.title class Number(models.Model): num = models.IntegerField() other_num = models.IntegerField(null=True) another_num = models.IntegerField(null=True) def __str__(self): return str(self.num) # Symmetrical m2m field with a normal field using the reverse accessor name # ("valid"). class Valid(models.Model): valid = models.CharField(max_length=10) parent = models.ManyToManyField('self') class Meta: ordering = ['valid'] # Some funky cross-linked models for testing a couple of infinite recursion # cases. class X(models.Model): y = models.ForeignKey('Y', models.CASCADE) class Y(models.Model): x1 = models.ForeignKey(X, models.CASCADE, related_name='y1') # Some models with a cycle in the default ordering. This would be bad if we # didn't catch the infinite loop. class LoopX(models.Model): y = models.ForeignKey('LoopY', models.CASCADE) class Meta: ordering = ['y'] class LoopY(models.Model): x = models.ForeignKey(LoopX, models.CASCADE) class Meta: ordering = ['x'] class LoopZ(models.Model): z = models.ForeignKey('self', models.CASCADE) class Meta: ordering = ['z'] # A model and custom default manager combination. class CustomManager(models.Manager): def get_queryset(self): qs = super().get_queryset() return qs.filter(public=True, tag__name='t1') class ManagedModel(models.Model): data = models.CharField(max_length=10) tag = models.ForeignKey(Tag, models.CASCADE) public = models.BooleanField(default=True) objects = CustomManager() normal_manager = models.Manager() def __str__(self): return self.data # An inter-related setup with multiple paths from Child to Detail. class Detail(models.Model): data = models.CharField(max_length=10) class MemberManager(models.Manager): def get_queryset(self): return super().get_queryset().select_related("details") class Member(models.Model): name = models.CharField(max_length=10) details = models.OneToOneField(Detail, models.CASCADE, primary_key=True) objects = MemberManager() class Child(models.Model): person = models.OneToOneField(Member, models.CASCADE, primary_key=True) parent = models.ForeignKey(Member, models.CASCADE, related_name="children") # Custom primary keys interfered with ordering in the past. class CustomPk(models.Model): name = models.CharField(max_length=10, primary_key=True) extra = models.CharField(max_length=10) class Meta: ordering = ['name', 'extra'] class Related(models.Model): custom = models.ForeignKey(CustomPk, models.CASCADE, null=True) class CustomPkTag(models.Model): id = models.CharField(max_length=20, primary_key=True) custom_pk = models.ManyToManyField(CustomPk) tag = models.CharField(max_length=20) # An inter-related setup with a model subclass that has a nullable # path to another model, and a return path from that model. class Celebrity(models.Model): name = models.CharField("Name", max_length=20) greatest_fan = models.ForeignKey("Fan", models.SET_NULL, null=True, unique=True) def __str__(self): return self.name class TvChef(Celebrity): pass class Fan(models.Model): fan_of = models.ForeignKey(Celebrity, models.CASCADE) # Multiple foreign keys class LeafA(models.Model): data = models.CharField(max_length=10) def __str__(self): return self.data class LeafB(models.Model): data = models.CharField(max_length=10) class Join(models.Model): a = models.ForeignKey(LeafA, models.CASCADE) b = models.ForeignKey(LeafB, models.CASCADE) class ReservedName(models.Model): name = models.CharField(max_length=20) order = models.IntegerField() def __str__(self): return self.name # A simpler shared-foreign-key setup that can expose some problems. class SharedConnection(models.Model): data = models.CharField(max_length=10) def __str__(self): return self.data class PointerA(models.Model): connection = models.ForeignKey(SharedConnection, models.CASCADE) class PointerB(models.Model): connection = models.ForeignKey(SharedConnection, models.CASCADE) # Multi-layer ordering class SingleObject(models.Model): name = models.CharField(max_length=10) class Meta: ordering = ['name'] def __str__(self): return self.name class RelatedObject(models.Model): single = models.ForeignKey(SingleObject, models.SET_NULL, null=True) f = models.IntegerField(null=True) class Meta: ordering = ['single'] class Plaything(models.Model): name = models.CharField(max_length=10) others = models.ForeignKey(RelatedObject, models.SET_NULL, null=True) class Meta: ordering = ['others'] def __str__(self): return self.name class Article(models.Model): name = models.CharField(max_length=20) created = models.DateTimeField() def __str__(self): return self.name class Food(models.Model): name = models.CharField(max_length=20, unique=True) def __str__(self): return self.name class Eaten(models.Model): food = models.ForeignKey(Food, models.SET_NULL, to_field="name", null=True) meal = models.CharField(max_length=20) def __str__(self): return "%s at %s" % (self.food, self.meal) class Node(models.Model): num = models.IntegerField(unique=True) parent = models.ForeignKey("self", models.SET_NULL, to_field="num", null=True) def __str__(self): return str(self.num) # Bug #12252 class ObjectA(models.Model): name = models.CharField(max_length=50) def __str__(self): return self.name def __iter__(self): # Ticket #23721 assert False, 'type checking should happen without calling model __iter__' class ProxyObjectA(ObjectA): class Meta: proxy = True class ChildObjectA(ObjectA): pass class ObjectB(models.Model): name = models.CharField(max_length=50) objecta = models.ForeignKey(ObjectA, models.CASCADE) num = models.PositiveIntegerField() def __str__(self): return self.name class ProxyObjectB(ObjectB): class Meta: proxy = True class ObjectC(models.Model): name = models.CharField(max_length=50) objecta = models.ForeignKey(ObjectA, models.SET_NULL, null=True) objectb = models.ForeignKey(ObjectB, models.SET_NULL, null=True) childobjecta = models.ForeignKey(ChildObjectA, models.SET_NULL, null=True, related_name='ca_pk') def __str__(self): return self.name class SimpleCategory(models.Model): name = models.CharField(max_length=25) def __str__(self): return self.name class SpecialCategory(SimpleCategory): special_name = models.CharField(max_length=35) def __str__(self): return self.name + " " + self.special_name class CategoryItem(models.Model): category = models.ForeignKey(SimpleCategory, models.CASCADE) def __str__(self): return "category item: " + str(self.category) class MixedCaseFieldCategoryItem(models.Model): CaTeGoRy = models.ForeignKey(SimpleCategory, models.CASCADE) class MixedCaseDbColumnCategoryItem(models.Model): category = models.ForeignKey(SimpleCategory, models.CASCADE, db_column='CaTeGoRy_Id') class OneToOneCategory(models.Model): new_name = models.CharField(max_length=15) category = models.OneToOneField(SimpleCategory, models.CASCADE) def __str__(self): return "one2one " + self.new_name class CategoryRelationship(models.Model): first = models.ForeignKey(SimpleCategory, models.CASCADE, related_name='first_rel') second = models.ForeignKey(SimpleCategory, models.CASCADE, related_name='second_rel') class CommonMixedCaseForeignKeys(models.Model): category = models.ForeignKey(CategoryItem, models.CASCADE) mixed_case_field_category = models.ForeignKey(MixedCaseFieldCategoryItem, models.CASCADE) mixed_case_db_column_category = models.ForeignKey(MixedCaseDbColumnCategoryItem, models.CASCADE) class NullableName(models.Model): name = models.CharField(max_length=20, null=True) class Meta: ordering = ['id'] class ModelD(models.Model): name = models.TextField() class ModelC(models.Model): name = models.TextField() class ModelB(models.Model): name = models.TextField() c = models.ForeignKey(ModelC, models.CASCADE) class ModelA(models.Model): name = models.TextField() b = models.ForeignKey(ModelB, models.SET_NULL, null=True) d = models.ForeignKey(ModelD, models.CASCADE) class Job(models.Model): name = models.CharField(max_length=20, unique=True) def __str__(self): return self.name class JobResponsibilities(models.Model): job = models.ForeignKey(Job, models.CASCADE, to_field='name') responsibility = models.ForeignKey('Responsibility', models.CASCADE, to_field='description') class Responsibility(models.Model): description = models.CharField(max_length=20, unique=True) jobs = models.ManyToManyField(Job, through=JobResponsibilities, related_name='responsibilities') def __str__(self): return self.description # Models for disjunction join promotion low level testing. class FK1(models.Model): f1 = models.TextField() f2 = models.TextField() class FK2(models.Model): f1 = models.TextField() f2 = models.TextField() class FK3(models.Model): f1 = models.TextField() f2 = models.TextField() class BaseA(models.Model): a = models.ForeignKey(FK1, models.SET_NULL, null=True) b = models.ForeignKey(FK2, models.SET_NULL, null=True) c = models.ForeignKey(FK3, models.SET_NULL, null=True) class Identifier(models.Model): name = models.CharField(max_length=100) def __str__(self): return self.name class Program(models.Model): identifier = models.OneToOneField(Identifier, models.CASCADE) class Channel(models.Model): programs = models.ManyToManyField(Program) identifier = models.OneToOneField(Identifier, models.CASCADE) class Book(models.Model): title = models.TextField() chapter = models.ForeignKey('Chapter', models.CASCADE) class Chapter(models.Model): title = models.TextField() paragraph = models.ForeignKey('Paragraph', models.CASCADE) class Paragraph(models.Model): text = models.TextField() page = models.ManyToManyField('Page') class Page(models.Model): text = models.TextField() class MyObject(models.Model): parent = models.ForeignKey('self', models.SET_NULL, null=True, blank=True, related_name='children') data = models.CharField(max_length=100) created_at = models.DateTimeField(auto_now_add=True) # Models for #17600 regressions class Order(models.Model): id = models.IntegerField(primary_key=True) name = models.CharField(max_length=12, null=True, default='') class Meta: ordering = ('pk',) def __str__(self): return str(self.pk) class OrderItem(models.Model): order = models.ForeignKey(Order, models.CASCADE, related_name='items') status = models.IntegerField() class Meta: ordering = ('pk',) def __str__(self): return str(self.pk) class BaseUser(models.Model): annotation = models.ForeignKey(Annotation, models.CASCADE, null=True, blank=True) class Task(models.Model): title = models.CharField(max_length=10) owner = models.ForeignKey(BaseUser, models.CASCADE, related_name='owner') creator = models.ForeignKey(BaseUser, models.CASCADE, related_name='creator') note = models.ForeignKey(Note, on_delete=models.CASCADE, null=True, blank=True) def __str__(self): return self.title class Staff(models.Model): name = models.CharField(max_length=10) def __str__(self): return self.name class StaffUser(BaseUser): staff = models.OneToOneField(Staff, models.CASCADE, related_name='user') def __str__(self): return str(self.staff) class Ticket21203Parent(models.Model): parentid = models.AutoField(primary_key=True) parent_bool = models.BooleanField(default=True) created = models.DateTimeField(auto_now=True) class Ticket21203Child(models.Model): childid = models.AutoField(primary_key=True) parent = models.ForeignKey(Ticket21203Parent, models.CASCADE) class Person(models.Model): name = models.CharField(max_length=128) class Company(models.Model): name = models.CharField(max_length=128) employees = models.ManyToManyField(Person, related_name='employers', through='Employment') def __str__(self): return self.name class Employment(models.Model): employer = models.ForeignKey(Company, models.CASCADE) employee = models.ForeignKey(Person, models.CASCADE) title = models.CharField(max_length=128) class School(models.Model): pass class Student(models.Model): school = models.ForeignKey(School, models.CASCADE) class Classroom(models.Model): name = models.CharField(max_length=20) has_blackboard = models.BooleanField(null=True) school = models.ForeignKey(School, models.CASCADE) students = models.ManyToManyField(Student, related_name='classroom') class Teacher(models.Model): schools = models.ManyToManyField(School) friends = models.ManyToManyField('self') class Ticket23605AParent(models.Model): pass class Ticket23605A(Ticket23605AParent): pass class Ticket23605B(models.Model): modela_fk = models.ForeignKey(Ticket23605A, models.CASCADE) modelc_fk = models.ForeignKey("Ticket23605C", models.CASCADE) field_b0 = models.IntegerField(null=True) field_b1 = models.BooleanField(default=False) class Ticket23605C(models.Model): field_c0 = models.FloatField() # db_table names have capital letters to ensure they are quoted in queries. class Individual(models.Model): alive = models.BooleanField() class Meta: db_table = 'Individual' class RelatedIndividual(models.Model): related = models.ForeignKey(Individual, models.CASCADE, related_name='related_individual') class Meta: db_table = 'RelatedIndividual' class CustomDbColumn(models.Model): custom_column = models.IntegerField(db_column='custom_name', null=True) ip_address = models.GenericIPAddressField(null=True) class CreatedField(models.DateTimeField): db_returning = True def __init__(self, *args, **kwargs): kwargs.setdefault('default', Now) super().__init__(*args, **kwargs) class ReturningModel(models.Model): created = CreatedField(editable=False) class NonIntegerPKReturningModel(models.Model): created = CreatedField(editable=False, primary_key=True) class JSONFieldNullable(models.Model): json_field = models.JSONField(blank=True, null=True) class Meta: required_db_features = {'supports_json_field'}
61dcadcb54d013299c1a8e148d24d696569b27342b5e1269bb292b988e2c4626
import operator from django.db import DatabaseError, NotSupportedError, connection from django.db.models import Exists, F, IntegerField, OuterRef, Subquery, Value from django.test import TestCase, skipIfDBFeature, skipUnlessDBFeature from django.test.utils import CaptureQueriesContext from .models import Author, Celebrity, ExtraInfo, Number, ReservedName @skipUnlessDBFeature('supports_select_union') class QuerySetSetOperationTests(TestCase): @classmethod def setUpTestData(cls): Number.objects.bulk_create(Number(num=i, other_num=10 - i) for i in range(10)) def assertNumbersEqual(self, queryset, expected_numbers, ordered=True): self.assertQuerysetEqual(queryset, expected_numbers, operator.attrgetter('num'), ordered) def test_simple_union(self): qs1 = Number.objects.filter(num__lte=1) qs2 = Number.objects.filter(num__gte=8) qs3 = Number.objects.filter(num=5) self.assertNumbersEqual(qs1.union(qs2, qs3), [0, 1, 5, 8, 9], ordered=False) @skipUnlessDBFeature('supports_select_intersection') def test_simple_intersection(self): qs1 = Number.objects.filter(num__lte=5) qs2 = Number.objects.filter(num__gte=5) qs3 = Number.objects.filter(num__gte=4, num__lte=6) self.assertNumbersEqual(qs1.intersection(qs2, qs3), [5], ordered=False) @skipUnlessDBFeature('supports_select_intersection') def test_intersection_with_values(self): ReservedName.objects.create(name='a', order=2) qs1 = ReservedName.objects.all() reserved_name = qs1.intersection(qs1).values('name', 'order', 'id').get() self.assertEqual(reserved_name['name'], 'a') self.assertEqual(reserved_name['order'], 2) reserved_name = qs1.intersection(qs1).values_list('name', 'order', 'id').get() self.assertEqual(reserved_name[:2], ('a', 2)) @skipUnlessDBFeature('supports_select_difference') def test_simple_difference(self): qs1 = Number.objects.filter(num__lte=5) qs2 = Number.objects.filter(num__lte=4) self.assertNumbersEqual(qs1.difference(qs2), [5], ordered=False) def test_union_distinct(self): qs1 = Number.objects.all() qs2 = Number.objects.all() self.assertEqual(len(list(qs1.union(qs2, all=True))), 20) self.assertEqual(len(list(qs1.union(qs2))), 10) def test_union_none(self): qs1 = Number.objects.filter(num__lte=1) qs2 = Number.objects.filter(num__gte=8) qs3 = qs1.union(qs2) self.assertSequenceEqual(qs3.none(), []) self.assertNumbersEqual(qs3, [0, 1, 8, 9], ordered=False) @skipUnlessDBFeature('supports_select_intersection') def test_intersection_with_empty_qs(self): qs1 = Number.objects.all() qs2 = Number.objects.none() qs3 = Number.objects.filter(pk__in=[]) self.assertEqual(len(qs1.intersection(qs2)), 0) self.assertEqual(len(qs1.intersection(qs3)), 0) self.assertEqual(len(qs2.intersection(qs1)), 0) self.assertEqual(len(qs3.intersection(qs1)), 0) self.assertEqual(len(qs2.intersection(qs2)), 0) self.assertEqual(len(qs3.intersection(qs3)), 0) @skipUnlessDBFeature('supports_select_difference') def test_difference_with_empty_qs(self): qs1 = Number.objects.all() qs2 = Number.objects.none() qs3 = Number.objects.filter(pk__in=[]) self.assertEqual(len(qs1.difference(qs2)), 10) self.assertEqual(len(qs1.difference(qs3)), 10) self.assertEqual(len(qs2.difference(qs1)), 0) self.assertEqual(len(qs3.difference(qs1)), 0) self.assertEqual(len(qs2.difference(qs2)), 0) self.assertEqual(len(qs3.difference(qs3)), 0) @skipUnlessDBFeature('supports_select_difference') def test_difference_with_values(self): ReservedName.objects.create(name='a', order=2) qs1 = ReservedName.objects.all() qs2 = ReservedName.objects.none() reserved_name = qs1.difference(qs2).values('name', 'order', 'id').get() self.assertEqual(reserved_name['name'], 'a') self.assertEqual(reserved_name['order'], 2) reserved_name = qs1.difference(qs2).values_list('name', 'order', 'id').get() self.assertEqual(reserved_name[:2], ('a', 2)) def test_union_with_empty_qs(self): qs1 = Number.objects.all() qs2 = Number.objects.none() qs3 = Number.objects.filter(pk__in=[]) self.assertEqual(len(qs1.union(qs2)), 10) self.assertEqual(len(qs2.union(qs1)), 10) self.assertEqual(len(qs1.union(qs3)), 10) self.assertEqual(len(qs3.union(qs1)), 10) self.assertEqual(len(qs2.union(qs1, qs1, qs1)), 10) self.assertEqual(len(qs2.union(qs1, qs1, all=True)), 20) self.assertEqual(len(qs2.union(qs2)), 0) self.assertEqual(len(qs3.union(qs3)), 0) def test_empty_qs_union_with_ordered_qs(self): qs1 = Number.objects.all().order_by('num') qs2 = Number.objects.none().union(qs1).order_by('num') self.assertEqual(list(qs1), list(qs2)) def test_limits(self): qs1 = Number.objects.all() qs2 = Number.objects.all() self.assertEqual(len(list(qs1.union(qs2)[:2])), 2) def test_ordering(self): qs1 = Number.objects.filter(num__lte=1) qs2 = Number.objects.filter(num__gte=2, num__lte=3) self.assertNumbersEqual(qs1.union(qs2).order_by('-num'), [3, 2, 1, 0]) def test_ordering_by_alias(self): qs1 = Number.objects.filter(num__lte=1).values(alias=F('num')) qs2 = Number.objects.filter(num__gte=2, num__lte=3).values(alias=F('num')) self.assertQuerysetEqual( qs1.union(qs2).order_by('-alias'), [3, 2, 1, 0], operator.itemgetter('alias'), ) def test_ordering_by_f_expression(self): qs1 = Number.objects.filter(num__lte=1) qs2 = Number.objects.filter(num__gte=2, num__lte=3) self.assertNumbersEqual(qs1.union(qs2).order_by(F('num').desc()), [3, 2, 1, 0]) def test_ordering_by_f_expression_and_alias(self): qs1 = Number.objects.filter(num__lte=1).values(alias=F('other_num')) qs2 = Number.objects.filter(num__gte=2, num__lte=3).values(alias=F('other_num')) self.assertQuerysetEqual( qs1.union(qs2).order_by(F('alias').desc()), [10, 9, 8, 7], operator.itemgetter('alias'), ) Number.objects.create(num=-1) self.assertQuerysetEqual( qs1.union(qs2).order_by(F('alias').desc(nulls_last=True)), [10, 9, 8, 7, None], operator.itemgetter('alias'), ) def test_union_with_values(self): ReservedName.objects.create(name='a', order=2) qs1 = ReservedName.objects.all() reserved_name = qs1.union(qs1).values('name', 'order', 'id').get() self.assertEqual(reserved_name['name'], 'a') self.assertEqual(reserved_name['order'], 2) reserved_name = qs1.union(qs1).values_list('name', 'order', 'id').get() self.assertEqual(reserved_name[:2], ('a', 2)) # List of columns can be changed. reserved_name = qs1.union(qs1).values_list('order').get() self.assertEqual(reserved_name, (2,)) def test_union_with_two_annotated_values_list(self): qs1 = Number.objects.filter(num=1).annotate( count=Value(0, IntegerField()), ).values_list('num', 'count') qs2 = Number.objects.filter(num=2).values('pk').annotate( count=F('num'), ).annotate( num=Value(1, IntegerField()), ).values_list('num', 'count') self.assertCountEqual(qs1.union(qs2), [(1, 0), (2, 1)]) def test_union_with_extra_and_values_list(self): qs1 = Number.objects.filter(num=1).extra( select={'count': 0}, ).values_list('num', 'count') qs2 = Number.objects.filter(num=2).extra(select={'count': 1}) self.assertCountEqual(qs1.union(qs2), [(1, 0), (2, 1)]) def test_union_with_values_list_on_annotated_and_unannotated(self): ReservedName.objects.create(name='rn1', order=1) qs1 = Number.objects.annotate( has_reserved_name=Exists(ReservedName.objects.filter(order=OuterRef('num'))) ).filter(has_reserved_name=True) qs2 = Number.objects.filter(num=9) self.assertCountEqual(qs1.union(qs2).values_list('num', flat=True), [1, 9]) def test_union_with_values_list_and_order(self): ReservedName.objects.bulk_create([ ReservedName(name='rn1', order=7), ReservedName(name='rn2', order=5), ReservedName(name='rn0', order=6), ReservedName(name='rn9', order=-1), ]) qs1 = ReservedName.objects.filter(order__gte=6) qs2 = ReservedName.objects.filter(order__lte=5) union_qs = qs1.union(qs2) for qs, expected_result in ( # Order by a single column. (union_qs.order_by('-pk').values_list('order', flat=True), [-1, 6, 5, 7]), (union_qs.order_by('pk').values_list('order', flat=True), [7, 5, 6, -1]), (union_qs.values_list('order', flat=True).order_by('-pk'), [-1, 6, 5, 7]), (union_qs.values_list('order', flat=True).order_by('pk'), [7, 5, 6, -1]), # Order by multiple columns. (union_qs.order_by('-name', 'pk').values_list('order', flat=True), [-1, 5, 7, 6]), (union_qs.values_list('order', flat=True).order_by('-name', 'pk'), [-1, 5, 7, 6]), ): with self.subTest(qs=qs): self.assertEqual(list(qs), expected_result) def test_union_with_values_list_and_order_on_annotation(self): qs1 = Number.objects.annotate( annotation=Value(-1), multiplier=F('annotation'), ).filter(num__gte=6) qs2 = Number.objects.annotate( annotation=Value(2), multiplier=F('annotation'), ).filter(num__lte=5) self.assertSequenceEqual( qs1.union(qs2).order_by('annotation', 'num').values_list('num', flat=True), [6, 7, 8, 9, 0, 1, 2, 3, 4, 5], ) self.assertQuerysetEqual( qs1.union(qs2).order_by( F('annotation') * F('multiplier'), 'num', ).values('num'), [6, 7, 8, 9, 0, 1, 2, 3, 4, 5], operator.itemgetter('num'), ) def test_union_multiple_models_with_values_list_and_order(self): reserved_name = ReservedName.objects.create(name='rn1', order=0) qs1 = Celebrity.objects.all() qs2 = ReservedName.objects.all() self.assertSequenceEqual( qs1.union(qs2).order_by('name').values_list('pk', flat=True), [reserved_name.pk], ) def test_union_multiple_models_with_values_list_and_order_by_extra_select(self): reserved_name = ReservedName.objects.create(name='rn1', order=0) qs1 = Celebrity.objects.extra(select={'extra_name': 'name'}) qs2 = ReservedName.objects.extra(select={'extra_name': 'name'}) self.assertSequenceEqual( qs1.union(qs2).order_by('extra_name').values_list('pk', flat=True), [reserved_name.pk], ) def test_union_in_subquery(self): ReservedName.objects.bulk_create([ ReservedName(name='rn1', order=8), ReservedName(name='rn2', order=1), ReservedName(name='rn3', order=5), ]) qs1 = Number.objects.filter(num__gt=7, num=OuterRef('order')) qs2 = Number.objects.filter(num__lt=2, num=OuterRef('order')) self.assertCountEqual( ReservedName.objects.annotate( number=Subquery(qs1.union(qs2).values('num')), ).filter(number__isnull=False).values_list('order', flat=True), [8, 1], ) def test_union_in_subquery_related_outerref(self): e1 = ExtraInfo.objects.create(value=7, info='e3') e2 = ExtraInfo.objects.create(value=5, info='e2') e3 = ExtraInfo.objects.create(value=1, info='e1') Author.objects.bulk_create([ Author(name='a1', num=1, extra=e1), Author(name='a2', num=3, extra=e2), Author(name='a3', num=2, extra=e3), ]) qs1 = ExtraInfo.objects.order_by().filter(value=OuterRef('num')) qs2 = ExtraInfo.objects.order_by().filter(value__lt=OuterRef('extra__value')) qs = Author.objects.annotate( info=Subquery(qs1.union(qs2).values('info')[:1]), ).filter(info__isnull=False).values_list('name', flat=True) self.assertCountEqual(qs, ['a1', 'a2']) # Combined queries don't mutate. self.assertCountEqual(qs, ['a1', 'a2']) def test_count_union(self): qs1 = Number.objects.filter(num__lte=1).values('num') qs2 = Number.objects.filter(num__gte=2, num__lte=3).values('num') self.assertEqual(qs1.union(qs2).count(), 4) def test_count_union_empty_result(self): qs = Number.objects.filter(pk__in=[]) self.assertEqual(qs.union(qs).count(), 0) @skipUnlessDBFeature('supports_select_difference') def test_count_difference(self): qs1 = Number.objects.filter(num__lt=10) qs2 = Number.objects.filter(num__lt=9) self.assertEqual(qs1.difference(qs2).count(), 1) @skipUnlessDBFeature('supports_select_intersection') def test_count_intersection(self): qs1 = Number.objects.filter(num__gte=5) qs2 = Number.objects.filter(num__lte=5) self.assertEqual(qs1.intersection(qs2).count(), 1) def test_exists_union(self): qs1 = Number.objects.filter(num__gte=5) qs2 = Number.objects.filter(num__lte=5) with CaptureQueriesContext(connection) as context: self.assertIs(qs1.union(qs2).exists(), True) captured_queries = context.captured_queries self.assertEqual(len(captured_queries), 1) captured_sql = captured_queries[0]['sql'] self.assertNotIn( connection.ops.quote_name(Number._meta.pk.column), captured_sql, ) self.assertEqual( captured_sql.count(connection.ops.limit_offset_sql(None, 1)), 3 if connection.features.supports_slicing_ordering_in_compound else 1 ) def test_exists_union_empty_result(self): qs = Number.objects.filter(pk__in=[]) self.assertIs(qs.union(qs).exists(), False) @skipUnlessDBFeature('supports_select_intersection') def test_exists_intersection(self): qs1 = Number.objects.filter(num__gt=5) qs2 = Number.objects.filter(num__lt=5) self.assertIs(qs1.intersection(qs1).exists(), True) self.assertIs(qs1.intersection(qs2).exists(), False) @skipUnlessDBFeature('supports_select_difference') def test_exists_difference(self): qs1 = Number.objects.filter(num__gte=5) qs2 = Number.objects.filter(num__gte=3) self.assertIs(qs1.difference(qs2).exists(), False) self.assertIs(qs2.difference(qs1).exists(), True) def test_get_union(self): qs = Number.objects.filter(num=2) self.assertEqual(qs.union(qs).get().num, 2) @skipUnlessDBFeature('supports_select_difference') def test_get_difference(self): qs1 = Number.objects.all() qs2 = Number.objects.exclude(num=2) self.assertEqual(qs1.difference(qs2).get().num, 2) @skipUnlessDBFeature('supports_select_intersection') def test_get_intersection(self): qs1 = Number.objects.all() qs2 = Number.objects.filter(num=2) self.assertEqual(qs1.intersection(qs2).get().num, 2) @skipUnlessDBFeature('supports_slicing_ordering_in_compound') def test_ordering_subqueries(self): qs1 = Number.objects.order_by('num')[:2] qs2 = Number.objects.order_by('-num')[:2] self.assertNumbersEqual(qs1.union(qs2).order_by('-num')[:4], [9, 8, 1, 0]) @skipIfDBFeature('supports_slicing_ordering_in_compound') def test_unsupported_ordering_slicing_raises_db_error(self): qs1 = Number.objects.all() qs2 = Number.objects.all() qs3 = Number.objects.all() msg = 'LIMIT/OFFSET not allowed in subqueries of compound statements' with self.assertRaisesMessage(DatabaseError, msg): list(qs1.union(qs2[:10])) msg = 'ORDER BY not allowed in subqueries of compound statements' with self.assertRaisesMessage(DatabaseError, msg): list(qs1.order_by('id').union(qs2)) with self.assertRaisesMessage(DatabaseError, msg): list(qs1.union(qs2).order_by('id').union(qs3)) @skipIfDBFeature('supports_select_intersection') def test_unsupported_intersection_raises_db_error(self): qs1 = Number.objects.all() qs2 = Number.objects.all() msg = 'intersection is not supported on this database backend' with self.assertRaisesMessage(NotSupportedError, msg): list(qs1.intersection(qs2)) def test_combining_multiple_models(self): ReservedName.objects.create(name='99 little bugs', order=99) qs1 = Number.objects.filter(num=1).values_list('num', flat=True) qs2 = ReservedName.objects.values_list('order') self.assertEqual(list(qs1.union(qs2).order_by('num')), [1, 99]) def test_order_raises_on_non_selected_column(self): qs1 = Number.objects.filter().annotate( annotation=Value(1, IntegerField()), ).values('annotation', num2=F('num')) qs2 = Number.objects.filter().values('id', 'num') # Should not raise list(qs1.union(qs2).order_by('annotation')) list(qs1.union(qs2).order_by('num2')) msg = 'ORDER BY term does not match any column in the result set' # 'id' is not part of the select with self.assertRaisesMessage(DatabaseError, msg): list(qs1.union(qs2).order_by('id')) # 'num' got realiased to num2 with self.assertRaisesMessage(DatabaseError, msg): list(qs1.union(qs2).order_by('num')) with self.assertRaisesMessage(DatabaseError, msg): list(qs1.union(qs2).order_by(F('num'))) with self.assertRaisesMessage(DatabaseError, msg): list(qs1.union(qs2).order_by(F('num').desc())) # switched order, now 'exists' again: list(qs2.union(qs1).order_by('num')) @skipUnlessDBFeature('supports_select_difference', 'supports_select_intersection') def test_qs_with_subcompound_qs(self): qs1 = Number.objects.all() qs2 = Number.objects.intersection(Number.objects.filter(num__gt=1)) self.assertEqual(qs1.difference(qs2).count(), 2) def test_order_by_same_type(self): qs = Number.objects.all() union = qs.union(qs) numbers = list(range(10)) self.assertNumbersEqual(union.order_by('num'), numbers) self.assertNumbersEqual(union.order_by('other_num'), reversed(numbers)) def test_unsupported_operations_on_combined_qs(self): qs = Number.objects.all() msg = 'Calling QuerySet.%s() after %s() is not supported.' combinators = ['union'] if connection.features.supports_select_difference: combinators.append('difference') if connection.features.supports_select_intersection: combinators.append('intersection') for combinator in combinators: for operation in ( 'alias', 'annotate', 'defer', 'delete', 'distinct', 'exclude', 'extra', 'filter', 'only', 'prefetch_related', 'select_related', 'update', ): with self.subTest(combinator=combinator, operation=operation): with self.assertRaisesMessage( NotSupportedError, msg % (operation, combinator), ): getattr(getattr(qs, combinator)(qs), operation)() with self.assertRaisesMessage( NotSupportedError, msg % ('contains', combinator), ): obj = Number.objects.first() getattr(qs, combinator)(qs).contains(obj) def test_get_with_filters_unsupported_on_combined_qs(self): qs = Number.objects.all() msg = 'Calling QuerySet.get(...) with filters after %s() is not supported.' combinators = ['union'] if connection.features.supports_select_difference: combinators.append('difference') if connection.features.supports_select_intersection: combinators.append('intersection') for combinator in combinators: with self.subTest(combinator=combinator): with self.assertRaisesMessage(NotSupportedError, msg % combinator): getattr(qs, combinator)(qs).get(num=2) def test_operator_on_combined_qs_error(self): qs = Number.objects.all() msg = 'Cannot use %s operator with combined queryset.' combinators = ['union'] if connection.features.supports_select_difference: combinators.append('difference') if connection.features.supports_select_intersection: combinators.append('intersection') operators = [ ('|', operator.or_), ('&', operator.and_), ] for combinator in combinators: combined_qs = getattr(qs, combinator)(qs) for operator_, operator_func in operators: with self.subTest(combinator=combinator): with self.assertRaisesMessage(TypeError, msg % operator_): operator_func(qs, combined_qs) with self.assertRaisesMessage(TypeError, msg % operator_): operator_func(combined_qs, qs)
76dea8702f84597fc2af2108891418a4cd84c28c525119d9ec6dee5dfe9167c2
from django.db import DEFAULT_DB_ALIAS, connection from django.db.models.sql import Query from django.test import SimpleTestCase from .models import Item class SQLCompilerTest(SimpleTestCase): def test_repr(self): query = Query(Item) compiler = query.get_compiler(DEFAULT_DB_ALIAS, connection) self.assertEqual( repr(compiler), f"<SQLCompiler model=Item connection=" f"<DatabaseWrapper vendor={connection.vendor!r} alias='default'> " f"using='default'>" )
02a30e38041df7803ab2202a2c627ff8750fbe45f6fa4ac0d8723fc48f2a39e8
from datetime import datetime from django.core.exceptions import FieldError from django.db.models import BooleanField, CharField, F, Q from django.db.models.expressions import Col, Func from django.db.models.fields.related_lookups import RelatedIsNull from django.db.models.functions import Lower from django.db.models.lookups import Exact, GreaterThan, IsNull, LessThan from django.db.models.sql.query import JoinPromoter, Query from django.db.models.sql.where import OR from django.test import SimpleTestCase from django.test.utils import register_lookup from .models import Author, Item, ObjectC, Ranking class TestQuery(SimpleTestCase): def test_simple_query(self): query = Query(Author) where = query.build_where(Q(num__gt=2)) lookup = where.children[0] self.assertIsInstance(lookup, GreaterThan) self.assertEqual(lookup.rhs, 2) self.assertEqual(lookup.lhs.target, Author._meta.get_field('num')) def test_non_alias_cols_query(self): query = Query(Author, alias_cols=False) where = query.build_where(Q(num__gt=2, name__isnull=False) | Q(num__lt=F('id'))) name_isnull_lookup, num_gt_lookup = where.children[0].children self.assertIsInstance(num_gt_lookup, GreaterThan) self.assertIsInstance(num_gt_lookup.lhs, Col) self.assertIsNone(num_gt_lookup.lhs.alias) self.assertIsInstance(name_isnull_lookup, IsNull) self.assertIsInstance(name_isnull_lookup.lhs, Col) self.assertIsNone(name_isnull_lookup.lhs.alias) num_lt_lookup = where.children[1] self.assertIsInstance(num_lt_lookup, LessThan) self.assertIsInstance(num_lt_lookup.rhs, Col) self.assertIsNone(num_lt_lookup.rhs.alias) self.assertIsInstance(num_lt_lookup.lhs, Col) self.assertIsNone(num_lt_lookup.lhs.alias) def test_complex_query(self): query = Query(Author) where = query.build_where(Q(num__gt=2) | Q(num__lt=0)) self.assertEqual(where.connector, OR) lookup = where.children[0] self.assertIsInstance(lookup, GreaterThan) self.assertEqual(lookup.rhs, 2) self.assertEqual(lookup.lhs.target, Author._meta.get_field('num')) lookup = where.children[1] self.assertIsInstance(lookup, LessThan) self.assertEqual(lookup.rhs, 0) self.assertEqual(lookup.lhs.target, Author._meta.get_field('num')) def test_multiple_fields(self): query = Query(Item, alias_cols=False) where = query.build_where(Q(modified__gt=F('created'))) lookup = where.children[0] self.assertIsInstance(lookup, GreaterThan) self.assertIsInstance(lookup.rhs, Col) self.assertIsNone(lookup.rhs.alias) self.assertIsInstance(lookup.lhs, Col) self.assertIsNone(lookup.lhs.alias) self.assertEqual(lookup.rhs.target, Item._meta.get_field('created')) self.assertEqual(lookup.lhs.target, Item._meta.get_field('modified')) def test_transform(self): query = Query(Author, alias_cols=False) with register_lookup(CharField, Lower): where = query.build_where(~Q(name__lower='foo')) lookup = where.children[0] self.assertIsInstance(lookup, Exact) self.assertIsInstance(lookup.lhs, Lower) self.assertIsInstance(lookup.lhs.lhs, Col) self.assertIsNone(lookup.lhs.lhs.alias) self.assertEqual(lookup.lhs.lhs.target, Author._meta.get_field('name')) def test_negated_nullable(self): query = Query(Item) where = query.build_where(~Q(modified__lt=datetime(2017, 1, 1))) self.assertTrue(where.negated) lookup = where.children[0] self.assertIsInstance(lookup, LessThan) self.assertEqual(lookup.lhs.target, Item._meta.get_field('modified')) lookup = where.children[1] self.assertIsInstance(lookup, IsNull) self.assertEqual(lookup.lhs.target, Item._meta.get_field('modified')) def test_foreign_key(self): query = Query(Item) msg = 'Joined field references are not permitted in this query' with self.assertRaisesMessage(FieldError, msg): query.build_where(Q(creator__num__gt=2)) def test_foreign_key_f(self): query = Query(Ranking) with self.assertRaises(FieldError): query.build_where(Q(rank__gt=F('author__num'))) def test_foreign_key_exclusive(self): query = Query(ObjectC, alias_cols=False) where = query.build_where(Q(objecta=None) | Q(objectb=None)) a_isnull = where.children[0] self.assertIsInstance(a_isnull, RelatedIsNull) self.assertIsInstance(a_isnull.lhs, Col) self.assertIsNone(a_isnull.lhs.alias) self.assertEqual(a_isnull.lhs.target, ObjectC._meta.get_field('objecta')) b_isnull = where.children[1] self.assertIsInstance(b_isnull, RelatedIsNull) self.assertIsInstance(b_isnull.lhs, Col) self.assertIsNone(b_isnull.lhs.alias) self.assertEqual(b_isnull.lhs.target, ObjectC._meta.get_field('objectb')) def test_clone_select_related(self): query = Query(Item) query.add_select_related(['creator']) clone = query.clone() clone.add_select_related(['note', 'creator__extra']) self.assertEqual(query.select_related, {'creator': {}}) def test_iterable_lookup_value(self): query = Query(Item) where = query.build_where(Q(name=['a', 'b'])) name_exact = where.children[0] self.assertIsInstance(name_exact, Exact) self.assertEqual(name_exact.rhs, "['a', 'b']") def test_filter_conditional(self): query = Query(Item) where = query.build_where(Func(output_field=BooleanField())) exact = where.children[0] self.assertIsInstance(exact, Exact) self.assertIsInstance(exact.lhs, Func) self.assertIs(exact.rhs, True) def test_filter_conditional_join(self): query = Query(Item) filter_expr = Func('note__note', output_field=BooleanField()) msg = 'Joined field references are not permitted in this query' with self.assertRaisesMessage(FieldError, msg): query.build_where(filter_expr) def test_filter_non_conditional(self): query = Query(Item) msg = 'Cannot filter against a non-conditional expression.' with self.assertRaisesMessage(TypeError, msg): query.build_where(Func(output_field=CharField())) class JoinPromoterTest(SimpleTestCase): def test_repr(self): self.assertEqual( repr(JoinPromoter('AND', 3, True)), "JoinPromoter(connector='AND', num_children=3, negated=True)", )
fd2a137bc2e57a5197f437de0a1508a2e068a0e7c02b98777f2fb08be4565358
import datetime from django.core.exceptions import FieldDoesNotExist from django.db.models import F from django.db.models.functions import Lower from django.test import TestCase, skipUnlessDBFeature from .models import ( Article, CustomDbColumn, CustomPk, Detail, Individual, JSONFieldNullable, Member, Note, Number, Order, Paragraph, RelatedObject, SingleObject, SpecialCategory, Tag, Valid, ) class BulkUpdateNoteTests(TestCase): @classmethod def setUpTestData(cls): cls.notes = [ Note.objects.create(note=str(i), misc=str(i)) for i in range(10) ] def create_tags(self): self.tags = [ Tag.objects.create(name=str(i)) for i in range(10) ] def test_simple(self): for note in self.notes: note.note = 'test-%s' % note.id with self.assertNumQueries(1): Note.objects.bulk_update(self.notes, ['note']) self.assertCountEqual( Note.objects.values_list('note', flat=True), [cat.note for cat in self.notes] ) def test_multiple_fields(self): for note in self.notes: note.note = 'test-%s' % note.id note.misc = 'misc-%s' % note.id with self.assertNumQueries(1): Note.objects.bulk_update(self.notes, ['note', 'misc']) self.assertCountEqual( Note.objects.values_list('note', flat=True), [cat.note for cat in self.notes] ) self.assertCountEqual( Note.objects.values_list('misc', flat=True), [cat.misc for cat in self.notes] ) def test_batch_size(self): with self.assertNumQueries(len(self.notes)): Note.objects.bulk_update(self.notes, fields=['note'], batch_size=1) def test_unsaved_models(self): objs = self.notes + [Note(note='test', misc='test')] msg = 'All bulk_update() objects must have a primary key set.' with self.assertRaisesMessage(ValueError, msg): Note.objects.bulk_update(objs, fields=['note']) def test_foreign_keys_do_not_lookup(self): self.create_tags() for note, tag in zip(self.notes, self.tags): note.tag = tag with self.assertNumQueries(1): Note.objects.bulk_update(self.notes, ['tag']) self.assertSequenceEqual(Note.objects.filter(tag__isnull=False), self.notes) def test_set_field_to_null(self): self.create_tags() Note.objects.update(tag=self.tags[0]) for note in self.notes: note.tag = None Note.objects.bulk_update(self.notes, ['tag']) self.assertCountEqual(Note.objects.filter(tag__isnull=True), self.notes) def test_set_mixed_fields_to_null(self): self.create_tags() midpoint = len(self.notes) // 2 top, bottom = self.notes[:midpoint], self.notes[midpoint:] for note in top: note.tag = None for note in bottom: note.tag = self.tags[0] Note.objects.bulk_update(self.notes, ['tag']) self.assertCountEqual(Note.objects.filter(tag__isnull=True), top) self.assertCountEqual(Note.objects.filter(tag__isnull=False), bottom) def test_functions(self): Note.objects.update(note='TEST') for note in self.notes: note.note = Lower('note') Note.objects.bulk_update(self.notes, ['note']) self.assertEqual(set(Note.objects.values_list('note', flat=True)), {'test'}) # Tests that use self.notes go here, otherwise put them in another class. class BulkUpdateTests(TestCase): def test_no_fields(self): msg = 'Field names must be given to bulk_update().' with self.assertRaisesMessage(ValueError, msg): Note.objects.bulk_update([], fields=[]) def test_invalid_batch_size(self): msg = 'Batch size must be a positive integer.' with self.assertRaisesMessage(ValueError, msg): Note.objects.bulk_update([], fields=['note'], batch_size=-1) def test_nonexistent_field(self): with self.assertRaisesMessage(FieldDoesNotExist, "Note has no field named 'nonexistent'"): Note.objects.bulk_update([], ['nonexistent']) pk_fields_error = 'bulk_update() cannot be used with primary key fields.' def test_update_primary_key(self): with self.assertRaisesMessage(ValueError, self.pk_fields_error): Note.objects.bulk_update([], ['id']) def test_update_custom_primary_key(self): with self.assertRaisesMessage(ValueError, self.pk_fields_error): CustomPk.objects.bulk_update([], ['name']) def test_empty_objects(self): with self.assertNumQueries(0): rows_updated = Note.objects.bulk_update([], ['note']) self.assertEqual(rows_updated, 0) def test_large_batch(self): Note.objects.bulk_create([ Note(note=str(i), misc=str(i)) for i in range(0, 2000) ]) notes = list(Note.objects.all()) rows_updated = Note.objects.bulk_update(notes, ['note']) self.assertEqual(rows_updated, 2000) def test_updated_rows_when_passing_duplicates(self): note = Note.objects.create(note='test-note', misc='test') rows_updated = Note.objects.bulk_update([note, note], ['note']) self.assertEqual(rows_updated, 1) # Duplicates in different batches. rows_updated = Note.objects.bulk_update([note, note], ['note'], batch_size=1) self.assertEqual(rows_updated, 2) def test_only_concrete_fields_allowed(self): obj = Valid.objects.create(valid='test') detail = Detail.objects.create(data='test') paragraph = Paragraph.objects.create(text='test') Member.objects.create(name='test', details=detail) msg = 'bulk_update() can only be used with concrete fields.' with self.assertRaisesMessage(ValueError, msg): Detail.objects.bulk_update([detail], fields=['member']) with self.assertRaisesMessage(ValueError, msg): Paragraph.objects.bulk_update([paragraph], fields=['page']) with self.assertRaisesMessage(ValueError, msg): Valid.objects.bulk_update([obj], fields=['parent']) def test_custom_db_columns(self): model = CustomDbColumn.objects.create(custom_column=1) model.custom_column = 2 CustomDbColumn.objects.bulk_update([model], fields=['custom_column']) model.refresh_from_db() self.assertEqual(model.custom_column, 2) def test_custom_pk(self): custom_pks = [ CustomPk.objects.create(name='pk-%s' % i, extra='') for i in range(10) ] for model in custom_pks: model.extra = 'extra-%s' % model.pk CustomPk.objects.bulk_update(custom_pks, ['extra']) self.assertCountEqual( CustomPk.objects.values_list('extra', flat=True), [cat.extra for cat in custom_pks] ) def test_falsey_pk_value(self): order = Order.objects.create(pk=0, name='test') order.name = 'updated' Order.objects.bulk_update([order], ['name']) order.refresh_from_db() self.assertEqual(order.name, 'updated') def test_inherited_fields(self): special_categories = [ SpecialCategory.objects.create(name=str(i), special_name=str(i)) for i in range(10) ] for category in special_categories: category.name = 'test-%s' % category.id category.special_name = 'special-test-%s' % category.special_name SpecialCategory.objects.bulk_update(special_categories, ['name', 'special_name']) self.assertCountEqual( SpecialCategory.objects.values_list('name', flat=True), [cat.name for cat in special_categories] ) self.assertCountEqual( SpecialCategory.objects.values_list('special_name', flat=True), [cat.special_name for cat in special_categories] ) def test_field_references(self): numbers = [Number.objects.create(num=0) for _ in range(10)] for number in numbers: number.num = F('num') + 1 Number.objects.bulk_update(numbers, ['num']) self.assertCountEqual(Number.objects.filter(num=1), numbers) def test_f_expression(self): notes = [ Note.objects.create(note='test_note', misc='test_misc') for _ in range(10) ] for note in notes: note.misc = F('note') Note.objects.bulk_update(notes, ['misc']) self.assertCountEqual(Note.objects.filter(misc='test_note'), notes) def test_booleanfield(self): individuals = [Individual.objects.create(alive=False) for _ in range(10)] for individual in individuals: individual.alive = True Individual.objects.bulk_update(individuals, ['alive']) self.assertCountEqual(Individual.objects.filter(alive=True), individuals) def test_ipaddressfield(self): for ip in ('2001::1', '1.2.3.4'): with self.subTest(ip=ip): models = [ CustomDbColumn.objects.create(ip_address='0.0.0.0') for _ in range(10) ] for model in models: model.ip_address = ip CustomDbColumn.objects.bulk_update(models, ['ip_address']) self.assertCountEqual(CustomDbColumn.objects.filter(ip_address=ip), models) def test_datetime_field(self): articles = [ Article.objects.create(name=str(i), created=datetime.datetime.today()) for i in range(10) ] point_in_time = datetime.datetime(1991, 10, 31) for article in articles: article.created = point_in_time Article.objects.bulk_update(articles, ['created']) self.assertCountEqual(Article.objects.filter(created=point_in_time), articles) @skipUnlessDBFeature('supports_json_field') def test_json_field(self): JSONFieldNullable.objects.bulk_create([ JSONFieldNullable(json_field={'a': i}) for i in range(10) ]) objs = JSONFieldNullable.objects.all() for obj in objs: obj.json_field = {'c': obj.json_field['a'] + 1} JSONFieldNullable.objects.bulk_update(objs, ['json_field']) self.assertCountEqual(JSONFieldNullable.objects.filter(json_field__has_key='c'), objs) def test_nullable_fk_after_related_save(self): parent = RelatedObject.objects.create() child = SingleObject() parent.single = child parent.single.save() RelatedObject.objects.bulk_update([parent], fields=['single']) self.assertEqual(parent.single_id, parent.single.pk) parent.refresh_from_db() self.assertEqual(parent.single, child) def test_unsaved_parent(self): parent = RelatedObject.objects.create() parent.single = SingleObject() msg = ( "bulk_update() prohibited to prevent data loss due to unsaved " "related object 'single'." ) with self.assertRaisesMessage(ValueError, msg): RelatedObject.objects.bulk_update([parent], fields=['single']) def test_unspecified_unsaved_parent(self): parent = RelatedObject.objects.create() parent.single = SingleObject() parent.f = 42 RelatedObject.objects.bulk_update([parent], fields=['f']) parent.refresh_from_db() self.assertEqual(parent.f, 42) self.assertIsNone(parent.single)
335bbbc2630f705db3dfbdebcbd6dc783fa58f94ee1d9ccf5fecbd2e0022a906
import sys from django.contrib.auth.models import Group from django.template import ( Context, Engine, TemplateDoesNotExist, TemplateSyntaxError, ) from django.template.base import UNKNOWN_SOURCE from django.test import SimpleTestCase, override_settings from django.urls import NoReverseMatch from django.utils import translation from django.utils.html import escape class TemplateTestMixin: def _engine(self, **kwargs): return Engine(debug=self.debug_engine, **kwargs) def test_string_origin(self): template = self._engine().from_string('string template') self.assertEqual(template.origin.name, UNKNOWN_SOURCE) self.assertIsNone(template.origin.loader_name) self.assertEqual(template.source, 'string template') @override_settings(SETTINGS_MODULE=None) def test_url_reverse_no_settings_module(self): """ #9005 -- url tag shouldn't require settings.SETTINGS_MODULE to be set. """ t = self._engine().from_string('{% url will_not_match %}') c = Context() with self.assertRaises(NoReverseMatch): t.render(c) def test_url_reverse_view_name(self): """ #19827 -- url tag should keep original strack trace when reraising exception. """ t = self._engine().from_string('{% url will_not_match %}') c = Context() try: t.render(c) except NoReverseMatch: tb = sys.exc_info()[2] depth = 0 while tb.tb_next is not None: tb = tb.tb_next depth += 1 self.assertGreater(depth, 5, "The traceback context was lost when reraising the traceback.") def test_no_wrapped_exception(self): """ # 16770 -- The template system doesn't wrap exceptions, but annotates them. """ engine = self._engine() c = Context({"coconuts": lambda: 42 / 0}) t = engine.from_string("{{ coconuts }}") with self.assertRaises(ZeroDivisionError) as e: t.render(c) if self.debug_engine: debug = e.exception.template_debug self.assertEqual(debug['start'], 0) self.assertEqual(debug['end'], 14) def test_invalid_block_suggestion(self): """ Error messages should include the unexpected block name and be in all English. """ engine = self._engine() msg = ( "Invalid block tag on line 1: 'endblock', expected 'elif', 'else' " "or 'endif'. Did you forget to register or load this tag?" ) with self.settings(USE_I18N=True), translation.override('de'): with self.assertRaisesMessage(TemplateSyntaxError, msg): engine.from_string("{% if 1 %}lala{% endblock %}{% endif %}") def test_unknown_block_tag(self): engine = self._engine() msg = ( "Invalid block tag on line 1: 'foobar'. Did you forget to " "register or load this tag?" ) with self.assertRaisesMessage(TemplateSyntaxError, msg): engine.from_string("lala{% foobar %}") def test_compile_filter_expression_error(self): """ 19819 -- Make sure the correct token is highlighted for FilterExpression errors. """ engine = self._engine() msg = "Could not parse the remainder: '@bar' from 'foo@bar'" with self.assertRaisesMessage(TemplateSyntaxError, msg) as e: engine.from_string("{% if 1 %}{{ foo@bar }}{% endif %}") if self.debug_engine: debug = e.exception.template_debug self.assertEqual((debug['start'], debug['end']), (10, 23)) self.assertEqual((debug['during']), '{{ foo@bar }}') def test_compile_tag_error(self): """ Errors raised while compiling nodes should include the token information. """ engine = self._engine( libraries={'bad_tag': 'template_tests.templatetags.bad_tag'}, ) with self.assertRaises(RuntimeError) as e: engine.from_string("{% load bad_tag %}{% badtag %}") if self.debug_engine: self.assertEqual(e.exception.template_debug['during'], '{% badtag %}') def test_compile_tag_error_27584(self): engine = self._engine( app_dirs=True, libraries={'tag_27584': 'template_tests.templatetags.tag_27584'}, ) t = engine.get_template('27584_parent.html') with self.assertRaises(TemplateSyntaxError) as e: t.render(Context()) if self.debug_engine: self.assertEqual(e.exception.template_debug['during'], '{% badtag %}') def test_compile_tag_error_27956(self): """Errors in a child of {% extends %} are displayed correctly.""" engine = self._engine( app_dirs=True, libraries={'tag_27584': 'template_tests.templatetags.tag_27584'}, ) t = engine.get_template('27956_child.html') with self.assertRaises(TemplateSyntaxError) as e: t.render(Context()) if self.debug_engine: self.assertEqual(e.exception.template_debug['during'], '{% badtag %}') def test_render_tag_error_in_extended_block(self): """Errors in extended block are displayed correctly.""" e = self._engine(app_dirs=True) template = e.get_template('test_extends_block_error.html') context = Context() with self.assertRaises(TemplateDoesNotExist) as cm: template.render(context) if self.debug_engine: self.assertEqual( cm.exception.template_debug['during'], escape('{% include "missing.html" %}'), ) def test_super_errors(self): """ #18169 -- NoReverseMatch should not be silence in block.super. """ engine = self._engine(app_dirs=True) t = engine.get_template('included_content.html') with self.assertRaises(NoReverseMatch): t.render(Context()) def test_debug_tag_non_ascii(self): """ #23060 -- Test non-ASCII model representation in debug output. """ group = Group(name="清風") c1 = Context({"objs": [group]}) t1 = self._engine().from_string('{% debug %}') self.assertIn("清風", t1.render(c1)) def test_extends_generic_template(self): """ #24338 -- Allow extending django.template.backends.django.Template objects. """ engine = self._engine() parent = engine.from_string('{% block content %}parent{% endblock %}') child = engine.from_string( '{% extends parent %}{% block content %}child{% endblock %}') self.assertEqual(child.render(Context({'parent': parent})), 'child') def test_node_origin(self): """ #25848 -- Set origin on Node so debugging tools can determine which template the node came from even if extending or including templates. """ template = self._engine().from_string('content') for node in template.nodelist: self.assertEqual(node.origin, template.origin) class TemplateTests(TemplateTestMixin, SimpleTestCase): debug_engine = False class DebugTemplateTests(TemplateTestMixin, SimpleTestCase): debug_engine = True
e7fc892e7401f1e19daa248faa269ca47c9b8900dcccf8149168766000fb5b74
from django.template import Context, Template, Variable, VariableDoesNotExist from django.template.base import DebugLexer, Lexer, TokenType from django.test import SimpleTestCase from django.utils.translation import gettext_lazy class LexerTestMixin: template_string = ( 'text\n' '{% if test %}{{ varvalue }}{% endif %}' '{#comment {{not a var}} %{not a block}% #}' 'end text' ) expected_token_tuples = [ # (token_type, contents, lineno, position) (TokenType.TEXT, 'text\n', 1, (0, 5)), (TokenType.BLOCK, 'if test', 2, (5, 18)), (TokenType.VAR, 'varvalue', 2, (18, 32)), (TokenType.BLOCK, 'endif', 2, (32, 43)), (TokenType.COMMENT, 'comment {{not a var}} %{not a block}%', 2, (43, 85)), (TokenType.TEXT, 'end text', 2, (85, 93)), ] def test_tokenize(self): tokens = self.lexer_class(self.template_string).tokenize() token_tuples = [(t.token_type, t.contents, t.lineno, t.position) for t in tokens] self.assertEqual(token_tuples, self.make_expected()) def make_expected(self): raise NotImplementedError('This method must be implemented by a subclass.') class LexerTests(LexerTestMixin, SimpleTestCase): lexer_class = Lexer def make_expected(self): # The non-debug lexer does not record position. return [t[:-1] + (None,) for t in self.expected_token_tuples] class DebugLexerTests(LexerTestMixin, SimpleTestCase): lexer_class = DebugLexer def make_expected(self): return self.expected_token_tuples class TemplateTests(SimpleTestCase): def test_lazy_template_string(self): template_string = gettext_lazy('lazy string') self.assertEqual(Template(template_string).render(Context()), template_string) def test_repr(self): template = Template( '<html><body>\n' '{% if test %}<h1>{{ varvalue }}</h1>{% endif %}' '</body></html>' ) self.assertEqual( repr(template), '<Template template_string="<html><body>{% if t...">', ) class VariableDoesNotExistTests(SimpleTestCase): def test_str(self): exc = VariableDoesNotExist(msg='Failed lookup in %r', params=({'foo': 'bar'},)) self.assertEqual(str(exc), "Failed lookup in {'foo': 'bar'}") class VariableTests(SimpleTestCase): def test_integer_literals(self): self.assertEqual(Variable('999999999999999999999999999').literal, 999999999999999999999999999) def test_nonliterals(self): """Variable names that aren't resolved as literals.""" var_names = [] for var in ('inf', 'infinity', 'iNFiniTy', 'nan'): var_names.extend((var, '-' + var, '+' + var)) for var in var_names: with self.subTest(var=var): self.assertIsNone(Variable(var).literal)
6502d67c0ce2e2ecf46606e71166c83775cb4df5c60d5c81e99fe4010543c13b
import os from django.template import Context, Engine, TemplateSyntaxError from django.template.base import Node from django.template.library import InvalidTemplateLibrary from django.test import SimpleTestCase from django.test.utils import extend_sys_path from .templatetags import custom, inclusion from .utils import ROOT LIBRARIES = { 'custom': 'template_tests.templatetags.custom', 'inclusion': 'template_tests.templatetags.inclusion', } class CustomFilterTests(SimpleTestCase): def test_filter(self): engine = Engine(libraries=LIBRARIES) t = engine.from_string("{% load custom %}{{ string|trim:5 }}") self.assertEqual( t.render(Context({"string": "abcdefghijklmnopqrstuvwxyz"})), "abcde" ) def test_decorated_filter(self): engine = Engine(libraries=LIBRARIES) t = engine.from_string('{% load custom %}{{ name|make_data_div }}') self.assertEqual(t.render(Context({'name': 'foo'})), '<div data-name="foo"></div>') class TagTestCase(SimpleTestCase): @classmethod def setUpClass(cls): cls.engine = Engine(app_dirs=True, libraries=LIBRARIES) super().setUpClass() def verify_tag(self, tag, name): self.assertEqual(tag.__name__, name) self.assertEqual(tag.__doc__, 'Expected %s __doc__' % name) self.assertEqual(tag.__dict__['anything'], 'Expected %s __dict__' % name) class SimpleTagTests(TagTestCase): def test_simple_tags(self): c = Context({'value': 42}) templates = [ ('{% load custom %}{% no_params %}', 'no_params - Expected result'), ('{% load custom %}{% one_param 37 %}', 'one_param - Expected result: 37'), ('{% load custom %}{% explicit_no_context 37 %}', 'explicit_no_context - Expected result: 37'), ('{% load custom %}{% no_params_with_context %}', 'no_params_with_context - Expected result (context value: 42)'), ('{% load custom %}{% params_and_context 37 %}', 'params_and_context - Expected result (context value: 42): 37'), ('{% load custom %}{% simple_two_params 37 42 %}', 'simple_two_params - Expected result: 37, 42'), ('{% load custom %}{% simple_keyword_only_param kwarg=37 %}', 'simple_keyword_only_param - Expected result: 37'), ('{% load custom %}{% simple_keyword_only_default %}', 'simple_keyword_only_default - Expected result: 42'), ( '{% load custom %}{% simple_keyword_only_default kwarg=37 %}', 'simple_keyword_only_default - Expected result: 37', ), ('{% load custom %}{% simple_one_default 37 %}', 'simple_one_default - Expected result: 37, hi'), ('{% load custom %}{% simple_one_default 37 two="hello" %}', 'simple_one_default - Expected result: 37, hello'), ('{% load custom %}{% simple_one_default one=99 two="hello" %}', 'simple_one_default - Expected result: 99, hello'), ('{% load custom %}{% simple_one_default 37 42 %}', 'simple_one_default - Expected result: 37, 42'), ('{% load custom %}{% simple_unlimited_args 37 %}', 'simple_unlimited_args - Expected result: 37, hi'), ('{% load custom %}{% simple_unlimited_args 37 42 56 89 %}', 'simple_unlimited_args - Expected result: 37, 42, 56, 89'), ('{% load custom %}{% simple_only_unlimited_args %}', 'simple_only_unlimited_args - Expected result: '), ('{% load custom %}{% simple_only_unlimited_args 37 42 56 89 %}', 'simple_only_unlimited_args - Expected result: 37, 42, 56, 89'), ('{% load custom %}{% simple_unlimited_args_kwargs 37 40|add:2 56 eggs="scrambled" four=1|add:3 %}', 'simple_unlimited_args_kwargs - Expected result: 37, 42, 56 / eggs=scrambled, four=4'), ] for entry in templates: t = self.engine.from_string(entry[0]) self.assertEqual(t.render(c), entry[1]) for entry in templates: t = self.engine.from_string("%s as var %%}Result: {{ var }}" % entry[0][0:-2]) self.assertEqual(t.render(c), "Result: %s" % entry[1]) def test_simple_tag_errors(self): errors = [ ("'simple_one_default' received unexpected keyword argument 'three'", '{% load custom %}{% simple_one_default 99 two="hello" three="foo" %}'), ("'simple_two_params' received too many positional arguments", '{% load custom %}{% simple_two_params 37 42 56 %}'), ("'simple_one_default' received too many positional arguments", '{% load custom %}{% simple_one_default 37 42 56 %}'), ("'simple_keyword_only_param' did not receive value(s) for the argument(s): 'kwarg'", '{% load custom %}{% simple_keyword_only_param %}'), ( "'simple_keyword_only_param' received multiple values for " "keyword argument 'kwarg'", '{% load custom %}{% simple_keyword_only_param kwarg=42 ' 'kwarg=37 %}', ), ( "'simple_keyword_only_default' received multiple values for " "keyword argument 'kwarg'", '{% load custom %}{% simple_keyword_only_default kwarg=42 ' 'kwarg=37 %}', ), ("'simple_unlimited_args_kwargs' received some positional argument(s) after some keyword argument(s)", '{% load custom %}{% simple_unlimited_args_kwargs 37 40|add:2 eggs="scrambled" 56 four=1|add:3 %}'), ("'simple_unlimited_args_kwargs' received multiple values for keyword argument 'eggs'", '{% load custom %}{% simple_unlimited_args_kwargs 37 eggs="scrambled" eggs="scrambled" %}'), ] for entry in errors: with self.assertRaisesMessage(TemplateSyntaxError, entry[0]): self.engine.from_string(entry[1]) for entry in errors: with self.assertRaisesMessage(TemplateSyntaxError, entry[0]): self.engine.from_string("%s as var %%}" % entry[1][0:-2]) def test_simple_tag_escaping_autoescape_off(self): c = Context({'name': "Jack & Jill"}, autoescape=False) t = self.engine.from_string("{% load custom %}{% escape_naive %}") self.assertEqual(t.render(c), "Hello Jack & Jill!") def test_simple_tag_naive_escaping(self): c = Context({'name': "Jack & Jill"}) t = self.engine.from_string("{% load custom %}{% escape_naive %}") self.assertEqual(t.render(c), "Hello Jack &amp; Jill!") def test_simple_tag_explicit_escaping(self): # Check we don't double escape c = Context({'name': "Jack & Jill"}) t = self.engine.from_string("{% load custom %}{% escape_explicit %}") self.assertEqual(t.render(c), "Hello Jack &amp; Jill!") def test_simple_tag_format_html_escaping(self): # Check we don't double escape c = Context({'name': "Jack & Jill"}) t = self.engine.from_string("{% load custom %}{% escape_format_html %}") self.assertEqual(t.render(c), "Hello Jack &amp; Jill!") def test_simple_tag_registration(self): # The decorators preserve the decorated function's docstring, name, # and attributes. self.verify_tag(custom.no_params, 'no_params') self.verify_tag(custom.one_param, 'one_param') self.verify_tag(custom.explicit_no_context, 'explicit_no_context') self.verify_tag(custom.no_params_with_context, 'no_params_with_context') self.verify_tag(custom.params_and_context, 'params_and_context') self.verify_tag(custom.simple_unlimited_args_kwargs, 'simple_unlimited_args_kwargs') self.verify_tag(custom.simple_tag_without_context_parameter, 'simple_tag_without_context_parameter') def test_simple_tag_missing_context(self): # The 'context' parameter must be present when takes_context is True msg = ( "'simple_tag_without_context_parameter' is decorated with " "takes_context=True so it must have a first argument of 'context'" ) with self.assertRaisesMessage(TemplateSyntaxError, msg): self.engine.from_string('{% load custom %}{% simple_tag_without_context_parameter 123 %}') def test_simple_tag_missing_context_no_params(self): msg = ( "'simple_tag_takes_context_without_params' is decorated with " "takes_context=True so it must have a first argument of 'context'" ) with self.assertRaisesMessage(TemplateSyntaxError, msg): self.engine.from_string( '{% load custom %}{% simple_tag_takes_context_without_params %}' ) class InclusionTagTests(TagTestCase): def test_inclusion_tags(self): c = Context({'value': 42}) templates = [ ('{% load inclusion %}{% inclusion_no_params %}', 'inclusion_no_params - Expected result\n'), ('{% load inclusion %}{% inclusion_one_param 37 %}', 'inclusion_one_param - Expected result: 37\n'), ('{% load inclusion %}{% inclusion_explicit_no_context 37 %}', 'inclusion_explicit_no_context - Expected result: 37\n'), ('{% load inclusion %}{% inclusion_no_params_with_context %}', 'inclusion_no_params_with_context - Expected result (context value: 42)\n'), ('{% load inclusion %}{% inclusion_params_and_context 37 %}', 'inclusion_params_and_context - Expected result (context value: 42): 37\n'), ('{% load inclusion %}{% inclusion_two_params 37 42 %}', 'inclusion_two_params - Expected result: 37, 42\n'), ( '{% load inclusion %}{% inclusion_one_default 37 %}', 'inclusion_one_default - Expected result: 37, hi\n' ), ('{% load inclusion %}{% inclusion_one_default 37 two="hello" %}', 'inclusion_one_default - Expected result: 37, hello\n'), ('{% load inclusion %}{% inclusion_one_default one=99 two="hello" %}', 'inclusion_one_default - Expected result: 99, hello\n'), ('{% load inclusion %}{% inclusion_one_default 37 42 %}', 'inclusion_one_default - Expected result: 37, 42\n'), ( '{% load inclusion %}{% inclusion_keyword_only_default kwarg=37 %}', 'inclusion_keyword_only_default - Expected result: 37\n', ), ('{% load inclusion %}{% inclusion_unlimited_args 37 %}', 'inclusion_unlimited_args - Expected result: 37, hi\n'), ('{% load inclusion %}{% inclusion_unlimited_args 37 42 56 89 %}', 'inclusion_unlimited_args - Expected result: 37, 42, 56, 89\n'), ('{% load inclusion %}{% inclusion_only_unlimited_args %}', 'inclusion_only_unlimited_args - Expected result: \n'), ('{% load inclusion %}{% inclusion_only_unlimited_args 37 42 56 89 %}', 'inclusion_only_unlimited_args - Expected result: 37, 42, 56, 89\n'), ('{% load inclusion %}{% inclusion_unlimited_args_kwargs 37 40|add:2 56 eggs="scrambled" four=1|add:3 %}', 'inclusion_unlimited_args_kwargs - Expected result: 37, 42, 56 / eggs=scrambled, four=4\n'), ] for entry in templates: t = self.engine.from_string(entry[0]) self.assertEqual(t.render(c), entry[1]) def test_inclusion_tag_errors(self): errors = [ ("'inclusion_one_default' received unexpected keyword argument 'three'", '{% load inclusion %}{% inclusion_one_default 99 two="hello" three="foo" %}'), ("'inclusion_two_params' received too many positional arguments", '{% load inclusion %}{% inclusion_two_params 37 42 56 %}'), ("'inclusion_one_default' received too many positional arguments", '{% load inclusion %}{% inclusion_one_default 37 42 56 %}'), ("'inclusion_one_default' did not receive value(s) for the argument(s): 'one'", '{% load inclusion %}{% inclusion_one_default %}'), ( "'inclusion_keyword_only_default' received multiple values " "for keyword argument 'kwarg'", '{% load inclusion %}{% inclusion_keyword_only_default ' 'kwarg=37 kwarg=42 %}', ), ("'inclusion_unlimited_args' did not receive value(s) for the argument(s): 'one'", '{% load inclusion %}{% inclusion_unlimited_args %}'), ( "'inclusion_unlimited_args_kwargs' received some positional argument(s) " "after some keyword argument(s)", '{% load inclusion %}{% inclusion_unlimited_args_kwargs 37 40|add:2 eggs="boiled" 56 four=1|add:3 %}', ), ("'inclusion_unlimited_args_kwargs' received multiple values for keyword argument 'eggs'", '{% load inclusion %}{% inclusion_unlimited_args_kwargs 37 eggs="scrambled" eggs="scrambled" %}'), ] for entry in errors: with self.assertRaisesMessage(TemplateSyntaxError, entry[0]): self.engine.from_string(entry[1]) def test_include_tag_missing_context(self): # The 'context' parameter must be present when takes_context is True msg = ( "'inclusion_tag_without_context_parameter' is decorated with " "takes_context=True so it must have a first argument of 'context'" ) with self.assertRaisesMessage(TemplateSyntaxError, msg): self.engine.from_string('{% load inclusion %}{% inclusion_tag_without_context_parameter 123 %}') def test_include_tag_missing_context_no_params(self): msg = ( "'inclusion_tag_takes_context_without_params' is decorated with " "takes_context=True so it must have a first argument of 'context'" ) with self.assertRaisesMessage(TemplateSyntaxError, msg): self.engine.from_string( '{% load inclusion %}{% inclusion_tag_takes_context_without_params %}' ) def test_inclusion_tags_from_template(self): c = Context({'value': 42}) templates = [ ('{% load inclusion %}{% inclusion_no_params_from_template %}', 'inclusion_no_params_from_template - Expected result\n'), ('{% load inclusion %}{% inclusion_one_param_from_template 37 %}', 'inclusion_one_param_from_template - Expected result: 37\n'), ('{% load inclusion %}{% inclusion_explicit_no_context_from_template 37 %}', 'inclusion_explicit_no_context_from_template - Expected result: 37\n'), ('{% load inclusion %}{% inclusion_no_params_with_context_from_template %}', 'inclusion_no_params_with_context_from_template - Expected result (context value: 42)\n'), ('{% load inclusion %}{% inclusion_params_and_context_from_template 37 %}', 'inclusion_params_and_context_from_template - Expected result (context value: 42): 37\n'), ('{% load inclusion %}{% inclusion_two_params_from_template 37 42 %}', 'inclusion_two_params_from_template - Expected result: 37, 42\n'), ('{% load inclusion %}{% inclusion_one_default_from_template 37 %}', 'inclusion_one_default_from_template - Expected result: 37, hi\n'), ('{% load inclusion %}{% inclusion_one_default_from_template 37 42 %}', 'inclusion_one_default_from_template - Expected result: 37, 42\n'), ('{% load inclusion %}{% inclusion_unlimited_args_from_template 37 %}', 'inclusion_unlimited_args_from_template - Expected result: 37, hi\n'), ('{% load inclusion %}{% inclusion_unlimited_args_from_template 37 42 56 89 %}', 'inclusion_unlimited_args_from_template - Expected result: 37, 42, 56, 89\n'), ('{% load inclusion %}{% inclusion_only_unlimited_args_from_template %}', 'inclusion_only_unlimited_args_from_template - Expected result: \n'), ('{% load inclusion %}{% inclusion_only_unlimited_args_from_template 37 42 56 89 %}', 'inclusion_only_unlimited_args_from_template - Expected result: 37, 42, 56, 89\n'), ] for entry in templates: t = self.engine.from_string(entry[0]) self.assertEqual(t.render(c), entry[1]) def test_inclusion_tag_registration(self): # The decorators preserve the decorated function's docstring, name, # and attributes. self.verify_tag(inclusion.inclusion_no_params, 'inclusion_no_params') self.verify_tag(inclusion.inclusion_one_param, 'inclusion_one_param') self.verify_tag(inclusion.inclusion_explicit_no_context, 'inclusion_explicit_no_context') self.verify_tag(inclusion.inclusion_no_params_with_context, 'inclusion_no_params_with_context') self.verify_tag(inclusion.inclusion_params_and_context, 'inclusion_params_and_context') self.verify_tag(inclusion.inclusion_two_params, 'inclusion_two_params') self.verify_tag(inclusion.inclusion_one_default, 'inclusion_one_default') self.verify_tag(inclusion.inclusion_unlimited_args, 'inclusion_unlimited_args') self.verify_tag(inclusion.inclusion_only_unlimited_args, 'inclusion_only_unlimited_args') self.verify_tag(inclusion.inclusion_tag_without_context_parameter, 'inclusion_tag_without_context_parameter') self.verify_tag(inclusion.inclusion_tag_use_l10n, 'inclusion_tag_use_l10n') self.verify_tag(inclusion.inclusion_unlimited_args_kwargs, 'inclusion_unlimited_args_kwargs') def test_15070_use_l10n(self): """ Inclusion tag passes down `use_l10n` of context to the Context of the included/rendered template as well. """ c = Context({}) t = self.engine.from_string('{% load inclusion %}{% inclusion_tag_use_l10n %}') self.assertEqual(t.render(c).strip(), 'None') c.use_l10n = True self.assertEqual(t.render(c).strip(), 'True') def test_no_render_side_effect(self): """ #23441 -- InclusionNode shouldn't modify its nodelist at render time. """ engine = Engine(app_dirs=True, libraries=LIBRARIES) template = engine.from_string('{% load inclusion %}{% inclusion_no_params %}') count = template.nodelist.get_nodes_by_type(Node) template.render(Context({})) self.assertEqual(template.nodelist.get_nodes_by_type(Node), count) def test_render_context_is_cleared(self): """ #24555 -- InclusionNode should push and pop the render_context stack when rendering. Otherwise, leftover values such as blocks from extending can interfere with subsequent rendering. """ engine = Engine(app_dirs=True, libraries=LIBRARIES) template = engine.from_string('{% load inclusion %}{% inclusion_extends1 %}{% inclusion_extends2 %}') self.assertEqual(template.render(Context({})).strip(), 'one\ntwo') class TemplateTagLoadingTests(SimpleTestCase): @classmethod def setUpClass(cls): cls.egg_dir = os.path.join(ROOT, 'eggs') super().setUpClass() def test_load_error(self): msg = ( "Invalid template library specified. ImportError raised when " "trying to load 'template_tests.broken_tag': cannot import name " "'Xtemplate'" ) with self.assertRaisesMessage(InvalidTemplateLibrary, msg): Engine(libraries={'broken_tag': 'template_tests.broken_tag'}) def test_load_error_egg(self): egg_name = '%s/tagsegg.egg' % self.egg_dir msg = ( "Invalid template library specified. ImportError raised when " "trying to load 'tagsegg.templatetags.broken_egg': cannot " "import name 'Xtemplate'" ) with extend_sys_path(egg_name): with self.assertRaisesMessage(InvalidTemplateLibrary, msg): Engine(libraries={'broken_egg': 'tagsegg.templatetags.broken_egg'}) def test_load_working_egg(self): ttext = "{% load working_egg %}" egg_name = '%s/tagsegg.egg' % self.egg_dir with extend_sys_path(egg_name): engine = Engine(libraries={ 'working_egg': 'tagsegg.templatetags.working_egg', }) engine.from_string(ttext) def test_load_annotated_function(self): Engine(libraries={ 'annotated_tag_function': 'template_tests.annotated_tag_function', })
df51f9a1951f950b0652c926ffa9528e642e766d8cb56900653123d99b56767b
import datetime import re from datetime import date from decimal import Decimal from django import forms from django.core.exceptions import ImproperlyConfigured from django.db import models from django.forms.models import ( BaseModelFormSet, _get_foreign_key, inlineformset_factory, modelformset_factory, ) from django.http import QueryDict from django.test import TestCase, skipUnlessDBFeature from .models import ( AlternateBook, Author, AuthorMeeting, BetterAuthor, Book, BookWithCustomPK, BookWithOptionalAltEditor, ClassyMexicanRestaurant, CustomPrimaryKey, Location, Membership, MexicanRestaurant, Owner, OwnerProfile, Person, Place, Player, Poem, Poet, Post, Price, Product, Repository, Restaurant, Revision, Team, ) class DeletionTests(TestCase): def test_deletion(self): PoetFormSet = modelformset_factory(Poet, fields="__all__", can_delete=True) poet = Poet.objects.create(name='test') data = { 'form-TOTAL_FORMS': '1', 'form-INITIAL_FORMS': '1', 'form-MAX_NUM_FORMS': '0', 'form-0-id': str(poet.pk), 'form-0-name': 'test', 'form-0-DELETE': 'on', } formset = PoetFormSet(data, queryset=Poet.objects.all()) formset.save(commit=False) self.assertEqual(Poet.objects.count(), 1) formset.save() self.assertTrue(formset.is_valid()) self.assertEqual(Poet.objects.count(), 0) def test_add_form_deletion_when_invalid(self): """ Make sure that an add form that is filled out, but marked for deletion doesn't cause validation errors. """ PoetFormSet = modelformset_factory(Poet, fields="__all__", can_delete=True) poet = Poet.objects.create(name='test') # One existing untouched and two new unvalid forms data = { 'form-TOTAL_FORMS': '3', 'form-INITIAL_FORMS': '1', 'form-MAX_NUM_FORMS': '0', 'form-0-id': str(poet.id), 'form-0-name': 'test', 'form-1-id': '', 'form-1-name': 'x' * 1000, # Too long 'form-2-id': str(poet.id), # Violate unique constraint 'form-2-name': 'test2', } formset = PoetFormSet(data, queryset=Poet.objects.all()) # Make sure this form doesn't pass validation. self.assertIs(formset.is_valid(), False) self.assertEqual(Poet.objects.count(), 1) # Then make sure that it *does* pass validation and delete the object, # even though the data in new forms aren't actually valid. data['form-0-DELETE'] = 'on' data['form-1-DELETE'] = 'on' data['form-2-DELETE'] = 'on' formset = PoetFormSet(data, queryset=Poet.objects.all()) self.assertIs(formset.is_valid(), True) formset.save() self.assertEqual(Poet.objects.count(), 0) def test_change_form_deletion_when_invalid(self): """ Make sure that a change form that is filled out, but marked for deletion doesn't cause validation errors. """ PoetFormSet = modelformset_factory(Poet, fields="__all__", can_delete=True) poet = Poet.objects.create(name='test') data = { 'form-TOTAL_FORMS': '1', 'form-INITIAL_FORMS': '1', 'form-MAX_NUM_FORMS': '0', 'form-0-id': str(poet.id), 'form-0-name': 'x' * 1000, } formset = PoetFormSet(data, queryset=Poet.objects.all()) # Make sure this form doesn't pass validation. self.assertIs(formset.is_valid(), False) self.assertEqual(Poet.objects.count(), 1) # Then make sure that it *does* pass validation and delete the object, # even though the data isn't actually valid. data['form-0-DELETE'] = 'on' formset = PoetFormSet(data, queryset=Poet.objects.all()) self.assertIs(formset.is_valid(), True) formset.save() self.assertEqual(Poet.objects.count(), 0) def test_outdated_deletion(self): poet = Poet.objects.create(name='test') poem = Poem.objects.create(name='Brevity is the soul of wit', poet=poet) PoemFormSet = inlineformset_factory(Poet, Poem, fields="__all__", can_delete=True) # Simulate deletion of an object that doesn't exist in the database data = { 'form-TOTAL_FORMS': '2', 'form-INITIAL_FORMS': '2', 'form-0-id': str(poem.pk), 'form-0-name': 'foo', 'form-1-id': str(poem.pk + 1), # doesn't exist 'form-1-name': 'bar', 'form-1-DELETE': 'on', } formset = PoemFormSet(data, instance=poet, prefix="form") # The formset is valid even though poem.pk + 1 doesn't exist, # because it's marked for deletion anyway self.assertTrue(formset.is_valid()) formset.save() # Make sure the save went through correctly self.assertEqual(Poem.objects.get(pk=poem.pk).name, "foo") self.assertEqual(poet.poem_set.count(), 1) self.assertFalse(Poem.objects.filter(pk=poem.pk + 1).exists()) class ModelFormsetTest(TestCase): def test_modelformset_factory_without_fields(self): """ Regression for #19733 """ message = ( "Calling modelformset_factory without defining 'fields' or 'exclude' " "explicitly is prohibited." ) with self.assertRaisesMessage(ImproperlyConfigured, message): modelformset_factory(Author) def test_simple_save(self): qs = Author.objects.all() AuthorFormSet = modelformset_factory(Author, fields="__all__", extra=3) formset = AuthorFormSet(queryset=qs) self.assertEqual(len(formset.forms), 3) self.assertHTMLEqual( formset.forms[0].as_p(), '<p><label for="id_form-0-name">Name:</label>' '<input id="id_form-0-name" type="text" name="form-0-name" maxlength="100">' '<input type="hidden" name="form-0-id" id="id_form-0-id"></p>' ) self.assertHTMLEqual( formset.forms[1].as_p(), '<p><label for="id_form-1-name">Name:</label>' '<input id="id_form-1-name" type="text" name="form-1-name" maxlength="100">' '<input type="hidden" name="form-1-id" id="id_form-1-id"></p>' ) self.assertHTMLEqual( formset.forms[2].as_p(), '<p><label for="id_form-2-name">Name:</label>' ' <input id="id_form-2-name" type="text" name="form-2-name" maxlength="100">' '<input type="hidden" name="form-2-id" id="id_form-2-id"></p>' ) data = { 'form-TOTAL_FORMS': '3', # the number of forms rendered 'form-INITIAL_FORMS': '0', # the number of forms with initial data 'form-MAX_NUM_FORMS': '', # the max number of forms 'form-0-name': 'Charles Baudelaire', 'form-1-name': 'Arthur Rimbaud', 'form-2-name': '', } formset = AuthorFormSet(data=data, queryset=qs) self.assertTrue(formset.is_valid()) saved = formset.save() self.assertEqual(len(saved), 2) author1, author2 = saved self.assertEqual(author1, Author.objects.get(name='Charles Baudelaire')) self.assertEqual(author2, Author.objects.get(name='Arthur Rimbaud')) authors = list(Author.objects.order_by('name')) self.assertEqual(authors, [author2, author1]) # Gah! We forgot Paul Verlaine. Let's create a formset to edit the # existing authors with an extra form to add him. We *could* pass in a # queryset to restrict the Author objects we edit, but in this case # we'll use it to display them in alphabetical order by name. qs = Author.objects.order_by('name') AuthorFormSet = modelformset_factory(Author, fields="__all__", extra=1, can_delete=False) formset = AuthorFormSet(queryset=qs) self.assertEqual(len(formset.forms), 3) self.assertHTMLEqual( formset.forms[0].as_p(), '<p><label for="id_form-0-name">Name:</label>' '<input id="id_form-0-name" type="text" name="form-0-name" value="Arthur Rimbaud" maxlength="100">' '<input type="hidden" name="form-0-id" value="%d" id="id_form-0-id"></p>' % author2.id ) self.assertHTMLEqual( formset.forms[1].as_p(), '<p><label for="id_form-1-name">Name:</label>' '<input id="id_form-1-name" type="text" name="form-1-name" value="Charles Baudelaire" maxlength="100">' '<input type="hidden" name="form-1-id" value="%d" id="id_form-1-id"></p>' % author1.id ) self.assertHTMLEqual( formset.forms[2].as_p(), '<p><label for="id_form-2-name">Name:</label>' '<input id="id_form-2-name" type="text" name="form-2-name" maxlength="100">' '<input type="hidden" name="form-2-id" id="id_form-2-id"></p>' ) data = { 'form-TOTAL_FORMS': '3', # the number of forms rendered 'form-INITIAL_FORMS': '2', # the number of forms with initial data 'form-MAX_NUM_FORMS': '', # the max number of forms 'form-0-id': str(author2.id), 'form-0-name': 'Arthur Rimbaud', 'form-1-id': str(author1.id), 'form-1-name': 'Charles Baudelaire', 'form-2-name': 'Paul Verlaine', } formset = AuthorFormSet(data=data, queryset=qs) self.assertTrue(formset.is_valid()) # Only changed or new objects are returned from formset.save() saved = formset.save() self.assertEqual(len(saved), 1) author3 = saved[0] self.assertEqual(author3, Author.objects.get(name='Paul Verlaine')) authors = list(Author.objects.order_by('name')) self.assertEqual(authors, [author2, author1, author3]) # This probably shouldn't happen, but it will. If an add form was # marked for deletion, make sure we don't save that form. qs = Author.objects.order_by('name') AuthorFormSet = modelformset_factory(Author, fields="__all__", extra=1, can_delete=True) formset = AuthorFormSet(queryset=qs) self.assertEqual(len(formset.forms), 4) self.assertHTMLEqual( formset.forms[0].as_p(), '<p><label for="id_form-0-name">Name:</label>' '<input id="id_form-0-name" type="text" name="form-0-name" ' 'value="Arthur Rimbaud" maxlength="100"></p>' '<p><label for="id_form-0-DELETE">Delete:</label>' '<input type="checkbox" name="form-0-DELETE" id="id_form-0-DELETE">' '<input type="hidden" name="form-0-id" value="%d" id="id_form-0-id"></p>' % author2.id ) self.assertHTMLEqual( formset.forms[1].as_p(), '<p><label for="id_form-1-name">Name:</label>' '<input id="id_form-1-name" type="text" name="form-1-name" ' 'value="Charles Baudelaire" maxlength="100"></p>' '<p><label for="id_form-1-DELETE">Delete:</label>' '<input type="checkbox" name="form-1-DELETE" id="id_form-1-DELETE">' '<input type="hidden" name="form-1-id" value="%d" id="id_form-1-id"></p>' % author1.id ) self.assertHTMLEqual( formset.forms[2].as_p(), '<p><label for="id_form-2-name">Name:</label>' '<input id="id_form-2-name" type="text" name="form-2-name" ' 'value="Paul Verlaine" maxlength="100"></p>' '<p><label for="id_form-2-DELETE">Delete:</label>' '<input type="checkbox" name="form-2-DELETE" id="id_form-2-DELETE">' '<input type="hidden" name="form-2-id" value="%d" id="id_form-2-id"></p>' % author3.id ) self.assertHTMLEqual( formset.forms[3].as_p(), '<p><label for="id_form-3-name">Name:</label>' '<input id="id_form-3-name" type="text" name="form-3-name" maxlength="100"></p>' '<p><label for="id_form-3-DELETE">Delete:</label>' '<input type="checkbox" name="form-3-DELETE" id="id_form-3-DELETE">' '<input type="hidden" name="form-3-id" id="id_form-3-id"></p>' ) data = { 'form-TOTAL_FORMS': '4', # the number of forms rendered 'form-INITIAL_FORMS': '3', # the number of forms with initial data 'form-MAX_NUM_FORMS': '', # the max number of forms 'form-0-id': str(author2.id), 'form-0-name': 'Arthur Rimbaud', 'form-1-id': str(author1.id), 'form-1-name': 'Charles Baudelaire', 'form-2-id': str(author3.id), 'form-2-name': 'Paul Verlaine', 'form-3-name': 'Walt Whitman', 'form-3-DELETE': 'on', } formset = AuthorFormSet(data=data, queryset=qs) self.assertTrue(formset.is_valid()) # No objects were changed or saved so nothing will come back. self.assertEqual(formset.save(), []) authors = list(Author.objects.order_by('name')) self.assertEqual(authors, [author2, author1, author3]) # Let's edit a record to ensure save only returns that one record. data = { 'form-TOTAL_FORMS': '4', # the number of forms rendered 'form-INITIAL_FORMS': '3', # the number of forms with initial data 'form-MAX_NUM_FORMS': '', # the max number of forms 'form-0-id': str(author2.id), 'form-0-name': 'Walt Whitman', 'form-1-id': str(author1.id), 'form-1-name': 'Charles Baudelaire', 'form-2-id': str(author3.id), 'form-2-name': 'Paul Verlaine', 'form-3-name': '', 'form-3-DELETE': '', } formset = AuthorFormSet(data=data, queryset=qs) self.assertTrue(formset.is_valid()) # One record has changed. saved = formset.save() self.assertEqual(len(saved), 1) self.assertEqual(saved[0], Author.objects.get(name='Walt Whitman')) def test_commit_false(self): # Test the behavior of commit=False and save_m2m author1 = Author.objects.create(name='Charles Baudelaire') author2 = Author.objects.create(name='Paul Verlaine') author3 = Author.objects.create(name='Walt Whitman') meeting = AuthorMeeting.objects.create(created=date.today()) meeting.authors.set(Author.objects.all()) # create an Author instance to add to the meeting. author4 = Author.objects.create(name='John Steinbeck') AuthorMeetingFormSet = modelformset_factory(AuthorMeeting, fields="__all__", extra=1, can_delete=True) data = { 'form-TOTAL_FORMS': '2', # the number of forms rendered 'form-INITIAL_FORMS': '1', # the number of forms with initial data 'form-MAX_NUM_FORMS': '', # the max number of forms 'form-0-id': str(meeting.id), 'form-0-name': '2nd Tuesday of the Week Meeting', 'form-0-authors': [author2.id, author1.id, author3.id, author4.id], 'form-1-name': '', 'form-1-authors': '', 'form-1-DELETE': '', } formset = AuthorMeetingFormSet(data=data, queryset=AuthorMeeting.objects.all()) self.assertTrue(formset.is_valid()) instances = formset.save(commit=False) for instance in instances: instance.created = date.today() instance.save() formset.save_m2m() self.assertSequenceEqual( instances[0].authors.all(), [author1, author4, author2, author3], ) def test_max_num(self): # Test the behavior of max_num with model formsets. It should allow # all existing related objects/inlines for a given object to be # displayed, but not allow the creation of new inlines beyond max_num. a1 = Author.objects.create(name='Charles Baudelaire') a2 = Author.objects.create(name='Paul Verlaine') a3 = Author.objects.create(name='Walt Whitman') qs = Author.objects.order_by('name') AuthorFormSet = modelformset_factory(Author, fields="__all__", max_num=None, extra=3) formset = AuthorFormSet(queryset=qs) self.assertEqual(len(formset.forms), 6) self.assertEqual(len(formset.extra_forms), 3) AuthorFormSet = modelformset_factory(Author, fields="__all__", max_num=4, extra=3) formset = AuthorFormSet(queryset=qs) self.assertEqual(len(formset.forms), 4) self.assertEqual(len(formset.extra_forms), 1) AuthorFormSet = modelformset_factory(Author, fields="__all__", max_num=0, extra=3) formset = AuthorFormSet(queryset=qs) self.assertEqual(len(formset.forms), 3) self.assertEqual(len(formset.extra_forms), 0) AuthorFormSet = modelformset_factory(Author, fields="__all__", max_num=None) formset = AuthorFormSet(queryset=qs) self.assertSequenceEqual(formset.get_queryset(), [a1, a2, a3]) AuthorFormSet = modelformset_factory(Author, fields="__all__", max_num=0) formset = AuthorFormSet(queryset=qs) self.assertSequenceEqual(formset.get_queryset(), [a1, a2, a3]) AuthorFormSet = modelformset_factory(Author, fields="__all__", max_num=4) formset = AuthorFormSet(queryset=qs) self.assertSequenceEqual(formset.get_queryset(), [a1, a2, a3]) def test_min_num(self): # Test the behavior of min_num with model formsets. It should be # added to extra. qs = Author.objects.none() AuthorFormSet = modelformset_factory(Author, fields="__all__", extra=0) formset = AuthorFormSet(queryset=qs) self.assertEqual(len(formset.forms), 0) AuthorFormSet = modelformset_factory(Author, fields="__all__", min_num=1, extra=0) formset = AuthorFormSet(queryset=qs) self.assertEqual(len(formset.forms), 1) AuthorFormSet = modelformset_factory(Author, fields="__all__", min_num=1, extra=1) formset = AuthorFormSet(queryset=qs) self.assertEqual(len(formset.forms), 2) def test_min_num_with_existing(self): # Test the behavior of min_num with existing objects. Author.objects.create(name='Charles Baudelaire') qs = Author.objects.all() AuthorFormSet = modelformset_factory(Author, fields="__all__", extra=0, min_num=1) formset = AuthorFormSet(queryset=qs) self.assertEqual(len(formset.forms), 1) def test_custom_save_method(self): class PoetForm(forms.ModelForm): def save(self, commit=True): # change the name to "Vladimir Mayakovsky" just to be a jerk. author = super().save(commit=False) author.name = "Vladimir Mayakovsky" if commit: author.save() return author PoetFormSet = modelformset_factory(Poet, fields="__all__", form=PoetForm) data = { 'form-TOTAL_FORMS': '3', # the number of forms rendered 'form-INITIAL_FORMS': '0', # the number of forms with initial data 'form-MAX_NUM_FORMS': '', # the max number of forms 'form-0-name': 'Walt Whitman', 'form-1-name': 'Charles Baudelaire', 'form-2-name': '', } qs = Poet.objects.all() formset = PoetFormSet(data=data, queryset=qs) self.assertTrue(formset.is_valid()) poets = formset.save() self.assertEqual(len(poets), 2) poet1, poet2 = poets self.assertEqual(poet1.name, 'Vladimir Mayakovsky') self.assertEqual(poet2.name, 'Vladimir Mayakovsky') def test_custom_form(self): """ model_formset_factory() respects fields and exclude parameters of a custom form. """ class PostForm1(forms.ModelForm): class Meta: model = Post fields = ('title', 'posted') class PostForm2(forms.ModelForm): class Meta: model = Post exclude = ('subtitle',) PostFormSet = modelformset_factory(Post, form=PostForm1) formset = PostFormSet() self.assertNotIn("subtitle", formset.forms[0].fields) PostFormSet = modelformset_factory(Post, form=PostForm2) formset = PostFormSet() self.assertNotIn("subtitle", formset.forms[0].fields) def test_custom_queryset_init(self): """ A queryset can be overridden in the formset's __init__() method. """ Author.objects.create(name='Charles Baudelaire') Author.objects.create(name='Paul Verlaine') class BaseAuthorFormSet(BaseModelFormSet): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.queryset = Author.objects.filter(name__startswith='Charles') AuthorFormSet = modelformset_factory(Author, fields='__all__', formset=BaseAuthorFormSet) formset = AuthorFormSet() self.assertEqual(len(formset.get_queryset()), 1) def test_model_inheritance(self): BetterAuthorFormSet = modelformset_factory(BetterAuthor, fields="__all__") formset = BetterAuthorFormSet() self.assertEqual(len(formset.forms), 1) self.assertHTMLEqual( formset.forms[0].as_p(), '<p><label for="id_form-0-name">Name:</label>' '<input id="id_form-0-name" type="text" name="form-0-name" maxlength="100"></p>' '<p><label for="id_form-0-write_speed">Write speed:</label>' '<input type="number" name="form-0-write_speed" id="id_form-0-write_speed">' '<input type="hidden" name="form-0-author_ptr" id="id_form-0-author_ptr"></p>' ) data = { 'form-TOTAL_FORMS': '1', # the number of forms rendered 'form-INITIAL_FORMS': '0', # the number of forms with initial data 'form-MAX_NUM_FORMS': '', # the max number of forms 'form-0-author_ptr': '', 'form-0-name': 'Ernest Hemingway', 'form-0-write_speed': '10', } formset = BetterAuthorFormSet(data) self.assertTrue(formset.is_valid()) saved = formset.save() self.assertEqual(len(saved), 1) author1, = saved self.assertEqual(author1, BetterAuthor.objects.get(name='Ernest Hemingway')) hemingway_id = BetterAuthor.objects.get(name="Ernest Hemingway").pk formset = BetterAuthorFormSet() self.assertEqual(len(formset.forms), 2) self.assertHTMLEqual( formset.forms[0].as_p(), '<p><label for="id_form-0-name">Name:</label>' '<input id="id_form-0-name" type="text" name="form-0-name" value="Ernest Hemingway" maxlength="100"></p>' '<p><label for="id_form-0-write_speed">Write speed:</label>' '<input type="number" name="form-0-write_speed" value="10" id="id_form-0-write_speed">' '<input type="hidden" name="form-0-author_ptr" value="%d" id="id_form-0-author_ptr"></p>' % hemingway_id ) self.assertHTMLEqual( formset.forms[1].as_p(), '<p><label for="id_form-1-name">Name:</label>' '<input id="id_form-1-name" type="text" name="form-1-name" maxlength="100"></p>' '<p><label for="id_form-1-write_speed">Write speed:</label>' '<input type="number" name="form-1-write_speed" id="id_form-1-write_speed">' '<input type="hidden" name="form-1-author_ptr" id="id_form-1-author_ptr"></p>' ) data = { 'form-TOTAL_FORMS': '2', # the number of forms rendered 'form-INITIAL_FORMS': '1', # the number of forms with initial data 'form-MAX_NUM_FORMS': '', # the max number of forms 'form-0-author_ptr': hemingway_id, 'form-0-name': 'Ernest Hemingway', 'form-0-write_speed': '10', 'form-1-author_ptr': '', 'form-1-name': '', 'form-1-write_speed': '', } formset = BetterAuthorFormSet(data) self.assertTrue(formset.is_valid()) self.assertEqual(formset.save(), []) def test_inline_formsets(self): # We can also create a formset that is tied to a parent model. This is # how the admin system's edit inline functionality works. AuthorBooksFormSet = inlineformset_factory(Author, Book, can_delete=False, extra=3, fields="__all__") author = Author.objects.create(name='Charles Baudelaire') formset = AuthorBooksFormSet(instance=author) self.assertEqual(len(formset.forms), 3) self.assertHTMLEqual( formset.forms[0].as_p(), '<p><label for="id_book_set-0-title">Title:</label> <input id="id_book_set-0-title" type="text" ' 'name="book_set-0-title" maxlength="100"><input type="hidden" name="book_set-0-author" value="%d" ' 'id="id_book_set-0-author"><input type="hidden" name="book_set-0-id" id="id_book_set-0-id">' '</p>' % author.id ) self.assertHTMLEqual( formset.forms[1].as_p(), '<p><label for="id_book_set-1-title">Title:</label>' '<input id="id_book_set-1-title" type="text" name="book_set-1-title" maxlength="100">' '<input type="hidden" name="book_set-1-author" value="%d" id="id_book_set-1-author">' '<input type="hidden" name="book_set-1-id" id="id_book_set-1-id"></p>' % author.id ) self.assertHTMLEqual( formset.forms[2].as_p(), '<p><label for="id_book_set-2-title">Title:</label>' '<input id="id_book_set-2-title" type="text" name="book_set-2-title" maxlength="100">' '<input type="hidden" name="book_set-2-author" value="%d" id="id_book_set-2-author">' '<input type="hidden" name="book_set-2-id" id="id_book_set-2-id"></p>' % author.id ) data = { 'book_set-TOTAL_FORMS': '3', # the number of forms rendered 'book_set-INITIAL_FORMS': '0', # the number of forms with initial data 'book_set-MAX_NUM_FORMS': '', # the max number of forms 'book_set-0-title': 'Les Fleurs du Mal', 'book_set-1-title': '', 'book_set-2-title': '', } formset = AuthorBooksFormSet(data, instance=author) self.assertTrue(formset.is_valid()) saved = formset.save() self.assertEqual(len(saved), 1) book1, = saved self.assertEqual(book1, Book.objects.get(title='Les Fleurs du Mal')) self.assertSequenceEqual(author.book_set.all(), [book1]) # Now that we've added a book to Charles Baudelaire, let's try adding # another one. This time though, an edit form will be available for # every existing book. AuthorBooksFormSet = inlineformset_factory(Author, Book, can_delete=False, extra=2, fields="__all__") author = Author.objects.get(name='Charles Baudelaire') formset = AuthorBooksFormSet(instance=author) self.assertEqual(len(formset.forms), 3) self.assertHTMLEqual( formset.forms[0].as_p(), '<p><label for="id_book_set-0-title">Title:</label>' '<input id="id_book_set-0-title" type="text" name="book_set-0-title" ' 'value="Les Fleurs du Mal" maxlength="100">' '<input type="hidden" name="book_set-0-author" value="%d" id="id_book_set-0-author">' '<input type="hidden" name="book_set-0-id" value="%d" id="id_book_set-0-id"></p>' % ( author.id, book1.id, ) ) self.assertHTMLEqual( formset.forms[1].as_p(), '<p><label for="id_book_set-1-title">Title:</label>' '<input id="id_book_set-1-title" type="text" name="book_set-1-title" maxlength="100">' '<input type="hidden" name="book_set-1-author" value="%d" id="id_book_set-1-author">' '<input type="hidden" name="book_set-1-id" id="id_book_set-1-id"></p>' % author.id ) self.assertHTMLEqual( formset.forms[2].as_p(), '<p><label for="id_book_set-2-title">Title:</label>' '<input id="id_book_set-2-title" type="text" name="book_set-2-title" maxlength="100">' '<input type="hidden" name="book_set-2-author" value="%d" id="id_book_set-2-author">' '<input type="hidden" name="book_set-2-id" id="id_book_set-2-id"></p>' % author.id ) data = { 'book_set-TOTAL_FORMS': '3', # the number of forms rendered 'book_set-INITIAL_FORMS': '1', # the number of forms with initial data 'book_set-MAX_NUM_FORMS': '', # the max number of forms 'book_set-0-id': str(book1.id), 'book_set-0-title': 'Les Fleurs du Mal', 'book_set-1-title': 'Les Paradis Artificiels', 'book_set-2-title': '', } formset = AuthorBooksFormSet(data, instance=author) self.assertTrue(formset.is_valid()) saved = formset.save() self.assertEqual(len(saved), 1) book2, = saved self.assertEqual(book2, Book.objects.get(title='Les Paradis Artificiels')) # As you can see, 'Les Paradis Artificiels' is now a book belonging to # Charles Baudelaire. self.assertSequenceEqual(author.book_set.order_by('title'), [book1, book2]) def test_inline_formsets_save_as_new(self): # The save_as_new parameter lets you re-associate the data to a new # instance. This is used in the admin for save_as functionality. AuthorBooksFormSet = inlineformset_factory(Author, Book, can_delete=False, extra=2, fields="__all__") Author.objects.create(name='Charles Baudelaire') # An immutable QueryDict simulates request.POST. data = QueryDict(mutable=True) data.update({ 'book_set-TOTAL_FORMS': '3', # the number of forms rendered 'book_set-INITIAL_FORMS': '2', # the number of forms with initial data 'book_set-MAX_NUM_FORMS': '', # the max number of forms 'book_set-0-id': '1', 'book_set-0-title': 'Les Fleurs du Mal', 'book_set-1-id': '2', 'book_set-1-title': 'Les Paradis Artificiels', 'book_set-2-title': '', }) data._mutable = False formset = AuthorBooksFormSet(data, instance=Author(), save_as_new=True) self.assertTrue(formset.is_valid()) self.assertIs(data._mutable, False) new_author = Author.objects.create(name='Charles Baudelaire') formset = AuthorBooksFormSet(data, instance=new_author, save_as_new=True) saved = formset.save() self.assertEqual(len(saved), 2) book1, book2 = saved self.assertEqual(book1.title, 'Les Fleurs du Mal') self.assertEqual(book2.title, 'Les Paradis Artificiels') # Test using a custom prefix on an inline formset. formset = AuthorBooksFormSet(prefix="test") self.assertEqual(len(formset.forms), 2) self.assertHTMLEqual( formset.forms[0].as_p(), '<p><label for="id_test-0-title">Title:</label>' '<input id="id_test-0-title" type="text" name="test-0-title" maxlength="100">' '<input type="hidden" name="test-0-author" id="id_test-0-author">' '<input type="hidden" name="test-0-id" id="id_test-0-id"></p>' ) self.assertHTMLEqual( formset.forms[1].as_p(), '<p><label for="id_test-1-title">Title:</label>' '<input id="id_test-1-title" type="text" name="test-1-title" maxlength="100">' '<input type="hidden" name="test-1-author" id="id_test-1-author">' '<input type="hidden" name="test-1-id" id="id_test-1-id"></p>' ) def test_inline_formsets_with_custom_pk(self): # Test inline formsets where the inline-edited object has a custom # primary key that is not the fk to the parent object. self.maxDiff = 1024 AuthorBooksFormSet2 = inlineformset_factory( Author, BookWithCustomPK, can_delete=False, extra=1, fields="__all__" ) author = Author.objects.create(pk=1, name='Charles Baudelaire') formset = AuthorBooksFormSet2(instance=author) self.assertEqual(len(formset.forms), 1) self.assertHTMLEqual( formset.forms[0].as_p(), '<p><label for="id_bookwithcustompk_set-0-my_pk">My pk:</label>' '<input id="id_bookwithcustompk_set-0-my_pk" type="number" ' 'name="bookwithcustompk_set-0-my_pk" step="1"></p>' '<p><label for="id_bookwithcustompk_set-0-title">Title:</label>' '<input id="id_bookwithcustompk_set-0-title" type="text" ' 'name="bookwithcustompk_set-0-title" maxlength="100">' '<input type="hidden" name="bookwithcustompk_set-0-author" ' 'value="1" id="id_bookwithcustompk_set-0-author"></p>' ) data = { 'bookwithcustompk_set-TOTAL_FORMS': '1', # the number of forms rendered 'bookwithcustompk_set-INITIAL_FORMS': '0', # the number of forms with initial data 'bookwithcustompk_set-MAX_NUM_FORMS': '', # the max number of forms 'bookwithcustompk_set-0-my_pk': '77777', 'bookwithcustompk_set-0-title': 'Les Fleurs du Mal', } formset = AuthorBooksFormSet2(data, instance=author) self.assertTrue(formset.is_valid()) saved = formset.save() self.assertEqual(len(saved), 1) book1, = saved self.assertEqual(book1.pk, 77777) book1 = author.bookwithcustompk_set.get() self.assertEqual(book1.title, 'Les Fleurs du Mal') def test_inline_formsets_with_multi_table_inheritance(self): # Test inline formsets where the inline-edited object uses multi-table # inheritance, thus has a non AutoField yet auto-created primary key. AuthorBooksFormSet3 = inlineformset_factory(Author, AlternateBook, can_delete=False, extra=1, fields="__all__") author = Author.objects.create(pk=1, name='Charles Baudelaire') formset = AuthorBooksFormSet3(instance=author) self.assertEqual(len(formset.forms), 1) self.assertHTMLEqual( formset.forms[0].as_p(), '<p><label for="id_alternatebook_set-0-title">Title:</label>' '<input id="id_alternatebook_set-0-title" type="text" ' 'name="alternatebook_set-0-title" maxlength="100"></p>' '<p><label for="id_alternatebook_set-0-notes">Notes:</label>' '<input id="id_alternatebook_set-0-notes" type="text" ' 'name="alternatebook_set-0-notes" maxlength="100">' '<input type="hidden" name="alternatebook_set-0-author" value="1" ' 'id="id_alternatebook_set-0-author">' '<input type="hidden" name="alternatebook_set-0-book_ptr" ' 'id="id_alternatebook_set-0-book_ptr"></p>' ) data = { 'alternatebook_set-TOTAL_FORMS': '1', # the number of forms rendered 'alternatebook_set-INITIAL_FORMS': '0', # the number of forms with initial data 'alternatebook_set-MAX_NUM_FORMS': '', # the max number of forms 'alternatebook_set-0-title': 'Flowers of Evil', 'alternatebook_set-0-notes': 'English translation of Les Fleurs du Mal' } formset = AuthorBooksFormSet3(data, instance=author) self.assertTrue(formset.is_valid()) saved = formset.save() self.assertEqual(len(saved), 1) book1, = saved self.assertEqual(book1.title, 'Flowers of Evil') self.assertEqual(book1.notes, 'English translation of Les Fleurs du Mal') @skipUnlessDBFeature('supports_partially_nullable_unique_constraints') def test_inline_formsets_with_nullable_unique_together(self): # Test inline formsets where the inline-edited object has a # unique_together constraint with a nullable member AuthorBooksFormSet4 = inlineformset_factory( Author, BookWithOptionalAltEditor, can_delete=False, extra=2, fields="__all__" ) author = Author.objects.create(pk=1, name='Charles Baudelaire') data = { 'bookwithoptionalalteditor_set-TOTAL_FORMS': '2', # the number of forms rendered 'bookwithoptionalalteditor_set-INITIAL_FORMS': '0', # the number of forms with initial data 'bookwithoptionalalteditor_set-MAX_NUM_FORMS': '', # the max number of forms 'bookwithoptionalalteditor_set-0-author': '1', 'bookwithoptionalalteditor_set-0-title': 'Les Fleurs du Mal', 'bookwithoptionalalteditor_set-1-author': '1', 'bookwithoptionalalteditor_set-1-title': 'Les Fleurs du Mal', } formset = AuthorBooksFormSet4(data, instance=author) self.assertTrue(formset.is_valid()) saved = formset.save() self.assertEqual(len(saved), 2) book1, book2 = saved self.assertEqual(book1.author_id, 1) self.assertEqual(book1.title, 'Les Fleurs du Mal') self.assertEqual(book2.author_id, 1) self.assertEqual(book2.title, 'Les Fleurs du Mal') def test_inline_formsets_with_custom_save_method(self): AuthorBooksFormSet = inlineformset_factory(Author, Book, can_delete=False, extra=2, fields="__all__") author = Author.objects.create(pk=1, name='Charles Baudelaire') book1 = Book.objects.create(pk=1, author=author, title='Les Paradis Artificiels') book2 = Book.objects.create(pk=2, author=author, title='Les Fleurs du Mal') book3 = Book.objects.create(pk=3, author=author, title='Flowers of Evil') class PoemForm(forms.ModelForm): def save(self, commit=True): # change the name to "Brooklyn Bridge" just to be a jerk. poem = super().save(commit=False) poem.name = "Brooklyn Bridge" if commit: poem.save() return poem PoemFormSet = inlineformset_factory(Poet, Poem, form=PoemForm, fields="__all__") data = { 'poem_set-TOTAL_FORMS': '3', # the number of forms rendered 'poem_set-INITIAL_FORMS': '0', # the number of forms with initial data 'poem_set-MAX_NUM_FORMS': '', # the max number of forms 'poem_set-0-name': 'The Cloud in Trousers', 'poem_set-1-name': 'I', 'poem_set-2-name': '', } poet = Poet.objects.create(name='Vladimir Mayakovsky') formset = PoemFormSet(data=data, instance=poet) self.assertTrue(formset.is_valid()) saved = formset.save() self.assertEqual(len(saved), 2) poem1, poem2 = saved self.assertEqual(poem1.name, 'Brooklyn Bridge') self.assertEqual(poem2.name, 'Brooklyn Bridge') # We can provide a custom queryset to our InlineFormSet: custom_qs = Book.objects.order_by('-title') formset = AuthorBooksFormSet(instance=author, queryset=custom_qs) self.assertEqual(len(formset.forms), 5) self.assertHTMLEqual( formset.forms[0].as_p(), '<p><label for="id_book_set-0-title">Title:</label>' '<input id="id_book_set-0-title" type="text" name="book_set-0-title" ' 'value="Les Paradis Artificiels" maxlength="100">' '<input type="hidden" name="book_set-0-author" value="1" id="id_book_set-0-author">' '<input type="hidden" name="book_set-0-id" value="1" id="id_book_set-0-id"></p>' ) self.assertHTMLEqual( formset.forms[1].as_p(), '<p><label for="id_book_set-1-title">Title:</label>' '<input id="id_book_set-1-title" type="text" name="book_set-1-title" ' 'value="Les Fleurs du Mal" maxlength="100">' '<input type="hidden" name="book_set-1-author" value="1" id="id_book_set-1-author">' '<input type="hidden" name="book_set-1-id" value="2" id="id_book_set-1-id"></p>' ) self.assertHTMLEqual( formset.forms[2].as_p(), '<p><label for="id_book_set-2-title">Title:</label>' '<input id="id_book_set-2-title" type="text" name="book_set-2-title" ' 'value="Flowers of Evil" maxlength="100">' '<input type="hidden" name="book_set-2-author" value="1" id="id_book_set-2-author">' '<input type="hidden" name="book_set-2-id" value="3" id="id_book_set-2-id"></p>' ) self.assertHTMLEqual( formset.forms[3].as_p(), '<p><label for="id_book_set-3-title">Title:</label>' '<input id="id_book_set-3-title" type="text" name="book_set-3-title" maxlength="100">' '<input type="hidden" name="book_set-3-author" value="1" id="id_book_set-3-author">' '<input type="hidden" name="book_set-3-id" id="id_book_set-3-id"></p>' ) self.assertHTMLEqual( formset.forms[4].as_p(), '<p><label for="id_book_set-4-title">Title:</label>' '<input id="id_book_set-4-title" type="text" name="book_set-4-title" maxlength="100">' '<input type="hidden" name="book_set-4-author" value="1" id="id_book_set-4-author">' '<input type="hidden" name="book_set-4-id" id="id_book_set-4-id"></p>' ) data = { 'book_set-TOTAL_FORMS': '5', # the number of forms rendered 'book_set-INITIAL_FORMS': '3', # the number of forms with initial data 'book_set-MAX_NUM_FORMS': '', # the max number of forms 'book_set-0-id': str(book1.id), 'book_set-0-title': 'Les Paradis Artificiels', 'book_set-1-id': str(book2.id), 'book_set-1-title': 'Les Fleurs du Mal', 'book_set-2-id': str(book3.id), 'book_set-2-title': 'Flowers of Evil', 'book_set-3-title': 'Revue des deux mondes', 'book_set-4-title': '', } formset = AuthorBooksFormSet(data, instance=author, queryset=custom_qs) self.assertTrue(formset.is_valid()) custom_qs = Book.objects.filter(title__startswith='F') formset = AuthorBooksFormSet(instance=author, queryset=custom_qs) self.assertHTMLEqual( formset.forms[0].as_p(), '<p><label for="id_book_set-0-title">Title:</label>' '<input id="id_book_set-0-title" type="text" name="book_set-0-title" ' 'value="Flowers of Evil" maxlength="100">' '<input type="hidden" name="book_set-0-author" value="1" id="id_book_set-0-author">' '<input type="hidden" name="book_set-0-id" value="3" id="id_book_set-0-id"></p>' ) self.assertHTMLEqual( formset.forms[1].as_p(), '<p><label for="id_book_set-1-title">Title:</label>' '<input id="id_book_set-1-title" type="text" name="book_set-1-title" maxlength="100">' '<input type="hidden" name="book_set-1-author" value="1" id="id_book_set-1-author">' '<input type="hidden" name="book_set-1-id" id="id_book_set-1-id"></p>' ) self.assertHTMLEqual( formset.forms[2].as_p(), '<p><label for="id_book_set-2-title">Title:</label>' '<input id="id_book_set-2-title" type="text" name="book_set-2-title" maxlength="100">' '<input type="hidden" name="book_set-2-author" value="1" id="id_book_set-2-author">' '<input type="hidden" name="book_set-2-id" id="id_book_set-2-id"></p>' ) data = { 'book_set-TOTAL_FORMS': '3', # the number of forms rendered 'book_set-INITIAL_FORMS': '1', # the number of forms with initial data 'book_set-MAX_NUM_FORMS': '', # the max number of forms 'book_set-0-id': str(book3.id), 'book_set-0-title': 'Flowers of Evil', 'book_set-1-title': 'Revue des deux mondes', 'book_set-2-title': '', } formset = AuthorBooksFormSet(data, instance=author, queryset=custom_qs) self.assertTrue(formset.is_valid()) def test_inline_formsets_with_custom_save_method_related_instance(self): """ The ModelForm.save() method should be able to access the related object if it exists in the database (#24395). """ class PoemForm2(forms.ModelForm): def save(self, commit=True): poem = super().save(commit=False) poem.name = "%s by %s" % (poem.name, poem.poet.name) if commit: poem.save() return poem PoemFormSet = inlineformset_factory(Poet, Poem, form=PoemForm2, fields="__all__") data = { 'poem_set-TOTAL_FORMS': '1', 'poem_set-INITIAL_FORMS': '0', 'poem_set-MAX_NUM_FORMS': '', 'poem_set-0-name': 'Le Lac', } poet = Poet() formset = PoemFormSet(data=data, instance=poet) self.assertTrue(formset.is_valid()) # The Poet instance is saved after the formset instantiation. This # happens in admin's changeform_view() when adding a new object and # some inlines in the same request. poet.name = 'Lamartine' poet.save() poem = formset.save()[0] self.assertEqual(poem.name, 'Le Lac by Lamartine') def test_inline_formsets_with_wrong_fk_name(self): """ Regression for #23451 """ message = "fk_name 'title' is not a ForeignKey to 'model_formsets.Author'." with self.assertRaisesMessage(ValueError, message): inlineformset_factory(Author, Book, fields="__all__", fk_name='title') def test_custom_pk(self): # We need to ensure that it is displayed CustomPrimaryKeyFormSet = modelformset_factory(CustomPrimaryKey, fields="__all__") formset = CustomPrimaryKeyFormSet() self.assertEqual(len(formset.forms), 1) self.assertHTMLEqual( formset.forms[0].as_p(), '<p><label for="id_form-0-my_pk">My pk:</label> <input id="id_form-0-my_pk" type="text" ' 'name="form-0-my_pk" maxlength="10"></p>' '<p><label for="id_form-0-some_field">Some field:</label>' '<input id="id_form-0-some_field" type="text" name="form-0-some_field" maxlength="100"></p>' ) # Custom primary keys with ForeignKey, OneToOneField and AutoField ############ place = Place.objects.create(pk=1, name='Giordanos', city='Chicago') FormSet = inlineformset_factory(Place, Owner, extra=2, can_delete=False, fields="__all__") formset = FormSet(instance=place) self.assertEqual(len(formset.forms), 2) self.assertHTMLEqual( formset.forms[0].as_p(), '<p><label for="id_owner_set-0-name">Name:</label>' '<input id="id_owner_set-0-name" type="text" name="owner_set-0-name" maxlength="100">' '<input type="hidden" name="owner_set-0-place" value="1" id="id_owner_set-0-place">' '<input type="hidden" name="owner_set-0-auto_id" id="id_owner_set-0-auto_id"></p>' ) self.assertHTMLEqual( formset.forms[1].as_p(), '<p><label for="id_owner_set-1-name">Name:</label>' '<input id="id_owner_set-1-name" type="text" name="owner_set-1-name" maxlength="100">' '<input type="hidden" name="owner_set-1-place" value="1" id="id_owner_set-1-place">' '<input type="hidden" name="owner_set-1-auto_id" id="id_owner_set-1-auto_id"></p>' ) data = { 'owner_set-TOTAL_FORMS': '2', 'owner_set-INITIAL_FORMS': '0', 'owner_set-MAX_NUM_FORMS': '', 'owner_set-0-auto_id': '', 'owner_set-0-name': 'Joe Perry', 'owner_set-1-auto_id': '', 'owner_set-1-name': '', } formset = FormSet(data, instance=place) self.assertTrue(formset.is_valid()) saved = formset.save() self.assertEqual(len(saved), 1) owner1, = saved self.assertEqual(owner1.name, 'Joe Perry') self.assertEqual(owner1.place.name, 'Giordanos') formset = FormSet(instance=place) self.assertEqual(len(formset.forms), 3) self.assertHTMLEqual( formset.forms[0].as_p(), '<p><label for="id_owner_set-0-name">Name:</label>' '<input id="id_owner_set-0-name" type="text" name="owner_set-0-name" value="Joe Perry" maxlength="100">' '<input type="hidden" name="owner_set-0-place" value="1" id="id_owner_set-0-place">' '<input type="hidden" name="owner_set-0-auto_id" value="%d" id="id_owner_set-0-auto_id"></p>' % owner1.auto_id ) self.assertHTMLEqual( formset.forms[1].as_p(), '<p><label for="id_owner_set-1-name">Name:</label>' '<input id="id_owner_set-1-name" type="text" name="owner_set-1-name" maxlength="100">' '<input type="hidden" name="owner_set-1-place" value="1" id="id_owner_set-1-place">' '<input type="hidden" name="owner_set-1-auto_id" id="id_owner_set-1-auto_id"></p>' ) self.assertHTMLEqual( formset.forms[2].as_p(), '<p><label for="id_owner_set-2-name">Name:</label>' '<input id="id_owner_set-2-name" type="text" name="owner_set-2-name" maxlength="100">' '<input type="hidden" name="owner_set-2-place" value="1" id="id_owner_set-2-place">' '<input type="hidden" name="owner_set-2-auto_id" id="id_owner_set-2-auto_id"></p>' ) data = { 'owner_set-TOTAL_FORMS': '3', 'owner_set-INITIAL_FORMS': '1', 'owner_set-MAX_NUM_FORMS': '', 'owner_set-0-auto_id': str(owner1.auto_id), 'owner_set-0-name': 'Joe Perry', 'owner_set-1-auto_id': '', 'owner_set-1-name': 'Jack Berry', 'owner_set-2-auto_id': '', 'owner_set-2-name': '', } formset = FormSet(data, instance=place) self.assertTrue(formset.is_valid()) saved = formset.save() self.assertEqual(len(saved), 1) owner2, = saved self.assertEqual(owner2.name, 'Jack Berry') self.assertEqual(owner2.place.name, 'Giordanos') # Ensure a custom primary key that is a ForeignKey or OneToOneField get rendered for the user to choose. FormSet = modelformset_factory(OwnerProfile, fields="__all__") formset = FormSet() self.assertHTMLEqual( formset.forms[0].as_p(), '<p><label for="id_form-0-owner">Owner:</label>' '<select name="form-0-owner" id="id_form-0-owner">' '<option value="" selected>---------</option>' '<option value="%d">Joe Perry at Giordanos</option>' '<option value="%d">Jack Berry at Giordanos</option>' '</select></p>' '<p><label for="id_form-0-age">Age:</label>' '<input type="number" name="form-0-age" id="id_form-0-age" min="0"></p>' % (owner1.auto_id, owner2.auto_id) ) owner1 = Owner.objects.get(name='Joe Perry') FormSet = inlineformset_factory(Owner, OwnerProfile, max_num=1, can_delete=False, fields="__all__") self.assertEqual(FormSet.max_num, 1) formset = FormSet(instance=owner1) self.assertEqual(len(formset.forms), 1) self.assertHTMLEqual( formset.forms[0].as_p(), '<p><label for="id_ownerprofile-0-age">Age:</label>' '<input type="number" name="ownerprofile-0-age" id="id_ownerprofile-0-age" min="0">' '<input type="hidden" name="ownerprofile-0-owner" value="%d" id="id_ownerprofile-0-owner"></p>' % owner1.auto_id ) data = { 'ownerprofile-TOTAL_FORMS': '1', 'ownerprofile-INITIAL_FORMS': '0', 'ownerprofile-MAX_NUM_FORMS': '1', 'ownerprofile-0-owner': '', 'ownerprofile-0-age': '54', } formset = FormSet(data, instance=owner1) self.assertTrue(formset.is_valid()) saved = formset.save() self.assertEqual(len(saved), 1) profile1, = saved self.assertEqual(profile1.owner, owner1) self.assertEqual(profile1.age, 54) formset = FormSet(instance=owner1) self.assertEqual(len(formset.forms), 1) self.assertHTMLEqual( formset.forms[0].as_p(), '<p><label for="id_ownerprofile-0-age">Age:</label>' '<input type="number" name="ownerprofile-0-age" value="54" id="id_ownerprofile-0-age" min="0">' '<input type="hidden" name="ownerprofile-0-owner" value="%d" id="id_ownerprofile-0-owner"></p>' % owner1.auto_id ) data = { 'ownerprofile-TOTAL_FORMS': '1', 'ownerprofile-INITIAL_FORMS': '1', 'ownerprofile-MAX_NUM_FORMS': '1', 'ownerprofile-0-owner': str(owner1.auto_id), 'ownerprofile-0-age': '55', } formset = FormSet(data, instance=owner1) self.assertTrue(formset.is_valid()) saved = formset.save() self.assertEqual(len(saved), 1) profile1, = saved self.assertEqual(profile1.owner, owner1) self.assertEqual(profile1.age, 55) def test_unique_true_enforces_max_num_one(self): # ForeignKey with unique=True should enforce max_num=1 place = Place.objects.create(pk=1, name='Giordanos', city='Chicago') FormSet = inlineformset_factory(Place, Location, can_delete=False, fields="__all__") self.assertEqual(FormSet.max_num, 1) formset = FormSet(instance=place) self.assertEqual(len(formset.forms), 1) self.assertHTMLEqual( formset.forms[0].as_p(), '<p><label for="id_location_set-0-lat">Lat:</label>' '<input id="id_location_set-0-lat" type="text" name="location_set-0-lat" maxlength="100"></p>' '<p><label for="id_location_set-0-lon">Lon:</label> ' '<input id="id_location_set-0-lon" type="text" name="location_set-0-lon" maxlength="100">' '<input type="hidden" name="location_set-0-place" value="1" id="id_location_set-0-place">' '<input type="hidden" name="location_set-0-id" id="id_location_set-0-id"></p>' ) def test_foreign_keys_in_parents(self): self.assertEqual(type(_get_foreign_key(Restaurant, Owner)), models.ForeignKey) self.assertEqual(type(_get_foreign_key(MexicanRestaurant, Owner)), models.ForeignKey) def test_unique_validation(self): FormSet = modelformset_factory(Product, fields="__all__", extra=1) data = { 'form-TOTAL_FORMS': '1', 'form-INITIAL_FORMS': '0', 'form-MAX_NUM_FORMS': '', 'form-0-slug': 'car-red', } formset = FormSet(data) self.assertTrue(formset.is_valid()) saved = formset.save() self.assertEqual(len(saved), 1) product1, = saved self.assertEqual(product1.slug, 'car-red') data = { 'form-TOTAL_FORMS': '1', 'form-INITIAL_FORMS': '0', 'form-MAX_NUM_FORMS': '', 'form-0-slug': 'car-red', } formset = FormSet(data) self.assertFalse(formset.is_valid()) self.assertEqual(formset.errors, [{'slug': ['Product with this Slug already exists.']}]) def test_modelformset_validate_max_flag(self): # If validate_max is set and max_num is less than TOTAL_FORMS in the # data, then throw an exception. MAX_NUM_FORMS in the data is # irrelevant here (it's output as a hint for the client but its # value in the returned data is not checked) data = { 'form-TOTAL_FORMS': '2', 'form-INITIAL_FORMS': '0', 'form-MAX_NUM_FORMS': '2', # should be ignored 'form-0-price': '12.00', 'form-0-quantity': '1', 'form-1-price': '24.00', 'form-1-quantity': '2', } FormSet = modelformset_factory(Price, fields="__all__", extra=1, max_num=1, validate_max=True) formset = FormSet(data) self.assertFalse(formset.is_valid()) self.assertEqual(formset.non_form_errors(), ['Please submit at most 1 form.']) # Now test the same thing without the validate_max flag to ensure # default behavior is unchanged FormSet = modelformset_factory(Price, fields="__all__", extra=1, max_num=1) formset = FormSet(data) self.assertTrue(formset.is_valid()) def test_modelformset_min_num_equals_max_num_less_than(self): data = { 'form-TOTAL_FORMS': '3', 'form-INITIAL_FORMS': '0', 'form-MAX_NUM_FORMS': '2', 'form-0-slug': 'car-red', 'form-1-slug': 'car-blue', 'form-2-slug': 'car-black', } FormSet = modelformset_factory( Product, fields='__all__', extra=1, max_num=2, validate_max=True, min_num=2, validate_min=True, ) formset = FormSet(data) self.assertFalse(formset.is_valid()) self.assertEqual(formset.non_form_errors(), ['Please submit at most 2 forms.']) def test_modelformset_min_num_equals_max_num_more_than(self): data = { 'form-TOTAL_FORMS': '1', 'form-INITIAL_FORMS': '0', 'form-MAX_NUM_FORMS': '2', 'form-0-slug': 'car-red', } FormSet = modelformset_factory( Product, fields='__all__', extra=1, max_num=2, validate_max=True, min_num=2, validate_min=True, ) formset = FormSet(data) self.assertFalse(formset.is_valid()) self.assertEqual(formset.non_form_errors(), ['Please submit at least 2 forms.']) def test_unique_together_validation(self): FormSet = modelformset_factory(Price, fields="__all__", extra=1) data = { 'form-TOTAL_FORMS': '1', 'form-INITIAL_FORMS': '0', 'form-MAX_NUM_FORMS': '', 'form-0-price': '12.00', 'form-0-quantity': '1', } formset = FormSet(data) self.assertTrue(formset.is_valid()) saved = formset.save() self.assertEqual(len(saved), 1) price1, = saved self.assertEqual(price1.price, Decimal('12.00')) self.assertEqual(price1.quantity, 1) data = { 'form-TOTAL_FORMS': '1', 'form-INITIAL_FORMS': '0', 'form-MAX_NUM_FORMS': '', 'form-0-price': '12.00', 'form-0-quantity': '1', } formset = FormSet(data) self.assertFalse(formset.is_valid()) self.assertEqual(formset.errors, [{'__all__': ['Price with this Price and Quantity already exists.']}]) def test_unique_together_with_inlineformset_factory(self): # Also see bug #8882. repository = Repository.objects.create(name='Test Repo') FormSet = inlineformset_factory(Repository, Revision, extra=1, fields="__all__") data = { 'revision_set-TOTAL_FORMS': '1', 'revision_set-INITIAL_FORMS': '0', 'revision_set-MAX_NUM_FORMS': '', 'revision_set-0-repository': repository.pk, 'revision_set-0-revision': '146239817507f148d448db38840db7c3cbf47c76', 'revision_set-0-DELETE': '', } formset = FormSet(data, instance=repository) self.assertTrue(formset.is_valid()) saved = formset.save() self.assertEqual(len(saved), 1) revision1, = saved self.assertEqual(revision1.repository, repository) self.assertEqual(revision1.revision, '146239817507f148d448db38840db7c3cbf47c76') # attempt to save the same revision against the same repo. data = { 'revision_set-TOTAL_FORMS': '1', 'revision_set-INITIAL_FORMS': '0', 'revision_set-MAX_NUM_FORMS': '', 'revision_set-0-repository': repository.pk, 'revision_set-0-revision': '146239817507f148d448db38840db7c3cbf47c76', 'revision_set-0-DELETE': '', } formset = FormSet(data, instance=repository) self.assertFalse(formset.is_valid()) self.assertEqual(formset.errors, [{'__all__': ['Revision with this Repository and Revision already exists.']}]) # unique_together with inlineformset_factory with overridden form fields # Also see #9494 FormSet = inlineformset_factory(Repository, Revision, fields=('revision',), extra=1) data = { 'revision_set-TOTAL_FORMS': '1', 'revision_set-INITIAL_FORMS': '0', 'revision_set-MAX_NUM_FORMS': '', 'revision_set-0-repository': repository.pk, 'revision_set-0-revision': '146239817507f148d448db38840db7c3cbf47c76', 'revision_set-0-DELETE': '', } formset = FormSet(data, instance=repository) self.assertFalse(formset.is_valid()) def test_callable_defaults(self): # Use of callable defaults (see bug #7975). person = Person.objects.create(name='Ringo') FormSet = inlineformset_factory(Person, Membership, can_delete=False, extra=1, fields="__all__") formset = FormSet(instance=person) # Django will render a hidden field for model fields that have a callable # default. This is required to ensure the value is tested for change correctly # when determine what extra forms have changed to save. self.assertEqual(len(formset.forms), 1) # this formset only has one form form = formset.forms[0] now = form.fields['date_joined'].initial() result = form.as_p() result = re.sub(r'[0-9]{4}-[0-9]{2}-[0-9]{2} [0-9]{2}:[0-9]{2}:[0-9]{2}(?:\.[0-9]+)?', '__DATETIME__', result) self.assertHTMLEqual( result, '<p><label for="id_membership_set-0-date_joined">Date joined:</label>' '<input type="text" name="membership_set-0-date_joined" ' 'value="__DATETIME__" id="id_membership_set-0-date_joined">' '<input type="hidden" name="initial-membership_set-0-date_joined" value="__DATETIME__" ' 'id="initial-membership_set-0-id_membership_set-0-date_joined"></p>' '<p><label for="id_membership_set-0-karma">Karma:</label>' '<input type="number" name="membership_set-0-karma" id="id_membership_set-0-karma">' '<input type="hidden" name="membership_set-0-person" value="%d" id="id_membership_set-0-person">' '<input type="hidden" name="membership_set-0-id" id="id_membership_set-0-id"></p>' % person.id) # test for validation with callable defaults. Validations rely on hidden fields data = { 'membership_set-TOTAL_FORMS': '1', 'membership_set-INITIAL_FORMS': '0', 'membership_set-MAX_NUM_FORMS': '', 'membership_set-0-date_joined': now.strftime('%Y-%m-%d %H:%M:%S'), 'initial-membership_set-0-date_joined': now.strftime('%Y-%m-%d %H:%M:%S'), 'membership_set-0-karma': '', } formset = FormSet(data, instance=person) self.assertTrue(formset.is_valid()) # now test for when the data changes one_day_later = now + datetime.timedelta(days=1) filled_data = { 'membership_set-TOTAL_FORMS': '1', 'membership_set-INITIAL_FORMS': '0', 'membership_set-MAX_NUM_FORMS': '', 'membership_set-0-date_joined': one_day_later.strftime('%Y-%m-%d %H:%M:%S'), 'initial-membership_set-0-date_joined': now.strftime('%Y-%m-%d %H:%M:%S'), 'membership_set-0-karma': '', } formset = FormSet(filled_data, instance=person) self.assertFalse(formset.is_valid()) # now test with split datetime fields class MembershipForm(forms.ModelForm): date_joined = forms.SplitDateTimeField(initial=now) class Meta: model = Membership fields = "__all__" def __init__(self, **kwargs): super().__init__(**kwargs) self.fields['date_joined'].widget = forms.SplitDateTimeWidget() FormSet = inlineformset_factory( Person, Membership, form=MembershipForm, can_delete=False, extra=1, fields="__all__", ) data = { 'membership_set-TOTAL_FORMS': '1', 'membership_set-INITIAL_FORMS': '0', 'membership_set-MAX_NUM_FORMS': '', 'membership_set-0-date_joined_0': now.strftime('%Y-%m-%d'), 'membership_set-0-date_joined_1': now.strftime('%H:%M:%S'), 'initial-membership_set-0-date_joined': now.strftime('%Y-%m-%d %H:%M:%S'), 'membership_set-0-karma': '', } formset = FormSet(data, instance=person) self.assertTrue(formset.is_valid()) def test_inlineformset_factory_with_null_fk(self): # inlineformset_factory tests with fk having null=True. see #9462. # create some data that will exhibit the issue team = Team.objects.create(name="Red Vipers") Player(name="Timmy").save() Player(name="Bobby", team=team).save() PlayerInlineFormSet = inlineformset_factory(Team, Player, fields="__all__") formset = PlayerInlineFormSet() self.assertQuerysetEqual(formset.get_queryset(), []) formset = PlayerInlineFormSet(instance=team) players = formset.get_queryset() self.assertEqual(len(players), 1) player1, = players self.assertEqual(player1.team, team) self.assertEqual(player1.name, 'Bobby') def test_inlineformset_with_arrayfield(self): class SimpleArrayField(forms.CharField): """A proxy for django.contrib.postgres.forms.SimpleArrayField.""" def to_python(self, value): value = super().to_python(value) return value.split(',') if value else [] class BookForm(forms.ModelForm): title = SimpleArrayField() class Meta: model = Book fields = ('title',) BookFormSet = inlineformset_factory(Author, Book, form=BookForm) data = { 'book_set-TOTAL_FORMS': '3', 'book_set-INITIAL_FORMS': '0', 'book_set-MAX_NUM_FORMS': '', 'book_set-0-title': 'test1,test2', 'book_set-1-title': 'test1,test2', 'book_set-2-title': 'test3,test4', } author = Author.objects.create(name='test') formset = BookFormSet(data, instance=author) self.assertEqual(formset.errors, [{}, {'__all__': ['Please correct the duplicate values below.']}, {}]) def test_model_formset_with_custom_pk(self): # a formset for a Model that has a custom primary key that still needs to be # added to the formset automatically FormSet = modelformset_factory(ClassyMexicanRestaurant, fields=["tacos_are_yummy"]) self.assertEqual(sorted(FormSet().forms[0].fields), ['tacos_are_yummy', 'the_restaurant']) def test_model_formset_with_initial_model_instance(self): # has_changed should compare model instance and primary key # see #18898 FormSet = modelformset_factory(Poem, fields='__all__') john_milton = Poet(name="John Milton") john_milton.save() data = { 'form-TOTAL_FORMS': 1, 'form-INITIAL_FORMS': 0, 'form-MAX_NUM_FORMS': '', 'form-0-name': '', 'form-0-poet': str(john_milton.id), } formset = FormSet(initial=[{'poet': john_milton}], data=data) self.assertFalse(formset.extra_forms[0].has_changed()) def test_model_formset_with_initial_queryset(self): # has_changed should work with queryset and list of pk's # see #18898 FormSet = modelformset_factory(AuthorMeeting, fields='__all__') Author.objects.create(pk=1, name='Charles Baudelaire') data = { 'form-TOTAL_FORMS': 1, 'form-INITIAL_FORMS': 0, 'form-MAX_NUM_FORMS': '', 'form-0-name': '', 'form-0-created': '', 'form-0-authors': list(Author.objects.values_list('id', flat=True)), } formset = FormSet(initial=[{'authors': Author.objects.all()}], data=data) self.assertFalse(formset.extra_forms[0].has_changed()) def test_prevent_duplicates_from_with_the_same_formset(self): FormSet = modelformset_factory(Product, fields="__all__", extra=2) data = { 'form-TOTAL_FORMS': 2, 'form-INITIAL_FORMS': 0, 'form-MAX_NUM_FORMS': '', 'form-0-slug': 'red_car', 'form-1-slug': 'red_car', } formset = FormSet(data) self.assertFalse(formset.is_valid()) self.assertEqual(formset._non_form_errors, ['Please correct the duplicate data for slug.']) FormSet = modelformset_factory(Price, fields="__all__", extra=2) data = { 'form-TOTAL_FORMS': 2, 'form-INITIAL_FORMS': 0, 'form-MAX_NUM_FORMS': '', 'form-0-price': '25', 'form-0-quantity': '7', 'form-1-price': '25', 'form-1-quantity': '7', } formset = FormSet(data) self.assertFalse(formset.is_valid()) self.assertEqual( formset._non_form_errors, ['Please correct the duplicate data for price and quantity, which must be unique.'] ) # Only the price field is specified, this should skip any unique checks since # the unique_together is not fulfilled. This will fail with a KeyError if broken. FormSet = modelformset_factory(Price, fields=("price",), extra=2) data = { 'form-TOTAL_FORMS': '2', 'form-INITIAL_FORMS': '0', 'form-MAX_NUM_FORMS': '', 'form-0-price': '24', 'form-1-price': '24', } formset = FormSet(data) self.assertTrue(formset.is_valid()) FormSet = inlineformset_factory(Author, Book, extra=0, fields="__all__") author = Author.objects.create(pk=1, name='Charles Baudelaire') Book.objects.create(pk=1, author=author, title='Les Paradis Artificiels') Book.objects.create(pk=2, author=author, title='Les Fleurs du Mal') Book.objects.create(pk=3, author=author, title='Flowers of Evil') book_ids = author.book_set.order_by('id').values_list('id', flat=True) data = { 'book_set-TOTAL_FORMS': '2', 'book_set-INITIAL_FORMS': '2', 'book_set-MAX_NUM_FORMS': '', 'book_set-0-title': 'The 2008 Election', 'book_set-0-author': str(author.id), 'book_set-0-id': str(book_ids[0]), 'book_set-1-title': 'The 2008 Election', 'book_set-1-author': str(author.id), 'book_set-1-id': str(book_ids[1]), } formset = FormSet(data=data, instance=author) self.assertFalse(formset.is_valid()) self.assertEqual(formset._non_form_errors, ['Please correct the duplicate data for title.']) self.assertEqual(formset.errors, [{}, {'__all__': ['Please correct the duplicate values below.']}]) FormSet = modelformset_factory(Post, fields="__all__", extra=2) data = { 'form-TOTAL_FORMS': '2', 'form-INITIAL_FORMS': '0', 'form-MAX_NUM_FORMS': '', 'form-0-title': 'blah', 'form-0-slug': 'Morning', 'form-0-subtitle': 'foo', 'form-0-posted': '2009-01-01', 'form-1-title': 'blah', 'form-1-slug': 'Morning in Prague', 'form-1-subtitle': 'rawr', 'form-1-posted': '2009-01-01' } formset = FormSet(data) self.assertFalse(formset.is_valid()) self.assertEqual( formset._non_form_errors, ['Please correct the duplicate data for title which must be unique for the date in posted.'] ) self.assertEqual( formset.errors, [{}, {'__all__': ['Please correct the duplicate values below.']}] ) data = { 'form-TOTAL_FORMS': '2', 'form-INITIAL_FORMS': '0', 'form-MAX_NUM_FORMS': '', 'form-0-title': 'foo', 'form-0-slug': 'Morning in Prague', 'form-0-subtitle': 'foo', 'form-0-posted': '2009-01-01', 'form-1-title': 'blah', 'form-1-slug': 'Morning in Prague', 'form-1-subtitle': 'rawr', 'form-1-posted': '2009-08-02' } formset = FormSet(data) self.assertFalse(formset.is_valid()) self.assertEqual( formset._non_form_errors, ['Please correct the duplicate data for slug which must be unique for the year in posted.'] ) data = { 'form-TOTAL_FORMS': '2', 'form-INITIAL_FORMS': '0', 'form-MAX_NUM_FORMS': '', 'form-0-title': 'foo', 'form-0-slug': 'Morning in Prague', 'form-0-subtitle': 'rawr', 'form-0-posted': '2008-08-01', 'form-1-title': 'blah', 'form-1-slug': 'Prague', 'form-1-subtitle': 'rawr', 'form-1-posted': '2009-08-02' } formset = FormSet(data) self.assertFalse(formset.is_valid()) self.assertEqual( formset._non_form_errors, ['Please correct the duplicate data for subtitle which must be unique for the month in posted.'] ) def test_prevent_change_outer_model_and_create_invalid_data(self): author = Author.objects.create(name='Charles') other_author = Author.objects.create(name='Walt') AuthorFormSet = modelformset_factory(Author, fields='__all__') data = { 'form-TOTAL_FORMS': '2', 'form-INITIAL_FORMS': '2', 'form-MAX_NUM_FORMS': '', 'form-0-id': str(author.id), 'form-0-name': 'Charles', 'form-1-id': str(other_author.id), # A model not in the formset's queryset. 'form-1-name': 'Changed name', } # This formset is only for Walt Whitman and shouldn't accept data for # other_author. formset = AuthorFormSet(data=data, queryset=Author.objects.filter(id__in=(author.id,))) self.assertTrue(formset.is_valid()) formset.save() # The name of other_author shouldn't be changed and new models aren't # created. self.assertSequenceEqual(Author.objects.all(), [author, other_author]) def test_validation_without_id(self): AuthorFormSet = modelformset_factory(Author, fields='__all__') data = { 'form-TOTAL_FORMS': '1', 'form-INITIAL_FORMS': '1', 'form-MAX_NUM_FORMS': '', 'form-0-name': 'Charles', } formset = AuthorFormSet(data) self.assertEqual( formset.errors, [{'id': ['This field is required.']}], ) def test_validation_with_child_model_without_id(self): BetterAuthorFormSet = modelformset_factory(BetterAuthor, fields='__all__') data = { 'form-TOTAL_FORMS': '1', 'form-INITIAL_FORMS': '1', 'form-MAX_NUM_FORMS': '', 'form-0-name': 'Charles', 'form-0-write_speed': '10', } formset = BetterAuthorFormSet(data) self.assertEqual( formset.errors, [{'author_ptr': ['This field is required.']}], ) def test_validation_with_invalid_id(self): AuthorFormSet = modelformset_factory(Author, fields='__all__') data = { 'form-TOTAL_FORMS': '1', 'form-INITIAL_FORMS': '1', 'form-MAX_NUM_FORMS': '', 'form-0-id': 'abc', 'form-0-name': 'Charles', } formset = AuthorFormSet(data) self.assertEqual( formset.errors, [{'id': ['Select a valid choice. That choice is not one of the available choices.']}], ) def test_validation_with_nonexistent_id(self): AuthorFormSet = modelformset_factory(Author, fields='__all__') data = { 'form-TOTAL_FORMS': '1', 'form-INITIAL_FORMS': '1', 'form-MAX_NUM_FORMS': '', 'form-0-id': '12345', 'form-0-name': 'Charles', } formset = AuthorFormSet(data) self.assertEqual( formset.errors, [{'id': ['Select a valid choice. That choice is not one of the available choices.']}], ) def test_initial_form_count_empty_data(self): AuthorFormSet = modelformset_factory(Author, fields='__all__') formset = AuthorFormSet({}) self.assertEqual(formset.initial_form_count(), 0) class TestModelFormsetOverridesTroughFormMeta(TestCase): def test_modelformset_factory_widgets(self): widgets = { 'name': forms.TextInput(attrs={'class': 'poet'}) } PoetFormSet = modelformset_factory(Poet, fields="__all__", widgets=widgets) form = PoetFormSet.form() self.assertHTMLEqual( str(form['name']), '<input id="id_name" maxlength="100" type="text" class="poet" name="name" required>' ) def test_inlineformset_factory_widgets(self): widgets = { 'title': forms.TextInput(attrs={'class': 'book'}) } BookFormSet = inlineformset_factory(Author, Book, widgets=widgets, fields="__all__") form = BookFormSet.form() self.assertHTMLEqual( str(form['title']), '<input class="book" id="id_title" maxlength="100" name="title" type="text" required>' ) def test_modelformset_factory_labels_overrides(self): BookFormSet = modelformset_factory(Book, fields="__all__", labels={ 'title': 'Name' }) form = BookFormSet.form() self.assertHTMLEqual(form['title'].label_tag(), '<label for="id_title">Name:</label>') self.assertHTMLEqual( form['title'].legend_tag(), '<legend for="id_title">Name:</legend>', ) def test_inlineformset_factory_labels_overrides(self): BookFormSet = inlineformset_factory(Author, Book, fields="__all__", labels={ 'title': 'Name' }) form = BookFormSet.form() self.assertHTMLEqual(form['title'].label_tag(), '<label for="id_title">Name:</label>') self.assertHTMLEqual( form['title'].legend_tag(), '<legend for="id_title">Name:</legend>', ) def test_modelformset_factory_help_text_overrides(self): BookFormSet = modelformset_factory(Book, fields="__all__", help_texts={ 'title': 'Choose carefully.' }) form = BookFormSet.form() self.assertEqual(form['title'].help_text, 'Choose carefully.') def test_inlineformset_factory_help_text_overrides(self): BookFormSet = inlineformset_factory(Author, Book, fields="__all__", help_texts={ 'title': 'Choose carefully.' }) form = BookFormSet.form() self.assertEqual(form['title'].help_text, 'Choose carefully.') def test_modelformset_factory_error_messages_overrides(self): author = Author.objects.create(pk=1, name='Charles Baudelaire') BookFormSet = modelformset_factory(Book, fields="__all__", error_messages={ 'title': { 'max_length': 'Title too long!!' } }) form = BookFormSet.form(data={'title': 'Foo ' * 30, 'author': author.id}) form.full_clean() self.assertEqual(form.errors, {'title': ['Title too long!!']}) def test_inlineformset_factory_error_messages_overrides(self): author = Author.objects.create(pk=1, name='Charles Baudelaire') BookFormSet = inlineformset_factory(Author, Book, fields="__all__", error_messages={ 'title': { 'max_length': 'Title too long!!' } }) form = BookFormSet.form(data={'title': 'Foo ' * 30, 'author': author.id}) form.full_clean() self.assertEqual(form.errors, {'title': ['Title too long!!']}) def test_modelformset_factory_field_class_overrides(self): author = Author.objects.create(pk=1, name='Charles Baudelaire') BookFormSet = modelformset_factory(Book, fields="__all__", field_classes={ 'title': forms.SlugField, }) form = BookFormSet.form(data={'title': 'Foo ' * 30, 'author': author.id}) self.assertIs(Book._meta.get_field('title').__class__, models.CharField) self.assertIsInstance(form.fields['title'], forms.SlugField) def test_inlineformset_factory_field_class_overrides(self): author = Author.objects.create(pk=1, name='Charles Baudelaire') BookFormSet = inlineformset_factory(Author, Book, fields="__all__", field_classes={ 'title': forms.SlugField, }) form = BookFormSet.form(data={'title': 'Foo ' * 30, 'author': author.id}) self.assertIs(Book._meta.get_field('title').__class__, models.CharField) self.assertIsInstance(form.fields['title'], forms.SlugField) def test_modelformset_factory_absolute_max(self): AuthorFormSet = modelformset_factory(Author, fields='__all__', absolute_max=1500) data = { 'form-TOTAL_FORMS': '1501', 'form-INITIAL_FORMS': '0', 'form-MAX_NUM_FORMS': '0', } formset = AuthorFormSet(data=data) self.assertIs(formset.is_valid(), False) self.assertEqual(len(formset.forms), 1500) self.assertEqual( formset.non_form_errors(), ['Please submit at most 1000 forms.'], ) def test_modelformset_factory_absolute_max_with_max_num(self): AuthorFormSet = modelformset_factory( Author, fields='__all__', max_num=20, absolute_max=100, ) data = { 'form-TOTAL_FORMS': '101', 'form-INITIAL_FORMS': '0', 'form-MAX_NUM_FORMS': '0', } formset = AuthorFormSet(data=data) self.assertIs(formset.is_valid(), False) self.assertEqual(len(formset.forms), 100) self.assertEqual( formset.non_form_errors(), ['Please submit at most 20 forms.'], ) def test_inlineformset_factory_absolute_max(self): author = Author.objects.create(name='Charles Baudelaire') BookFormSet = inlineformset_factory( Author, Book, fields='__all__', absolute_max=1500, ) data = { 'book_set-TOTAL_FORMS': '1501', 'book_set-INITIAL_FORMS': '0', 'book_set-MAX_NUM_FORMS': '0', } formset = BookFormSet(data, instance=author) self.assertIs(formset.is_valid(), False) self.assertEqual(len(formset.forms), 1500) self.assertEqual( formset.non_form_errors(), ['Please submit at most 1000 forms.'], ) def test_inlineformset_factory_absolute_max_with_max_num(self): author = Author.objects.create(name='Charles Baudelaire') BookFormSet = inlineformset_factory( Author, Book, fields='__all__', max_num=20, absolute_max=100, ) data = { 'book_set-TOTAL_FORMS': '101', 'book_set-INITIAL_FORMS': '0', 'book_set-MAX_NUM_FORMS': '0', } formset = BookFormSet(data, instance=author) self.assertIs(formset.is_valid(), False) self.assertEqual(len(formset.forms), 100) self.assertEqual( formset.non_form_errors(), ['Please submit at most 20 forms.'], ) def test_modelformset_factory_can_delete_extra(self): AuthorFormSet = modelformset_factory( Author, fields='__all__', can_delete=True, can_delete_extra=True, extra=2, ) formset = AuthorFormSet() self.assertEqual(len(formset), 2) self.assertIn('DELETE', formset.forms[0].fields) self.assertIn('DELETE', formset.forms[1].fields) def test_modelformset_factory_disable_delete_extra(self): AuthorFormSet = modelformset_factory( Author, fields='__all__', can_delete=True, can_delete_extra=False, extra=2, ) formset = AuthorFormSet() self.assertEqual(len(formset), 2) self.assertNotIn('DELETE', formset.forms[0].fields) self.assertNotIn('DELETE', formset.forms[1].fields) def test_inlineformset_factory_can_delete_extra(self): BookFormSet = inlineformset_factory( Author, Book, fields='__all__', can_delete=True, can_delete_extra=True, extra=2, ) formset = BookFormSet() self.assertEqual(len(formset), 2) self.assertIn('DELETE', formset.forms[0].fields) self.assertIn('DELETE', formset.forms[1].fields) def test_inlineformset_factory_can_not_delete_extra(self): BookFormSet = inlineformset_factory( Author, Book, fields='__all__', can_delete=True, can_delete_extra=False, extra=2, ) formset = BookFormSet() self.assertEqual(len(formset), 2) self.assertNotIn('DELETE', formset.forms[0].fields) self.assertNotIn('DELETE', formset.forms[1].fields) def test_inlineformset_factory_passes_renderer(self): from django.forms.renderers import Jinja2 renderer = Jinja2() BookFormSet = inlineformset_factory( Author, Book, fields='__all__', renderer=renderer, ) formset = BookFormSet() self.assertEqual(formset.renderer, renderer) def test_modelformset_factory_passes_renderer(self): from django.forms.renderers import Jinja2 renderer = Jinja2() BookFormSet = modelformset_factory(Author, fields='__all__', renderer=renderer) formset = BookFormSet() self.assertEqual(formset.renderer, renderer)
6a7174514e1f2475b97a0935ce522bc8826d754e99fbf93f98b2e3a384807021
import base64 import os import shutil import string import tempfile import unittest from datetime import timedelta from http import cookies from pathlib import Path from unittest import mock from django.conf import settings from django.contrib.sessions.backends.base import UpdateError from django.contrib.sessions.backends.cache import SessionStore as CacheSession from django.contrib.sessions.backends.cached_db import ( SessionStore as CacheDBSession, ) from django.contrib.sessions.backends.db import SessionStore as DatabaseSession from django.contrib.sessions.backends.file import SessionStore as FileSession from django.contrib.sessions.backends.signed_cookies import ( SessionStore as CookieSession, ) from django.contrib.sessions.exceptions import ( InvalidSessionKey, SessionInterrupted, ) from django.contrib.sessions.middleware import SessionMiddleware from django.contrib.sessions.models import Session from django.contrib.sessions.serializers import JSONSerializer from django.core import management from django.core.cache import caches from django.core.cache.backends.base import InvalidCacheBackendError from django.core.exceptions import ImproperlyConfigured from django.core.signing import TimestampSigner from django.http import HttpResponse from django.test import ( RequestFactory, SimpleTestCase, TestCase, ignore_warnings, override_settings, ) from django.utils import timezone from .models import SessionStore as CustomDatabaseSession class SessionTestsMixin: # This does not inherit from TestCase to avoid any tests being run with this # class, which wouldn't work, and to allow different TestCase subclasses to # be used. backend = None # subclasses must specify def setUp(self): self.session = self.backend() def tearDown(self): # NB: be careful to delete any sessions created; stale sessions fill up # the /tmp (with some backends) and eventually overwhelm it after lots # of runs (think buildbots) self.session.delete() def test_new_session(self): self.assertIs(self.session.modified, False) self.assertIs(self.session.accessed, False) def test_get_empty(self): self.assertIsNone(self.session.get('cat')) def test_store(self): self.session['cat'] = "dog" self.assertIs(self.session.modified, True) self.assertEqual(self.session.pop('cat'), 'dog') def test_pop(self): self.session['some key'] = 'exists' # Need to reset these to pretend we haven't accessed it: self.accessed = False self.modified = False self.assertEqual(self.session.pop('some key'), 'exists') self.assertIs(self.session.accessed, True) self.assertIs(self.session.modified, True) self.assertIsNone(self.session.get('some key')) def test_pop_default(self): self.assertEqual(self.session.pop('some key', 'does not exist'), 'does not exist') self.assertIs(self.session.accessed, True) self.assertIs(self.session.modified, False) def test_pop_default_named_argument(self): self.assertEqual(self.session.pop('some key', default='does not exist'), 'does not exist') self.assertIs(self.session.accessed, True) self.assertIs(self.session.modified, False) def test_pop_no_default_keyerror_raised(self): with self.assertRaises(KeyError): self.session.pop('some key') def test_setdefault(self): self.assertEqual(self.session.setdefault('foo', 'bar'), 'bar') self.assertEqual(self.session.setdefault('foo', 'baz'), 'bar') self.assertIs(self.session.accessed, True) self.assertIs(self.session.modified, True) def test_update(self): self.session.update({'update key': 1}) self.assertIs(self.session.accessed, True) self.assertIs(self.session.modified, True) self.assertEqual(self.session.get('update key', None), 1) def test_has_key(self): self.session['some key'] = 1 self.session.modified = False self.session.accessed = False self.assertIn('some key', self.session) self.assertIs(self.session.accessed, True) self.assertIs(self.session.modified, False) def test_values(self): self.assertEqual(list(self.session.values()), []) self.assertIs(self.session.accessed, True) self.session['some key'] = 1 self.session.modified = False self.session.accessed = False self.assertEqual(list(self.session.values()), [1]) self.assertIs(self.session.accessed, True) self.assertIs(self.session.modified, False) def test_keys(self): self.session['x'] = 1 self.session.modified = False self.session.accessed = False self.assertEqual(list(self.session.keys()), ['x']) self.assertIs(self.session.accessed, True) self.assertIs(self.session.modified, False) def test_items(self): self.session['x'] = 1 self.session.modified = False self.session.accessed = False self.assertEqual(list(self.session.items()), [('x', 1)]) self.assertIs(self.session.accessed, True) self.assertIs(self.session.modified, False) def test_clear(self): self.session['x'] = 1 self.session.modified = False self.session.accessed = False self.assertEqual(list(self.session.items()), [('x', 1)]) self.session.clear() self.assertEqual(list(self.session.items()), []) self.assertIs(self.session.accessed, True) self.assertIs(self.session.modified, True) def test_save(self): self.session.save() self.assertIs(self.session.exists(self.session.session_key), True) def test_delete(self): self.session.save() self.session.delete(self.session.session_key) self.assertIs(self.session.exists(self.session.session_key), False) def test_flush(self): self.session['foo'] = 'bar' self.session.save() prev_key = self.session.session_key self.session.flush() self.assertIs(self.session.exists(prev_key), False) self.assertNotEqual(self.session.session_key, prev_key) self.assertIsNone(self.session.session_key) self.assertIs(self.session.modified, True) self.assertIs(self.session.accessed, True) def test_cycle(self): self.session['a'], self.session['b'] = 'c', 'd' self.session.save() prev_key = self.session.session_key prev_data = list(self.session.items()) self.session.cycle_key() self.assertIs(self.session.exists(prev_key), False) self.assertNotEqual(self.session.session_key, prev_key) self.assertEqual(list(self.session.items()), prev_data) def test_cycle_with_no_session_cache(self): self.session['a'], self.session['b'] = 'c', 'd' self.session.save() prev_data = self.session.items() self.session = self.backend(self.session.session_key) self.assertIs(hasattr(self.session, '_session_cache'), False) self.session.cycle_key() self.assertCountEqual(self.session.items(), prev_data) def test_save_doesnt_clear_data(self): self.session['a'] = 'b' self.session.save() self.assertEqual(self.session['a'], 'b') def test_invalid_key(self): # Submitting an invalid session key (either by guessing, or if the db has # removed the key) results in a new key being generated. try: session = self.backend('1') session.save() self.assertNotEqual(session.session_key, '1') self.assertIsNone(session.get('cat')) session.delete() finally: # Some backends leave a stale cache entry for the invalid # session key; make sure that entry is manually deleted session.delete('1') def test_session_key_empty_string_invalid(self): """Falsey values (Such as an empty string) are rejected.""" self.session._session_key = '' self.assertIsNone(self.session.session_key) def test_session_key_too_short_invalid(self): """Strings shorter than 8 characters are rejected.""" self.session._session_key = '1234567' self.assertIsNone(self.session.session_key) def test_session_key_valid_string_saved(self): """Strings of length 8 and up are accepted and stored.""" self.session._session_key = '12345678' self.assertEqual(self.session.session_key, '12345678') def test_session_key_is_read_only(self): def set_session_key(session): session.session_key = session._get_new_session_key() with self.assertRaises(AttributeError): set_session_key(self.session) # Custom session expiry def test_default_expiry(self): # A normal session has a max age equal to settings self.assertEqual(self.session.get_expiry_age(), settings.SESSION_COOKIE_AGE) # So does a custom session with an idle expiration time of 0 (but it'll # expire at browser close) self.session.set_expiry(0) self.assertEqual(self.session.get_expiry_age(), settings.SESSION_COOKIE_AGE) def test_custom_expiry_seconds(self): modification = timezone.now() self.session.set_expiry(10) date = self.session.get_expiry_date(modification=modification) self.assertEqual(date, modification + timedelta(seconds=10)) age = self.session.get_expiry_age(modification=modification) self.assertEqual(age, 10) def test_custom_expiry_timedelta(self): modification = timezone.now() # Mock timezone.now, because set_expiry calls it on this code path. original_now = timezone.now try: timezone.now = lambda: modification self.session.set_expiry(timedelta(seconds=10)) finally: timezone.now = original_now date = self.session.get_expiry_date(modification=modification) self.assertEqual(date, modification + timedelta(seconds=10)) age = self.session.get_expiry_age(modification=modification) self.assertEqual(age, 10) def test_custom_expiry_datetime(self): modification = timezone.now() self.session.set_expiry(modification + timedelta(seconds=10)) date = self.session.get_expiry_date(modification=modification) self.assertEqual(date, modification + timedelta(seconds=10)) age = self.session.get_expiry_age(modification=modification) self.assertEqual(age, 10) def test_custom_expiry_reset(self): self.session.set_expiry(None) self.session.set_expiry(10) self.session.set_expiry(None) self.assertEqual(self.session.get_expiry_age(), settings.SESSION_COOKIE_AGE) def test_get_expire_at_browser_close(self): # Tests get_expire_at_browser_close with different settings and different # set_expiry calls with override_settings(SESSION_EXPIRE_AT_BROWSER_CLOSE=False): self.session.set_expiry(10) self.assertIs(self.session.get_expire_at_browser_close(), False) self.session.set_expiry(0) self.assertIs(self.session.get_expire_at_browser_close(), True) self.session.set_expiry(None) self.assertIs(self.session.get_expire_at_browser_close(), False) with override_settings(SESSION_EXPIRE_AT_BROWSER_CLOSE=True): self.session.set_expiry(10) self.assertIs(self.session.get_expire_at_browser_close(), False) self.session.set_expiry(0) self.assertIs(self.session.get_expire_at_browser_close(), True) self.session.set_expiry(None) self.assertIs(self.session.get_expire_at_browser_close(), True) def test_decode(self): # Ensure we can decode what we encode data = {'a test key': 'a test value'} encoded = self.session.encode(data) self.assertEqual(self.session.decode(encoded), data) def test_decode_failure_logged_to_security(self): tests = [ base64.b64encode(b'flaskdj:alkdjf').decode('ascii'), 'bad:encoded:value', ] for encoded in tests: with self.subTest(encoded=encoded): with self.assertLogs('django.security.SuspiciousSession', 'WARNING') as cm: self.assertEqual(self.session.decode(encoded), {}) # The failed decode is logged. self.assertIn('Session data corrupted', cm.output[0]) def test_decode_serializer_exception(self): signer = TimestampSigner(salt=self.session.key_salt) encoded = signer.sign(b'invalid data') self.assertEqual(self.session.decode(encoded), {}) def test_actual_expiry(self): old_session_key = None new_session_key = None try: self.session['foo'] = 'bar' self.session.set_expiry(-timedelta(seconds=10)) self.session.save() old_session_key = self.session.session_key # With an expiry date in the past, the session expires instantly. new_session = self.backend(self.session.session_key) new_session_key = new_session.session_key self.assertNotIn('foo', new_session) finally: self.session.delete(old_session_key) self.session.delete(new_session_key) def test_session_load_does_not_create_record(self): """ Loading an unknown session key does not create a session record. Creating session records on load is a DOS vulnerability. """ session = self.backend('someunknownkey') session.load() self.assertIsNone(session.session_key) self.assertIs(session.exists(session.session_key), False) # provided unknown key was cycled, not reused self.assertNotEqual(session.session_key, 'someunknownkey') def test_session_save_does_not_resurrect_session_logged_out_in_other_context(self): """ Sessions shouldn't be resurrected by a concurrent request. """ # Create new session. s1 = self.backend() s1['test_data'] = 'value1' s1.save(must_create=True) # Logout in another context. s2 = self.backend(s1.session_key) s2.delete() # Modify session in first context. s1['test_data'] = 'value2' with self.assertRaises(UpdateError): # This should throw an exception as the session is deleted, not # resurrect the session. s1.save() self.assertEqual(s1.load(), {}) class DatabaseSessionTests(SessionTestsMixin, TestCase): backend = DatabaseSession session_engine = 'django.contrib.sessions.backends.db' @property def model(self): return self.backend.get_model_class() def test_session_str(self): "Session repr should be the session key." self.session['x'] = 1 self.session.save() session_key = self.session.session_key s = self.model.objects.get(session_key=session_key) self.assertEqual(str(s), session_key) def test_session_get_decoded(self): """ Test we can use Session.get_decoded to retrieve data stored in normal way """ self.session['x'] = 1 self.session.save() s = self.model.objects.get(session_key=self.session.session_key) self.assertEqual(s.get_decoded(), {'x': 1}) def test_sessionmanager_save(self): """ Test SessionManager.save method """ # Create a session self.session['y'] = 1 self.session.save() s = self.model.objects.get(session_key=self.session.session_key) # Change it self.model.objects.save(s.session_key, {'y': 2}, s.expire_date) # Clear cache, so that it will be retrieved from DB del self.session._session_cache self.assertEqual(self.session['y'], 2) def test_clearsessions_command(self): """ Test clearsessions command for clearing expired sessions. """ self.assertEqual(0, self.model.objects.count()) # One object in the future self.session['foo'] = 'bar' self.session.set_expiry(3600) self.session.save() # One object in the past other_session = self.backend() other_session['foo'] = 'bar' other_session.set_expiry(-3600) other_session.save() # Two sessions are in the database before clearsessions... self.assertEqual(2, self.model.objects.count()) with override_settings(SESSION_ENGINE=self.session_engine): management.call_command('clearsessions') # ... and one is deleted. self.assertEqual(1, self.model.objects.count()) @override_settings(USE_TZ=True) class DatabaseSessionWithTimeZoneTests(DatabaseSessionTests): pass class CustomDatabaseSessionTests(DatabaseSessionTests): backend = CustomDatabaseSession session_engine = 'sessions_tests.models' custom_session_cookie_age = 60 * 60 * 24 # One day. def test_extra_session_field(self): # Set the account ID to be picked up by a custom session storage # and saved to a custom session model database column. self.session['_auth_user_id'] = 42 self.session.save() # Make sure that the customized create_model_instance() was called. s = self.model.objects.get(session_key=self.session.session_key) self.assertEqual(s.account_id, 42) # Make the session "anonymous". self.session.pop('_auth_user_id') self.session.save() # Make sure that save() on an existing session did the right job. s = self.model.objects.get(session_key=self.session.session_key) self.assertIsNone(s.account_id) def test_custom_expiry_reset(self): self.session.set_expiry(None) self.session.set_expiry(10) self.session.set_expiry(None) self.assertEqual(self.session.get_expiry_age(), self.custom_session_cookie_age) def test_default_expiry(self): self.assertEqual(self.session.get_expiry_age(), self.custom_session_cookie_age) self.session.set_expiry(0) self.assertEqual(self.session.get_expiry_age(), self.custom_session_cookie_age) class CacheDBSessionTests(SessionTestsMixin, TestCase): backend = CacheDBSession def test_exists_searches_cache_first(self): self.session.save() with self.assertNumQueries(0): self.assertIs(self.session.exists(self.session.session_key), True) # Some backends might issue a warning @ignore_warnings(module="django.core.cache.backends.base") def test_load_overlong_key(self): self.session._session_key = (string.ascii_letters + string.digits) * 20 self.assertEqual(self.session.load(), {}) @override_settings(SESSION_CACHE_ALIAS='sessions') def test_non_default_cache(self): # 21000 - CacheDB backend should respect SESSION_CACHE_ALIAS. with self.assertRaises(InvalidCacheBackendError): self.backend() @override_settings(USE_TZ=True) class CacheDBSessionWithTimeZoneTests(CacheDBSessionTests): pass class FileSessionTests(SessionTestsMixin, SimpleTestCase): backend = FileSession def setUp(self): # Do file session tests in an isolated directory, and kill it after we're done. self.original_session_file_path = settings.SESSION_FILE_PATH self.temp_session_store = settings.SESSION_FILE_PATH = self.mkdtemp() # Reset the file session backend's internal caches if hasattr(self.backend, '_storage_path'): del self.backend._storage_path super().setUp() def tearDown(self): super().tearDown() settings.SESSION_FILE_PATH = self.original_session_file_path shutil.rmtree(self.temp_session_store) def mkdtemp(self): return tempfile.mkdtemp() @override_settings( SESSION_FILE_PATH='/if/this/directory/exists/you/have/a/weird/computer', ) def test_configuration_check(self): del self.backend._storage_path # Make sure the file backend checks for a good storage dir with self.assertRaises(ImproperlyConfigured): self.backend() def test_invalid_key_backslash(self): # Ensure we don't allow directory-traversal. # This is tested directly on _key_to_file, as load() will swallow # a SuspiciousOperation in the same way as an OSError - by creating # a new session, making it unclear whether the slashes were detected. with self.assertRaises(InvalidSessionKey): self.backend()._key_to_file("a\\b\\c") def test_invalid_key_forwardslash(self): # Ensure we don't allow directory-traversal with self.assertRaises(InvalidSessionKey): self.backend()._key_to_file("a/b/c") @override_settings( SESSION_ENGINE="django.contrib.sessions.backends.file", SESSION_COOKIE_AGE=0, ) def test_clearsessions_command(self): """ Test clearsessions command for clearing expired sessions. """ storage_path = self.backend._get_storage_path() file_prefix = settings.SESSION_COOKIE_NAME def count_sessions(): return len([ session_file for session_file in os.listdir(storage_path) if session_file.startswith(file_prefix) ]) self.assertEqual(0, count_sessions()) # One object in the future self.session['foo'] = 'bar' self.session.set_expiry(3600) self.session.save() # One object in the past other_session = self.backend() other_session['foo'] = 'bar' other_session.set_expiry(-3600) other_session.save() # One object in the present without an expiry (should be deleted since # its modification time + SESSION_COOKIE_AGE will be in the past when # clearsessions runs). other_session2 = self.backend() other_session2['foo'] = 'bar' other_session2.save() # Three sessions are in the filesystem before clearsessions... self.assertEqual(3, count_sessions()) management.call_command('clearsessions') # ... and two are deleted. self.assertEqual(1, count_sessions()) class FileSessionPathLibTests(FileSessionTests): def mkdtemp(self): tmp_dir = super().mkdtemp() return Path(tmp_dir) class CacheSessionTests(SessionTestsMixin, SimpleTestCase): backend = CacheSession # Some backends might issue a warning @ignore_warnings(module="django.core.cache.backends.base") def test_load_overlong_key(self): self.session._session_key = (string.ascii_letters + string.digits) * 20 self.assertEqual(self.session.load(), {}) def test_default_cache(self): self.session.save() self.assertIsNotNone(caches['default'].get(self.session.cache_key)) @override_settings(CACHES={ 'default': { 'BACKEND': 'django.core.cache.backends.dummy.DummyCache', }, 'sessions': { 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache', 'LOCATION': 'session', }, }, SESSION_CACHE_ALIAS='sessions') def test_non_default_cache(self): # Re-initialize the session backend to make use of overridden settings. self.session = self.backend() self.session.save() self.assertIsNone(caches['default'].get(self.session.cache_key)) self.assertIsNotNone(caches['sessions'].get(self.session.cache_key)) def test_create_and_save(self): self.session = self.backend() self.session.create() self.session.save() self.assertIsNotNone(caches['default'].get(self.session.cache_key)) class SessionMiddlewareTests(TestCase): request_factory = RequestFactory() @staticmethod def get_response_touching_session(request): request.session['hello'] = 'world' return HttpResponse('Session test') @override_settings(SESSION_COOKIE_SECURE=True) def test_secure_session_cookie(self): request = self.request_factory.get('/') middleware = SessionMiddleware(self.get_response_touching_session) # Handle the response through the middleware response = middleware(request) self.assertIs(response.cookies[settings.SESSION_COOKIE_NAME]['secure'], True) @override_settings(SESSION_COOKIE_HTTPONLY=True) def test_httponly_session_cookie(self): request = self.request_factory.get('/') middleware = SessionMiddleware(self.get_response_touching_session) # Handle the response through the middleware response = middleware(request) self.assertIs(response.cookies[settings.SESSION_COOKIE_NAME]['httponly'], True) self.assertIn( cookies.Morsel._reserved['httponly'], str(response.cookies[settings.SESSION_COOKIE_NAME]) ) @override_settings(SESSION_COOKIE_SAMESITE='Strict') def test_samesite_session_cookie(self): request = self.request_factory.get('/') middleware = SessionMiddleware(self.get_response_touching_session) response = middleware(request) self.assertEqual(response.cookies[settings.SESSION_COOKIE_NAME]['samesite'], 'Strict') @override_settings(SESSION_COOKIE_HTTPONLY=False) def test_no_httponly_session_cookie(self): request = self.request_factory.get('/') middleware = SessionMiddleware(self.get_response_touching_session) response = middleware(request) self.assertEqual(response.cookies[settings.SESSION_COOKIE_NAME]['httponly'], '') self.assertNotIn( cookies.Morsel._reserved['httponly'], str(response.cookies[settings.SESSION_COOKIE_NAME]) ) def test_session_save_on_500(self): def response_500(requset): response = HttpResponse('Horrible error') response.status_code = 500 request.session['hello'] = 'world' return response request = self.request_factory.get('/') SessionMiddleware(response_500)(request) # The value wasn't saved above. self.assertNotIn('hello', request.session.load()) def test_session_update_error_redirect(self): def response_delete_session(request): request.session = DatabaseSession() request.session.save(must_create=True) request.session.delete() return HttpResponse() request = self.request_factory.get('/foo/') middleware = SessionMiddleware(response_delete_session) msg = ( "The request's session was deleted before the request completed. " "The user may have logged out in a concurrent request, for example." ) with self.assertRaisesMessage(SessionInterrupted, msg): # Handle the response through the middleware. It will try to save # the deleted session which will cause an UpdateError that's caught # and raised as a SessionInterrupted. middleware(request) def test_session_delete_on_end(self): def response_ending_session(request): request.session.flush() return HttpResponse('Session test') request = self.request_factory.get('/') middleware = SessionMiddleware(response_ending_session) # Before deleting, there has to be an existing cookie request.COOKIES[settings.SESSION_COOKIE_NAME] = 'abc' # Handle the response through the middleware response = middleware(request) # The cookie was deleted, not recreated. # A deleted cookie header looks like: # Set-Cookie: sessionid=; expires=Thu, 01 Jan 1970 00:00:00 GMT; Max-Age=0; Path=/ self.assertEqual( 'Set-Cookie: {}=""; expires=Thu, 01 Jan 1970 00:00:00 GMT; ' 'Max-Age=0; Path=/; SameSite={}'.format( settings.SESSION_COOKIE_NAME, settings.SESSION_COOKIE_SAMESITE, ), str(response.cookies[settings.SESSION_COOKIE_NAME]) ) # SessionMiddleware sets 'Vary: Cookie' to prevent the 'Set-Cookie' # from being cached. self.assertEqual(response.headers['Vary'], 'Cookie') @override_settings(SESSION_COOKIE_DOMAIN='.example.local', SESSION_COOKIE_PATH='/example/') def test_session_delete_on_end_with_custom_domain_and_path(self): def response_ending_session(request): request.session.flush() return HttpResponse('Session test') request = self.request_factory.get('/') middleware = SessionMiddleware(response_ending_session) # Before deleting, there has to be an existing cookie request.COOKIES[settings.SESSION_COOKIE_NAME] = 'abc' # Handle the response through the middleware response = middleware(request) # The cookie was deleted, not recreated. # A deleted cookie header with a custom domain and path looks like: # Set-Cookie: sessionid=; Domain=.example.local; # expires=Thu, 01 Jan 1970 00:00:00 GMT; Max-Age=0; # Path=/example/ self.assertEqual( 'Set-Cookie: {}=""; Domain=.example.local; expires=Thu, ' '01 Jan 1970 00:00:00 GMT; Max-Age=0; Path=/example/; SameSite={}'.format( settings.SESSION_COOKIE_NAME, settings.SESSION_COOKIE_SAMESITE, ), str(response.cookies[settings.SESSION_COOKIE_NAME]) ) def test_flush_empty_without_session_cookie_doesnt_set_cookie(self): def response_ending_session(request): request.session.flush() return HttpResponse('Session test') request = self.request_factory.get('/') middleware = SessionMiddleware(response_ending_session) # Handle the response through the middleware response = middleware(request) # A cookie should not be set. self.assertEqual(response.cookies, {}) # The session is accessed so "Vary: Cookie" should be set. self.assertEqual(response.headers['Vary'], 'Cookie') def test_empty_session_saved(self): """ If a session is emptied of data but still has a key, it should still be updated. """ def response_set_session(request): # Set a session key and some data. request.session['foo'] = 'bar' return HttpResponse('Session test') request = self.request_factory.get('/') middleware = SessionMiddleware(response_set_session) # Handle the response through the middleware. response = middleware(request) self.assertEqual(tuple(request.session.items()), (('foo', 'bar'),)) # A cookie should be set, along with Vary: Cookie. self.assertIn( 'Set-Cookie: sessionid=%s' % request.session.session_key, str(response.cookies) ) self.assertEqual(response.headers['Vary'], 'Cookie') # Empty the session data. del request.session['foo'] # Handle the response through the middleware. response = HttpResponse('Session test') response = middleware.process_response(request, response) self.assertEqual(dict(request.session.values()), {}) session = Session.objects.get(session_key=request.session.session_key) self.assertEqual(session.get_decoded(), {}) # While the session is empty, it hasn't been flushed so a cookie should # still be set, along with Vary: Cookie. self.assertGreater(len(request.session.session_key), 8) self.assertIn( 'Set-Cookie: sessionid=%s' % request.session.session_key, str(response.cookies) ) self.assertEqual(response.headers['Vary'], 'Cookie') class CookieSessionTests(SessionTestsMixin, SimpleTestCase): backend = CookieSession def test_save(self): """ This test tested exists() in the other session backends, but that doesn't make sense for us. """ pass def test_cycle(self): """ This test tested cycle_key() which would create a new session key for the same session data. But we can't invalidate previously signed cookies (other than letting them expire naturally) so testing for this behavior is meaningless. """ pass @unittest.expectedFailure def test_actual_expiry(self): # The cookie backend doesn't handle non-default expiry dates, see #19201 super().test_actual_expiry() def test_unpickling_exception(self): # signed_cookies backend should handle unpickle exceptions gracefully # by creating a new session self.assertEqual(self.session.serializer, JSONSerializer) self.session.save() with mock.patch('django.core.signing.loads', side_effect=ValueError): self.session.load() @unittest.skip("Cookie backend doesn't have an external store to create records in.") def test_session_load_does_not_create_record(self): pass @unittest.skip("CookieSession is stored in the client and there is no way to query it.") def test_session_save_does_not_resurrect_session_logged_out_in_other_context(self): pass class ClearSessionsCommandTests(SimpleTestCase): def test_clearsessions_unsupported(self): msg = ( "Session engine 'tests.sessions_tests.no_clear_expired' doesn't " "support clearing expired sessions." ) with self.settings(SESSION_ENGINE='tests.sessions_tests.no_clear_expired'): with self.assertRaisesMessage(management.CommandError, msg): management.call_command('clearsessions')
0343bccc2f5751c5fd431362c67e4c2d4f181ef728d8303645cc16e987e5ebcb
from datetime import datetime from django.contrib.sitemaps import GenericSitemap from django.test import ignore_warnings, override_settings from django.utils.deprecation import RemovedInDjango50Warning from .base import SitemapTestsBase from .models import TestModel @override_settings(ABSOLUTE_URL_OVERRIDES={}) class GenericViewsSitemapTests(SitemapTestsBase): def test_generic_sitemap_attributes(self): datetime_value = datetime.now() queryset = TestModel.objects.all() generic_sitemap = GenericSitemap( info_dict={ 'queryset': queryset, 'date_field': datetime_value, }, priority=0.6, changefreq='monthly', protocol='https', ) attr_values = ( ('date_field', datetime_value), ('priority', 0.6), ('changefreq', 'monthly'), ('protocol', 'https'), ) for attr_name, expected_value in attr_values: with self.subTest(attr_name=attr_name): self.assertEqual(getattr(generic_sitemap, attr_name), expected_value) self.assertCountEqual(generic_sitemap.queryset, queryset) def test_generic_sitemap(self): "A minimal generic sitemap can be rendered" response = self.client.get('/generic/sitemap.xml') expected = '' for pk in TestModel.objects.values_list("id", flat=True): expected += "<url><loc>%s/testmodel/%s/</loc></url>" % (self.base_url, pk) expected_content = """<?xml version="1.0" encoding="UTF-8"?> <urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9" xmlns:xhtml="http://www.w3.org/1999/xhtml"> %s </urlset> """ % expected self.assertXMLEqual(response.content.decode(), expected_content) def test_generic_sitemap_lastmod(self): test_model = TestModel.objects.first() TestModel.objects.update(lastmod=datetime(2013, 3, 13, 10, 0, 0)) response = self.client.get('/generic-lastmod/sitemap.xml') expected_content = """<?xml version="1.0" encoding="UTF-8"?> <urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9" xmlns:xhtml="http://www.w3.org/1999/xhtml"> <url><loc>%s/testmodel/%s/</loc><lastmod>2013-03-13</lastmod></url> </urlset> """ % (self.base_url, test_model.pk) self.assertXMLEqual(response.content.decode(), expected_content) self.assertEqual(response.headers['Last-Modified'], 'Wed, 13 Mar 2013 10:00:00 GMT') def test_get_protocol_defined_in_constructor(self): for protocol in ['http', 'https']: with self.subTest(protocol=protocol): sitemap = GenericSitemap({'queryset': None}, protocol=protocol) self.assertEqual(sitemap.get_protocol(), protocol) def test_get_protocol_passed_as_argument(self): sitemap = GenericSitemap({'queryset': None}) for protocol in ['http', 'https']: with self.subTest(protocol=protocol): self.assertEqual(sitemap.get_protocol(protocol), protocol) @ignore_warnings(category=RemovedInDjango50Warning) def test_get_protocol_default(self): sitemap = GenericSitemap({'queryset': None}) self.assertEqual(sitemap.get_protocol(), 'http') def test_get_protocol_default_warning(self): sitemap = GenericSitemap({'queryset': None}) msg = ( "The default sitemap protocol will be changed from 'http' to " "'https' in Django 5.0. Set Sitemap.protocol to silence this " "warning." ) with self.assertWarnsMessage(RemovedInDjango50Warning, msg): sitemap.get_protocol() def test_generic_sitemap_index(self): TestModel.objects.update(lastmod=datetime(2013, 3, 13, 10, 0, 0)) response = self.client.get('/generic-lastmod/index.xml') expected_content = """<?xml version="1.0" encoding="UTF-8"?> <sitemapindex xmlns="http://www.sitemaps.org/schemas/sitemap/0.9"> <sitemap><loc>http://example.com/simple/sitemap-generic.xml</loc><lastmod>2013-03-13T10:00:00</lastmod></sitemap> </sitemapindex>""" self.assertXMLEqual(response.content.decode('utf-8'), expected_content)
753f4d9a9505b64bc300991845f1dd5709df5e254399316beabe3710269a1c4c
from unittest import mock from urllib.parse import urlencode from django.contrib.sitemaps import ( SitemapNotFound, _get_sitemap_full_url, ping_google, ) from django.core.exceptions import ImproperlyConfigured from django.test import modify_settings, override_settings from .base import SitemapTestsBase class PingGoogleTests(SitemapTestsBase): @override_settings(ROOT_URLCONF='sitemaps_tests.urls.sitemap_only') @mock.patch('django.contrib.sitemaps.urlopen') def test_something(self, urlopen): ping_google() params = urlencode({'sitemap': 'https://example.com/sitemap-without-entries/sitemap.xml'}) full_url = 'https://www.google.com/webmasters/tools/ping?%s' % params urlopen.assert_called_with(full_url) @override_settings(ROOT_URLCONF='sitemaps_tests.urls.sitemap_only') def test_get_sitemap_full_url_global(self): self.assertEqual(_get_sitemap_full_url(None), 'https://example.com/sitemap-without-entries/sitemap.xml') @override_settings(ROOT_URLCONF='sitemaps_tests.urls.index_only') def test_get_sitemap_full_url_index(self): self.assertEqual(_get_sitemap_full_url(None), 'https://example.com/simple/index.xml') @override_settings(ROOT_URLCONF='sitemaps_tests.urls.empty') def test_get_sitemap_full_url_not_detected(self): msg = "You didn't provide a sitemap_url, and the sitemap URL couldn't be auto-detected." with self.assertRaisesMessage(SitemapNotFound, msg): _get_sitemap_full_url(None) def test_get_sitemap_full_url_exact_url(self): self.assertEqual(_get_sitemap_full_url('/foo.xml'), 'https://example.com/foo.xml') def test_get_sitemap_full_url_insecure(self): self.assertEqual( _get_sitemap_full_url('/foo.xml', sitemap_uses_https=False), 'http://example.com/foo.xml' ) @modify_settings(INSTALLED_APPS={'remove': 'django.contrib.sites'}) def test_get_sitemap_full_url_no_sites(self): msg = "ping_google requires django.contrib.sites, which isn't installed." with self.assertRaisesMessage(ImproperlyConfigured, msg): _get_sitemap_full_url(None)
7a159e12f557b6de67bce49b04052e3b208c441679424ac249147a6873c2ec04
from datetime import date from django.test import override_settings from .base import SitemapTestsBase @override_settings(ROOT_URLCONF='sitemaps_tests.urls.https') class HTTPSSitemapTests(SitemapTestsBase): protocol = 'https' def test_secure_sitemap_index(self): "A secure sitemap index can be rendered" response = self.client.get('/secure/index.xml') expected_content = """<?xml version="1.0" encoding="UTF-8"?> <sitemapindex xmlns="http://www.sitemaps.org/schemas/sitemap/0.9"> <sitemap><loc>%s/secure/sitemap-simple.xml</loc><lastmod>%s</lastmod></sitemap> </sitemapindex> """ % (self.base_url, date.today()) self.assertXMLEqual(response.content.decode(), expected_content) def test_secure_sitemap_section(self): "A secure sitemap section can be rendered" response = self.client.get('/secure/sitemap-simple.xml') expected_content = """<?xml version="1.0" encoding="UTF-8"?> <urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9" xmlns:xhtml="http://www.w3.org/1999/xhtml"> <url><loc>%s/location/</loc><lastmod>%s</lastmod><changefreq>never</changefreq><priority>0.5</priority></url> </urlset> """ % (self.base_url, date.today()) self.assertXMLEqual(response.content.decode(), expected_content) @override_settings(SECURE_PROXY_SSL_HEADER=False) class HTTPSDetectionSitemapTests(SitemapTestsBase): extra = {'wsgi.url_scheme': 'https'} def test_sitemap_index_with_https_request(self): "A sitemap index requested in HTTPS is rendered with HTTPS links" response = self.client.get('/simple/index.xml', **self.extra) expected_content = """<?xml version="1.0" encoding="UTF-8"?> <sitemapindex xmlns="http://www.sitemaps.org/schemas/sitemap/0.9"> <sitemap><loc>%s/simple/sitemap-simple.xml</loc><lastmod>%s</lastmod></sitemap> </sitemapindex> """ % (self.base_url.replace('http://', 'https://'), date.today()) self.assertXMLEqual(response.content.decode(), expected_content) def test_sitemap_section_with_https_request(self): "A sitemap section requested in HTTPS is rendered with HTTPS links" response = self.client.get('/simple/sitemap-simple.xml', **self.extra) expected_content = """<?xml version="1.0" encoding="UTF-8"?> <urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9" xmlns:xhtml="http://www.w3.org/1999/xhtml"> <url><loc>%s/location/</loc><lastmod>%s</lastmod><changefreq>never</changefreq><priority>0.5</priority></url> </urlset> """ % (self.base_url.replace('http://', 'https://'), date.today()) self.assertXMLEqual(response.content.decode(), expected_content)
49fbb9ebc3d4e1d0bc78496b01ba285dbdc323e448abd4d1e26be9cf8bd1e0b3
import os from datetime import date from django.contrib.sitemaps import Sitemap from django.contrib.sites.models import Site from django.core.exceptions import ImproperlyConfigured from django.test import ignore_warnings, modify_settings, override_settings from django.utils import translation from django.utils.deprecation import RemovedInDjango50Warning from django.utils.formats import localize from .base import SitemapTestsBase from .models import TestModel class HTTPSitemapTests(SitemapTestsBase): use_sitemap_err_msg = ( 'To use sitemaps, either enable the sites framework or pass a ' 'Site/RequestSite object in your view.' ) def test_simple_sitemap_index(self): "A simple sitemap index can be rendered" response = self.client.get('/simple/index.xml') expected_content = """<?xml version="1.0" encoding="UTF-8"?> <sitemapindex xmlns="http://www.sitemaps.org/schemas/sitemap/0.9"> <sitemap><loc>%s/simple/sitemap-simple.xml</loc><lastmod>%s</lastmod></sitemap> </sitemapindex> """ % (self.base_url, date.today()) self.assertXMLEqual(response.content.decode(), expected_content) def test_sitemap_not_callable(self): """A sitemap may not be callable.""" response = self.client.get('/simple-not-callable/index.xml') expected_content = """<?xml version="1.0" encoding="UTF-8"?> <sitemapindex xmlns="http://www.sitemaps.org/schemas/sitemap/0.9"> <sitemap><loc>%s/simple/sitemap-simple.xml</loc><lastmod>%s</lastmod></sitemap> </sitemapindex> """ % (self.base_url, date.today()) self.assertXMLEqual(response.content.decode(), expected_content) def test_paged_sitemap(self): """A sitemap may have multiple pages.""" response = self.client.get('/simple-paged/index.xml') expected_content = """<?xml version="1.0" encoding="UTF-8"?> <sitemapindex xmlns="http://www.sitemaps.org/schemas/sitemap/0.9"> <sitemap><loc>{0}/simple/sitemap-simple.xml</loc><lastmod>{1}</lastmod></sitemap><sitemap><loc>{0}/simple/sitemap-simple.xml?p=2</loc><lastmod>{1}</lastmod></sitemap> </sitemapindex> """.format(self.base_url, date.today()) self.assertXMLEqual(response.content.decode(), expected_content) @override_settings(TEMPLATES=[{ 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': [os.path.join(os.path.dirname(__file__), 'templates')], }]) def test_simple_sitemap_custom_lastmod_index(self): "A simple sitemap index can be rendered with a custom template" response = self.client.get('/simple/custom-lastmod-index.xml') expected_content = """<?xml version="1.0" encoding="UTF-8"?> <!-- This is a customised template --> <sitemapindex xmlns="http://www.sitemaps.org/schemas/sitemap/0.9"> <sitemap><loc>%s/simple/sitemap-simple.xml</loc><lastmod>%s</lastmod></sitemap> </sitemapindex> """ % (self.base_url, date.today()) self.assertXMLEqual(response.content.decode(), expected_content) def test_simple_sitemap_section(self): "A simple sitemap section can be rendered" response = self.client.get('/simple/sitemap-simple.xml') expected_content = """<?xml version="1.0" encoding="UTF-8"?> <urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9" xmlns:xhtml="http://www.w3.org/1999/xhtml"> <url><loc>%s/location/</loc><lastmod>%s</lastmod><changefreq>never</changefreq><priority>0.5</priority></url> </urlset> """ % (self.base_url, date.today()) self.assertXMLEqual(response.content.decode(), expected_content) def test_no_section(self): response = self.client.get('/simple/sitemap-simple2.xml') self.assertEqual(str(response.context['exception']), "No sitemap available for section: 'simple2'") self.assertEqual(response.status_code, 404) def test_empty_page(self): response = self.client.get('/simple/sitemap-simple.xml?p=0') self.assertEqual(str(response.context['exception']), 'Page 0 empty') self.assertEqual(response.status_code, 404) def test_page_not_int(self): response = self.client.get('/simple/sitemap-simple.xml?p=test') self.assertEqual(str(response.context['exception']), "No page 'test'") self.assertEqual(response.status_code, 404) def test_simple_sitemap(self): "A simple sitemap can be rendered" response = self.client.get('/simple/sitemap.xml') expected_content = """<?xml version="1.0" encoding="UTF-8"?> <urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9" xmlns:xhtml="http://www.w3.org/1999/xhtml"> <url><loc>%s/location/</loc><lastmod>%s</lastmod><changefreq>never</changefreq><priority>0.5</priority></url> </urlset> """ % (self.base_url, date.today()) self.assertXMLEqual(response.content.decode(), expected_content) @override_settings(TEMPLATES=[{ 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': [os.path.join(os.path.dirname(__file__), 'templates')], }]) def test_simple_custom_sitemap(self): "A simple sitemap can be rendered with a custom template" response = self.client.get('/simple/custom-sitemap.xml') expected_content = """<?xml version="1.0" encoding="UTF-8"?> <!-- This is a customised template --> <urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9"> <url><loc>%s/location/</loc><lastmod>%s</lastmod><changefreq>never</changefreq><priority>0.5</priority></url> </urlset> """ % (self.base_url, date.today()) self.assertXMLEqual(response.content.decode(), expected_content) def test_sitemap_last_modified(self): "Last-Modified header is set correctly" response = self.client.get('/lastmod/sitemap.xml') self.assertEqual(response.headers['Last-Modified'], 'Wed, 13 Mar 2013 10:00:00 GMT') def test_sitemap_last_modified_date(self): """ The Last-Modified header should be support dates (without time). """ response = self.client.get('/lastmod/date-sitemap.xml') self.assertEqual(response.headers['Last-Modified'], 'Wed, 13 Mar 2013 00:00:00 GMT') def test_sitemap_last_modified_tz(self): """ The Last-Modified header should be converted from timezone aware dates to GMT. """ response = self.client.get('/lastmod/tz-sitemap.xml') self.assertEqual(response.headers['Last-Modified'], 'Wed, 13 Mar 2013 15:00:00 GMT') def test_sitemap_last_modified_missing(self): "Last-Modified header is missing when sitemap has no lastmod" response = self.client.get('/generic/sitemap.xml') self.assertFalse(response.has_header('Last-Modified')) def test_sitemap_last_modified_mixed(self): "Last-Modified header is omitted when lastmod not on all items" response = self.client.get('/lastmod-mixed/sitemap.xml') self.assertFalse(response.has_header('Last-Modified')) def test_sitemaps_lastmod_mixed_ascending_last_modified_missing(self): """ The Last-Modified header is omitted when lastmod isn't found in all sitemaps. Test sitemaps are sorted by lastmod in ascending order. """ response = self.client.get('/lastmod-sitemaps/mixed-ascending.xml') self.assertFalse(response.has_header('Last-Modified')) def test_sitemaps_lastmod_mixed_descending_last_modified_missing(self): """ The Last-Modified header is omitted when lastmod isn't found in all sitemaps. Test sitemaps are sorted by lastmod in descending order. """ response = self.client.get('/lastmod-sitemaps/mixed-descending.xml') self.assertFalse(response.has_header('Last-Modified')) def test_sitemaps_lastmod_ascending(self): """ The Last-Modified header is set to the most recent sitemap lastmod. Test sitemaps are sorted by lastmod in ascending order. """ response = self.client.get('/lastmod-sitemaps/ascending.xml') self.assertEqual(response.headers['Last-Modified'], 'Sat, 20 Apr 2013 05:00:00 GMT') def test_sitemaps_lastmod_descending(self): """ The Last-Modified header is set to the most recent sitemap lastmod. Test sitemaps are sorted by lastmod in descending order. """ response = self.client.get('/lastmod-sitemaps/descending.xml') self.assertEqual(response.headers['Last-Modified'], 'Sat, 20 Apr 2013 05:00:00 GMT') def test_sitemap_get_latest_lastmod_none(self): """ sitemapindex.lastmod is ommitted when Sitemap.lastmod is callable and Sitemap.get_latest_lastmod is not implemented """ response = self.client.get('/lastmod/get-latest-lastmod-none-sitemap.xml') self.assertNotContains(response, '<lastmod>') def test_sitemap_get_latest_lastmod(self): """ sitemapindex.lastmod is included when Sitemap.lastmod is attribute and Sitemap.get_latest_lastmod is implemented """ response = self.client.get('/lastmod/get-latest-lastmod-sitemap.xml') self.assertContains(response, '<lastmod>2013-03-13T10:00:00</lastmod>') def test_sitemap_latest_lastmod_timezone(self): """ lastmod datestamp shows timezones if Sitemap.get_latest_lastmod returns an aware datetime. """ response = self.client.get('/lastmod/latest-lastmod-timezone-sitemap.xml') self.assertContains(response, '<lastmod>2013-03-13T10:00:00-05:00</lastmod>') def test_localized_priority(self): """The priority value should not be localized.""" with translation.override('fr'): self.assertEqual('0,3', localize(0.3)) # Priorities aren't rendered in localized format. response = self.client.get('/simple/sitemap.xml') self.assertContains(response, '<priority>0.5</priority>') self.assertContains(response, '<lastmod>%s</lastmod>' % date.today()) @modify_settings(INSTALLED_APPS={'remove': 'django.contrib.sites'}) def test_requestsite_sitemap(self): # Hitting the flatpages sitemap without the sites framework installed # doesn't raise an exception. response = self.client.get('/simple/sitemap.xml') expected_content = """<?xml version="1.0" encoding="UTF-8"?> <urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9" xmlns:xhtml="http://www.w3.org/1999/xhtml"> <url><loc>http://testserver/location/</loc><lastmod>%s</lastmod><changefreq>never</changefreq><priority>0.5</priority></url> </urlset> """ % date.today() self.assertXMLEqual(response.content.decode(), expected_content) @ignore_warnings(category=RemovedInDjango50Warning) def test_sitemap_get_urls_no_site_1(self): """ Check we get ImproperlyConfigured if we don't pass a site object to Sitemap.get_urls and no Site objects exist """ Site.objects.all().delete() with self.assertRaisesMessage(ImproperlyConfigured, self.use_sitemap_err_msg): Sitemap().get_urls() @modify_settings(INSTALLED_APPS={'remove': 'django.contrib.sites'}) @ignore_warnings(category=RemovedInDjango50Warning) def test_sitemap_get_urls_no_site_2(self): """ Check we get ImproperlyConfigured when we don't pass a site object to Sitemap.get_urls if Site objects exists, but the sites framework is not actually installed. """ with self.assertRaisesMessage(ImproperlyConfigured, self.use_sitemap_err_msg): Sitemap().get_urls() @ignore_warnings(category=RemovedInDjango50Warning) def test_sitemap_item(self): """ Check to make sure that the raw item is included with each Sitemap.get_url() url result. """ test_sitemap = Sitemap() test_sitemap.items = TestModel.objects.order_by('pk').all def is_testmodel(url): return isinstance(url['item'], TestModel) item_in_url_info = all(map(is_testmodel, test_sitemap.get_urls())) self.assertTrue(item_in_url_info) def test_cached_sitemap_index(self): """ A cached sitemap index can be rendered (#2713). """ response = self.client.get('/cached/index.xml') expected_content = """<?xml version="1.0" encoding="UTF-8"?> <sitemapindex xmlns="http://www.sitemaps.org/schemas/sitemap/0.9"> <sitemap><loc>%s/cached/sitemap-simple.xml</loc><lastmod>%s</lastmod></sitemap> </sitemapindex> """ % (self.base_url, date.today()) self.assertXMLEqual(response.content.decode(), expected_content) def test_x_robots_sitemap(self): response = self.client.get('/simple/index.xml') self.assertEqual(response.headers['X-Robots-Tag'], 'noindex, noodp, noarchive') response = self.client.get('/simple/sitemap.xml') self.assertEqual(response.headers['X-Robots-Tag'], 'noindex, noodp, noarchive') def test_empty_sitemap(self): response = self.client.get('/empty/sitemap.xml') self.assertEqual(response.status_code, 200) @override_settings(LANGUAGES=(('en', 'English'), ('pt', 'Portuguese'))) def test_simple_i18n_sitemap_index(self): """ A simple i18n sitemap index can be rendered, without logging variable lookup errors. """ with self.assertNoLogs('django.template', 'DEBUG'): response = self.client.get('/simple/i18n.xml') expected_content = """<?xml version="1.0" encoding="UTF-8"?> <urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9" xmlns:xhtml="http://www.w3.org/1999/xhtml"> <url><loc>{0}/en/i18n/testmodel/{1}/</loc><changefreq>never</changefreq><priority>0.5</priority></url><url><loc>{0}/pt/i18n/testmodel/{1}/</loc><changefreq>never</changefreq><priority>0.5</priority></url> </urlset> """.format(self.base_url, self.i18n_model.pk) self.assertXMLEqual(response.content.decode(), expected_content) @override_settings(LANGUAGES=(('en', 'English'), ('pt', 'Portuguese'))) def test_alternate_i18n_sitemap_index(self): """ A i18n sitemap with alternate/hreflang links can be rendered. """ response = self.client.get('/alternates/i18n.xml') url, pk = self.base_url, self.i18n_model.pk expected_urls = f""" <url><loc>{url}/en/i18n/testmodel/{pk}/</loc><changefreq>never</changefreq><priority>0.5</priority> <xhtml:link rel="alternate" hreflang="en" href="{url}/en/i18n/testmodel/{pk}/"/> <xhtml:link rel="alternate" hreflang="pt" href="{url}/pt/i18n/testmodel/{pk}/"/> </url> <url><loc>{url}/pt/i18n/testmodel/{pk}/</loc><changefreq>never</changefreq><priority>0.5</priority> <xhtml:link rel="alternate" hreflang="en" href="{url}/en/i18n/testmodel/{pk}/"/> <xhtml:link rel="alternate" hreflang="pt" href="{url}/pt/i18n/testmodel/{pk}/"/> </url> """.replace('\n', '') expected_content = f"""<?xml version="1.0" encoding="UTF-8"?> <urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9" xmlns:xhtml="http://www.w3.org/1999/xhtml"> {expected_urls} </urlset> """ self.assertXMLEqual(response.content.decode(), expected_content) @override_settings(LANGUAGES=(('en', 'English'), ('pt', 'Portuguese'), ('es', 'Spanish'))) def test_alternate_i18n_sitemap_limited(self): """ A i18n sitemap index with limited languages can be rendered. """ response = self.client.get('/limited/i18n.xml') url, pk = self.base_url, self.i18n_model.pk expected_urls = f""" <url><loc>{url}/en/i18n/testmodel/{pk}/</loc><changefreq>never</changefreq><priority>0.5</priority> <xhtml:link rel="alternate" hreflang="en" href="{url}/en/i18n/testmodel/{pk}/"/> <xhtml:link rel="alternate" hreflang="es" href="{url}/es/i18n/testmodel/{pk}/"/> </url> <url><loc>{url}/es/i18n/testmodel/{pk}/</loc><changefreq>never</changefreq><priority>0.5</priority> <xhtml:link rel="alternate" hreflang="en" href="{url}/en/i18n/testmodel/{pk}/"/> <xhtml:link rel="alternate" hreflang="es" href="{url}/es/i18n/testmodel/{pk}/"/> </url> """.replace('\n', '') expected_content = f"""<?xml version="1.0" encoding="UTF-8"?> <urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9" xmlns:xhtml="http://www.w3.org/1999/xhtml"> {expected_urls} </urlset> """ self.assertXMLEqual(response.content.decode(), expected_content) @override_settings(LANGUAGES=(('en', 'English'), ('pt', 'Portuguese'))) def test_alternate_i18n_sitemap_xdefault(self): """ A i18n sitemap index with x-default can be rendered. """ response = self.client.get('/x-default/i18n.xml') url, pk = self.base_url, self.i18n_model.pk expected_urls = f""" <url><loc>{url}/en/i18n/testmodel/{pk}/</loc><changefreq>never</changefreq><priority>0.5</priority> <xhtml:link rel="alternate" hreflang="en" href="{url}/en/i18n/testmodel/{pk}/"/> <xhtml:link rel="alternate" hreflang="pt" href="{url}/pt/i18n/testmodel/{pk}/"/> <xhtml:link rel="alternate" hreflang="x-default" href="{url}/i18n/testmodel/{pk}/"/> </url> <url><loc>{url}/pt/i18n/testmodel/{pk}/</loc><changefreq>never</changefreq><priority>0.5</priority> <xhtml:link rel="alternate" hreflang="en" href="{url}/en/i18n/testmodel/{pk}/"/> <xhtml:link rel="alternate" hreflang="pt" href="{url}/pt/i18n/testmodel/{pk}/"/> <xhtml:link rel="alternate" hreflang="x-default" href="{url}/i18n/testmodel/{pk}/"/> </url> """.replace('\n', '') expected_content = f"""<?xml version="1.0" encoding="UTF-8"?> <urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9" xmlns:xhtml="http://www.w3.org/1999/xhtml"> {expected_urls} </urlset> """ self.assertXMLEqual(response.content.decode(), expected_content) def test_sitemap_without_entries(self): response = self.client.get('/sitemap-without-entries/sitemap.xml') expected_content = """<?xml version="1.0" encoding="UTF-8"?> <urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9" xmlns:xhtml="http://www.w3.org/1999/xhtml"> </urlset>""" self.assertXMLEqual(response.content.decode(), expected_content) def test_callable_sitemod_partial(self): """ Not all items have `lastmod`. Therefore the `Last-Modified` header is not set by the detail or index sitemap view. """ index_response = self.client.get('/callable-lastmod-partial/index.xml') sitemap_response = self.client.get('/callable-lastmod-partial/sitemap.xml') self.assertNotIn('Last-Modified', index_response) self.assertNotIn('Last-Modified', sitemap_response) expected_content_index = """<?xml version="1.0" encoding="UTF-8"?> <sitemapindex xmlns="http://www.sitemaps.org/schemas/sitemap/0.9"> <sitemap><loc>http://example.com/simple/sitemap-callable-lastmod.xml</loc></sitemap> </sitemapindex> """ expected_content_sitemap = """<?xml version="1.0" encoding="UTF-8"?> <urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9" xmlns:xhtml="http://www.w3.org/1999/xhtml"> <url><loc>http://example.com/location/</loc><lastmod>2013-03-13</lastmod></url><url><loc>http://example.com/location/</loc></url> </urlset> """ self.assertXMLEqual(index_response.content.decode(), expected_content_index) self.assertXMLEqual(sitemap_response.content.decode(), expected_content_sitemap) def test_callable_sitemod_full(self): """ All items in the sitemap have `lastmod`. The `Last-Modified` header is set for the detail and index sitemap view. """ index_response = self.client.get('/callable-lastmod-full/index.xml') sitemap_response = self.client.get('/callable-lastmod-full/sitemap.xml') self.assertEqual(index_response.headers['Last-Modified'], 'Thu, 13 Mar 2014 10:00:00 GMT') self.assertEqual(sitemap_response.headers['Last-Modified'], 'Thu, 13 Mar 2014 10:00:00 GMT') expected_content_index = """<?xml version="1.0" encoding="UTF-8"?> <sitemapindex xmlns="http://www.sitemaps.org/schemas/sitemap/0.9"> <sitemap><loc>http://example.com/simple/sitemap-callable-lastmod.xml</loc><lastmod>2014-03-13T10:00:00</lastmod></sitemap> </sitemapindex> """ expected_content_sitemap = """<?xml version="1.0" encoding="UTF-8"?> <urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9" xmlns:xhtml="http://www.w3.org/1999/xhtml"> <url><loc>http://example.com/location/</loc><lastmod>2013-03-13</lastmod></url><url><loc>http://example.com/location/</loc><lastmod>2014-03-13</lastmod></url> </urlset> """ self.assertXMLEqual(index_response.content.decode(), expected_content_index) self.assertXMLEqual(sitemap_response.content.decode(), expected_content_sitemap) # RemovedInDjango50Warning class DeprecatedTests(SitemapTestsBase): @override_settings(TEMPLATES=[{ 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': [os.path.join(os.path.dirname(__file__), 'templates')], }]) def test_simple_sitemap_custom_index_warning(self): msg = 'Calling `__str__` on SitemapIndexItem is deprecated, use the `location` attribute instead.' with self.assertRaisesMessage(RemovedInDjango50Warning, msg): self.client.get('/simple/custom-index.xml') @ignore_warnings(category=RemovedInDjango50Warning) @override_settings(TEMPLATES=[{ 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': [os.path.join(os.path.dirname(__file__), 'templates')], }]) def test_simple_sitemap_custom_index(self): "A simple sitemap index can be rendered with a custom template" response = self.client.get('/simple/custom-index.xml') expected_content = """<?xml version="1.0" encoding="UTF-8"?> <!-- This is a customised template --> <sitemapindex xmlns="http://www.sitemaps.org/schemas/sitemap/0.9"> <sitemap><loc>%s/simple/sitemap-simple.xml</loc></sitemap> </sitemapindex> """ % (self.base_url) self.assertXMLEqual(response.content.decode(), expected_content)
0b4360ded7b40e4d6d80ca8455c6a628d8d4750ceb8dfda090efee2c035d29dd
from django.db.models import Q from django.http import Http404 from django.shortcuts import get_list_or_404, get_object_or_404 from django.test import TestCase from .models import Article, Author class GetObjectOr404Tests(TestCase): def test_get_object_or_404(self): a1 = Author.objects.create(name="Brave Sir Robin") a2 = Author.objects.create(name="Patsy") # No Articles yet, so we should get an Http404 error. with self.assertRaises(Http404): get_object_or_404(Article, title="Foo") article = Article.objects.create(title="Run away!") article.authors.set([a1, a2]) # get_object_or_404 can be passed a Model to query. self.assertEqual( get_object_or_404(Article, title__contains="Run"), article ) # We can also use the Article manager through an Author object. self.assertEqual( get_object_or_404(a1.article_set, title__contains="Run"), article ) # No articles containing "Camelot". This should raise an Http404 error. with self.assertRaises(Http404): get_object_or_404(a1.article_set, title__contains="Camelot") # Custom managers can be used too. self.assertEqual( get_object_or_404(Article.by_a_sir, title="Run away!"), article ) # QuerySets can be used too. self.assertEqual( get_object_or_404(Article.objects.all(), title__contains="Run"), article ) # Just as when using a get() lookup, you will get an error if more than # one object is returned. with self.assertRaises(Author.MultipleObjectsReturned): get_object_or_404(Author.objects.all()) # Using an empty QuerySet raises an Http404 error. with self.assertRaises(Http404): get_object_or_404(Article.objects.none(), title__contains="Run") # get_list_or_404 can be used to get lists of objects self.assertEqual( get_list_or_404(a1.article_set, title__icontains="Run"), [article] ) # Http404 is returned if the list is empty. with self.assertRaises(Http404): get_list_or_404(a1.article_set, title__icontains="Shrubbery") # Custom managers can be used too. self.assertEqual( get_list_or_404(Article.by_a_sir, title__icontains="Run"), [article] ) # QuerySets can be used too. self.assertEqual( get_list_or_404(Article.objects.all(), title__icontains="Run"), [article] ) # Q objects. self.assertEqual( get_object_or_404( Article, Q(title__startswith='Run') | Q(title__startswith='Walk'), authors__name__contains='Brave', ), article, ) self.assertEqual( get_list_or_404( Article, Q(title__startswith='Run') | Q(title__startswith='Walk'), authors__name='Patsy', ), [article], ) def test_bad_class(self): # Given an argument klass that is not a Model, Manager, or Queryset # raises a helpful ValueError message msg = "First argument to get_object_or_404() must be a Model, Manager, or QuerySet, not 'str'." with self.assertRaisesMessage(ValueError, msg): get_object_or_404("Article", title__icontains="Run") class CustomClass: pass msg = "First argument to get_object_or_404() must be a Model, Manager, or QuerySet, not 'CustomClass'." with self.assertRaisesMessage(ValueError, msg): get_object_or_404(CustomClass, title__icontains="Run") # Works for lists too msg = "First argument to get_list_or_404() must be a Model, Manager, or QuerySet, not 'list'." with self.assertRaisesMessage(ValueError, msg): get_list_or_404([Article], title__icontains="Run") def test_get_object_or_404_queryset_attribute_error(self): """AttributeError raised by QuerySet.get() isn't hidden.""" with self.assertRaisesMessage(AttributeError, 'AttributeErrorManager'): get_object_or_404(Article.attribute_error_objects, id=42) def test_get_list_or_404_queryset_attribute_error(self): """AttributeError raised by QuerySet.filter() isn't hidden.""" with self.assertRaisesMessage(AttributeError, 'AttributeErrorManager'): get_list_or_404(Article.attribute_error_objects, title__icontains='Run')
fa61c320a3312f60a949033d6f1472299f29ec87784cb1189a59c7ec58c4278d
""" Testing using the Test Client The test client is a class that can act like a simple browser for testing purposes. It allows the user to compose GET and POST requests, and obtain the response that the server gave to those requests. The server Response objects are annotated with the details of the contexts and templates that were rendered during the process of serving the request. ``Client`` objects are stateful - they will retain cookie (and thus session) details for the lifetime of the ``Client`` instance. This is not intended as a replacement for Twill, Selenium, or other browser automation frameworks - it is here to allow testing against the contexts and templates produced by a view, rather than the HTML rendered to the end-user. """ import itertools import tempfile from unittest import mock from django.contrib.auth.models import User from django.core import mail from django.http import HttpResponse, HttpResponseNotAllowed from django.test import ( AsyncRequestFactory, Client, RequestFactory, SimpleTestCase, TestCase, modify_settings, override_settings, ) from django.urls import reverse_lazy from django.utils.decorators import async_only_middleware from django.views.generic import RedirectView from .views import TwoArgException, get_view, post_view, trace_view def middleware_urlconf(get_response): def middleware(request): request.urlconf = 'tests.test_client.urls_middleware_urlconf' return get_response(request) return middleware @async_only_middleware def async_middleware_urlconf(get_response): async def middleware(request): request.urlconf = 'tests.test_client.urls_middleware_urlconf' return await get_response(request) return middleware @override_settings(ROOT_URLCONF='test_client.urls') class ClientTest(TestCase): @classmethod def setUpTestData(cls): cls.u1 = User.objects.create_user(username='testclient', password='password') cls.u2 = User.objects.create_user(username='inactive', password='password', is_active=False) def test_get_view(self): "GET a view" # The data is ignored, but let's check it doesn't crash the system # anyway. data = {'var': '\xf2'} response = self.client.get('/get_view/', data) # Check some response details self.assertContains(response, 'This is a test') self.assertEqual(response.context['var'], '\xf2') self.assertEqual(response.templates[0].name, 'GET Template') def test_query_string_encoding(self): # WSGI requires latin-1 encoded strings. response = self.client.get('/get_view/?var=1\ufffd') self.assertEqual(response.context['var'], '1\ufffd') def test_get_data_none(self): msg = ( "Cannot encode None for key 'value' in a query string. Did you " "mean to pass an empty string or omit the value?" ) with self.assertRaisesMessage(TypeError, msg): self.client.get('/get_view/', {'value': None}) def test_get_post_view(self): "GET a view that normally expects POSTs" response = self.client.get('/post_view/', {}) # Check some response details self.assertEqual(response.status_code, 200) self.assertEqual(response.templates[0].name, 'Empty GET Template') self.assertTemplateUsed(response, 'Empty GET Template') self.assertTemplateNotUsed(response, 'Empty POST Template') def test_empty_post(self): "POST an empty dictionary to a view" response = self.client.post('/post_view/', {}) # Check some response details self.assertEqual(response.status_code, 200) self.assertEqual(response.templates[0].name, 'Empty POST Template') self.assertTemplateNotUsed(response, 'Empty GET Template') self.assertTemplateUsed(response, 'Empty POST Template') def test_post(self): "POST some data to a view" post_data = { 'value': 37 } response = self.client.post('/post_view/', post_data) # Check some response details self.assertContains(response, 'Data received') self.assertEqual(response.context['data'], '37') self.assertEqual(response.templates[0].name, 'POST Template') def test_post_data_none(self): msg = ( "Cannot encode None for key 'value' as POST data. Did you mean " "to pass an empty string or omit the value?" ) with self.assertRaisesMessage(TypeError, msg): self.client.post('/post_view/', {'value': None}) def test_json_serialization(self): """The test client serializes JSON data.""" methods = ('post', 'put', 'patch', 'delete') tests = ( ({'value': 37}, {'value': 37}), ([37, True], [37, True]), ((37, False), [37, False]), ) for method in methods: with self.subTest(method=method): for data, expected in tests: with self.subTest(data): client_method = getattr(self.client, method) method_name = method.upper() response = client_method('/json_view/', data, content_type='application/json') self.assertContains(response, 'Viewing %s page.' % method_name) self.assertEqual(response.context['data'], expected) def test_json_encoder_argument(self): """The test Client accepts a json_encoder.""" mock_encoder = mock.MagicMock() mock_encoding = mock.MagicMock() mock_encoder.return_value = mock_encoding mock_encoding.encode.return_value = '{"value": 37}' client = self.client_class(json_encoder=mock_encoder) # Vendored tree JSON content types are accepted. client.post('/json_view/', {'value': 37}, content_type='application/vnd.api+json') self.assertTrue(mock_encoder.called) self.assertTrue(mock_encoding.encode.called) def test_put(self): response = self.client.put('/put_view/', {'foo': 'bar'}) self.assertEqual(response.status_code, 200) self.assertEqual(response.templates[0].name, 'PUT Template') self.assertEqual(response.context['data'], "{'foo': 'bar'}") self.assertEqual(response.context['Content-Length'], '14') def test_trace(self): """TRACE a view""" response = self.client.trace('/trace_view/') self.assertEqual(response.status_code, 200) self.assertEqual(response.context['method'], 'TRACE') self.assertEqual(response.templates[0].name, 'TRACE Template') def test_response_headers(self): "Check the value of HTTP headers returned in a response" response = self.client.get("/header_view/") self.assertEqual(response.headers['X-DJANGO-TEST'], 'Slartibartfast') def test_response_attached_request(self): """ The returned response has a ``request`` attribute with the originating environ dict and a ``wsgi_request`` with the originating WSGIRequest. """ response = self.client.get("/header_view/") self.assertTrue(hasattr(response, 'request')) self.assertTrue(hasattr(response, 'wsgi_request')) for key, value in response.request.items(): self.assertIn(key, response.wsgi_request.environ) self.assertEqual(response.wsgi_request.environ[key], value) def test_response_resolver_match(self): """ The response contains a ResolverMatch instance. """ response = self.client.get('/header_view/') self.assertTrue(hasattr(response, 'resolver_match')) def test_response_resolver_match_redirect_follow(self): """ The response ResolverMatch instance contains the correct information when following redirects. """ response = self.client.get('/redirect_view/', follow=True) self.assertEqual(response.resolver_match.url_name, 'get_view') def test_response_resolver_match_regular_view(self): """ The response ResolverMatch instance contains the correct information when accessing a regular view. """ response = self.client.get('/get_view/') self.assertEqual(response.resolver_match.url_name, 'get_view') def test_response_resolver_match_class_based_view(self): """ The response ResolverMatch instance can be used to access the CBV view class. """ response = self.client.get('/accounts/') self.assertIs(response.resolver_match.func.view_class, RedirectView) @modify_settings(MIDDLEWARE={'prepend': 'test_client.tests.middleware_urlconf'}) def test_response_resolver_match_middleware_urlconf(self): response = self.client.get('/middleware_urlconf_view/') self.assertEqual(response.resolver_match.url_name, 'middleware_urlconf_view') def test_raw_post(self): "POST raw data (with a content type) to a view" test_doc = """<?xml version="1.0" encoding="utf-8"?> <library><book><title>Blink</title><author>Malcolm Gladwell</author></book></library> """ response = self.client.post('/raw_post_view/', test_doc, content_type='text/xml') self.assertEqual(response.status_code, 200) self.assertEqual(response.templates[0].name, "Book template") self.assertEqual(response.content, b"Blink - Malcolm Gladwell") def test_insecure(self): "GET a URL through http" response = self.client.get('/secure_view/', secure=False) self.assertFalse(response.test_was_secure_request) self.assertEqual(response.test_server_port, '80') def test_secure(self): "GET a URL through https" response = self.client.get('/secure_view/', secure=True) self.assertTrue(response.test_was_secure_request) self.assertEqual(response.test_server_port, '443') def test_redirect(self): "GET a URL that redirects elsewhere" response = self.client.get('/redirect_view/') self.assertRedirects(response, '/get_view/') def test_redirect_with_query(self): "GET a URL that redirects with given GET parameters" response = self.client.get('/redirect_view/', {'var': 'value'}) self.assertRedirects(response, '/get_view/?var=value') def test_redirect_with_query_ordering(self): """assertRedirects() ignores the order of query string parameters.""" response = self.client.get('/redirect_view/', {'var': 'value', 'foo': 'bar'}) self.assertRedirects(response, '/get_view/?var=value&foo=bar') self.assertRedirects(response, '/get_view/?foo=bar&var=value') def test_permanent_redirect(self): "GET a URL that redirects permanently elsewhere" response = self.client.get('/permanent_redirect_view/') self.assertRedirects(response, '/get_view/', status_code=301) def test_temporary_redirect(self): "GET a URL that does a non-permanent redirect" response = self.client.get('/temporary_redirect_view/') self.assertRedirects(response, '/get_view/', status_code=302) def test_redirect_to_strange_location(self): "GET a URL that redirects to a non-200 page" response = self.client.get('/double_redirect_view/') # The response was a 302, and that the attempt to get the redirection # location returned 301 when retrieved self.assertRedirects(response, '/permanent_redirect_view/', target_status_code=301) def test_follow_redirect(self): "A URL that redirects can be followed to termination." response = self.client.get('/double_redirect_view/', follow=True) self.assertRedirects(response, '/get_view/', status_code=302, target_status_code=200) self.assertEqual(len(response.redirect_chain), 2) def test_follow_relative_redirect(self): "A URL with a relative redirect can be followed." response = self.client.get('/accounts/', follow=True) self.assertEqual(response.status_code, 200) self.assertEqual(response.request['PATH_INFO'], '/accounts/login/') def test_follow_relative_redirect_no_trailing_slash(self): "A URL with a relative redirect with no trailing slash can be followed." response = self.client.get('/accounts/no_trailing_slash', follow=True) self.assertEqual(response.status_code, 200) self.assertEqual(response.request['PATH_INFO'], '/accounts/login/') def test_redirect_to_querystring_only(self): """A URL that consists of a querystring only can be followed""" response = self.client.post('/post_then_get_view/', follow=True) self.assertEqual(response.status_code, 200) self.assertEqual(response.request['PATH_INFO'], '/post_then_get_view/') self.assertEqual(response.content, b'The value of success is true.') def test_follow_307_and_308_redirect(self): """ A 307 or 308 redirect preserves the request method after the redirect. """ methods = ('get', 'post', 'head', 'options', 'put', 'patch', 'delete', 'trace') codes = (307, 308) for method, code in itertools.product(methods, codes): with self.subTest(method=method, code=code): req_method = getattr(self.client, method) response = req_method('/redirect_view_%s/' % code, data={'value': 'test'}, follow=True) self.assertEqual(response.status_code, 200) self.assertEqual(response.request['PATH_INFO'], '/post_view/') self.assertEqual(response.request['REQUEST_METHOD'], method.upper()) def test_follow_307_and_308_preserves_query_string(self): methods = ('post', 'options', 'put', 'patch', 'delete', 'trace') codes = (307, 308) for method, code in itertools.product(methods, codes): with self.subTest(method=method, code=code): req_method = getattr(self.client, method) response = req_method( '/redirect_view_%s_query_string/' % code, data={'value': 'test'}, follow=True, ) self.assertRedirects(response, '/post_view/?hello=world', status_code=code) self.assertEqual(response.request['QUERY_STRING'], 'hello=world') def test_follow_307_and_308_get_head_query_string(self): methods = ('get', 'head') codes = (307, 308) for method, code in itertools.product(methods, codes): with self.subTest(method=method, code=code): req_method = getattr(self.client, method) response = req_method( '/redirect_view_%s_query_string/' % code, data={'value': 'test'}, follow=True, ) self.assertRedirects(response, '/post_view/?hello=world', status_code=code) self.assertEqual(response.request['QUERY_STRING'], 'value=test') def test_follow_307_and_308_preserves_post_data(self): for code in (307, 308): with self.subTest(code=code): response = self.client.post('/redirect_view_%s/' % code, data={'value': 'test'}, follow=True) self.assertContains(response, 'test is the value') def test_follow_307_and_308_preserves_put_body(self): for code in (307, 308): with self.subTest(code=code): response = self.client.put('/redirect_view_%s/?to=/put_view/' % code, data='a=b', follow=True) self.assertContains(response, 'a=b is the body') def test_follow_307_and_308_preserves_get_params(self): data = {'var': 30, 'to': '/get_view/'} for code in (307, 308): with self.subTest(code=code): response = self.client.get('/redirect_view_%s/' % code, data=data, follow=True) self.assertContains(response, '30 is the value') def test_redirect_http(self): """GET a URL that redirects to an HTTP URI.""" response = self.client.get('/http_redirect_view/', follow=True) self.assertFalse(response.test_was_secure_request) def test_redirect_https(self): """GET a URL that redirects to an HTTPS URI.""" response = self.client.get('/https_redirect_view/', follow=True) self.assertTrue(response.test_was_secure_request) def test_notfound_response(self): "GET a URL that responds as '404:Not Found'" response = self.client.get('/bad_view/') self.assertContains(response, 'MAGIC', status_code=404) def test_valid_form(self): "POST valid data to a form" post_data = { 'text': 'Hello World', 'email': '[email protected]', 'value': 37, 'single': 'b', 'multi': ('b', 'c', 'e') } response = self.client.post('/form_view/', post_data) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, "Valid POST Template") def test_valid_form_with_hints(self): "GET a form, providing hints in the GET data" hints = { 'text': 'Hello World', 'multi': ('b', 'c', 'e') } response = self.client.get('/form_view/', data=hints) # The multi-value data has been rolled out ok self.assertContains(response, 'Select a valid choice.', 0) self.assertTemplateUsed(response, "Form GET Template") def test_incomplete_data_form(self): "POST incomplete data to a form" post_data = { 'text': 'Hello World', 'value': 37 } response = self.client.post('/form_view/', post_data) self.assertContains(response, 'This field is required.', 3) self.assertTemplateUsed(response, "Invalid POST Template") self.assertFormError(response, 'form', 'email', 'This field is required.') self.assertFormError(response, 'form', 'single', 'This field is required.') self.assertFormError(response, 'form', 'multi', 'This field is required.') def test_form_error(self): "POST erroneous data to a form" post_data = { 'text': 'Hello World', 'email': 'not an email address', 'value': 37, 'single': 'b', 'multi': ('b', 'c', 'e') } response = self.client.post('/form_view/', post_data) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, "Invalid POST Template") self.assertFormError(response, 'form', 'email', 'Enter a valid email address.') def test_valid_form_with_template(self): "POST valid data to a form using multiple templates" post_data = { 'text': 'Hello World', 'email': '[email protected]', 'value': 37, 'single': 'b', 'multi': ('b', 'c', 'e') } response = self.client.post('/form_view_with_template/', post_data) self.assertContains(response, 'POST data OK') self.assertTemplateUsed(response, "form_view.html") self.assertTemplateUsed(response, 'base.html') self.assertTemplateNotUsed(response, "Valid POST Template") def test_incomplete_data_form_with_template(self): "POST incomplete data to a form using multiple templates" post_data = { 'text': 'Hello World', 'value': 37 } response = self.client.post('/form_view_with_template/', post_data) self.assertContains(response, 'POST data has errors') self.assertTemplateUsed(response, 'form_view.html') self.assertTemplateUsed(response, 'base.html') self.assertTemplateNotUsed(response, "Invalid POST Template") self.assertFormError(response, 'form', 'email', 'This field is required.') self.assertFormError(response, 'form', 'single', 'This field is required.') self.assertFormError(response, 'form', 'multi', 'This field is required.') def test_form_error_with_template(self): "POST erroneous data to a form using multiple templates" post_data = { 'text': 'Hello World', 'email': 'not an email address', 'value': 37, 'single': 'b', 'multi': ('b', 'c', 'e') } response = self.client.post('/form_view_with_template/', post_data) self.assertContains(response, 'POST data has errors') self.assertTemplateUsed(response, "form_view.html") self.assertTemplateUsed(response, 'base.html') self.assertTemplateNotUsed(response, "Invalid POST Template") self.assertFormError(response, 'form', 'email', 'Enter a valid email address.') def test_unknown_page(self): "GET an invalid URL" response = self.client.get('/unknown_view/') # The response was a 404 self.assertEqual(response.status_code, 404) def test_url_parameters(self): "Make sure that URL ;-parameters are not stripped." response = self.client.get('/unknown_view/;some-parameter') # The path in the response includes it (ignore that it's a 404) self.assertEqual(response.request['PATH_INFO'], '/unknown_view/;some-parameter') def test_view_with_login(self): "Request a page that is protected with @login_required" # Get the page without logging in. Should result in 302. response = self.client.get('/login_protected_view/') self.assertRedirects(response, '/accounts/login/?next=/login_protected_view/') # Log in login = self.client.login(username='testclient', password='password') self.assertTrue(login, 'Could not log in') # Request a page that requires a login response = self.client.get('/login_protected_view/') self.assertEqual(response.status_code, 200) self.assertEqual(response.context['user'].username, 'testclient') @override_settings( INSTALLED_APPS=['django.contrib.auth'], SESSION_ENGINE='django.contrib.sessions.backends.file', ) def test_view_with_login_when_sessions_app_is_not_installed(self): self.test_view_with_login() def test_view_with_force_login(self): "Request a page that is protected with @login_required" # Get the page without logging in. Should result in 302. response = self.client.get('/login_protected_view/') self.assertRedirects(response, '/accounts/login/?next=/login_protected_view/') # Log in self.client.force_login(self.u1) # Request a page that requires a login response = self.client.get('/login_protected_view/') self.assertEqual(response.status_code, 200) self.assertEqual(response.context['user'].username, 'testclient') def test_view_with_method_login(self): "Request a page that is protected with a @login_required method" # Get the page without logging in. Should result in 302. response = self.client.get('/login_protected_method_view/') self.assertRedirects(response, '/accounts/login/?next=/login_protected_method_view/') # Log in login = self.client.login(username='testclient', password='password') self.assertTrue(login, 'Could not log in') # Request a page that requires a login response = self.client.get('/login_protected_method_view/') self.assertEqual(response.status_code, 200) self.assertEqual(response.context['user'].username, 'testclient') def test_view_with_method_force_login(self): "Request a page that is protected with a @login_required method" # Get the page without logging in. Should result in 302. response = self.client.get('/login_protected_method_view/') self.assertRedirects(response, '/accounts/login/?next=/login_protected_method_view/') # Log in self.client.force_login(self.u1) # Request a page that requires a login response = self.client.get('/login_protected_method_view/') self.assertEqual(response.status_code, 200) self.assertEqual(response.context['user'].username, 'testclient') def test_view_with_login_and_custom_redirect(self): "Request a page that is protected with @login_required(redirect_field_name='redirect_to')" # Get the page without logging in. Should result in 302. response = self.client.get('/login_protected_view_custom_redirect/') self.assertRedirects(response, '/accounts/login/?redirect_to=/login_protected_view_custom_redirect/') # Log in login = self.client.login(username='testclient', password='password') self.assertTrue(login, 'Could not log in') # Request a page that requires a login response = self.client.get('/login_protected_view_custom_redirect/') self.assertEqual(response.status_code, 200) self.assertEqual(response.context['user'].username, 'testclient') def test_view_with_force_login_and_custom_redirect(self): """ Request a page that is protected with @login_required(redirect_field_name='redirect_to') """ # Get the page without logging in. Should result in 302. response = self.client.get('/login_protected_view_custom_redirect/') self.assertRedirects(response, '/accounts/login/?redirect_to=/login_protected_view_custom_redirect/') # Log in self.client.force_login(self.u1) # Request a page that requires a login response = self.client.get('/login_protected_view_custom_redirect/') self.assertEqual(response.status_code, 200) self.assertEqual(response.context['user'].username, 'testclient') def test_view_with_bad_login(self): "Request a page that is protected with @login, but use bad credentials" login = self.client.login(username='otheruser', password='nopassword') self.assertFalse(login) def test_view_with_inactive_login(self): """ An inactive user may login if the authenticate backend allows it. """ credentials = {'username': 'inactive', 'password': 'password'} self.assertFalse(self.client.login(**credentials)) with self.settings(AUTHENTICATION_BACKENDS=['django.contrib.auth.backends.AllowAllUsersModelBackend']): self.assertTrue(self.client.login(**credentials)) @override_settings( AUTHENTICATION_BACKENDS=[ 'django.contrib.auth.backends.ModelBackend', 'django.contrib.auth.backends.AllowAllUsersModelBackend', ] ) def test_view_with_inactive_force_login(self): "Request a page that is protected with @login, but use an inactive login" # Get the page without logging in. Should result in 302. response = self.client.get('/login_protected_view/') self.assertRedirects(response, '/accounts/login/?next=/login_protected_view/') # Log in self.client.force_login(self.u2, backend='django.contrib.auth.backends.AllowAllUsersModelBackend') # Request a page that requires a login response = self.client.get('/login_protected_view/') self.assertEqual(response.status_code, 200) self.assertEqual(response.context['user'].username, 'inactive') def test_logout(self): "Request a logout after logging in" # Log in self.client.login(username='testclient', password='password') # Request a page that requires a login response = self.client.get('/login_protected_view/') self.assertEqual(response.status_code, 200) self.assertEqual(response.context['user'].username, 'testclient') # Log out self.client.logout() # Request a page that requires a login response = self.client.get('/login_protected_view/') self.assertRedirects(response, '/accounts/login/?next=/login_protected_view/') def test_logout_with_force_login(self): "Request a logout after logging in" # Log in self.client.force_login(self.u1) # Request a page that requires a login response = self.client.get('/login_protected_view/') self.assertEqual(response.status_code, 200) self.assertEqual(response.context['user'].username, 'testclient') # Log out self.client.logout() # Request a page that requires a login response = self.client.get('/login_protected_view/') self.assertRedirects(response, '/accounts/login/?next=/login_protected_view/') @override_settings( AUTHENTICATION_BACKENDS=[ 'django.contrib.auth.backends.ModelBackend', 'test_client.auth_backends.TestClientBackend', ], ) def test_force_login_with_backend(self): """ Request a page that is protected with @login_required when using force_login() and passing a backend. """ # Get the page without logging in. Should result in 302. response = self.client.get('/login_protected_view/') self.assertRedirects(response, '/accounts/login/?next=/login_protected_view/') # Log in self.client.force_login(self.u1, backend='test_client.auth_backends.TestClientBackend') self.assertEqual(self.u1.backend, 'test_client.auth_backends.TestClientBackend') # Request a page that requires a login response = self.client.get('/login_protected_view/') self.assertEqual(response.status_code, 200) self.assertEqual(response.context['user'].username, 'testclient') @override_settings( AUTHENTICATION_BACKENDS=[ 'django.contrib.auth.backends.ModelBackend', 'test_client.auth_backends.TestClientBackend', ], ) def test_force_login_without_backend(self): """ force_login() without passing a backend and with multiple backends configured should automatically use the first backend. """ self.client.force_login(self.u1) response = self.client.get('/login_protected_view/') self.assertEqual(response.status_code, 200) self.assertEqual(response.context['user'].username, 'testclient') self.assertEqual(self.u1.backend, 'django.contrib.auth.backends.ModelBackend') @override_settings(AUTHENTICATION_BACKENDS=[ 'test_client.auth_backends.BackendWithoutGetUserMethod', 'django.contrib.auth.backends.ModelBackend', ]) def test_force_login_with_backend_missing_get_user(self): """ force_login() skips auth backends without a get_user() method. """ self.client.force_login(self.u1) self.assertEqual(self.u1.backend, 'django.contrib.auth.backends.ModelBackend') @override_settings(SESSION_ENGINE="django.contrib.sessions.backends.signed_cookies") def test_logout_cookie_sessions(self): self.test_logout() def test_view_with_permissions(self): "Request a page that is protected with @permission_required" # Get the page without logging in. Should result in 302. response = self.client.get('/permission_protected_view/') self.assertRedirects(response, '/accounts/login/?next=/permission_protected_view/') # Log in login = self.client.login(username='testclient', password='password') self.assertTrue(login, 'Could not log in') # Log in with wrong permissions. Should result in 302. response = self.client.get('/permission_protected_view/') self.assertRedirects(response, '/accounts/login/?next=/permission_protected_view/') # TODO: Log in with right permissions and request the page again def test_view_with_permissions_exception(self): "Request a page that is protected with @permission_required but raises an exception" # Get the page without logging in. Should result in 403. response = self.client.get('/permission_protected_view_exception/') self.assertEqual(response.status_code, 403) # Log in login = self.client.login(username='testclient', password='password') self.assertTrue(login, 'Could not log in') # Log in with wrong permissions. Should result in 403. response = self.client.get('/permission_protected_view_exception/') self.assertEqual(response.status_code, 403) def test_view_with_method_permissions(self): "Request a page that is protected with a @permission_required method" # Get the page without logging in. Should result in 302. response = self.client.get('/permission_protected_method_view/') self.assertRedirects(response, '/accounts/login/?next=/permission_protected_method_view/') # Log in login = self.client.login(username='testclient', password='password') self.assertTrue(login, 'Could not log in') # Log in with wrong permissions. Should result in 302. response = self.client.get('/permission_protected_method_view/') self.assertRedirects(response, '/accounts/login/?next=/permission_protected_method_view/') # TODO: Log in with right permissions and request the page again def test_external_redirect(self): response = self.client.get('/django_project_redirect/') self.assertRedirects(response, 'https://www.djangoproject.com/', fetch_redirect_response=False) def test_external_redirect_without_trailing_slash(self): """ Client._handle_redirects() with an empty path. """ response = self.client.get('/no_trailing_slash_external_redirect/', follow=True) self.assertRedirects(response, 'https://testserver') def test_external_redirect_with_fetch_error_msg(self): """ assertRedirects without fetch_redirect_response=False raises a relevant ValueError rather than a non-descript AssertionError. """ response = self.client.get('/django_project_redirect/') msg = ( "The test client is unable to fetch remote URLs (got " "https://www.djangoproject.com/). If the host is served by Django, " "add 'www.djangoproject.com' to ALLOWED_HOSTS. " "Otherwise, use assertRedirects(..., fetch_redirect_response=False)." ) with self.assertRaisesMessage(ValueError, msg): self.assertRedirects(response, 'https://www.djangoproject.com/') def test_session_modifying_view(self): "Request a page that modifies the session" # Session value isn't set initially with self.assertRaises(KeyError): self.client.session['tobacconist'] self.client.post('/session_view/') # The session was modified self.assertEqual(self.client.session['tobacconist'], 'hovercraft') @override_settings( INSTALLED_APPS=[], SESSION_ENGINE='django.contrib.sessions.backends.file', ) def test_sessions_app_is_not_installed(self): self.test_session_modifying_view() @override_settings( INSTALLED_APPS=[], SESSION_ENGINE='django.contrib.sessions.backends.nonexistent', ) def test_session_engine_is_invalid(self): with self.assertRaisesMessage(ImportError, 'nonexistent'): self.test_session_modifying_view() def test_view_with_exception(self): "Request a page that is known to throw an error" with self.assertRaises(KeyError): self.client.get("/broken_view/") def test_exc_info(self): client = Client(raise_request_exception=False) response = client.get("/broken_view/") self.assertEqual(response.status_code, 500) exc_type, exc_value, exc_traceback = response.exc_info self.assertIs(exc_type, KeyError) self.assertIsInstance(exc_value, KeyError) self.assertEqual(str(exc_value), "'Oops! Looks like you wrote some bad code.'") self.assertIsNotNone(exc_traceback) def test_exc_info_none(self): response = self.client.get("/get_view/") self.assertIsNone(response.exc_info) def test_mail_sending(self): "Mail is redirected to a dummy outbox during test setup" response = self.client.get('/mail_sending_view/') self.assertEqual(response.status_code, 200) self.assertEqual(len(mail.outbox), 1) self.assertEqual(mail.outbox[0].subject, 'Test message') self.assertEqual(mail.outbox[0].body, 'This is a test email') self.assertEqual(mail.outbox[0].from_email, '[email protected]') self.assertEqual(mail.outbox[0].to[0], '[email protected]') self.assertEqual(mail.outbox[0].to[1], '[email protected]') def test_reverse_lazy_decodes(self): "reverse_lazy() works in the test client" data = {'var': 'data'} response = self.client.get(reverse_lazy('get_view'), data) # Check some response details self.assertContains(response, 'This is a test') def test_relative_redirect(self): response = self.client.get('/accounts/') self.assertRedirects(response, '/accounts/login/') def test_relative_redirect_no_trailing_slash(self): response = self.client.get('/accounts/no_trailing_slash') self.assertRedirects(response, '/accounts/login/') def test_mass_mail_sending(self): "Mass mail is redirected to a dummy outbox during test setup" response = self.client.get('/mass_mail_sending_view/') self.assertEqual(response.status_code, 200) self.assertEqual(len(mail.outbox), 2) self.assertEqual(mail.outbox[0].subject, 'First Test message') self.assertEqual(mail.outbox[0].body, 'This is the first test email') self.assertEqual(mail.outbox[0].from_email, '[email protected]') self.assertEqual(mail.outbox[0].to[0], '[email protected]') self.assertEqual(mail.outbox[0].to[1], '[email protected]') self.assertEqual(mail.outbox[1].subject, 'Second Test message') self.assertEqual(mail.outbox[1].body, 'This is the second test email') self.assertEqual(mail.outbox[1].from_email, '[email protected]') self.assertEqual(mail.outbox[1].to[0], '[email protected]') self.assertEqual(mail.outbox[1].to[1], '[email protected]') def test_exception_following_nested_client_request(self): """ A nested test client request shouldn't clobber exception signals from the outer client request. """ with self.assertRaisesMessage(Exception, 'exception message'): self.client.get('/nesting_exception_view/') def test_response_raises_multi_arg_exception(self): """A request may raise an exception with more than one required arg.""" with self.assertRaises(TwoArgException) as cm: self.client.get('/two_arg_exception/') self.assertEqual(cm.exception.args, ('one', 'two')) def test_uploading_temp_file(self): with tempfile.TemporaryFile() as test_file: response = self.client.post('/upload_view/', data={'temp_file': test_file}) self.assertEqual(response.content, b'temp_file') def test_uploading_named_temp_file(self): with tempfile.NamedTemporaryFile() as test_file: response = self.client.post( '/upload_view/', data={'named_temp_file': test_file}, ) self.assertEqual(response.content, b'named_temp_file') @override_settings( MIDDLEWARE=['django.middleware.csrf.CsrfViewMiddleware'], ROOT_URLCONF='test_client.urls', ) class CSRFEnabledClientTests(SimpleTestCase): def test_csrf_enabled_client(self): "A client can be instantiated with CSRF checks enabled" csrf_client = Client(enforce_csrf_checks=True) # The normal client allows the post response = self.client.post('/post_view/', {}) self.assertEqual(response.status_code, 200) # The CSRF-enabled client rejects it response = csrf_client.post('/post_view/', {}) self.assertEqual(response.status_code, 403) class CustomTestClient(Client): i_am_customized = "Yes" class CustomTestClientTest(SimpleTestCase): client_class = CustomTestClient def test_custom_test_client(self): """A test case can specify a custom class for self.client.""" self.assertIs(hasattr(self.client, "i_am_customized"), True) def _generic_view(request): return HttpResponse(status=200) @override_settings(ROOT_URLCONF='test_client.urls') class RequestFactoryTest(SimpleTestCase): """Tests for the request factory.""" # A mapping between names of HTTP/1.1 methods and their test views. http_methods_and_views = ( ('get', get_view), ('post', post_view), ('put', _generic_view), ('patch', _generic_view), ('delete', _generic_view), ('head', _generic_view), ('options', _generic_view), ('trace', trace_view), ) request_factory = RequestFactory() def test_request_factory(self): """The request factory implements all the HTTP/1.1 methods.""" for method_name, view in self.http_methods_and_views: method = getattr(self.request_factory, method_name) request = method('/somewhere/') response = view(request) self.assertEqual(response.status_code, 200) def test_get_request_from_factory(self): """ The request factory returns a templated response for a GET request. """ request = self.request_factory.get('/somewhere/') response = get_view(request) self.assertContains(response, 'This is a test') def test_trace_request_from_factory(self): """The request factory returns an echo response for a TRACE request.""" url_path = '/somewhere/' request = self.request_factory.trace(url_path) response = trace_view(request) protocol = request.META["SERVER_PROTOCOL"] echoed_request_line = "TRACE {} {}".format(url_path, protocol) self.assertContains(response, echoed_request_line) @override_settings(ROOT_URLCONF='test_client.urls') class AsyncClientTest(TestCase): async def test_response_resolver_match(self): response = await self.async_client.get('/async_get_view/') self.assertTrue(hasattr(response, 'resolver_match')) self.assertEqual(response.resolver_match.url_name, 'async_get_view') @modify_settings( MIDDLEWARE={'prepend': 'test_client.tests.async_middleware_urlconf'}, ) async def test_response_resolver_match_middleware_urlconf(self): response = await self.async_client.get('/middleware_urlconf_view/') self.assertEqual(response.resolver_match.url_name, 'middleware_urlconf_view') async def test_follow_parameter_not_implemented(self): msg = 'AsyncClient request methods do not accept the follow parameter.' tests = ( 'get', 'post', 'put', 'patch', 'delete', 'head', 'options', 'trace', ) for method_name in tests: with self.subTest(method=method_name): method = getattr(self.async_client, method_name) with self.assertRaisesMessage(NotImplementedError, msg): await method('/redirect_view/', follow=True) async def test_get_data(self): response = await self.async_client.get('/get_view/', {'var': 'val'}) self.assertContains(response, 'This is a test. val is the value.') @override_settings(ROOT_URLCONF='test_client.urls') class AsyncRequestFactoryTest(SimpleTestCase): request_factory = AsyncRequestFactory() async def test_request_factory(self): tests = ( 'get', 'post', 'put', 'patch', 'delete', 'head', 'options', 'trace', ) for method_name in tests: with self.subTest(method=method_name): async def async_generic_view(request): if request.method.lower() != method_name: return HttpResponseNotAllowed(method_name) return HttpResponse(status=200) method = getattr(self.request_factory, method_name) request = method('/somewhere/') response = await async_generic_view(request) self.assertEqual(response.status_code, 200) async def test_request_factory_data(self): async def async_generic_view(request): return HttpResponse(status=200, content=request.body) request = self.request_factory.post( '/somewhere/', data={'example': 'data'}, content_type='application/json', ) self.assertEqual(request.headers['content-length'], '19') self.assertEqual(request.headers['content-type'], 'application/json') response = await async_generic_view(request) self.assertEqual(response.status_code, 200) self.assertEqual(response.content, b'{"example": "data"}') def test_request_factory_sets_headers(self): request = self.request_factory.get( '/somewhere/', AUTHORIZATION='Bearer faketoken', X_ANOTHER_HEADER='some other value', ) self.assertEqual(request.headers['authorization'], 'Bearer faketoken') self.assertIn('HTTP_AUTHORIZATION', request.META) self.assertEqual(request.headers['x-another-header'], 'some other value') self.assertIn('HTTP_X_ANOTHER_HEADER', request.META) def test_request_factory_query_string(self): request = self.request_factory.get('/somewhere/', {'example': 'data'}) self.assertNotIn('Query-String', request.headers) self.assertEqual(request.GET['example'], 'data')
3168604874c3185d2df0eda7e5c190ce7f8cd6558dc06600321e79f7e0c47d6c
from django.contrib.auth import views as auth_views from django.urls import path from django.views.generic import RedirectView from . import views urlpatterns = [ path('upload_view/', views.upload_view, name='upload_view'), path('get_view/', views.get_view, name='get_view'), path('post_view/', views.post_view), path('post_then_get_view/', views.post_then_get_view), path('put_view/', views.put_view), path('trace_view/', views.trace_view), path('header_view/', views.view_with_header), path('raw_post_view/', views.raw_post_view), path('redirect_view/', views.redirect_view), path('redirect_view_307/', views.method_saving_307_redirect_view), path( 'redirect_view_307_query_string/', views.method_saving_307_redirect_query_string_view, ), path('redirect_view_308/', views.method_saving_308_redirect_view), path( 'redirect_view_308_query_string/', views.method_saving_308_redirect_query_string_view, ), path('secure_view/', views.view_with_secure), path('permanent_redirect_view/', RedirectView.as_view(url='/get_view/', permanent=True)), path('temporary_redirect_view/', RedirectView.as_view(url='/get_view/', permanent=False)), path('http_redirect_view/', RedirectView.as_view(url='/secure_view/')), path('https_redirect_view/', RedirectView.as_view(url='https://testserver/secure_view/')), path('double_redirect_view/', views.double_redirect_view), path('bad_view/', views.bad_view), path('form_view/', views.form_view), path('form_view_with_template/', views.form_view_with_template), path('formset_view/', views.formset_view), path('json_view/', views.json_view), path('login_protected_view/', views.login_protected_view), path('login_protected_method_view/', views.login_protected_method_view), path('login_protected_view_custom_redirect/', views.login_protected_view_changed_redirect), path('permission_protected_view/', views.permission_protected_view), path('permission_protected_view_exception/', views.permission_protected_view_exception), path('permission_protected_method_view/', views.permission_protected_method_view), path('session_view/', views.session_view), path('broken_view/', views.broken_view), path('mail_sending_view/', views.mail_sending_view), path('mass_mail_sending_view/', views.mass_mail_sending_view), path('nesting_exception_view/', views.nesting_exception_view), path('django_project_redirect/', views.django_project_redirect), path('no_trailing_slash_external_redirect/', views.no_trailing_slash_external_redirect), path('', views.index_view, name='index'), # Target for no_trailing_slash_external_redirect/ with follow=True path('two_arg_exception/', views.two_arg_exception), path('accounts/', RedirectView.as_view(url='login/')), path('accounts/no_trailing_slash', RedirectView.as_view(url='login/')), path('accounts/login/', auth_views.LoginView.as_view(template_name='login.html')), path('accounts/logout/', auth_views.LogoutView.as_view()), # Async views. path('async_get_view/', views.async_get_view, name='async_get_view'), ]
50f60e9fb3f9ffda81a0d97557731a49730f27d3327cbcf39a6a52b19ec5a1df
import json from urllib.parse import urlencode from xml.dom.minidom import parseString from django.contrib.auth.decorators import login_required, permission_required from django.core import mail from django.core.exceptions import ValidationError from django.forms import fields from django.forms.forms import Form from django.forms.formsets import BaseFormSet, formset_factory from django.http import ( HttpResponse, HttpResponseBadRequest, HttpResponseNotAllowed, HttpResponseNotFound, HttpResponseRedirect, ) from django.shortcuts import render from django.template import Context, Template from django.test import Client from django.utils.decorators import method_decorator def get_view(request): "A simple view that expects a GET request, and returns a rendered template" t = Template('This is a test. {{ var }} is the value.', name='GET Template') c = Context({'var': request.GET.get('var', 42)}) return HttpResponse(t.render(c)) async def async_get_view(request): return HttpResponse(b'GET content.') def trace_view(request): """ A simple view that expects a TRACE request and echoes its status line. TRACE requests should not have an entity; the view will return a 400 status response if it is present. """ if request.method.upper() != "TRACE": return HttpResponseNotAllowed("TRACE") elif request.body: return HttpResponseBadRequest("TRACE requests MUST NOT include an entity") else: protocol = request.META["SERVER_PROTOCOL"] t = Template( '{{ method }} {{ uri }} {{ version }}', name="TRACE Template", ) c = Context({ 'method': request.method, 'uri': request.path, 'version': protocol, }) return HttpResponse(t.render(c)) def put_view(request): if request.method == 'PUT': t = Template('Data received: {{ data }} is the body.', name='PUT Template') c = Context({ 'Content-Length': request.META['CONTENT_LENGTH'], 'data': request.body.decode(), }) else: t = Template('Viewing GET page.', name='Empty GET Template') c = Context() return HttpResponse(t.render(c)) def post_view(request): """A view that expects a POST, and returns a different template depending on whether any POST data is available """ if request.method == 'POST': if request.POST: t = Template('Data received: {{ data }} is the value.', name='POST Template') c = Context({'data': request.POST['value']}) else: t = Template('Viewing POST page.', name='Empty POST Template') c = Context() else: t = Template('Viewing GET page.', name='Empty GET Template') c = Context() return HttpResponse(t.render(c)) def post_then_get_view(request): """ A view that expects a POST request, returns a redirect response to itself providing only a ?success=true querystring, the value of this querystring is then rendered upon GET. """ if request.method == 'POST': return HttpResponseRedirect('?success=true') t = Template('The value of success is {{ value }}.', name='GET Template') c = Context({'value': request.GET.get('success', 'false')}) return HttpResponse(t.render(c)) def json_view(request): """ A view that expects a request with the header 'application/json' and JSON data, which is deserialized and included in the context. """ if request.META.get('CONTENT_TYPE') != 'application/json': return HttpResponse() t = Template('Viewing {} page. With data {{ data }}.'.format(request.method)) data = json.loads(request.body.decode('utf-8')) c = Context({'data': data}) return HttpResponse(t.render(c)) def view_with_header(request): "A view that has a custom header" response = HttpResponse() response.headers['X-DJANGO-TEST'] = 'Slartibartfast' return response def raw_post_view(request): """A view which expects raw XML to be posted and returns content extracted from the XML""" if request.method == 'POST': root = parseString(request.body) first_book = root.firstChild.firstChild title, author = [n.firstChild.nodeValue for n in first_book.childNodes] t = Template("{{ title }} - {{ author }}", name="Book template") c = Context({"title": title, "author": author}) else: t = Template("GET request.", name="Book GET template") c = Context() return HttpResponse(t.render(c)) def redirect_view(request): "A view that redirects all requests to the GET view" if request.GET: query = '?' + urlencode(request.GET, True) else: query = '' return HttpResponseRedirect('/get_view/' + query) def method_saving_307_redirect_query_string_view(request): return HttpResponseRedirect('/post_view/?hello=world', status=307) def method_saving_308_redirect_query_string_view(request): return HttpResponseRedirect('/post_view/?hello=world', status=308) def _post_view_redirect(request, status_code): """Redirect to /post_view/ using the status code.""" redirect_to = request.GET.get('to', '/post_view/') return HttpResponseRedirect(redirect_to, status=status_code) def method_saving_307_redirect_view(request): return _post_view_redirect(request, 307) def method_saving_308_redirect_view(request): return _post_view_redirect(request, 308) def view_with_secure(request): "A view that indicates if the request was secure" response = HttpResponse() response.test_was_secure_request = request.is_secure() response.test_server_port = request.META.get('SERVER_PORT', 80) return response def double_redirect_view(request): "A view that redirects all requests to a redirection view" return HttpResponseRedirect('/permanent_redirect_view/') def bad_view(request): "A view that returns a 404 with some error content" return HttpResponseNotFound('Not found!. This page contains some MAGIC content') TestChoices = ( ('a', 'First Choice'), ('b', 'Second Choice'), ('c', 'Third Choice'), ('d', 'Fourth Choice'), ('e', 'Fifth Choice') ) class TestForm(Form): text = fields.CharField() email = fields.EmailField() value = fields.IntegerField() single = fields.ChoiceField(choices=TestChoices) multi = fields.MultipleChoiceField(choices=TestChoices) def clean(self): cleaned_data = self.cleaned_data if cleaned_data.get("text") == "Raise non-field error": raise ValidationError("Non-field error.") return cleaned_data def form_view(request): "A view that tests a simple form" if request.method == 'POST': form = TestForm(request.POST) if form.is_valid(): t = Template('Valid POST data.', name='Valid POST Template') c = Context() else: t = Template('Invalid POST data. {{ form.errors }}', name='Invalid POST Template') c = Context({'form': form}) else: form = TestForm(request.GET) t = Template('Viewing base form. {{ form }}.', name='Form GET Template') c = Context({'form': form}) return HttpResponse(t.render(c)) def form_view_with_template(request): "A view that tests a simple form" if request.method == 'POST': form = TestForm(request.POST) if form.is_valid(): message = 'POST data OK' else: message = 'POST data has errors' else: form = TestForm() message = 'GET form page' return render(request, 'form_view.html', { 'form': form, 'message': message, }) class BaseTestFormSet(BaseFormSet): def clean(self): """No two email addresses are the same.""" if any(self.errors): # Don't bother validating the formset unless each form is valid return emails = [] for form in self.forms: email = form.cleaned_data['email'] if email in emails: raise ValidationError( "Forms in a set must have distinct email addresses." ) emails.append(email) TestFormSet = formset_factory(TestForm, BaseTestFormSet) def formset_view(request): "A view that tests a simple formset" if request.method == 'POST': formset = TestFormSet(request.POST) if formset.is_valid(): t = Template('Valid POST data.', name='Valid POST Template') c = Context() else: t = Template('Invalid POST data. {{ my_formset.errors }}', name='Invalid POST Template') c = Context({'my_formset': formset}) else: formset = TestForm(request.GET) t = Template('Viewing base formset. {{ my_formset }}.', name='Formset GET Template') c = Context({'my_formset': formset}) return HttpResponse(t.render(c)) @login_required def login_protected_view(request): "A simple view that is login protected." t = Template('This is a login protected test. Username is {{ user.username }}.', name='Login Template') c = Context({'user': request.user}) return HttpResponse(t.render(c)) @login_required(redirect_field_name='redirect_to') def login_protected_view_changed_redirect(request): "A simple view that is login protected with a custom redirect field set" t = Template('This is a login protected test. Username is {{ user.username }}.', name='Login Template') c = Context({'user': request.user}) return HttpResponse(t.render(c)) def _permission_protected_view(request): "A simple view that is permission protected." t = Template('This is a permission protected test. ' 'Username is {{ user.username }}. ' 'Permissions are {{ user.get_all_permissions }}.', name='Permissions Template') c = Context({'user': request.user}) return HttpResponse(t.render(c)) permission_protected_view = permission_required('permission_not_granted')(_permission_protected_view) permission_protected_view_exception = ( permission_required('permission_not_granted', raise_exception=True)(_permission_protected_view) ) class _ViewManager: @method_decorator(login_required) def login_protected_view(self, request): t = Template('This is a login protected test using a method. ' 'Username is {{ user.username }}.', name='Login Method Template') c = Context({'user': request.user}) return HttpResponse(t.render(c)) @method_decorator(permission_required('permission_not_granted')) def permission_protected_view(self, request): t = Template('This is a permission protected test using a method. ' 'Username is {{ user.username }}. ' 'Permissions are {{ user.get_all_permissions }}.', name='Permissions Template') c = Context({'user': request.user}) return HttpResponse(t.render(c)) _view_manager = _ViewManager() login_protected_method_view = _view_manager.login_protected_view permission_protected_method_view = _view_manager.permission_protected_view def session_view(request): "A view that modifies the session" request.session['tobacconist'] = 'hovercraft' t = Template('This is a view that modifies the session.', name='Session Modifying View Template') c = Context() return HttpResponse(t.render(c)) def broken_view(request): """A view which just raises an exception, simulating a broken view.""" raise KeyError("Oops! Looks like you wrote some bad code.") def mail_sending_view(request): mail.EmailMessage( "Test message", "This is a test email", "[email protected]", ['[email protected]', '[email protected]']).send() return HttpResponse("Mail sent") def mass_mail_sending_view(request): m1 = mail.EmailMessage( 'First Test message', 'This is the first test email', '[email protected]', ['[email protected]', '[email protected]']) m2 = mail.EmailMessage( 'Second Test message', 'This is the second test email', '[email protected]', ['[email protected]', '[email protected]']) c = mail.get_connection() c.send_messages([m1, m2]) return HttpResponse("Mail sent") def nesting_exception_view(request): """ A view that uses a nested client to call another view and then raises an exception. """ client = Client() client.get('/get_view/') raise Exception('exception message') def django_project_redirect(request): return HttpResponseRedirect('https://www.djangoproject.com/') def no_trailing_slash_external_redirect(request): """ RFC 2616 3.2.2: A bare domain without any abs_path element should be treated as having the trailing `/`. Use https://testserver, rather than an external domain, in order to allow use of follow=True, triggering Client._handle_redirects(). """ return HttpResponseRedirect('https://testserver') def index_view(request): """Target for no_trailing_slash_external_redirect with follow=True.""" return HttpResponse('Hello world') def upload_view(request): """Prints keys of request.FILES to the response.""" return HttpResponse(', '.join(request.FILES)) class TwoArgException(Exception): def __init__(self, one, two): pass def two_arg_exception(request): raise TwoArgException('one', 'two')
295396a9913d5ba6c939dc082513bb745e257c34e9c08867493ef769e140f35e
from django.db import connection from django.db.backends.ddl_references import ( Columns, Expressions, ForeignKeyName, IndexName, Statement, Table, ) from django.db.models import ExpressionList, F from django.db.models.functions import Upper from django.db.models.indexes import IndexExpression from django.db.models.sql import Query from django.test import SimpleTestCase, TransactionTestCase from .models import Person class TableTests(SimpleTestCase): def setUp(self): self.reference = Table('table', lambda table: table.upper()) def test_references_table(self): self.assertIs(self.reference.references_table('table'), True) self.assertIs(self.reference.references_table('other'), False) def test_rename_table_references(self): self.reference.rename_table_references('other', 'table') self.assertIs(self.reference.references_table('table'), True) self.assertIs(self.reference.references_table('other'), False) self.reference.rename_table_references('table', 'other') self.assertIs(self.reference.references_table('table'), False) self.assertIs(self.reference.references_table('other'), True) def test_repr(self): self.assertEqual(repr(self.reference), "<Table 'TABLE'>") def test_str(self): self.assertEqual(str(self.reference), 'TABLE') class ColumnsTests(TableTests): def setUp(self): self.reference = Columns( 'table', ['first_column', 'second_column'], lambda column: column.upper() ) def test_references_column(self): self.assertIs(self.reference.references_column('other', 'first_column'), False) self.assertIs(self.reference.references_column('table', 'third_column'), False) self.assertIs(self.reference.references_column('table', 'first_column'), True) def test_rename_column_references(self): self.reference.rename_column_references('other', 'first_column', 'third_column') self.assertIs(self.reference.references_column('table', 'first_column'), True) self.assertIs(self.reference.references_column('table', 'third_column'), False) self.assertIs(self.reference.references_column('other', 'third_column'), False) self.reference.rename_column_references('table', 'third_column', 'first_column') self.assertIs(self.reference.references_column('table', 'first_column'), True) self.assertIs(self.reference.references_column('table', 'third_column'), False) self.reference.rename_column_references('table', 'first_column', 'third_column') self.assertIs(self.reference.references_column('table', 'first_column'), False) self.assertIs(self.reference.references_column('table', 'third_column'), True) def test_repr(self): self.assertEqual(repr(self.reference), "<Columns 'FIRST_COLUMN, SECOND_COLUMN'>") def test_str(self): self.assertEqual(str(self.reference), 'FIRST_COLUMN, SECOND_COLUMN') class IndexNameTests(ColumnsTests): def setUp(self): def create_index_name(table_name, column_names, suffix): return ', '.join("%s_%s_%s" % (table_name, column_name, suffix) for column_name in column_names) self.reference = IndexName( 'table', ['first_column', 'second_column'], 'suffix', create_index_name ) def test_repr(self): self.assertEqual(repr(self.reference), "<IndexName 'table_first_column_suffix, table_second_column_suffix'>") def test_str(self): self.assertEqual(str(self.reference), 'table_first_column_suffix, table_second_column_suffix') class ForeignKeyNameTests(IndexNameTests): def setUp(self): def create_foreign_key_name(table_name, column_names, suffix): return ', '.join("%s_%s_%s" % (table_name, column_name, suffix) for column_name in column_names) self.reference = ForeignKeyName( 'table', ['first_column', 'second_column'], 'to_table', ['to_first_column', 'to_second_column'], '%(to_table)s_%(to_column)s_fk', create_foreign_key_name, ) def test_references_table(self): super().test_references_table() self.assertIs(self.reference.references_table('to_table'), True) def test_references_column(self): super().test_references_column() self.assertIs(self.reference.references_column('to_table', 'second_column'), False) self.assertIs(self.reference.references_column('to_table', 'to_second_column'), True) def test_rename_table_references(self): super().test_rename_table_references() self.reference.rename_table_references('to_table', 'other_to_table') self.assertIs(self.reference.references_table('other_to_table'), True) self.assertIs(self.reference.references_table('to_table'), False) def test_rename_column_references(self): super().test_rename_column_references() self.reference.rename_column_references('to_table', 'second_column', 'third_column') self.assertIs(self.reference.references_column('table', 'second_column'), True) self.assertIs(self.reference.references_column('to_table', 'to_second_column'), True) self.reference.rename_column_references('to_table', 'to_first_column', 'to_third_column') self.assertIs(self.reference.references_column('to_table', 'to_first_column'), False) self.assertIs(self.reference.references_column('to_table', 'to_third_column'), True) def test_repr(self): self.assertEqual( repr(self.reference), "<ForeignKeyName 'table_first_column_to_table_to_first_column_fk, " "table_second_column_to_table_to_first_column_fk'>" ) def test_str(self): self.assertEqual( str(self.reference), 'table_first_column_to_table_to_first_column_fk, ' 'table_second_column_to_table_to_first_column_fk' ) class MockReference: def __init__(self, representation, referenced_tables, referenced_columns): self.representation = representation self.referenced_tables = referenced_tables self.referenced_columns = referenced_columns def references_table(self, table): return table in self.referenced_tables def references_column(self, table, column): return (table, column) in self.referenced_columns def rename_table_references(self, old_table, new_table): if old_table in self.referenced_tables: self.referenced_tables.remove(old_table) self.referenced_tables.add(new_table) def rename_column_references(self, table, old_column, new_column): column = (table, old_column) if column in self.referenced_columns: self.referenced_columns.remove(column) self.referenced_columns.add((table, new_column)) def __str__(self): return self.representation class StatementTests(SimpleTestCase): def test_references_table(self): statement = Statement('', reference=MockReference('', {'table'}, {}), non_reference='') self.assertIs(statement.references_table('table'), True) self.assertIs(statement.references_table('other'), False) def test_references_column(self): statement = Statement('', reference=MockReference('', {}, {('table', 'column')}), non_reference='') self.assertIs(statement.references_column('table', 'column'), True) self.assertIs(statement.references_column('other', 'column'), False) def test_rename_table_references(self): reference = MockReference('', {'table'}, {}) statement = Statement('', reference=reference, non_reference='') statement.rename_table_references('table', 'other') self.assertEqual(reference.referenced_tables, {'other'}) def test_rename_column_references(self): reference = MockReference('', {}, {('table', 'column')}) statement = Statement('', reference=reference, non_reference='') statement.rename_column_references('table', 'column', 'other') self.assertEqual(reference.referenced_columns, {('table', 'other')}) def test_repr(self): reference = MockReference('reference', {}, {}) statement = Statement("%(reference)s - %(non_reference)s", reference=reference, non_reference='non_reference') self.assertEqual(repr(statement), "<Statement 'reference - non_reference'>") def test_str(self): reference = MockReference('reference', {}, {}) statement = Statement("%(reference)s - %(non_reference)s", reference=reference, non_reference='non_reference') self.assertEqual(str(statement), 'reference - non_reference') class ExpressionsTests(TransactionTestCase): available_apps = [] def setUp(self): compiler = Person.objects.all().query.get_compiler(connection.alias) self.editor = connection.schema_editor() self.expressions = Expressions( table=Person._meta.db_table, expressions=ExpressionList( IndexExpression(F('first_name')), IndexExpression(F('last_name').desc()), IndexExpression(Upper('last_name')), ).resolve_expression(compiler.query), compiler=compiler, quote_value=self.editor.quote_value, ) def test_references_table(self): self.assertIs(self.expressions.references_table(Person._meta.db_table), True) self.assertIs(self.expressions.references_table('other'), False) def test_references_column(self): table = Person._meta.db_table self.assertIs(self.expressions.references_column(table, 'first_name'), True) self.assertIs(self.expressions.references_column(table, 'last_name'), True) self.assertIs(self.expressions.references_column(table, 'other'), False) def test_rename_table_references(self): table = Person._meta.db_table self.expressions.rename_table_references(table, 'other') self.assertIs(self.expressions.references_table(table), False) self.assertIs(self.expressions.references_table('other'), True) self.assertIn( '%s.%s' % ( self.editor.quote_name('other'), self.editor.quote_name('first_name'), ), str(self.expressions), ) def test_rename_table_references_without_alias(self): compiler = Query(Person, alias_cols=False).get_compiler(connection=connection) table = Person._meta.db_table expressions = Expressions( table=table, expressions=ExpressionList( IndexExpression(Upper('last_name')), IndexExpression(F('first_name')), ).resolve_expression(compiler.query), compiler=compiler, quote_value=self.editor.quote_value, ) expressions.rename_table_references(table, 'other') self.assertIs(expressions.references_table(table), False) self.assertIs(expressions.references_table('other'), True) expected_str = '(UPPER(%s)), %s' % ( self.editor.quote_name('last_name'), self.editor.quote_name('first_name'), ) self.assertEqual(str(expressions), expected_str) def test_rename_column_references(self): table = Person._meta.db_table self.expressions.rename_column_references(table, 'first_name', 'other') self.assertIs(self.expressions.references_column(table, 'other'), True) self.assertIs(self.expressions.references_column(table, 'first_name'), False) self.assertIn( '%s.%s' % (self.editor.quote_name(table), self.editor.quote_name('other')), str(self.expressions), ) def test_str(self): table_name = self.editor.quote_name(Person._meta.db_table) expected_str = '%s.%s, %s.%s DESC, (UPPER(%s.%s))' % ( table_name, self.editor.quote_name('first_name'), table_name, self.editor.quote_name('last_name'), table_name, self.editor.quote_name('last_name'), ) self.assertEqual(str(self.expressions), expected_str)
c470d6350c84b23276601f439d3fe058365f6b5758e1314ca31254253220bb98
"""Tests for django.db.backends.utils""" from decimal import Decimal, Rounded from django.db import NotSupportedError, connection from django.db.backends.utils import ( format_number, split_identifier, split_tzname_delta, truncate_name, ) from django.test import ( SimpleTestCase, TransactionTestCase, skipIfDBFeature, skipUnlessDBFeature, ) class TestUtils(SimpleTestCase): def test_truncate_name(self): self.assertEqual(truncate_name('some_table', 10), 'some_table') self.assertEqual(truncate_name('some_long_table', 10), 'some_la38a') self.assertEqual(truncate_name('some_long_table', 10, 3), 'some_loa38') self.assertEqual(truncate_name('some_long_table'), 'some_long_table') # "user"."table" syntax self.assertEqual(truncate_name('username"."some_table', 10), 'username"."some_table') self.assertEqual(truncate_name('username"."some_long_table', 10), 'username"."some_la38a') self.assertEqual(truncate_name('username"."some_long_table', 10, 3), 'username"."some_loa38') def test_split_identifier(self): self.assertEqual(split_identifier('some_table'), ('', 'some_table')) self.assertEqual(split_identifier('"some_table"'), ('', 'some_table')) self.assertEqual(split_identifier('namespace"."some_table'), ('namespace', 'some_table')) self.assertEqual(split_identifier('"namespace"."some_table"'), ('namespace', 'some_table')) def test_format_number(self): def equal(value, max_d, places, result): self.assertEqual(format_number(Decimal(value), max_d, places), result) equal('0', 12, 3, '0.000') equal('0', 12, 8, '0.00000000') equal('1', 12, 9, '1.000000000') equal('0.00000000', 12, 8, '0.00000000') equal('0.000000004', 12, 8, '0.00000000') equal('0.000000008', 12, 8, '0.00000001') equal('0.000000000000000000999', 10, 8, '0.00000000') equal('0.1234567890', 12, 10, '0.1234567890') equal('0.1234567890', 12, 9, '0.123456789') equal('0.1234567890', 12, 8, '0.12345679') equal('0.1234567890', 12, 5, '0.12346') equal('0.1234567890', 12, 3, '0.123') equal('0.1234567890', 12, 1, '0.1') equal('0.1234567890', 12, 0, '0') equal('0.1234567890', None, 0, '0') equal('1234567890.1234567890', None, 0, '1234567890') equal('1234567890.1234567890', None, 2, '1234567890.12') equal('0.1234', 5, None, '0.1234') equal('123.12', 5, None, '123.12') with self.assertRaises(Rounded): equal('0.1234567890', 5, None, '0.12346') with self.assertRaises(Rounded): equal('1234567890.1234', 5, None, '1234600000') def test_split_tzname_delta(self): tests = [ ('Asia/Ust+Nera', ('Asia/Ust+Nera', None, None)), ('Asia/Ust-Nera', ('Asia/Ust-Nera', None, None)), ('Asia/Ust+Nera-02:00', ('Asia/Ust+Nera', '-', '02:00')), ('Asia/Ust-Nera+05:00', ('Asia/Ust-Nera', '+', '05:00')), ('America/Coral_Harbour-01:00', ('America/Coral_Harbour', '-', '01:00')), ('America/Coral_Harbour+02:30', ('America/Coral_Harbour', '+', '02:30')), ('UTC+15:00', ('UTC', '+', '15:00')), ('UTC-04:43', ('UTC', '-', '04:43')), ('UTC', ('UTC', None, None)), ('UTC+1', ('UTC+1', None, None)), ] for tzname, expected in tests: with self.subTest(tzname=tzname): self.assertEqual(split_tzname_delta(tzname), expected) class CursorWrapperTests(TransactionTestCase): available_apps = [] def _test_procedure(self, procedure_sql, params, param_types, kparams=None): with connection.cursor() as cursor: cursor.execute(procedure_sql) # Use a new cursor because in MySQL a procedure can't be used in the # same cursor in which it was created. with connection.cursor() as cursor: cursor.callproc('test_procedure', params, kparams) with connection.schema_editor() as editor: editor.remove_procedure('test_procedure', param_types) @skipUnlessDBFeature('create_test_procedure_without_params_sql') def test_callproc_without_params(self): self._test_procedure(connection.features.create_test_procedure_without_params_sql, [], []) @skipUnlessDBFeature('create_test_procedure_with_int_param_sql') def test_callproc_with_int_params(self): self._test_procedure(connection.features.create_test_procedure_with_int_param_sql, [1], ['INTEGER']) @skipUnlessDBFeature('create_test_procedure_with_int_param_sql', 'supports_callproc_kwargs') def test_callproc_kparams(self): self._test_procedure(connection.features.create_test_procedure_with_int_param_sql, [], ['INTEGER'], {'P_I': 1}) @skipIfDBFeature('supports_callproc_kwargs') def test_unsupported_callproc_kparams_raises_error(self): msg = 'Keyword parameters for callproc are not supported on this database backend.' with self.assertRaisesMessage(NotSupportedError, msg): with connection.cursor() as cursor: cursor.callproc('test_procedure', [], {'P_I': 1})
3718bf44ea949576bdb29656af45a5eaacc5ae70f3531f353cddb7961b79c687
import datetime import itertools import unittest from copy import copy from unittest import mock from django.core.exceptions import FieldError from django.core.management.color import no_style from django.db import ( DatabaseError, DataError, IntegrityError, OperationalError, connection, ) from django.db.models import ( CASCADE, PROTECT, AutoField, BigAutoField, BigIntegerField, BinaryField, BooleanField, CharField, CheckConstraint, DateField, DateTimeField, DecimalField, DurationField, F, FloatField, ForeignKey, ForeignObject, Index, IntegerField, JSONField, ManyToManyField, Model, OneToOneField, OrderBy, PositiveIntegerField, Q, SlugField, SmallAutoField, SmallIntegerField, TextField, TimeField, UniqueConstraint, UUIDField, Value, ) from django.db.models.fields.json import KeyTextTransform from django.db.models.functions import Abs, Cast, Collate, Lower, Random, Upper from django.db.models.indexes import IndexExpression from django.db.transaction import TransactionManagementError, atomic from django.test import ( TransactionTestCase, skipIfDBFeature, skipUnlessDBFeature, ) from django.test.utils import ( CaptureQueriesContext, isolate_apps, register_lookup, ) from django.utils import timezone from .fields import ( CustomManyToManyField, InheritedManyToManyField, MediumBlobField, ) from .models import ( Author, AuthorCharFieldWithIndex, AuthorTextFieldWithIndex, AuthorWithDefaultHeight, AuthorWithEvenLongerName, AuthorWithIndexedName, AuthorWithIndexedNameAndBirthday, AuthorWithUniqueName, AuthorWithUniqueNameAndBirthday, Book, BookForeignObj, BookWeak, BookWithLongName, BookWithO2O, BookWithoutAuthor, BookWithSlug, IntegerPK, Node, Note, NoteRename, Tag, TagIndexed, TagM2MTest, TagUniqueRename, Thing, UniqueTest, new_apps, ) class SchemaTests(TransactionTestCase): """ Tests for the schema-alteration code. Be aware that these tests are more liable than most to false results, as sometimes the code to check if a test has worked is almost as complex as the code it is testing. """ available_apps = [] models = [ Author, AuthorCharFieldWithIndex, AuthorTextFieldWithIndex, AuthorWithDefaultHeight, AuthorWithEvenLongerName, Book, BookWeak, BookWithLongName, BookWithO2O, BookWithSlug, IntegerPK, Node, Note, Tag, TagIndexed, TagM2MTest, TagUniqueRename, Thing, UniqueTest, ] # Utility functions def setUp(self): # local_models should contain test dependent model classes that will be # automatically removed from the app cache on test tear down. self.local_models = [] # isolated_local_models contains models that are in test methods # decorated with @isolate_apps. self.isolated_local_models = [] def tearDown(self): # Delete any tables made for our models self.delete_tables() new_apps.clear_cache() for model in new_apps.get_models(): model._meta._expire_cache() if 'schema' in new_apps.all_models: for model in self.local_models: for many_to_many in model._meta.many_to_many: through = many_to_many.remote_field.through if through and through._meta.auto_created: del new_apps.all_models['schema'][through._meta.model_name] del new_apps.all_models['schema'][model._meta.model_name] if self.isolated_local_models: with connection.schema_editor() as editor: for model in self.isolated_local_models: editor.delete_model(model) def delete_tables(self): "Deletes all model tables for our models for a clean test environment" converter = connection.introspection.identifier_converter with connection.schema_editor() as editor: connection.disable_constraint_checking() table_names = connection.introspection.table_names() if connection.features.ignores_table_name_case: table_names = [table_name.lower() for table_name in table_names] for model in itertools.chain(SchemaTests.models, self.local_models): tbl = converter(model._meta.db_table) if connection.features.ignores_table_name_case: tbl = tbl.lower() if tbl in table_names: editor.delete_model(model) table_names.remove(tbl) connection.enable_constraint_checking() def column_classes(self, model): with connection.cursor() as cursor: columns = { d[0]: (connection.introspection.get_field_type(d[1], d), d) for d in connection.introspection.get_table_description( cursor, model._meta.db_table, ) } # SQLite has a different format for field_type for name, (type, desc) in columns.items(): if isinstance(type, tuple): columns[name] = (type[0], desc) return columns def get_primary_key(self, table): with connection.cursor() as cursor: return connection.introspection.get_primary_key_column(cursor, table) def get_indexes(self, table): """ Get the indexes on the table using a new cursor. """ with connection.cursor() as cursor: return [ c['columns'][0] for c in connection.introspection.get_constraints(cursor, table).values() if c['index'] and len(c['columns']) == 1 ] def get_uniques(self, table): with connection.cursor() as cursor: return [ c['columns'][0] for c in connection.introspection.get_constraints(cursor, table).values() if c['unique'] and len(c['columns']) == 1 ] def get_constraints(self, table): """ Get the constraints on a table using a new cursor. """ with connection.cursor() as cursor: return connection.introspection.get_constraints(cursor, table) def get_constraints_for_column(self, model, column_name): constraints = self.get_constraints(model._meta.db_table) constraints_for_column = [] for name, details in constraints.items(): if details['columns'] == [column_name]: constraints_for_column.append(name) return sorted(constraints_for_column) def check_added_field_default(self, schema_editor, model, field, field_name, expected_default, cast_function=None): with connection.cursor() as cursor: schema_editor.add_field(model, field) cursor.execute("SELECT {} FROM {};".format(field_name, model._meta.db_table)) database_default = cursor.fetchall()[0][0] if cast_function and type(database_default) != type(expected_default): database_default = cast_function(database_default) self.assertEqual(database_default, expected_default) def get_constraints_count(self, table, column, fk_to): """ Return a dict with keys 'fks', 'uniques, and 'indexes' indicating the number of foreign keys, unique constraints, and indexes on `table`.`column`. The `fk_to` argument is a 2-tuple specifying the expected foreign key relationship's (table, column). """ with connection.cursor() as cursor: constraints = connection.introspection.get_constraints(cursor, table) counts = {'fks': 0, 'uniques': 0, 'indexes': 0} for c in constraints.values(): if c['columns'] == [column]: if c['foreign_key'] == fk_to: counts['fks'] += 1 if c['unique']: counts['uniques'] += 1 elif c['index']: counts['indexes'] += 1 return counts def get_column_collation(self, table, column): with connection.cursor() as cursor: return next( f.collation for f in connection.introspection.get_table_description(cursor, table) if f.name == column ) def assertIndexOrder(self, table, index, order): constraints = self.get_constraints(table) self.assertIn(index, constraints) index_orders = constraints[index]['orders'] self.assertTrue(all(val == expected for val, expected in zip(index_orders, order))) def assertForeignKeyExists(self, model, column, expected_fk_table, field='id'): """ Fail if the FK constraint on `model.Meta.db_table`.`column` to `expected_fk_table`.id doesn't exist. """ constraints = self.get_constraints(model._meta.db_table) constraint_fk = None for details in constraints.values(): if details['columns'] == [column] and details['foreign_key']: constraint_fk = details['foreign_key'] break self.assertEqual(constraint_fk, (expected_fk_table, field)) def assertForeignKeyNotExists(self, model, column, expected_fk_table): with self.assertRaises(AssertionError): self.assertForeignKeyExists(model, column, expected_fk_table) # Tests def test_creation_deletion(self): """ Tries creating a model's table, and then deleting it. """ with connection.schema_editor() as editor: # Create the table editor.create_model(Author) # The table is there list(Author.objects.all()) # Clean up that table editor.delete_model(Author) # No deferred SQL should be left over. self.assertEqual(editor.deferred_sql, []) # The table is gone with self.assertRaises(DatabaseError): list(Author.objects.all()) @skipUnlessDBFeature('supports_foreign_keys') def test_fk(self): "Creating tables out of FK order, then repointing, works" # Create the table with connection.schema_editor() as editor: editor.create_model(Book) editor.create_model(Author) editor.create_model(Tag) # Initial tables are there list(Author.objects.all()) list(Book.objects.all()) # Make sure the FK constraint is present with self.assertRaises(IntegrityError): Book.objects.create( author_id=1, title="Much Ado About Foreign Keys", pub_date=datetime.datetime.now(), ) # Repoint the FK constraint old_field = Book._meta.get_field("author") new_field = ForeignKey(Tag, CASCADE) new_field.set_attributes_from_name("author") with connection.schema_editor() as editor: editor.alter_field(Book, old_field, new_field, strict=True) self.assertForeignKeyExists(Book, 'author_id', 'schema_tag') @skipUnlessDBFeature('can_create_inline_fk') def test_inline_fk(self): # Create some tables. with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(Book) editor.create_model(Note) self.assertForeignKeyNotExists(Note, 'book_id', 'schema_book') # Add a foreign key from one to the other. with connection.schema_editor() as editor: new_field = ForeignKey(Book, CASCADE) new_field.set_attributes_from_name('book') editor.add_field(Note, new_field) self.assertForeignKeyExists(Note, 'book_id', 'schema_book') # Creating a FK field with a constraint uses a single statement without # a deferred ALTER TABLE. self.assertFalse([ sql for sql in (str(statement) for statement in editor.deferred_sql) if sql.startswith('ALTER TABLE') and 'ADD CONSTRAINT' in sql ]) @skipUnlessDBFeature('can_create_inline_fk') def test_add_inline_fk_update_data(self): with connection.schema_editor() as editor: editor.create_model(Node) # Add an inline foreign key and update data in the same transaction. new_field = ForeignKey(Node, CASCADE, related_name='new_fk', null=True) new_field.set_attributes_from_name('new_parent_fk') parent = Node.objects.create() with connection.schema_editor() as editor: editor.add_field(Node, new_field) editor.execute('UPDATE schema_node SET new_parent_fk_id = %s;', [parent.pk]) assertIndex = ( self.assertIn if connection.features.indexes_foreign_keys else self.assertNotIn ) assertIndex('new_parent_fk_id', self.get_indexes(Node._meta.db_table)) @skipUnlessDBFeature( 'can_create_inline_fk', 'allows_multiple_constraints_on_same_fields', ) @isolate_apps('schema') def test_add_inline_fk_index_update_data(self): class Node(Model): class Meta: app_label = 'schema' with connection.schema_editor() as editor: editor.create_model(Node) # Add an inline foreign key, update data, and an index in the same # transaction. new_field = ForeignKey(Node, CASCADE, related_name='new_fk', null=True) new_field.set_attributes_from_name('new_parent_fk') parent = Node.objects.create() with connection.schema_editor() as editor: editor.add_field(Node, new_field) Node._meta.add_field(new_field) editor.execute('UPDATE schema_node SET new_parent_fk_id = %s;', [parent.pk]) editor.add_index(Node, Index(fields=['new_parent_fk'], name='new_parent_inline_fk_idx')) self.assertIn('new_parent_fk_id', self.get_indexes(Node._meta.db_table)) @skipUnlessDBFeature('supports_foreign_keys') def test_char_field_with_db_index_to_fk(self): # Create the table with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(AuthorCharFieldWithIndex) # Change CharField to FK old_field = AuthorCharFieldWithIndex._meta.get_field('char_field') new_field = ForeignKey(Author, CASCADE, blank=True) new_field.set_attributes_from_name('char_field') with connection.schema_editor() as editor: editor.alter_field(AuthorCharFieldWithIndex, old_field, new_field, strict=True) self.assertForeignKeyExists(AuthorCharFieldWithIndex, 'char_field_id', 'schema_author') @skipUnlessDBFeature('supports_foreign_keys') @skipUnlessDBFeature('supports_index_on_text_field') def test_text_field_with_db_index_to_fk(self): # Create the table with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(AuthorTextFieldWithIndex) # Change TextField to FK old_field = AuthorTextFieldWithIndex._meta.get_field('text_field') new_field = ForeignKey(Author, CASCADE, blank=True) new_field.set_attributes_from_name('text_field') with connection.schema_editor() as editor: editor.alter_field(AuthorTextFieldWithIndex, old_field, new_field, strict=True) self.assertForeignKeyExists(AuthorTextFieldWithIndex, 'text_field_id', 'schema_author') @isolate_apps('schema') def test_char_field_pk_to_auto_field(self): class Foo(Model): id = CharField(max_length=255, primary_key=True) class Meta: app_label = 'schema' with connection.schema_editor() as editor: editor.create_model(Foo) self.isolated_local_models = [Foo] old_field = Foo._meta.get_field('id') new_field = AutoField(primary_key=True) new_field.set_attributes_from_name('id') new_field.model = Foo with connection.schema_editor() as editor: editor.alter_field(Foo, old_field, new_field, strict=True) @skipUnlessDBFeature('supports_foreign_keys') def test_fk_to_proxy(self): "Creating a FK to a proxy model creates database constraints." class AuthorProxy(Author): class Meta: app_label = 'schema' apps = new_apps proxy = True class AuthorRef(Model): author = ForeignKey(AuthorProxy, on_delete=CASCADE) class Meta: app_label = 'schema' apps = new_apps self.local_models = [AuthorProxy, AuthorRef] # Create the table with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(AuthorRef) self.assertForeignKeyExists(AuthorRef, 'author_id', 'schema_author') @skipUnlessDBFeature('supports_foreign_keys') def test_fk_db_constraint(self): "The db_constraint parameter is respected" # Create the table with connection.schema_editor() as editor: editor.create_model(Tag) editor.create_model(Author) editor.create_model(BookWeak) # Initial tables are there list(Author.objects.all()) list(Tag.objects.all()) list(BookWeak.objects.all()) self.assertForeignKeyNotExists(BookWeak, 'author_id', 'schema_author') # Make a db_constraint=False FK new_field = ForeignKey(Tag, CASCADE, db_constraint=False) new_field.set_attributes_from_name("tag") with connection.schema_editor() as editor: editor.add_field(Author, new_field) self.assertForeignKeyNotExists(Author, 'tag_id', 'schema_tag') # Alter to one with a constraint new_field2 = ForeignKey(Tag, CASCADE) new_field2.set_attributes_from_name("tag") with connection.schema_editor() as editor: editor.alter_field(Author, new_field, new_field2, strict=True) self.assertForeignKeyExists(Author, 'tag_id', 'schema_tag') # Alter to one without a constraint again new_field2 = ForeignKey(Tag, CASCADE) new_field2.set_attributes_from_name("tag") with connection.schema_editor() as editor: editor.alter_field(Author, new_field2, new_field, strict=True) self.assertForeignKeyNotExists(Author, 'tag_id', 'schema_tag') @isolate_apps('schema') def test_no_db_constraint_added_during_primary_key_change(self): """ When a primary key that's pointed to by a ForeignKey with db_constraint=False is altered, a foreign key constraint isn't added. """ class Author(Model): class Meta: app_label = 'schema' class BookWeak(Model): author = ForeignKey(Author, CASCADE, db_constraint=False) class Meta: app_label = 'schema' with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(BookWeak) self.assertForeignKeyNotExists(BookWeak, 'author_id', 'schema_author') old_field = Author._meta.get_field('id') new_field = BigAutoField(primary_key=True) new_field.model = Author new_field.set_attributes_from_name('id') # @isolate_apps() and inner models are needed to have the model # relations populated, otherwise this doesn't act as a regression test. self.assertEqual(len(new_field.model._meta.related_objects), 1) with connection.schema_editor() as editor: editor.alter_field(Author, old_field, new_field, strict=True) self.assertForeignKeyNotExists(BookWeak, 'author_id', 'schema_author') def _test_m2m_db_constraint(self, M2MFieldClass): class LocalAuthorWithM2M(Model): name = CharField(max_length=255) class Meta: app_label = 'schema' apps = new_apps self.local_models = [LocalAuthorWithM2M] # Create the table with connection.schema_editor() as editor: editor.create_model(Tag) editor.create_model(LocalAuthorWithM2M) # Initial tables are there list(LocalAuthorWithM2M.objects.all()) list(Tag.objects.all()) # Make a db_constraint=False FK new_field = M2MFieldClass(Tag, related_name="authors", db_constraint=False) new_field.contribute_to_class(LocalAuthorWithM2M, "tags") # Add the field with connection.schema_editor() as editor: editor.add_field(LocalAuthorWithM2M, new_field) self.assertForeignKeyNotExists(new_field.remote_field.through, 'tag_id', 'schema_tag') @skipUnlessDBFeature('supports_foreign_keys') def test_m2m_db_constraint(self): self._test_m2m_db_constraint(ManyToManyField) @skipUnlessDBFeature('supports_foreign_keys') def test_m2m_db_constraint_custom(self): self._test_m2m_db_constraint(CustomManyToManyField) @skipUnlessDBFeature('supports_foreign_keys') def test_m2m_db_constraint_inherited(self): self._test_m2m_db_constraint(InheritedManyToManyField) def test_add_field(self): """ Tests adding fields to models """ # Create the table with connection.schema_editor() as editor: editor.create_model(Author) # Ensure there's no age field columns = self.column_classes(Author) self.assertNotIn("age", columns) # Add the new field new_field = IntegerField(null=True) new_field.set_attributes_from_name("age") with CaptureQueriesContext(connection) as ctx, connection.schema_editor() as editor: editor.add_field(Author, new_field) drop_default_sql = editor.sql_alter_column_no_default % { 'column': editor.quote_name(new_field.name), } self.assertFalse(any(drop_default_sql in query['sql'] for query in ctx.captured_queries)) # Table is not rebuilt. self.assertIs(any( 'CREATE TABLE' in query['sql'] for query in ctx.captured_queries ), False) self.assertIs(any( 'DROP TABLE' in query['sql'] for query in ctx.captured_queries ), False) columns = self.column_classes(Author) self.assertEqual(columns['age'][0], connection.features.introspected_field_types['IntegerField']) self.assertTrue(columns['age'][1][6]) def test_add_field_remove_field(self): """ Adding a field and removing it removes all deferred sql referring to it. """ with connection.schema_editor() as editor: # Create a table with a unique constraint on the slug field. editor.create_model(Tag) # Remove the slug column. editor.remove_field(Tag, Tag._meta.get_field('slug')) self.assertEqual(editor.deferred_sql, []) def test_add_field_temp_default(self): """ Tests adding fields to models with a temporary default """ # Create the table with connection.schema_editor() as editor: editor.create_model(Author) # Ensure there's no age field columns = self.column_classes(Author) self.assertNotIn("age", columns) # Add some rows of data Author.objects.create(name="Andrew", height=30) Author.objects.create(name="Andrea") # Add a not-null field new_field = CharField(max_length=30, default="Godwin") new_field.set_attributes_from_name("surname") with connection.schema_editor() as editor: editor.add_field(Author, new_field) columns = self.column_classes(Author) self.assertEqual(columns['surname'][0], connection.features.introspected_field_types['CharField']) self.assertEqual(columns['surname'][1][6], connection.features.interprets_empty_strings_as_nulls) def test_add_field_temp_default_boolean(self): """ Tests adding fields to models with a temporary default where the default is False. (#21783) """ # Create the table with connection.schema_editor() as editor: editor.create_model(Author) # Ensure there's no age field columns = self.column_classes(Author) self.assertNotIn("age", columns) # Add some rows of data Author.objects.create(name="Andrew", height=30) Author.objects.create(name="Andrea") # Add a not-null field new_field = BooleanField(default=False) new_field.set_attributes_from_name("awesome") with connection.schema_editor() as editor: editor.add_field(Author, new_field) columns = self.column_classes(Author) # BooleanField are stored as TINYINT(1) on MySQL. field_type = columns['awesome'][0] self.assertEqual(field_type, connection.features.introspected_field_types['BooleanField']) def test_add_field_default_transform(self): """ Tests adding fields to models with a default that is not directly valid in the database (#22581) """ class TestTransformField(IntegerField): # Weird field that saves the count of items in its value def get_default(self): return self.default def get_prep_value(self, value): if value is None: return 0 return len(value) # Create the table with connection.schema_editor() as editor: editor.create_model(Author) # Add some rows of data Author.objects.create(name="Andrew", height=30) Author.objects.create(name="Andrea") # Add the field with a default it needs to cast (to string in this case) new_field = TestTransformField(default={1: 2}) new_field.set_attributes_from_name("thing") with connection.schema_editor() as editor: editor.add_field(Author, new_field) # Ensure the field is there columns = self.column_classes(Author) field_type, field_info = columns['thing'] self.assertEqual(field_type, connection.features.introspected_field_types['IntegerField']) # Make sure the values were transformed correctly self.assertEqual(Author.objects.extra(where=["thing = 1"]).count(), 2) def test_add_field_o2o_nullable(self): with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(Note) new_field = OneToOneField(Note, CASCADE, null=True) new_field.set_attributes_from_name('note') with connection.schema_editor() as editor: editor.add_field(Author, new_field) columns = self.column_classes(Author) self.assertIn('note_id', columns) self.assertTrue(columns['note_id'][1][6]) def test_add_field_binary(self): """ Tests binary fields get a sane default (#22851) """ # Create the table with connection.schema_editor() as editor: editor.create_model(Author) # Add the new field new_field = BinaryField(blank=True) new_field.set_attributes_from_name("bits") with connection.schema_editor() as editor: editor.add_field(Author, new_field) columns = self.column_classes(Author) # MySQL annoyingly uses the same backend, so it'll come back as one of # these two types. self.assertIn(columns['bits'][0], ("BinaryField", "TextField")) def test_add_field_durationfield_with_default(self): with connection.schema_editor() as editor: editor.create_model(Author) new_field = DurationField(default=datetime.timedelta(minutes=10)) new_field.set_attributes_from_name('duration') with connection.schema_editor() as editor: editor.add_field(Author, new_field) columns = self.column_classes(Author) self.assertEqual( columns['duration'][0], connection.features.introspected_field_types['DurationField'], ) @unittest.skipUnless(connection.vendor == 'mysql', "MySQL specific") def test_add_binaryfield_mediumblob(self): """ Test adding a custom-sized binary field on MySQL (#24846). """ # Create the table with connection.schema_editor() as editor: editor.create_model(Author) # Add the new field with default new_field = MediumBlobField(blank=True, default=b'123') new_field.set_attributes_from_name('bits') with connection.schema_editor() as editor: editor.add_field(Author, new_field) columns = self.column_classes(Author) # Introspection treats BLOBs as TextFields self.assertEqual(columns['bits'][0], "TextField") def test_alter(self): """ Tests simple altering of fields """ # Create the table with connection.schema_editor() as editor: editor.create_model(Author) # Ensure the field is right to begin with columns = self.column_classes(Author) self.assertEqual(columns['name'][0], connection.features.introspected_field_types['CharField']) self.assertEqual(bool(columns['name'][1][6]), bool(connection.features.interprets_empty_strings_as_nulls)) # Alter the name field to a TextField old_field = Author._meta.get_field("name") new_field = TextField(null=True) new_field.set_attributes_from_name("name") with connection.schema_editor() as editor: editor.alter_field(Author, old_field, new_field, strict=True) columns = self.column_classes(Author) self.assertEqual(columns['name'][0], "TextField") self.assertTrue(columns['name'][1][6]) # Change nullability again new_field2 = TextField(null=False) new_field2.set_attributes_from_name("name") with connection.schema_editor() as editor: editor.alter_field(Author, new_field, new_field2, strict=True) columns = self.column_classes(Author) self.assertEqual(columns['name'][0], "TextField") self.assertEqual(bool(columns['name'][1][6]), bool(connection.features.interprets_empty_strings_as_nulls)) def test_alter_auto_field_to_integer_field(self): # Create the table with connection.schema_editor() as editor: editor.create_model(Author) # Change AutoField to IntegerField old_field = Author._meta.get_field('id') new_field = IntegerField(primary_key=True) new_field.set_attributes_from_name('id') new_field.model = Author with connection.schema_editor() as editor: editor.alter_field(Author, old_field, new_field, strict=True) # Now that ID is an IntegerField, the database raises an error if it # isn't provided. if not connection.features.supports_unspecified_pk: with self.assertRaises(DatabaseError): Author.objects.create() def test_alter_auto_field_to_char_field(self): # Create the table with connection.schema_editor() as editor: editor.create_model(Author) # Change AutoField to CharField old_field = Author._meta.get_field('id') new_field = CharField(primary_key=True, max_length=50) new_field.set_attributes_from_name('id') new_field.model = Author with connection.schema_editor() as editor: editor.alter_field(Author, old_field, new_field, strict=True) @isolate_apps('schema') def test_alter_auto_field_quoted_db_column(self): class Foo(Model): id = AutoField(primary_key=True, db_column='"quoted_id"') class Meta: app_label = 'schema' with connection.schema_editor() as editor: editor.create_model(Foo) self.isolated_local_models = [Foo] old_field = Foo._meta.get_field('id') new_field = BigAutoField(primary_key=True) new_field.model = Foo new_field.db_column = '"quoted_id"' new_field.set_attributes_from_name('id') with connection.schema_editor() as editor: editor.alter_field(Foo, old_field, new_field, strict=True) Foo.objects.create() def test_alter_not_unique_field_to_primary_key(self): # Create the table. with connection.schema_editor() as editor: editor.create_model(Author) # Change UUIDField to primary key. old_field = Author._meta.get_field('uuid') new_field = UUIDField(primary_key=True) new_field.set_attributes_from_name('uuid') new_field.model = Author with connection.schema_editor() as editor: editor.remove_field(Author, Author._meta.get_field('id')) editor.alter_field(Author, old_field, new_field, strict=True) # Redundant unique constraint is not added. count = self.get_constraints_count( Author._meta.db_table, Author._meta.get_field('uuid').column, None, ) self.assertLessEqual(count['uniques'], 1) @isolate_apps('schema') def test_alter_primary_key_quoted_db_table(self): class Foo(Model): class Meta: app_label = 'schema' db_table = '"foo"' with connection.schema_editor() as editor: editor.create_model(Foo) self.isolated_local_models = [Foo] old_field = Foo._meta.get_field('id') new_field = BigAutoField(primary_key=True) new_field.model = Foo new_field.set_attributes_from_name('id') with connection.schema_editor() as editor: editor.alter_field(Foo, old_field, new_field, strict=True) Foo.objects.create() def test_alter_text_field(self): # Regression for "BLOB/TEXT column 'info' can't have a default value") # on MySQL. # Create the table with connection.schema_editor() as editor: editor.create_model(Note) old_field = Note._meta.get_field("info") new_field = TextField(blank=True) new_field.set_attributes_from_name("info") with connection.schema_editor() as editor: editor.alter_field(Note, old_field, new_field, strict=True) def test_alter_text_field_to_not_null_with_default_value(self): with connection.schema_editor() as editor: editor.create_model(Note) old_field = Note._meta.get_field('address') new_field = TextField(blank=True, default='', null=False) new_field.set_attributes_from_name('address') with connection.schema_editor() as editor: editor.alter_field(Note, old_field, new_field, strict=True) @skipUnlessDBFeature('can_defer_constraint_checks', 'can_rollback_ddl') def test_alter_fk_checks_deferred_constraints(self): """ #25492 - Altering a foreign key's structure and data in the same transaction. """ with connection.schema_editor() as editor: editor.create_model(Node) old_field = Node._meta.get_field('parent') new_field = ForeignKey(Node, CASCADE) new_field.set_attributes_from_name('parent') parent = Node.objects.create() with connection.schema_editor() as editor: # Update the parent FK to create a deferred constraint check. Node.objects.update(parent=parent) editor.alter_field(Node, old_field, new_field, strict=True) def test_alter_text_field_to_date_field(self): """ #25002 - Test conversion of text field to date field. """ with connection.schema_editor() as editor: editor.create_model(Note) Note.objects.create(info='1988-05-05') old_field = Note._meta.get_field('info') new_field = DateField(blank=True) new_field.set_attributes_from_name('info') with connection.schema_editor() as editor: editor.alter_field(Note, old_field, new_field, strict=True) # Make sure the field isn't nullable columns = self.column_classes(Note) self.assertFalse(columns['info'][1][6]) def test_alter_text_field_to_datetime_field(self): """ #25002 - Test conversion of text field to datetime field. """ with connection.schema_editor() as editor: editor.create_model(Note) Note.objects.create(info='1988-05-05 3:16:17.4567') old_field = Note._meta.get_field('info') new_field = DateTimeField(blank=True) new_field.set_attributes_from_name('info') with connection.schema_editor() as editor: editor.alter_field(Note, old_field, new_field, strict=True) # Make sure the field isn't nullable columns = self.column_classes(Note) self.assertFalse(columns['info'][1][6]) def test_alter_text_field_to_time_field(self): """ #25002 - Test conversion of text field to time field. """ with connection.schema_editor() as editor: editor.create_model(Note) Note.objects.create(info='3:16:17.4567') old_field = Note._meta.get_field('info') new_field = TimeField(blank=True) new_field.set_attributes_from_name('info') with connection.schema_editor() as editor: editor.alter_field(Note, old_field, new_field, strict=True) # Make sure the field isn't nullable columns = self.column_classes(Note) self.assertFalse(columns['info'][1][6]) @skipIfDBFeature('interprets_empty_strings_as_nulls') def test_alter_textual_field_keep_null_status(self): """ Changing a field type shouldn't affect the not null status. """ with connection.schema_editor() as editor: editor.create_model(Note) with self.assertRaises(IntegrityError): Note.objects.create(info=None) old_field = Note._meta.get_field("info") new_field = CharField(max_length=50) new_field.set_attributes_from_name("info") with connection.schema_editor() as editor: editor.alter_field(Note, old_field, new_field, strict=True) with self.assertRaises(IntegrityError): Note.objects.create(info=None) @skipUnlessDBFeature('interprets_empty_strings_as_nulls') def test_alter_textual_field_not_null_to_null(self): """ Nullability for textual fields is preserved on databases that interpret empty strings as NULLs. """ with connection.schema_editor() as editor: editor.create_model(Author) columns = self.column_classes(Author) # Field is nullable. self.assertTrue(columns['uuid'][1][6]) # Change to NOT NULL. old_field = Author._meta.get_field('uuid') new_field = SlugField(null=False, blank=True) new_field.set_attributes_from_name('uuid') with connection.schema_editor() as editor: editor.alter_field(Author, old_field, new_field, strict=True) columns = self.column_classes(Author) # Nullability is preserved. self.assertTrue(columns['uuid'][1][6]) def test_alter_numeric_field_keep_null_status(self): """ Changing a field type shouldn't affect the not null status. """ with connection.schema_editor() as editor: editor.create_model(UniqueTest) with self.assertRaises(IntegrityError): UniqueTest.objects.create(year=None, slug='aaa') old_field = UniqueTest._meta.get_field("year") new_field = BigIntegerField() new_field.set_attributes_from_name("year") with connection.schema_editor() as editor: editor.alter_field(UniqueTest, old_field, new_field, strict=True) with self.assertRaises(IntegrityError): UniqueTest.objects.create(year=None, slug='bbb') def test_alter_null_to_not_null(self): """ #23609 - Tests handling of default values when altering from NULL to NOT NULL. """ # Create the table with connection.schema_editor() as editor: editor.create_model(Author) # Ensure the field is right to begin with columns = self.column_classes(Author) self.assertTrue(columns['height'][1][6]) # Create some test data Author.objects.create(name='Not null author', height=12) Author.objects.create(name='Null author') # Verify null value self.assertEqual(Author.objects.get(name='Not null author').height, 12) self.assertIsNone(Author.objects.get(name='Null author').height) # Alter the height field to NOT NULL with default old_field = Author._meta.get_field("height") new_field = PositiveIntegerField(default=42) new_field.set_attributes_from_name("height") with connection.schema_editor() as editor: editor.alter_field(Author, old_field, new_field, strict=True) columns = self.column_classes(Author) self.assertFalse(columns['height'][1][6]) # Verify default value self.assertEqual(Author.objects.get(name='Not null author').height, 12) self.assertEqual(Author.objects.get(name='Null author').height, 42) def test_alter_charfield_to_null(self): """ #24307 - Should skip an alter statement on databases with interprets_empty_strings_as_nulls when changing a CharField to null. """ # Create the table with connection.schema_editor() as editor: editor.create_model(Author) # Change the CharField to null old_field = Author._meta.get_field('name') new_field = copy(old_field) new_field.null = True with connection.schema_editor() as editor: editor.alter_field(Author, old_field, new_field, strict=True) @unittest.skipUnless(connection.vendor == 'postgresql', 'PostgreSQL specific') def test_alter_char_field_decrease_length(self): # Create the table. with connection.schema_editor() as editor: editor.create_model(Author) Author.objects.create(name='x' * 255) # Change max_length of CharField. old_field = Author._meta.get_field('name') new_field = CharField(max_length=254) new_field.set_attributes_from_name('name') with connection.schema_editor() as editor: msg = 'value too long for type character varying(254)' with self.assertRaisesMessage(DataError, msg): editor.alter_field(Author, old_field, new_field, strict=True) @unittest.skipUnless(connection.vendor == 'postgresql', 'PostgreSQL specific') def test_alter_field_with_custom_db_type(self): from django.contrib.postgres.fields import ArrayField class Foo(Model): field = ArrayField(CharField(max_length=255)) class Meta: app_label = 'schema' with connection.schema_editor() as editor: editor.create_model(Foo) self.isolated_local_models = [Foo] old_field = Foo._meta.get_field('field') new_field = ArrayField(CharField(max_length=16)) new_field.set_attributes_from_name('field') new_field.model = Foo with connection.schema_editor() as editor: editor.alter_field(Foo, old_field, new_field, strict=True) @isolate_apps('schema') @unittest.skipUnless(connection.vendor == 'postgresql', 'PostgreSQL specific') def test_alter_array_field_decrease_base_field_length(self): from django.contrib.postgres.fields import ArrayField class ArrayModel(Model): field = ArrayField(CharField(max_length=16)) class Meta: app_label = 'schema' with connection.schema_editor() as editor: editor.create_model(ArrayModel) self.isolated_local_models = [ArrayModel] ArrayModel.objects.create(field=['x' * 16]) old_field = ArrayModel._meta.get_field('field') new_field = ArrayField(CharField(max_length=15)) new_field.set_attributes_from_name('field') new_field.model = ArrayModel with connection.schema_editor() as editor: msg = 'value too long for type character varying(15)' with self.assertRaisesMessage(DataError, msg): editor.alter_field(ArrayModel, old_field, new_field, strict=True) @isolate_apps('schema') @unittest.skipUnless(connection.vendor == 'postgresql', 'PostgreSQL specific') def test_alter_array_field_decrease_nested_base_field_length(self): from django.contrib.postgres.fields import ArrayField class ArrayModel(Model): field = ArrayField(ArrayField(CharField(max_length=16))) class Meta: app_label = 'schema' with connection.schema_editor() as editor: editor.create_model(ArrayModel) self.isolated_local_models = [ArrayModel] ArrayModel.objects.create(field=[['x' * 16]]) old_field = ArrayModel._meta.get_field('field') new_field = ArrayField(ArrayField(CharField(max_length=15))) new_field.set_attributes_from_name('field') new_field.model = ArrayModel with connection.schema_editor() as editor: msg = 'value too long for type character varying(15)' with self.assertRaisesMessage(DataError, msg): editor.alter_field(ArrayModel, old_field, new_field, strict=True) def test_alter_textfield_to_null(self): """ #24307 - Should skip an alter statement on databases with interprets_empty_strings_as_nulls when changing a TextField to null. """ # Create the table with connection.schema_editor() as editor: editor.create_model(Note) # Change the TextField to null old_field = Note._meta.get_field('info') new_field = copy(old_field) new_field.null = True with connection.schema_editor() as editor: editor.alter_field(Note, old_field, new_field, strict=True) def test_alter_null_to_not_null_keeping_default(self): """ #23738 - Can change a nullable field with default to non-nullable with the same default. """ # Create the table with connection.schema_editor() as editor: editor.create_model(AuthorWithDefaultHeight) # Ensure the field is right to begin with columns = self.column_classes(AuthorWithDefaultHeight) self.assertTrue(columns['height'][1][6]) # Alter the height field to NOT NULL keeping the previous default old_field = AuthorWithDefaultHeight._meta.get_field("height") new_field = PositiveIntegerField(default=42) new_field.set_attributes_from_name("height") with connection.schema_editor() as editor: editor.alter_field(AuthorWithDefaultHeight, old_field, new_field, strict=True) columns = self.column_classes(AuthorWithDefaultHeight) self.assertFalse(columns['height'][1][6]) @skipUnlessDBFeature('supports_foreign_keys') def test_alter_fk(self): """ Tests altering of FKs """ # Create the table with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(Book) # Ensure the field is right to begin with columns = self.column_classes(Book) self.assertEqual(columns['author_id'][0], connection.features.introspected_field_types['IntegerField']) self.assertForeignKeyExists(Book, 'author_id', 'schema_author') # Alter the FK old_field = Book._meta.get_field("author") new_field = ForeignKey(Author, CASCADE, editable=False) new_field.set_attributes_from_name("author") with connection.schema_editor() as editor: editor.alter_field(Book, old_field, new_field, strict=True) columns = self.column_classes(Book) self.assertEqual(columns['author_id'][0], connection.features.introspected_field_types['IntegerField']) self.assertForeignKeyExists(Book, 'author_id', 'schema_author') @skipUnlessDBFeature('supports_foreign_keys') def test_alter_to_fk(self): """ #24447 - Tests adding a FK constraint for an existing column """ class LocalBook(Model): author = IntegerField() title = CharField(max_length=100, db_index=True) pub_date = DateTimeField() class Meta: app_label = 'schema' apps = new_apps self.local_models = [LocalBook] # Create the tables with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(LocalBook) # Ensure no FK constraint exists constraints = self.get_constraints(LocalBook._meta.db_table) for details in constraints.values(): if details['foreign_key']: self.fail('Found an unexpected FK constraint to %s' % details['columns']) old_field = LocalBook._meta.get_field("author") new_field = ForeignKey(Author, CASCADE) new_field.set_attributes_from_name("author") with connection.schema_editor() as editor: editor.alter_field(LocalBook, old_field, new_field, strict=True) self.assertForeignKeyExists(LocalBook, 'author_id', 'schema_author') @skipUnlessDBFeature('supports_foreign_keys') def test_alter_o2o_to_fk(self): """ #24163 - Tests altering of OneToOneField to ForeignKey """ # Create the table with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(BookWithO2O) # Ensure the field is right to begin with columns = self.column_classes(BookWithO2O) self.assertEqual(columns['author_id'][0], connection.features.introspected_field_types['IntegerField']) # Ensure the field is unique author = Author.objects.create(name="Joe") BookWithO2O.objects.create(author=author, title="Django 1", pub_date=datetime.datetime.now()) with self.assertRaises(IntegrityError): BookWithO2O.objects.create(author=author, title="Django 2", pub_date=datetime.datetime.now()) BookWithO2O.objects.all().delete() self.assertForeignKeyExists(BookWithO2O, 'author_id', 'schema_author') # Alter the OneToOneField to ForeignKey old_field = BookWithO2O._meta.get_field("author") new_field = ForeignKey(Author, CASCADE) new_field.set_attributes_from_name("author") with connection.schema_editor() as editor: editor.alter_field(BookWithO2O, old_field, new_field, strict=True) columns = self.column_classes(Book) self.assertEqual(columns['author_id'][0], connection.features.introspected_field_types['IntegerField']) # Ensure the field is not unique anymore Book.objects.create(author=author, title="Django 1", pub_date=datetime.datetime.now()) Book.objects.create(author=author, title="Django 2", pub_date=datetime.datetime.now()) self.assertForeignKeyExists(Book, 'author_id', 'schema_author') @skipUnlessDBFeature('supports_foreign_keys') def test_alter_fk_to_o2o(self): """ #24163 - Tests altering of ForeignKey to OneToOneField """ # Create the table with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(Book) # Ensure the field is right to begin with columns = self.column_classes(Book) self.assertEqual(columns['author_id'][0], connection.features.introspected_field_types['IntegerField']) # Ensure the field is not unique author = Author.objects.create(name="Joe") Book.objects.create(author=author, title="Django 1", pub_date=datetime.datetime.now()) Book.objects.create(author=author, title="Django 2", pub_date=datetime.datetime.now()) Book.objects.all().delete() self.assertForeignKeyExists(Book, 'author_id', 'schema_author') # Alter the ForeignKey to OneToOneField old_field = Book._meta.get_field("author") new_field = OneToOneField(Author, CASCADE) new_field.set_attributes_from_name("author") with connection.schema_editor() as editor: editor.alter_field(Book, old_field, new_field, strict=True) columns = self.column_classes(BookWithO2O) self.assertEqual(columns['author_id'][0], connection.features.introspected_field_types['IntegerField']) # Ensure the field is unique now BookWithO2O.objects.create(author=author, title="Django 1", pub_date=datetime.datetime.now()) with self.assertRaises(IntegrityError): BookWithO2O.objects.create(author=author, title="Django 2", pub_date=datetime.datetime.now()) self.assertForeignKeyExists(BookWithO2O, 'author_id', 'schema_author') def test_alter_field_fk_to_o2o(self): with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(Book) expected_fks = 1 if connection.features.supports_foreign_keys else 0 expected_indexes = 1 if connection.features.indexes_foreign_keys else 0 # Check the index is right to begin with. counts = self.get_constraints_count( Book._meta.db_table, Book._meta.get_field('author').column, (Author._meta.db_table, Author._meta.pk.column), ) self.assertEqual( counts, {'fks': expected_fks, 'uniques': 0, 'indexes': expected_indexes}, ) old_field = Book._meta.get_field('author') new_field = OneToOneField(Author, CASCADE) new_field.set_attributes_from_name('author') with connection.schema_editor() as editor: editor.alter_field(Book, old_field, new_field, strict=True) counts = self.get_constraints_count( Book._meta.db_table, Book._meta.get_field('author').column, (Author._meta.db_table, Author._meta.pk.column), ) # The index on ForeignKey is replaced with a unique constraint for OneToOneField. self.assertEqual(counts, {'fks': expected_fks, 'uniques': 1, 'indexes': 0}) def test_alter_field_fk_keeps_index(self): with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(Book) expected_fks = 1 if connection.features.supports_foreign_keys else 0 expected_indexes = 1 if connection.features.indexes_foreign_keys else 0 # Check the index is right to begin with. counts = self.get_constraints_count( Book._meta.db_table, Book._meta.get_field('author').column, (Author._meta.db_table, Author._meta.pk.column), ) self.assertEqual( counts, {'fks': expected_fks, 'uniques': 0, 'indexes': expected_indexes}, ) old_field = Book._meta.get_field('author') # on_delete changed from CASCADE. new_field = ForeignKey(Author, PROTECT) new_field.set_attributes_from_name('author') with connection.schema_editor() as editor: editor.alter_field(Book, old_field, new_field, strict=True) counts = self.get_constraints_count( Book._meta.db_table, Book._meta.get_field('author').column, (Author._meta.db_table, Author._meta.pk.column), ) # The index remains. self.assertEqual( counts, {'fks': expected_fks, 'uniques': 0, 'indexes': expected_indexes}, ) def test_alter_field_o2o_to_fk(self): with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(BookWithO2O) expected_fks = 1 if connection.features.supports_foreign_keys else 0 # Check the unique constraint is right to begin with. counts = self.get_constraints_count( BookWithO2O._meta.db_table, BookWithO2O._meta.get_field('author').column, (Author._meta.db_table, Author._meta.pk.column), ) self.assertEqual(counts, {'fks': expected_fks, 'uniques': 1, 'indexes': 0}) old_field = BookWithO2O._meta.get_field('author') new_field = ForeignKey(Author, CASCADE) new_field.set_attributes_from_name('author') with connection.schema_editor() as editor: editor.alter_field(BookWithO2O, old_field, new_field, strict=True) counts = self.get_constraints_count( BookWithO2O._meta.db_table, BookWithO2O._meta.get_field('author').column, (Author._meta.db_table, Author._meta.pk.column), ) # The unique constraint on OneToOneField is replaced with an index for ForeignKey. self.assertEqual(counts, {'fks': expected_fks, 'uniques': 0, 'indexes': 1}) def test_alter_field_o2o_keeps_unique(self): with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(BookWithO2O) expected_fks = 1 if connection.features.supports_foreign_keys else 0 # Check the unique constraint is right to begin with. counts = self.get_constraints_count( BookWithO2O._meta.db_table, BookWithO2O._meta.get_field('author').column, (Author._meta.db_table, Author._meta.pk.column), ) self.assertEqual(counts, {'fks': expected_fks, 'uniques': 1, 'indexes': 0}) old_field = BookWithO2O._meta.get_field('author') # on_delete changed from CASCADE. new_field = OneToOneField(Author, PROTECT) new_field.set_attributes_from_name('author') with connection.schema_editor() as editor: editor.alter_field(BookWithO2O, old_field, new_field, strict=True) counts = self.get_constraints_count( BookWithO2O._meta.db_table, BookWithO2O._meta.get_field('author').column, (Author._meta.db_table, Author._meta.pk.column), ) # The unique constraint remains. self.assertEqual(counts, {'fks': expected_fks, 'uniques': 1, 'indexes': 0}) @skipUnlessDBFeature('ignores_table_name_case') def test_alter_db_table_case(self): # Create the table with connection.schema_editor() as editor: editor.create_model(Author) # Alter the case of the table old_table_name = Author._meta.db_table with connection.schema_editor() as editor: editor.alter_db_table(Author, old_table_name, old_table_name.upper()) def test_alter_implicit_id_to_explicit(self): """ Should be able to convert an implicit "id" field to an explicit "id" primary key field. """ with connection.schema_editor() as editor: editor.create_model(Author) old_field = Author._meta.get_field("id") new_field = AutoField(primary_key=True) new_field.set_attributes_from_name("id") new_field.model = Author with connection.schema_editor() as editor: editor.alter_field(Author, old_field, new_field, strict=True) # This will fail if DROP DEFAULT is inadvertently executed on this # field which drops the id sequence, at least on PostgreSQL. Author.objects.create(name='Foo') Author.objects.create(name='Bar') def test_alter_autofield_pk_to_bigautofield_pk_sequence_owner(self): """ Converting an implicit PK to BigAutoField(primary_key=True) should keep a sequence owner on PostgreSQL. """ with connection.schema_editor() as editor: editor.create_model(Author) old_field = Author._meta.get_field('id') new_field = BigAutoField(primary_key=True) new_field.set_attributes_from_name('id') new_field.model = Author with connection.schema_editor() as editor: editor.alter_field(Author, old_field, new_field, strict=True) Author.objects.create(name='Foo', pk=1) with connection.cursor() as cursor: sequence_reset_sqls = connection.ops.sequence_reset_sql(no_style(), [Author]) if sequence_reset_sqls: cursor.execute(sequence_reset_sqls[0]) # Fail on PostgreSQL if sequence is missing an owner. self.assertIsNotNone(Author.objects.create(name='Bar')) def test_alter_autofield_pk_to_smallautofield_pk_sequence_owner(self): """ Converting an implicit PK to SmallAutoField(primary_key=True) should keep a sequence owner on PostgreSQL. """ with connection.schema_editor() as editor: editor.create_model(Author) old_field = Author._meta.get_field('id') new_field = SmallAutoField(primary_key=True) new_field.set_attributes_from_name('id') new_field.model = Author with connection.schema_editor() as editor: editor.alter_field(Author, old_field, new_field, strict=True) Author.objects.create(name='Foo', pk=1) with connection.cursor() as cursor: sequence_reset_sqls = connection.ops.sequence_reset_sql(no_style(), [Author]) if sequence_reset_sqls: cursor.execute(sequence_reset_sqls[0]) # Fail on PostgreSQL if sequence is missing an owner. self.assertIsNotNone(Author.objects.create(name='Bar')) def test_alter_int_pk_to_autofield_pk(self): """ Should be able to rename an IntegerField(primary_key=True) to AutoField(primary_key=True). """ with connection.schema_editor() as editor: editor.create_model(IntegerPK) old_field = IntegerPK._meta.get_field('i') new_field = AutoField(primary_key=True) new_field.model = IntegerPK new_field.set_attributes_from_name('i') with connection.schema_editor() as editor: editor.alter_field(IntegerPK, old_field, new_field, strict=True) # A model representing the updated model. class IntegerPKToAutoField(Model): i = AutoField(primary_key=True) j = IntegerField(unique=True) class Meta: app_label = 'schema' apps = new_apps db_table = IntegerPK._meta.db_table # An id (i) is generated by the database. obj = IntegerPKToAutoField.objects.create(j=1) self.assertIsNotNone(obj.i) def test_alter_int_pk_to_bigautofield_pk(self): """ Should be able to rename an IntegerField(primary_key=True) to BigAutoField(primary_key=True). """ with connection.schema_editor() as editor: editor.create_model(IntegerPK) old_field = IntegerPK._meta.get_field('i') new_field = BigAutoField(primary_key=True) new_field.model = IntegerPK new_field.set_attributes_from_name('i') with connection.schema_editor() as editor: editor.alter_field(IntegerPK, old_field, new_field, strict=True) # A model representing the updated model. class IntegerPKToBigAutoField(Model): i = BigAutoField(primary_key=True) j = IntegerField(unique=True) class Meta: app_label = 'schema' apps = new_apps db_table = IntegerPK._meta.db_table # An id (i) is generated by the database. obj = IntegerPKToBigAutoField.objects.create(j=1) self.assertIsNotNone(obj.i) @isolate_apps('schema') def test_alter_smallint_pk_to_smallautofield_pk(self): """ Should be able to rename an SmallIntegerField(primary_key=True) to SmallAutoField(primary_key=True). """ class SmallIntegerPK(Model): i = SmallIntegerField(primary_key=True) class Meta: app_label = 'schema' with connection.schema_editor() as editor: editor.create_model(SmallIntegerPK) self.isolated_local_models = [SmallIntegerPK] old_field = SmallIntegerPK._meta.get_field('i') new_field = SmallAutoField(primary_key=True) new_field.model = SmallIntegerPK new_field.set_attributes_from_name('i') with connection.schema_editor() as editor: editor.alter_field(SmallIntegerPK, old_field, new_field, strict=True) def test_alter_int_pk_to_int_unique(self): """ Should be able to rename an IntegerField(primary_key=True) to IntegerField(unique=True). """ with connection.schema_editor() as editor: editor.create_model(IntegerPK) # Delete the old PK old_field = IntegerPK._meta.get_field('i') new_field = IntegerField(unique=True) new_field.model = IntegerPK new_field.set_attributes_from_name('i') with connection.schema_editor() as editor: editor.alter_field(IntegerPK, old_field, new_field, strict=True) # The primary key constraint is gone. Result depends on database: # 'id' for SQLite, None for others (must not be 'i'). self.assertIn(self.get_primary_key(IntegerPK._meta.db_table), ('id', None)) # Set up a model class as it currently stands. The original IntegerPK # class is now out of date and some backends make use of the whole # model class when modifying a field (such as sqlite3 when remaking a # table) so an outdated model class leads to incorrect results. class Transitional(Model): i = IntegerField(unique=True) j = IntegerField(unique=True) class Meta: app_label = 'schema' apps = new_apps db_table = 'INTEGERPK' # model requires a new PK old_field = Transitional._meta.get_field('j') new_field = IntegerField(primary_key=True) new_field.model = Transitional new_field.set_attributes_from_name('j') with connection.schema_editor() as editor: editor.alter_field(Transitional, old_field, new_field, strict=True) # Create a model class representing the updated model. class IntegerUnique(Model): i = IntegerField(unique=True) j = IntegerField(primary_key=True) class Meta: app_label = 'schema' apps = new_apps db_table = 'INTEGERPK' # Ensure unique constraint works. IntegerUnique.objects.create(i=1, j=1) with self.assertRaises(IntegrityError): IntegerUnique.objects.create(i=1, j=2) def test_rename(self): """ Tests simple altering of fields """ # Create the table with connection.schema_editor() as editor: editor.create_model(Author) # Ensure the field is right to begin with columns = self.column_classes(Author) self.assertEqual(columns['name'][0], connection.features.introspected_field_types['CharField']) self.assertNotIn("display_name", columns) # Alter the name field's name old_field = Author._meta.get_field("name") new_field = CharField(max_length=254) new_field.set_attributes_from_name("display_name") with connection.schema_editor() as editor: editor.alter_field(Author, old_field, new_field, strict=True) columns = self.column_classes(Author) self.assertEqual(columns['display_name'][0], connection.features.introspected_field_types['CharField']) self.assertNotIn("name", columns) @isolate_apps('schema') def test_rename_referenced_field(self): class Author(Model): name = CharField(max_length=255, unique=True) class Meta: app_label = 'schema' class Book(Model): author = ForeignKey(Author, CASCADE, to_field='name') class Meta: app_label = 'schema' with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(Book) new_field = CharField(max_length=255, unique=True) new_field.set_attributes_from_name('renamed') with connection.schema_editor(atomic=connection.features.supports_atomic_references_rename) as editor: editor.alter_field(Author, Author._meta.get_field('name'), new_field) # Ensure the foreign key reference was updated. self.assertForeignKeyExists(Book, 'author_id', 'schema_author', 'renamed') @skipIfDBFeature('interprets_empty_strings_as_nulls') def test_rename_keep_null_status(self): """ Renaming a field shouldn't affect the not null status. """ with connection.schema_editor() as editor: editor.create_model(Note) with self.assertRaises(IntegrityError): Note.objects.create(info=None) old_field = Note._meta.get_field("info") new_field = TextField() new_field.set_attributes_from_name("detail_info") with connection.schema_editor() as editor: editor.alter_field(Note, old_field, new_field, strict=True) columns = self.column_classes(Note) self.assertEqual(columns['detail_info'][0], "TextField") self.assertNotIn("info", columns) with self.assertRaises(IntegrityError): NoteRename.objects.create(detail_info=None) def _test_m2m_create(self, M2MFieldClass): """ Tests M2M fields on models during creation """ class LocalBookWithM2M(Model): author = ForeignKey(Author, CASCADE) title = CharField(max_length=100, db_index=True) pub_date = DateTimeField() tags = M2MFieldClass("TagM2MTest", related_name="books") class Meta: app_label = 'schema' apps = new_apps self.local_models = [LocalBookWithM2M] # Create the tables with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(TagM2MTest) editor.create_model(LocalBookWithM2M) # Ensure there is now an m2m table there columns = self.column_classes(LocalBookWithM2M._meta.get_field("tags").remote_field.through) self.assertEqual(columns['tagm2mtest_id'][0], connection.features.introspected_field_types['IntegerField']) def test_m2m_create(self): self._test_m2m_create(ManyToManyField) def test_m2m_create_custom(self): self._test_m2m_create(CustomManyToManyField) def test_m2m_create_inherited(self): self._test_m2m_create(InheritedManyToManyField) def _test_m2m_create_through(self, M2MFieldClass): """ Tests M2M fields on models during creation with through models """ class LocalTagThrough(Model): book = ForeignKey("schema.LocalBookWithM2MThrough", CASCADE) tag = ForeignKey("schema.TagM2MTest", CASCADE) class Meta: app_label = 'schema' apps = new_apps class LocalBookWithM2MThrough(Model): tags = M2MFieldClass("TagM2MTest", related_name="books", through=LocalTagThrough) class Meta: app_label = 'schema' apps = new_apps self.local_models = [LocalTagThrough, LocalBookWithM2MThrough] # Create the tables with connection.schema_editor() as editor: editor.create_model(LocalTagThrough) editor.create_model(TagM2MTest) editor.create_model(LocalBookWithM2MThrough) # Ensure there is now an m2m table there columns = self.column_classes(LocalTagThrough) self.assertEqual(columns['book_id'][0], connection.features.introspected_field_types['IntegerField']) self.assertEqual(columns['tag_id'][0], connection.features.introspected_field_types['IntegerField']) def test_m2m_create_through(self): self._test_m2m_create_through(ManyToManyField) def test_m2m_create_through_custom(self): self._test_m2m_create_through(CustomManyToManyField) def test_m2m_create_through_inherited(self): self._test_m2m_create_through(InheritedManyToManyField) def _test_m2m(self, M2MFieldClass): """ Tests adding/removing M2M fields on models """ class LocalAuthorWithM2M(Model): name = CharField(max_length=255) class Meta: app_label = 'schema' apps = new_apps self.local_models = [LocalAuthorWithM2M] # Create the tables with connection.schema_editor() as editor: editor.create_model(LocalAuthorWithM2M) editor.create_model(TagM2MTest) # Create an M2M field new_field = M2MFieldClass("schema.TagM2MTest", related_name="authors") new_field.contribute_to_class(LocalAuthorWithM2M, "tags") # Ensure there's no m2m table there with self.assertRaises(DatabaseError): self.column_classes(new_field.remote_field.through) # Add the field with connection.schema_editor() as editor: editor.add_field(LocalAuthorWithM2M, new_field) # Ensure there is now an m2m table there columns = self.column_classes(new_field.remote_field.through) self.assertEqual(columns['tagm2mtest_id'][0], connection.features.introspected_field_types['IntegerField']) # "Alter" the field. This should not rename the DB table to itself. with connection.schema_editor() as editor: editor.alter_field(LocalAuthorWithM2M, new_field, new_field, strict=True) # Remove the M2M table again with connection.schema_editor() as editor: editor.remove_field(LocalAuthorWithM2M, new_field) # Ensure there's no m2m table there with self.assertRaises(DatabaseError): self.column_classes(new_field.remote_field.through) # Make sure the model state is coherent with the table one now that # we've removed the tags field. opts = LocalAuthorWithM2M._meta opts.local_many_to_many.remove(new_field) del new_apps.all_models['schema'][new_field.remote_field.through._meta.model_name] opts._expire_cache() def test_m2m(self): self._test_m2m(ManyToManyField) def test_m2m_custom(self): self._test_m2m(CustomManyToManyField) def test_m2m_inherited(self): self._test_m2m(InheritedManyToManyField) def _test_m2m_through_alter(self, M2MFieldClass): """ Tests altering M2Ms with explicit through models (should no-op) """ class LocalAuthorTag(Model): author = ForeignKey("schema.LocalAuthorWithM2MThrough", CASCADE) tag = ForeignKey("schema.TagM2MTest", CASCADE) class Meta: app_label = 'schema' apps = new_apps class LocalAuthorWithM2MThrough(Model): name = CharField(max_length=255) tags = M2MFieldClass("schema.TagM2MTest", related_name="authors", through=LocalAuthorTag) class Meta: app_label = 'schema' apps = new_apps self.local_models = [LocalAuthorTag, LocalAuthorWithM2MThrough] # Create the tables with connection.schema_editor() as editor: editor.create_model(LocalAuthorTag) editor.create_model(LocalAuthorWithM2MThrough) editor.create_model(TagM2MTest) # Ensure the m2m table is there self.assertEqual(len(self.column_classes(LocalAuthorTag)), 3) # "Alter" the field's blankness. This should not actually do anything. old_field = LocalAuthorWithM2MThrough._meta.get_field("tags") new_field = M2MFieldClass("schema.TagM2MTest", related_name="authors", through=LocalAuthorTag) new_field.contribute_to_class(LocalAuthorWithM2MThrough, "tags") with connection.schema_editor() as editor: editor.alter_field(LocalAuthorWithM2MThrough, old_field, new_field, strict=True) # Ensure the m2m table is still there self.assertEqual(len(self.column_classes(LocalAuthorTag)), 3) def test_m2m_through_alter(self): self._test_m2m_through_alter(ManyToManyField) def test_m2m_through_alter_custom(self): self._test_m2m_through_alter(CustomManyToManyField) def test_m2m_through_alter_inherited(self): self._test_m2m_through_alter(InheritedManyToManyField) def _test_m2m_repoint(self, M2MFieldClass): """ Tests repointing M2M fields """ class LocalBookWithM2M(Model): author = ForeignKey(Author, CASCADE) title = CharField(max_length=100, db_index=True) pub_date = DateTimeField() tags = M2MFieldClass("TagM2MTest", related_name="books") class Meta: app_label = 'schema' apps = new_apps self.local_models = [LocalBookWithM2M] # Create the tables with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(LocalBookWithM2M) editor.create_model(TagM2MTest) editor.create_model(UniqueTest) # Ensure the M2M exists and points to TagM2MTest if connection.features.supports_foreign_keys: self.assertForeignKeyExists( LocalBookWithM2M._meta.get_field("tags").remote_field.through, 'tagm2mtest_id', 'schema_tagm2mtest', ) # Repoint the M2M old_field = LocalBookWithM2M._meta.get_field("tags") new_field = M2MFieldClass(UniqueTest) new_field.contribute_to_class(LocalBookWithM2M, "uniques") with connection.schema_editor() as editor: editor.alter_field(LocalBookWithM2M, old_field, new_field, strict=True) # Ensure old M2M is gone with self.assertRaises(DatabaseError): self.column_classes(LocalBookWithM2M._meta.get_field("tags").remote_field.through) # This model looks like the new model and is used for teardown. opts = LocalBookWithM2M._meta opts.local_many_to_many.remove(old_field) # Ensure the new M2M exists and points to UniqueTest if connection.features.supports_foreign_keys: self.assertForeignKeyExists(new_field.remote_field.through, 'uniquetest_id', 'schema_uniquetest') def test_m2m_repoint(self): self._test_m2m_repoint(ManyToManyField) def test_m2m_repoint_custom(self): self._test_m2m_repoint(CustomManyToManyField) def test_m2m_repoint_inherited(self): self._test_m2m_repoint(InheritedManyToManyField) @isolate_apps('schema') def test_m2m_rename_field_in_target_model(self): class LocalTagM2MTest(Model): title = CharField(max_length=255) class Meta: app_label = 'schema' class LocalM2M(Model): tags = ManyToManyField(LocalTagM2MTest) class Meta: app_label = 'schema' # Create the tables. with connection.schema_editor() as editor: editor.create_model(LocalM2M) editor.create_model(LocalTagM2MTest) self.isolated_local_models = [LocalM2M, LocalTagM2MTest] # Ensure the m2m table is there. self.assertEqual(len(self.column_classes(LocalM2M)), 1) # Alter a field in LocalTagM2MTest. old_field = LocalTagM2MTest._meta.get_field('title') new_field = CharField(max_length=254) new_field.contribute_to_class(LocalTagM2MTest, 'title1') # @isolate_apps() and inner models are needed to have the model # relations populated, otherwise this doesn't act as a regression test. self.assertEqual(len(new_field.model._meta.related_objects), 1) with connection.schema_editor() as editor: editor.alter_field(LocalTagM2MTest, old_field, new_field, strict=True) # Ensure the m2m table is still there. self.assertEqual(len(self.column_classes(LocalM2M)), 1) @skipUnlessDBFeature('supports_column_check_constraints', 'can_introspect_check_constraints') def test_check_constraints(self): """ Tests creating/deleting CHECK constraints """ # Create the tables with connection.schema_editor() as editor: editor.create_model(Author) # Ensure the constraint exists constraints = self.get_constraints(Author._meta.db_table) if not any(details['columns'] == ['height'] and details['check'] for details in constraints.values()): self.fail("No check constraint for height found") # Alter the column to remove it old_field = Author._meta.get_field("height") new_field = IntegerField(null=True, blank=True) new_field.set_attributes_from_name("height") with connection.schema_editor() as editor: editor.alter_field(Author, old_field, new_field, strict=True) constraints = self.get_constraints(Author._meta.db_table) for details in constraints.values(): if details['columns'] == ["height"] and details['check']: self.fail("Check constraint for height found") # Alter the column to re-add it new_field2 = Author._meta.get_field("height") with connection.schema_editor() as editor: editor.alter_field(Author, new_field, new_field2, strict=True) constraints = self.get_constraints(Author._meta.db_table) if not any(details['columns'] == ['height'] and details['check'] for details in constraints.values()): self.fail("No check constraint for height found") @skipUnlessDBFeature('supports_column_check_constraints', 'can_introspect_check_constraints') @isolate_apps('schema') def test_check_constraint_timedelta_param(self): class DurationModel(Model): duration = DurationField() class Meta: app_label = 'schema' with connection.schema_editor() as editor: editor.create_model(DurationModel) self.isolated_local_models = [DurationModel] constraint_name = 'duration_gte_5_minutes' constraint = CheckConstraint( check=Q(duration__gt=datetime.timedelta(minutes=5)), name=constraint_name, ) DurationModel._meta.constraints = [constraint] with connection.schema_editor() as editor: editor.add_constraint(DurationModel, constraint) constraints = self.get_constraints(DurationModel._meta.db_table) self.assertIn(constraint_name, constraints) with self.assertRaises(IntegrityError), atomic(): DurationModel.objects.create(duration=datetime.timedelta(minutes=4)) DurationModel.objects.create(duration=datetime.timedelta(minutes=10)) @skipUnlessDBFeature('supports_column_check_constraints', 'can_introspect_check_constraints') def test_remove_field_check_does_not_remove_meta_constraints(self): with connection.schema_editor() as editor: editor.create_model(Author) # Add the custom check constraint constraint = CheckConstraint(check=Q(height__gte=0), name='author_height_gte_0_check') custom_constraint_name = constraint.name Author._meta.constraints = [constraint] with connection.schema_editor() as editor: editor.add_constraint(Author, constraint) # Ensure the constraints exist constraints = self.get_constraints(Author._meta.db_table) self.assertIn(custom_constraint_name, constraints) other_constraints = [ name for name, details in constraints.items() if details['columns'] == ['height'] and details['check'] and name != custom_constraint_name ] self.assertEqual(len(other_constraints), 1) # Alter the column to remove field check old_field = Author._meta.get_field('height') new_field = IntegerField(null=True, blank=True) new_field.set_attributes_from_name('height') with connection.schema_editor() as editor: editor.alter_field(Author, old_field, new_field, strict=True) constraints = self.get_constraints(Author._meta.db_table) self.assertIn(custom_constraint_name, constraints) other_constraints = [ name for name, details in constraints.items() if details['columns'] == ['height'] and details['check'] and name != custom_constraint_name ] self.assertEqual(len(other_constraints), 0) # Alter the column to re-add field check new_field2 = Author._meta.get_field('height') with connection.schema_editor() as editor: editor.alter_field(Author, new_field, new_field2, strict=True) constraints = self.get_constraints(Author._meta.db_table) self.assertIn(custom_constraint_name, constraints) other_constraints = [ name for name, details in constraints.items() if details['columns'] == ['height'] and details['check'] and name != custom_constraint_name ] self.assertEqual(len(other_constraints), 1) # Drop the check constraint with connection.schema_editor() as editor: Author._meta.constraints = [] editor.remove_constraint(Author, constraint) def test_unique(self): """ Tests removing and adding unique constraints to a single column. """ # Create the table with connection.schema_editor() as editor: editor.create_model(Tag) # Ensure the field is unique to begin with Tag.objects.create(title="foo", slug="foo") with self.assertRaises(IntegrityError): Tag.objects.create(title="bar", slug="foo") Tag.objects.all().delete() # Alter the slug field to be non-unique old_field = Tag._meta.get_field("slug") new_field = SlugField(unique=False) new_field.set_attributes_from_name("slug") with connection.schema_editor() as editor: editor.alter_field(Tag, old_field, new_field, strict=True) # Ensure the field is no longer unique Tag.objects.create(title="foo", slug="foo") Tag.objects.create(title="bar", slug="foo") Tag.objects.all().delete() # Alter the slug field to be unique new_field2 = SlugField(unique=True) new_field2.set_attributes_from_name("slug") with connection.schema_editor() as editor: editor.alter_field(Tag, new_field, new_field2, strict=True) # Ensure the field is unique again Tag.objects.create(title="foo", slug="foo") with self.assertRaises(IntegrityError): Tag.objects.create(title="bar", slug="foo") Tag.objects.all().delete() # Rename the field new_field3 = SlugField(unique=True) new_field3.set_attributes_from_name("slug2") with connection.schema_editor() as editor: editor.alter_field(Tag, new_field2, new_field3, strict=True) # Ensure the field is still unique TagUniqueRename.objects.create(title="foo", slug2="foo") with self.assertRaises(IntegrityError): TagUniqueRename.objects.create(title="bar", slug2="foo") Tag.objects.all().delete() def test_unique_name_quoting(self): old_table_name = TagUniqueRename._meta.db_table try: with connection.schema_editor() as editor: editor.create_model(TagUniqueRename) editor.alter_db_table(TagUniqueRename, old_table_name, 'unique-table') TagUniqueRename._meta.db_table = 'unique-table' # This fails if the unique index name isn't quoted. editor.alter_unique_together(TagUniqueRename, [], (('title', 'slug2'),)) finally: TagUniqueRename._meta.db_table = old_table_name @isolate_apps('schema') @skipUnlessDBFeature('supports_foreign_keys') def test_unique_no_unnecessary_fk_drops(self): """ If AlterField isn't selective about dropping foreign key constraints when modifying a field with a unique constraint, the AlterField incorrectly drops and recreates the Book.author foreign key even though it doesn't restrict the field being changed (#29193). """ class Author(Model): name = CharField(max_length=254, unique=True) class Meta: app_label = 'schema' class Book(Model): author = ForeignKey(Author, CASCADE) class Meta: app_label = 'schema' with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(Book) new_field = CharField(max_length=255, unique=True) new_field.model = Author new_field.set_attributes_from_name('name') with self.assertLogs('django.db.backends.schema', 'DEBUG') as cm: with connection.schema_editor() as editor: editor.alter_field(Author, Author._meta.get_field('name'), new_field) # One SQL statement is executed to alter the field. self.assertEqual(len(cm.records), 1) @isolate_apps('schema') def test_unique_and_reverse_m2m(self): """ AlterField can modify a unique field when there's a reverse M2M relation on the model. """ class Tag(Model): title = CharField(max_length=255) slug = SlugField(unique=True) class Meta: app_label = 'schema' class Book(Model): tags = ManyToManyField(Tag, related_name='books') class Meta: app_label = 'schema' self.isolated_local_models = [Book._meta.get_field('tags').remote_field.through] with connection.schema_editor() as editor: editor.create_model(Tag) editor.create_model(Book) new_field = SlugField(max_length=75, unique=True) new_field.model = Tag new_field.set_attributes_from_name('slug') with self.assertLogs('django.db.backends.schema', 'DEBUG') as cm: with connection.schema_editor() as editor: editor.alter_field(Tag, Tag._meta.get_field('slug'), new_field) # One SQL statement is executed to alter the field. self.assertEqual(len(cm.records), 1) # Ensure that the field is still unique. Tag.objects.create(title='foo', slug='foo') with self.assertRaises(IntegrityError): Tag.objects.create(title='bar', slug='foo') @skipUnlessDBFeature('allows_multiple_constraints_on_same_fields') def test_remove_field_unique_does_not_remove_meta_constraints(self): with connection.schema_editor() as editor: editor.create_model(AuthorWithUniqueName) # Add the custom unique constraint constraint = UniqueConstraint(fields=['name'], name='author_name_uniq') custom_constraint_name = constraint.name AuthorWithUniqueName._meta.constraints = [constraint] with connection.schema_editor() as editor: editor.add_constraint(AuthorWithUniqueName, constraint) # Ensure the constraints exist constraints = self.get_constraints(AuthorWithUniqueName._meta.db_table) self.assertIn(custom_constraint_name, constraints) other_constraints = [ name for name, details in constraints.items() if details['columns'] == ['name'] and details['unique'] and name != custom_constraint_name ] self.assertEqual(len(other_constraints), 1) # Alter the column to remove field uniqueness old_field = AuthorWithUniqueName._meta.get_field('name') new_field = CharField(max_length=255) new_field.set_attributes_from_name('name') with connection.schema_editor() as editor: editor.alter_field(AuthorWithUniqueName, old_field, new_field, strict=True) constraints = self.get_constraints(AuthorWithUniqueName._meta.db_table) self.assertIn(custom_constraint_name, constraints) other_constraints = [ name for name, details in constraints.items() if details['columns'] == ['name'] and details['unique'] and name != custom_constraint_name ] self.assertEqual(len(other_constraints), 0) # Alter the column to re-add field uniqueness new_field2 = AuthorWithUniqueName._meta.get_field('name') with connection.schema_editor() as editor: editor.alter_field(AuthorWithUniqueName, new_field, new_field2, strict=True) constraints = self.get_constraints(AuthorWithUniqueName._meta.db_table) self.assertIn(custom_constraint_name, constraints) other_constraints = [ name for name, details in constraints.items() if details['columns'] == ['name'] and details['unique'] and name != custom_constraint_name ] self.assertEqual(len(other_constraints), 1) # Drop the unique constraint with connection.schema_editor() as editor: AuthorWithUniqueName._meta.constraints = [] editor.remove_constraint(AuthorWithUniqueName, constraint) def test_unique_together(self): """ Tests removing and adding unique_together constraints on a model. """ # Create the table with connection.schema_editor() as editor: editor.create_model(UniqueTest) # Ensure the fields are unique to begin with UniqueTest.objects.create(year=2012, slug="foo") UniqueTest.objects.create(year=2011, slug="foo") UniqueTest.objects.create(year=2011, slug="bar") with self.assertRaises(IntegrityError): UniqueTest.objects.create(year=2012, slug="foo") UniqueTest.objects.all().delete() # Alter the model to its non-unique-together companion with connection.schema_editor() as editor: editor.alter_unique_together(UniqueTest, UniqueTest._meta.unique_together, []) # Ensure the fields are no longer unique UniqueTest.objects.create(year=2012, slug="foo") UniqueTest.objects.create(year=2012, slug="foo") UniqueTest.objects.all().delete() # Alter it back new_field2 = SlugField(unique=True) new_field2.set_attributes_from_name("slug") with connection.schema_editor() as editor: editor.alter_unique_together(UniqueTest, [], UniqueTest._meta.unique_together) # Ensure the fields are unique again UniqueTest.objects.create(year=2012, slug="foo") with self.assertRaises(IntegrityError): UniqueTest.objects.create(year=2012, slug="foo") UniqueTest.objects.all().delete() def test_unique_together_with_fk(self): """ Tests removing and adding unique_together constraints that include a foreign key. """ # Create the table with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(Book) # Ensure the fields are unique to begin with self.assertEqual(Book._meta.unique_together, ()) # Add the unique_together constraint with connection.schema_editor() as editor: editor.alter_unique_together(Book, [], [['author', 'title']]) # Alter it back with connection.schema_editor() as editor: editor.alter_unique_together(Book, [['author', 'title']], []) def test_unique_together_with_fk_with_existing_index(self): """ Tests removing and adding unique_together constraints that include a foreign key, where the foreign key is added after the model is created. """ # Create the tables with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(BookWithoutAuthor) new_field = ForeignKey(Author, CASCADE) new_field.set_attributes_from_name('author') editor.add_field(BookWithoutAuthor, new_field) # Ensure the fields aren't unique to begin with self.assertEqual(Book._meta.unique_together, ()) # Add the unique_together constraint with connection.schema_editor() as editor: editor.alter_unique_together(Book, [], [['author', 'title']]) # Alter it back with connection.schema_editor() as editor: editor.alter_unique_together(Book, [['author', 'title']], []) @skipUnlessDBFeature('allows_multiple_constraints_on_same_fields') def test_remove_unique_together_does_not_remove_meta_constraints(self): with connection.schema_editor() as editor: editor.create_model(AuthorWithUniqueNameAndBirthday) # Add the custom unique constraint constraint = UniqueConstraint(fields=['name', 'birthday'], name='author_name_birthday_uniq') custom_constraint_name = constraint.name AuthorWithUniqueNameAndBirthday._meta.constraints = [constraint] with connection.schema_editor() as editor: editor.add_constraint(AuthorWithUniqueNameAndBirthday, constraint) # Ensure the constraints exist constraints = self.get_constraints(AuthorWithUniqueNameAndBirthday._meta.db_table) self.assertIn(custom_constraint_name, constraints) other_constraints = [ name for name, details in constraints.items() if details['columns'] == ['name', 'birthday'] and details['unique'] and name != custom_constraint_name ] self.assertEqual(len(other_constraints), 1) # Remove unique together unique_together = AuthorWithUniqueNameAndBirthday._meta.unique_together with connection.schema_editor() as editor: editor.alter_unique_together(AuthorWithUniqueNameAndBirthday, unique_together, []) constraints = self.get_constraints(AuthorWithUniqueNameAndBirthday._meta.db_table) self.assertIn(custom_constraint_name, constraints) other_constraints = [ name for name, details in constraints.items() if details['columns'] == ['name', 'birthday'] and details['unique'] and name != custom_constraint_name ] self.assertEqual(len(other_constraints), 0) # Re-add unique together with connection.schema_editor() as editor: editor.alter_unique_together(AuthorWithUniqueNameAndBirthday, [], unique_together) constraints = self.get_constraints(AuthorWithUniqueNameAndBirthday._meta.db_table) self.assertIn(custom_constraint_name, constraints) other_constraints = [ name for name, details in constraints.items() if details['columns'] == ['name', 'birthday'] and details['unique'] and name != custom_constraint_name ] self.assertEqual(len(other_constraints), 1) # Drop the unique constraint with connection.schema_editor() as editor: AuthorWithUniqueNameAndBirthday._meta.constraints = [] editor.remove_constraint(AuthorWithUniqueNameAndBirthday, constraint) def test_unique_constraint(self): with connection.schema_editor() as editor: editor.create_model(Author) constraint = UniqueConstraint(fields=['name'], name='name_uq') # Add constraint. with connection.schema_editor() as editor: editor.add_constraint(Author, constraint) sql = constraint.create_sql(Author, editor) table = Author._meta.db_table self.assertIs(sql.references_table(table), True) self.assertIs(sql.references_column(table, 'name'), True) # Remove constraint. with connection.schema_editor() as editor: editor.remove_constraint(Author, constraint) self.assertNotIn(constraint.name, self.get_constraints(table)) @skipUnlessDBFeature('supports_expression_indexes') def test_func_unique_constraint(self): with connection.schema_editor() as editor: editor.create_model(Author) constraint = UniqueConstraint(Upper('name').desc(), name='func_upper_uq') # Add constraint. with connection.schema_editor() as editor: editor.add_constraint(Author, constraint) sql = constraint.create_sql(Author, editor) table = Author._meta.db_table constraints = self.get_constraints(table) if connection.features.supports_index_column_ordering: self.assertIndexOrder(table, constraint.name, ['DESC']) self.assertIn(constraint.name, constraints) self.assertIs(constraints[constraint.name]['unique'], True) # SQL contains a database function. self.assertIs(sql.references_column(table, 'name'), True) self.assertIn('UPPER(%s)' % editor.quote_name('name'), str(sql)) # Remove constraint. with connection.schema_editor() as editor: editor.remove_constraint(Author, constraint) self.assertNotIn(constraint.name, self.get_constraints(table)) @skipUnlessDBFeature('supports_expression_indexes') def test_composite_func_unique_constraint(self): with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(BookWithSlug) constraint = UniqueConstraint( Upper('title'), Lower('slug'), name='func_upper_lower_unq', ) # Add constraint. with connection.schema_editor() as editor: editor.add_constraint(BookWithSlug, constraint) sql = constraint.create_sql(BookWithSlug, editor) table = BookWithSlug._meta.db_table constraints = self.get_constraints(table) self.assertIn(constraint.name, constraints) self.assertIs(constraints[constraint.name]['unique'], True) # SQL contains database functions. self.assertIs(sql.references_column(table, 'title'), True) self.assertIs(sql.references_column(table, 'slug'), True) sql = str(sql) self.assertIn('UPPER(%s)' % editor.quote_name('title'), sql) self.assertIn('LOWER(%s)' % editor.quote_name('slug'), sql) self.assertLess(sql.index('UPPER'), sql.index('LOWER')) # Remove constraint. with connection.schema_editor() as editor: editor.remove_constraint(BookWithSlug, constraint) self.assertNotIn(constraint.name, self.get_constraints(table)) @skipUnlessDBFeature('supports_expression_indexes') def test_unique_constraint_field_and_expression(self): with connection.schema_editor() as editor: editor.create_model(Author) constraint = UniqueConstraint( F('height').desc(), 'uuid', Lower('name').asc(), name='func_f_lower_field_unq', ) # Add constraint. with connection.schema_editor() as editor: editor.add_constraint(Author, constraint) sql = constraint.create_sql(Author, editor) table = Author._meta.db_table if connection.features.supports_index_column_ordering: self.assertIndexOrder(table, constraint.name, ['DESC', 'ASC', 'ASC']) constraints = self.get_constraints(table) self.assertIs(constraints[constraint.name]['unique'], True) self.assertEqual(len(constraints[constraint.name]['columns']), 3) self.assertEqual(constraints[constraint.name]['columns'][1], 'uuid') # SQL contains database functions and columns. self.assertIs(sql.references_column(table, 'height'), True) self.assertIs(sql.references_column(table, 'name'), True) self.assertIs(sql.references_column(table, 'uuid'), True) self.assertIn('LOWER(%s)' % editor.quote_name('name'), str(sql)) # Remove constraint. with connection.schema_editor() as editor: editor.remove_constraint(Author, constraint) self.assertNotIn(constraint.name, self.get_constraints(table)) @skipUnlessDBFeature('supports_expression_indexes', 'supports_partial_indexes') def test_func_unique_constraint_partial(self): with connection.schema_editor() as editor: editor.create_model(Author) constraint = UniqueConstraint( Upper('name'), name='func_upper_cond_weight_uq', condition=Q(weight__isnull=False), ) # Add constraint. with connection.schema_editor() as editor: editor.add_constraint(Author, constraint) sql = constraint.create_sql(Author, editor) table = Author._meta.db_table constraints = self.get_constraints(table) self.assertIn(constraint.name, constraints) self.assertIs(constraints[constraint.name]['unique'], True) self.assertIs(sql.references_column(table, 'name'), True) self.assertIn('UPPER(%s)' % editor.quote_name('name'), str(sql)) self.assertIn( 'WHERE %s IS NOT NULL' % editor.quote_name('weight'), str(sql), ) # Remove constraint. with connection.schema_editor() as editor: editor.remove_constraint(Author, constraint) self.assertNotIn(constraint.name, self.get_constraints(table)) @skipUnlessDBFeature('supports_expression_indexes', 'supports_covering_indexes') def test_func_unique_constraint_covering(self): with connection.schema_editor() as editor: editor.create_model(Author) constraint = UniqueConstraint( Upper('name'), name='func_upper_covering_uq', include=['weight', 'height'], ) # Add constraint. with connection.schema_editor() as editor: editor.add_constraint(Author, constraint) sql = constraint.create_sql(Author, editor) table = Author._meta.db_table constraints = self.get_constraints(table) self.assertIn(constraint.name, constraints) self.assertIs(constraints[constraint.name]['unique'], True) self.assertEqual( constraints[constraint.name]['columns'], [None, 'weight', 'height'], ) self.assertIs(sql.references_column(table, 'name'), True) self.assertIs(sql.references_column(table, 'weight'), True) self.assertIs(sql.references_column(table, 'height'), True) self.assertIn('UPPER(%s)' % editor.quote_name('name'), str(sql)) self.assertIn( 'INCLUDE (%s, %s)' % ( editor.quote_name('weight'), editor.quote_name('height'), ), str(sql), ) # Remove constraint. with connection.schema_editor() as editor: editor.remove_constraint(Author, constraint) self.assertNotIn(constraint.name, self.get_constraints(table)) @skipUnlessDBFeature('supports_expression_indexes') def test_func_unique_constraint_lookups(self): with connection.schema_editor() as editor: editor.create_model(Author) with register_lookup(CharField, Lower), register_lookup(IntegerField, Abs): constraint = UniqueConstraint( F('name__lower'), F('weight__abs'), name='func_lower_abs_lookup_uq', ) # Add constraint. with connection.schema_editor() as editor: editor.add_constraint(Author, constraint) sql = constraint.create_sql(Author, editor) table = Author._meta.db_table constraints = self.get_constraints(table) self.assertIn(constraint.name, constraints) self.assertIs(constraints[constraint.name]['unique'], True) # SQL contains columns. self.assertIs(sql.references_column(table, 'name'), True) self.assertIs(sql.references_column(table, 'weight'), True) # Remove constraint. with connection.schema_editor() as editor: editor.remove_constraint(Author, constraint) self.assertNotIn(constraint.name, self.get_constraints(table)) @skipUnlessDBFeature('supports_expression_indexes') def test_func_unique_constraint_collate(self): collation = connection.features.test_collations.get('non_default') if not collation: self.skipTest( 'This backend does not support case-insensitive collations.' ) with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(BookWithSlug) constraint = UniqueConstraint( Collate(F('title'), collation=collation).desc(), Collate('slug', collation=collation), name='func_collate_uq', ) # Add constraint. with connection.schema_editor() as editor: editor.add_constraint(BookWithSlug, constraint) sql = constraint.create_sql(BookWithSlug, editor) table = BookWithSlug._meta.db_table constraints = self.get_constraints(table) self.assertIn(constraint.name, constraints) self.assertIs(constraints[constraint.name]['unique'], True) if connection.features.supports_index_column_ordering: self.assertIndexOrder(table, constraint.name, ['DESC', 'ASC']) # SQL contains columns and a collation. self.assertIs(sql.references_column(table, 'title'), True) self.assertIs(sql.references_column(table, 'slug'), True) self.assertIn('COLLATE %s' % editor.quote_name(collation), str(sql)) # Remove constraint. with connection.schema_editor() as editor: editor.remove_constraint(BookWithSlug, constraint) self.assertNotIn(constraint.name, self.get_constraints(table)) @skipIfDBFeature('supports_expression_indexes') def test_func_unique_constraint_unsupported(self): # UniqueConstraint is ignored on databases that don't support indexes on # expressions. with connection.schema_editor() as editor: editor.create_model(Author) constraint = UniqueConstraint(F('name'), name='func_name_uq') with connection.schema_editor() as editor, self.assertNumQueries(0): self.assertIsNone(editor.add_constraint(Author, constraint)) self.assertIsNone(editor.remove_constraint(Author, constraint)) @skipUnlessDBFeature('supports_expression_indexes') def test_func_unique_constraint_nonexistent_field(self): constraint = UniqueConstraint(Lower('nonexistent'), name='func_nonexistent_uq') msg = ( "Cannot resolve keyword 'nonexistent' into field. Choices are: " "height, id, name, uuid, weight" ) with self.assertRaisesMessage(FieldError, msg): with connection.schema_editor() as editor: editor.add_constraint(Author, constraint) @skipUnlessDBFeature('supports_expression_indexes') def test_func_unique_constraint_nondeterministic(self): with connection.schema_editor() as editor: editor.create_model(Author) constraint = UniqueConstraint(Random(), name='func_random_uq') with connection.schema_editor() as editor: with self.assertRaises(DatabaseError): editor.add_constraint(Author, constraint) def test_index_together(self): """ Tests removing and adding index_together constraints on a model. """ # Create the table with connection.schema_editor() as editor: editor.create_model(Tag) # Ensure there's no index on the year/slug columns first self.assertIs( any( c["index"] for c in self.get_constraints("schema_tag").values() if c['columns'] == ["slug", "title"] ), False, ) # Alter the model to add an index with connection.schema_editor() as editor: editor.alter_index_together(Tag, [], [("slug", "title")]) # Ensure there is now an index self.assertIs( any( c["index"] for c in self.get_constraints("schema_tag").values() if c['columns'] == ["slug", "title"] ), True, ) # Alter it back new_field2 = SlugField(unique=True) new_field2.set_attributes_from_name("slug") with connection.schema_editor() as editor: editor.alter_index_together(Tag, [("slug", "title")], []) # Ensure there's no index self.assertIs( any( c["index"] for c in self.get_constraints("schema_tag").values() if c['columns'] == ["slug", "title"] ), False, ) def test_index_together_with_fk(self): """ Tests removing and adding index_together constraints that include a foreign key. """ # Create the table with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(Book) # Ensure the fields are unique to begin with self.assertEqual(Book._meta.index_together, ()) # Add the unique_together constraint with connection.schema_editor() as editor: editor.alter_index_together(Book, [], [['author', 'title']]) # Alter it back with connection.schema_editor() as editor: editor.alter_index_together(Book, [['author', 'title']], []) def test_create_index_together(self): """ Tests creating models with index_together already defined """ # Create the table with connection.schema_editor() as editor: editor.create_model(TagIndexed) # Ensure there is an index self.assertIs( any( c["index"] for c in self.get_constraints("schema_tagindexed").values() if c['columns'] == ["slug", "title"] ), True, ) @skipUnlessDBFeature('allows_multiple_constraints_on_same_fields') def test_remove_index_together_does_not_remove_meta_indexes(self): with connection.schema_editor() as editor: editor.create_model(AuthorWithIndexedNameAndBirthday) # Add the custom index index = Index(fields=['name', 'birthday'], name='author_name_birthday_idx') custom_index_name = index.name AuthorWithIndexedNameAndBirthday._meta.indexes = [index] with connection.schema_editor() as editor: editor.add_index(AuthorWithIndexedNameAndBirthday, index) # Ensure the indexes exist constraints = self.get_constraints(AuthorWithIndexedNameAndBirthday._meta.db_table) self.assertIn(custom_index_name, constraints) other_constraints = [ name for name, details in constraints.items() if details['columns'] == ['name', 'birthday'] and details['index'] and name != custom_index_name ] self.assertEqual(len(other_constraints), 1) # Remove index together index_together = AuthorWithIndexedNameAndBirthday._meta.index_together with connection.schema_editor() as editor: editor.alter_index_together(AuthorWithIndexedNameAndBirthday, index_together, []) constraints = self.get_constraints(AuthorWithIndexedNameAndBirthday._meta.db_table) self.assertIn(custom_index_name, constraints) other_constraints = [ name for name, details in constraints.items() if details['columns'] == ['name', 'birthday'] and details['index'] and name != custom_index_name ] self.assertEqual(len(other_constraints), 0) # Re-add index together with connection.schema_editor() as editor: editor.alter_index_together(AuthorWithIndexedNameAndBirthday, [], index_together) constraints = self.get_constraints(AuthorWithIndexedNameAndBirthday._meta.db_table) self.assertIn(custom_index_name, constraints) other_constraints = [ name for name, details in constraints.items() if details['columns'] == ['name', 'birthday'] and details['index'] and name != custom_index_name ] self.assertEqual(len(other_constraints), 1) # Drop the index with connection.schema_editor() as editor: AuthorWithIndexedNameAndBirthday._meta.indexes = [] editor.remove_index(AuthorWithIndexedNameAndBirthday, index) @isolate_apps('schema') def test_db_table(self): """ Tests renaming of the table """ class Author(Model): name = CharField(max_length=255) class Meta: app_label = 'schema' class Book(Model): author = ForeignKey(Author, CASCADE) class Meta: app_label = 'schema' # Create the table and one referring it. with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(Book) # Ensure the table is there to begin with columns = self.column_classes(Author) self.assertEqual(columns['name'][0], connection.features.introspected_field_types['CharField']) # Alter the table with connection.schema_editor(atomic=connection.features.supports_atomic_references_rename) as editor: editor.alter_db_table(Author, "schema_author", "schema_otherauthor") Author._meta.db_table = "schema_otherauthor" columns = self.column_classes(Author) self.assertEqual(columns['name'][0], connection.features.introspected_field_types['CharField']) # Ensure the foreign key reference was updated self.assertForeignKeyExists(Book, "author_id", "schema_otherauthor") # Alter the table again with connection.schema_editor(atomic=connection.features.supports_atomic_references_rename) as editor: editor.alter_db_table(Author, "schema_otherauthor", "schema_author") # Ensure the table is still there Author._meta.db_table = "schema_author" columns = self.column_classes(Author) self.assertEqual(columns['name'][0], connection.features.introspected_field_types['CharField']) def test_add_remove_index(self): """ Tests index addition and removal """ # Create the table with connection.schema_editor() as editor: editor.create_model(Author) # Ensure the table is there and has no index self.assertNotIn('title', self.get_indexes(Author._meta.db_table)) # Add the index index = Index(fields=['name'], name='author_title_idx') with connection.schema_editor() as editor: editor.add_index(Author, index) self.assertIn('name', self.get_indexes(Author._meta.db_table)) # Drop the index with connection.schema_editor() as editor: editor.remove_index(Author, index) self.assertNotIn('name', self.get_indexes(Author._meta.db_table)) def test_remove_db_index_doesnt_remove_custom_indexes(self): """ Changing db_index to False doesn't remove indexes from Meta.indexes. """ with connection.schema_editor() as editor: editor.create_model(AuthorWithIndexedName) # Ensure the table has its index self.assertIn('name', self.get_indexes(AuthorWithIndexedName._meta.db_table)) # Add the custom index index = Index(fields=['-name'], name='author_name_idx') author_index_name = index.name with connection.schema_editor() as editor: db_index_name = editor._create_index_name( table_name=AuthorWithIndexedName._meta.db_table, column_names=('name',), ) try: AuthorWithIndexedName._meta.indexes = [index] with connection.schema_editor() as editor: editor.add_index(AuthorWithIndexedName, index) old_constraints = self.get_constraints(AuthorWithIndexedName._meta.db_table) self.assertIn(author_index_name, old_constraints) self.assertIn(db_index_name, old_constraints) # Change name field to db_index=False old_field = AuthorWithIndexedName._meta.get_field('name') new_field = CharField(max_length=255) new_field.set_attributes_from_name('name') with connection.schema_editor() as editor: editor.alter_field(AuthorWithIndexedName, old_field, new_field, strict=True) new_constraints = self.get_constraints(AuthorWithIndexedName._meta.db_table) self.assertNotIn(db_index_name, new_constraints) # The index from Meta.indexes is still in the database. self.assertIn(author_index_name, new_constraints) # Drop the index with connection.schema_editor() as editor: editor.remove_index(AuthorWithIndexedName, index) finally: AuthorWithIndexedName._meta.indexes = [] def test_order_index(self): """ Indexes defined with ordering (ASC/DESC) defined on column """ with connection.schema_editor() as editor: editor.create_model(Author) # The table doesn't have an index self.assertNotIn('title', self.get_indexes(Author._meta.db_table)) index_name = 'author_name_idx' # Add the index index = Index(fields=['name', '-weight'], name=index_name) with connection.schema_editor() as editor: editor.add_index(Author, index) if connection.features.supports_index_column_ordering: self.assertIndexOrder(Author._meta.db_table, index_name, ['ASC', 'DESC']) # Drop the index with connection.schema_editor() as editor: editor.remove_index(Author, index) def test_indexes(self): """ Tests creation/altering of indexes """ # Create the table with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(Book) # Ensure the table is there and has the right index self.assertIn( "title", self.get_indexes(Book._meta.db_table), ) # Alter to remove the index old_field = Book._meta.get_field("title") new_field = CharField(max_length=100, db_index=False) new_field.set_attributes_from_name("title") with connection.schema_editor() as editor: editor.alter_field(Book, old_field, new_field, strict=True) # Ensure the table is there and has no index self.assertNotIn( "title", self.get_indexes(Book._meta.db_table), ) # Alter to re-add the index new_field2 = Book._meta.get_field("title") with connection.schema_editor() as editor: editor.alter_field(Book, new_field, new_field2, strict=True) # Ensure the table is there and has the index again self.assertIn( "title", self.get_indexes(Book._meta.db_table), ) # Add a unique column, verify that creates an implicit index new_field3 = BookWithSlug._meta.get_field("slug") with connection.schema_editor() as editor: editor.add_field(Book, new_field3) self.assertIn( "slug", self.get_uniques(Book._meta.db_table), ) # Remove the unique, check the index goes with it new_field4 = CharField(max_length=20, unique=False) new_field4.set_attributes_from_name("slug") with connection.schema_editor() as editor: editor.alter_field(BookWithSlug, new_field3, new_field4, strict=True) self.assertNotIn( "slug", self.get_uniques(Book._meta.db_table), ) def test_text_field_with_db_index(self): with connection.schema_editor() as editor: editor.create_model(AuthorTextFieldWithIndex) # The text_field index is present if the database supports it. assertion = self.assertIn if connection.features.supports_index_on_text_field else self.assertNotIn assertion('text_field', self.get_indexes(AuthorTextFieldWithIndex._meta.db_table)) def _index_expressions_wrappers(self): index_expression = IndexExpression() index_expression.set_wrapper_classes(connection) return ', '.join([ wrapper_cls.__qualname__ for wrapper_cls in index_expression.wrapper_classes ]) @skipUnlessDBFeature('supports_expression_indexes') def test_func_index_multiple_wrapper_references(self): index = Index(OrderBy(F('name').desc(), descending=True), name='name') msg = ( "Multiple references to %s can't be used in an indexed expression." % self._index_expressions_wrappers() ) with connection.schema_editor() as editor: with self.assertRaisesMessage(ValueError, msg): editor.add_index(Author, index) @skipUnlessDBFeature('supports_expression_indexes') def test_func_index_invalid_topmost_expressions(self): index = Index(Upper(F('name').desc()), name='name') msg = ( '%s must be topmost expressions in an indexed expression.' % self._index_expressions_wrappers() ) with connection.schema_editor() as editor: with self.assertRaisesMessage(ValueError, msg): editor.add_index(Author, index) @skipUnlessDBFeature('supports_expression_indexes') def test_func_index(self): with connection.schema_editor() as editor: editor.create_model(Author) index = Index(Lower('name').desc(), name='func_lower_idx') # Add index. with connection.schema_editor() as editor: editor.add_index(Author, index) sql = index.create_sql(Author, editor) table = Author._meta.db_table if connection.features.supports_index_column_ordering: self.assertIndexOrder(table, index.name, ['DESC']) # SQL contains a database function. self.assertIs(sql.references_column(table, 'name'), True) self.assertIn('LOWER(%s)' % editor.quote_name('name'), str(sql)) # Remove index. with connection.schema_editor() as editor: editor.remove_index(Author, index) self.assertNotIn(index.name, self.get_constraints(table)) @skipUnlessDBFeature('supports_expression_indexes') def test_func_index_f(self): with connection.schema_editor() as editor: editor.create_model(Tag) index = Index('slug', F('title').desc(), name='func_f_idx') # Add index. with connection.schema_editor() as editor: editor.add_index(Tag, index) sql = index.create_sql(Tag, editor) table = Tag._meta.db_table self.assertIn(index.name, self.get_constraints(table)) if connection.features.supports_index_column_ordering: self.assertIndexOrder(Tag._meta.db_table, index.name, ['ASC', 'DESC']) # SQL contains columns. self.assertIs(sql.references_column(table, 'slug'), True) self.assertIs(sql.references_column(table, 'title'), True) # Remove index. with connection.schema_editor() as editor: editor.remove_index(Tag, index) self.assertNotIn(index.name, self.get_constraints(table)) @skipUnlessDBFeature('supports_expression_indexes') def test_func_index_lookups(self): with connection.schema_editor() as editor: editor.create_model(Author) with register_lookup(CharField, Lower), register_lookup(IntegerField, Abs): index = Index( F('name__lower'), F('weight__abs'), name='func_lower_abs_lookup_idx', ) # Add index. with connection.schema_editor() as editor: editor.add_index(Author, index) sql = index.create_sql(Author, editor) table = Author._meta.db_table self.assertIn(index.name, self.get_constraints(table)) # SQL contains columns. self.assertIs(sql.references_column(table, 'name'), True) self.assertIs(sql.references_column(table, 'weight'), True) # Remove index. with connection.schema_editor() as editor: editor.remove_index(Author, index) self.assertNotIn(index.name, self.get_constraints(table)) @skipUnlessDBFeature('supports_expression_indexes') def test_composite_func_index(self): with connection.schema_editor() as editor: editor.create_model(Author) index = Index(Lower('name'), Upper('name'), name='func_lower_upper_idx') # Add index. with connection.schema_editor() as editor: editor.add_index(Author, index) sql = index.create_sql(Author, editor) table = Author._meta.db_table self.assertIn(index.name, self.get_constraints(table)) # SQL contains database functions. self.assertIs(sql.references_column(table, 'name'), True) sql = str(sql) self.assertIn('LOWER(%s)' % editor.quote_name('name'), sql) self.assertIn('UPPER(%s)' % editor.quote_name('name'), sql) self.assertLess(sql.index('LOWER'), sql.index('UPPER')) # Remove index. with connection.schema_editor() as editor: editor.remove_index(Author, index) self.assertNotIn(index.name, self.get_constraints(table)) @skipUnlessDBFeature('supports_expression_indexes') def test_composite_func_index_field_and_expression(self): with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(Book) index = Index( F('author').desc(), Lower('title').asc(), 'pub_date', name='func_f_lower_field_idx', ) # Add index. with connection.schema_editor() as editor: editor.add_index(Book, index) sql = index.create_sql(Book, editor) table = Book._meta.db_table constraints = self.get_constraints(table) if connection.features.supports_index_column_ordering: self.assertIndexOrder(table, index.name, ['DESC', 'ASC', 'ASC']) self.assertEqual(len(constraints[index.name]['columns']), 3) self.assertEqual(constraints[index.name]['columns'][2], 'pub_date') # SQL contains database functions and columns. self.assertIs(sql.references_column(table, 'author_id'), True) self.assertIs(sql.references_column(table, 'title'), True) self.assertIs(sql.references_column(table, 'pub_date'), True) self.assertIn('LOWER(%s)' % editor.quote_name('title'), str(sql)) # Remove index. with connection.schema_editor() as editor: editor.remove_index(Book, index) self.assertNotIn(index.name, self.get_constraints(table)) @skipUnlessDBFeature('supports_expression_indexes') @isolate_apps('schema') def test_func_index_f_decimalfield(self): class Node(Model): value = DecimalField(max_digits=5, decimal_places=2) class Meta: app_label = 'schema' with connection.schema_editor() as editor: editor.create_model(Node) index = Index(F('value'), name='func_f_decimalfield_idx') # Add index. with connection.schema_editor() as editor: editor.add_index(Node, index) sql = index.create_sql(Node, editor) table = Node._meta.db_table self.assertIn(index.name, self.get_constraints(table)) self.assertIs(sql.references_column(table, 'value'), True) # SQL doesn't contain casting. self.assertNotIn('CAST', str(sql)) # Remove index. with connection.schema_editor() as editor: editor.remove_index(Node, index) self.assertNotIn(index.name, self.get_constraints(table)) @skipUnlessDBFeature('supports_expression_indexes') def test_func_index_cast(self): with connection.schema_editor() as editor: editor.create_model(Author) index = Index(Cast('weight', FloatField()), name='func_cast_idx') # Add index. with connection.schema_editor() as editor: editor.add_index(Author, index) sql = index.create_sql(Author, editor) table = Author._meta.db_table self.assertIn(index.name, self.get_constraints(table)) self.assertIs(sql.references_column(table, 'weight'), True) # Remove index. with connection.schema_editor() as editor: editor.remove_index(Author, index) self.assertNotIn(index.name, self.get_constraints(table)) @skipUnlessDBFeature('supports_expression_indexes') def test_func_index_collate(self): collation = connection.features.test_collations.get('non_default') if not collation: self.skipTest( 'This backend does not support case-insensitive collations.' ) with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(BookWithSlug) index = Index( Collate(F('title'), collation=collation).desc(), Collate('slug', collation=collation), name='func_collate_idx', ) # Add index. with connection.schema_editor() as editor: editor.add_index(BookWithSlug, index) sql = index.create_sql(BookWithSlug, editor) table = Book._meta.db_table self.assertIn(index.name, self.get_constraints(table)) if connection.features.supports_index_column_ordering: self.assertIndexOrder(table, index.name, ['DESC', 'ASC']) # SQL contains columns and a collation. self.assertIs(sql.references_column(table, 'title'), True) self.assertIs(sql.references_column(table, 'slug'), True) self.assertIn('COLLATE %s' % editor.quote_name(collation), str(sql)) # Remove index. with connection.schema_editor() as editor: editor.remove_index(Book, index) self.assertNotIn(index.name, self.get_constraints(table)) @skipUnlessDBFeature('supports_expression_indexes') @skipIfDBFeature('collate_as_index_expression') def test_func_index_collate_f_ordered(self): collation = connection.features.test_collations.get('non_default') if not collation: self.skipTest( 'This backend does not support case-insensitive collations.' ) with connection.schema_editor() as editor: editor.create_model(Author) index = Index( Collate(F('name').desc(), collation=collation), name='func_collate_f_desc_idx', ) # Add index. with connection.schema_editor() as editor: editor.add_index(Author, index) sql = index.create_sql(Author, editor) table = Author._meta.db_table self.assertIn(index.name, self.get_constraints(table)) if connection.features.supports_index_column_ordering: self.assertIndexOrder(table, index.name, ['DESC']) # SQL contains columns and a collation. self.assertIs(sql.references_column(table, 'name'), True) self.assertIn('COLLATE %s' % editor.quote_name(collation), str(sql)) # Remove index. with connection.schema_editor() as editor: editor.remove_index(Author, index) self.assertNotIn(index.name, self.get_constraints(table)) @skipUnlessDBFeature('supports_expression_indexes') def test_func_index_calc(self): with connection.schema_editor() as editor: editor.create_model(Author) index = Index(F('height') / (F('weight') + Value(5)), name='func_calc_idx') # Add index. with connection.schema_editor() as editor: editor.add_index(Author, index) sql = index.create_sql(Author, editor) table = Author._meta.db_table self.assertIn(index.name, self.get_constraints(table)) # SQL contains columns and expressions. self.assertIs(sql.references_column(table, 'height'), True) self.assertIs(sql.references_column(table, 'weight'), True) sql = str(sql) self.assertIs( sql.index(editor.quote_name('height')) < sql.index('/') < sql.index(editor.quote_name('weight')) < sql.index('+') < sql.index('5'), True, ) # Remove index. with connection.schema_editor() as editor: editor.remove_index(Author, index) self.assertNotIn(index.name, self.get_constraints(table)) @skipUnlessDBFeature('supports_expression_indexes', 'supports_json_field') @isolate_apps('schema') def test_func_index_json_key_transform(self): class JSONModel(Model): field = JSONField() class Meta: app_label = 'schema' with connection.schema_editor() as editor: editor.create_model(JSONModel) self.isolated_local_models = [JSONModel] index = Index('field__some_key', name='func_json_key_idx') with connection.schema_editor() as editor: editor.add_index(JSONModel, index) sql = index.create_sql(JSONModel, editor) table = JSONModel._meta.db_table self.assertIn(index.name, self.get_constraints(table)) self.assertIs(sql.references_column(table, 'field'), True) with connection.schema_editor() as editor: editor.remove_index(JSONModel, index) self.assertNotIn(index.name, self.get_constraints(table)) @skipUnlessDBFeature('supports_expression_indexes', 'supports_json_field') @isolate_apps('schema') def test_func_index_json_key_transform_cast(self): class JSONModel(Model): field = JSONField() class Meta: app_label = 'schema' with connection.schema_editor() as editor: editor.create_model(JSONModel) self.isolated_local_models = [JSONModel] index = Index( Cast(KeyTextTransform('some_key', 'field'), IntegerField()), name='func_json_key_cast_idx', ) with connection.schema_editor() as editor: editor.add_index(JSONModel, index) sql = index.create_sql(JSONModel, editor) table = JSONModel._meta.db_table self.assertIn(index.name, self.get_constraints(table)) self.assertIs(sql.references_column(table, 'field'), True) with connection.schema_editor() as editor: editor.remove_index(JSONModel, index) self.assertNotIn(index.name, self.get_constraints(table)) @skipIfDBFeature('supports_expression_indexes') def test_func_index_unsupported(self): # Index is ignored on databases that don't support indexes on # expressions. with connection.schema_editor() as editor: editor.create_model(Author) index = Index(F('name'), name='random_idx') with connection.schema_editor() as editor, self.assertNumQueries(0): self.assertIsNone(editor.add_index(Author, index)) self.assertIsNone(editor.remove_index(Author, index)) @skipUnlessDBFeature('supports_expression_indexes') def test_func_index_nonexistent_field(self): index = Index(Lower('nonexistent'), name='func_nonexistent_idx') msg = ( "Cannot resolve keyword 'nonexistent' into field. Choices are: " "height, id, name, uuid, weight" ) with self.assertRaisesMessage(FieldError, msg): with connection.schema_editor() as editor: editor.add_index(Author, index) @skipUnlessDBFeature('supports_expression_indexes') def test_func_index_nondeterministic(self): with connection.schema_editor() as editor: editor.create_model(Author) index = Index(Random(), name='func_random_idx') with connection.schema_editor() as editor: with self.assertRaises(DatabaseError): editor.add_index(Author, index) def test_primary_key(self): """ Tests altering of the primary key """ # Create the table with connection.schema_editor() as editor: editor.create_model(Tag) # Ensure the table is there and has the right PK self.assertEqual(self.get_primary_key(Tag._meta.db_table), 'id') # Alter to change the PK id_field = Tag._meta.get_field("id") old_field = Tag._meta.get_field("slug") new_field = SlugField(primary_key=True) new_field.set_attributes_from_name("slug") new_field.model = Tag with connection.schema_editor() as editor: editor.remove_field(Tag, id_field) editor.alter_field(Tag, old_field, new_field) # Ensure the PK changed self.assertNotIn( 'id', self.get_indexes(Tag._meta.db_table), ) self.assertEqual(self.get_primary_key(Tag._meta.db_table), 'slug') def test_context_manager_exit(self): """ Ensures transaction is correctly closed when an error occurs inside a SchemaEditor context. """ class SomeError(Exception): pass try: with connection.schema_editor(): raise SomeError except SomeError: self.assertFalse(connection.in_atomic_block) @skipIfDBFeature('can_rollback_ddl') def test_unsupported_transactional_ddl_disallowed(self): message = ( "Executing DDL statements while in a transaction on databases " "that can't perform a rollback is prohibited." ) with atomic(), connection.schema_editor() as editor: with self.assertRaisesMessage(TransactionManagementError, message): editor.execute(editor.sql_create_table % {'table': 'foo', 'definition': ''}) @skipUnlessDBFeature('supports_foreign_keys', 'indexes_foreign_keys') def test_foreign_key_index_long_names_regression(self): """ Regression test for #21497. Only affects databases that supports foreign keys. """ # Create the table with connection.schema_editor() as editor: editor.create_model(AuthorWithEvenLongerName) editor.create_model(BookWithLongName) # Find the properly shortened column name column_name = connection.ops.quote_name("author_foreign_key_with_really_long_field_name_id") column_name = column_name[1:-1].lower() # unquote, and, for Oracle, un-upcase # Ensure the table is there and has an index on the column self.assertIn( column_name, self.get_indexes(BookWithLongName._meta.db_table), ) @skipUnlessDBFeature('supports_foreign_keys') def test_add_foreign_key_long_names(self): """ Regression test for #23009. Only affects databases that supports foreign keys. """ # Create the initial tables with connection.schema_editor() as editor: editor.create_model(AuthorWithEvenLongerName) editor.create_model(BookWithLongName) # Add a second FK, this would fail due to long ref name before the fix new_field = ForeignKey(AuthorWithEvenLongerName, CASCADE, related_name="something") new_field.set_attributes_from_name("author_other_really_long_named_i_mean_so_long_fk") with connection.schema_editor() as editor: editor.add_field(BookWithLongName, new_field) @isolate_apps('schema') @skipUnlessDBFeature('supports_foreign_keys') def test_add_foreign_key_quoted_db_table(self): class Author(Model): class Meta: db_table = '"table_author_double_quoted"' app_label = 'schema' class Book(Model): author = ForeignKey(Author, CASCADE) class Meta: app_label = 'schema' with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(Book) if connection.vendor == 'mysql': self.assertForeignKeyExists(Book, 'author_id', '"table_author_double_quoted"') else: self.assertForeignKeyExists(Book, 'author_id', 'table_author_double_quoted') def test_add_foreign_object(self): with connection.schema_editor() as editor: editor.create_model(BookForeignObj) new_field = ForeignObject(Author, on_delete=CASCADE, from_fields=['author_id'], to_fields=['id']) new_field.set_attributes_from_name('author') with connection.schema_editor() as editor: editor.add_field(BookForeignObj, new_field) def test_creation_deletion_reserved_names(self): """ Tries creating a model's table, and then deleting it when it has a SQL reserved name. """ # Create the table with connection.schema_editor() as editor: try: editor.create_model(Thing) except OperationalError as e: self.fail("Errors when applying initial migration for a model " "with a table named after an SQL reserved word: %s" % e) # The table is there list(Thing.objects.all()) # Clean up that table with connection.schema_editor() as editor: editor.delete_model(Thing) # The table is gone with self.assertRaises(DatabaseError): list(Thing.objects.all()) def test_remove_constraints_capital_letters(self): """ #23065 - Constraint names must be quoted if they contain capital letters. """ def get_field(*args, field_class=IntegerField, **kwargs): kwargs['db_column'] = "CamelCase" field = field_class(*args, **kwargs) field.set_attributes_from_name("CamelCase") return field model = Author field = get_field() table = model._meta.db_table column = field.column identifier_converter = connection.introspection.identifier_converter with connection.schema_editor() as editor: editor.create_model(model) editor.add_field(model, field) constraint_name = 'CamelCaseIndex' expected_constraint_name = identifier_converter(constraint_name) editor.execute( editor.sql_create_index % { "table": editor.quote_name(table), "name": editor.quote_name(constraint_name), "using": "", "columns": editor.quote_name(column), "extra": "", "condition": "", "include": "", } ) self.assertIn(expected_constraint_name, self.get_constraints(model._meta.db_table)) editor.alter_field(model, get_field(db_index=True), field, strict=True) self.assertNotIn(expected_constraint_name, self.get_constraints(model._meta.db_table)) constraint_name = 'CamelCaseUniqConstraint' expected_constraint_name = identifier_converter(constraint_name) editor.execute(editor._create_unique_sql(model, [field], constraint_name)) self.assertIn(expected_constraint_name, self.get_constraints(model._meta.db_table)) editor.alter_field(model, get_field(unique=True), field, strict=True) self.assertNotIn(expected_constraint_name, self.get_constraints(model._meta.db_table)) if editor.sql_create_fk: constraint_name = 'CamelCaseFKConstraint' expected_constraint_name = identifier_converter(constraint_name) editor.execute( editor.sql_create_fk % { "table": editor.quote_name(table), "name": editor.quote_name(constraint_name), "column": editor.quote_name(column), "to_table": editor.quote_name(table), "to_column": editor.quote_name(model._meta.auto_field.column), "deferrable": connection.ops.deferrable_sql(), } ) self.assertIn(expected_constraint_name, self.get_constraints(model._meta.db_table)) editor.alter_field(model, get_field(Author, CASCADE, field_class=ForeignKey), field, strict=True) self.assertNotIn(expected_constraint_name, self.get_constraints(model._meta.db_table)) def test_add_field_use_effective_default(self): """ #23987 - effective_default() should be used as the field default when adding a new field. """ # Create the table with connection.schema_editor() as editor: editor.create_model(Author) # Ensure there's no surname field columns = self.column_classes(Author) self.assertNotIn("surname", columns) # Create a row Author.objects.create(name='Anonymous1') # Add new CharField to ensure default will be used from effective_default new_field = CharField(max_length=15, blank=True) new_field.set_attributes_from_name("surname") with connection.schema_editor() as editor: editor.add_field(Author, new_field) # Ensure field was added with the right default with connection.cursor() as cursor: cursor.execute("SELECT surname FROM schema_author;") item = cursor.fetchall()[0] self.assertEqual(item[0], None if connection.features.interprets_empty_strings_as_nulls else '') def test_add_field_default_dropped(self): # Create the table with connection.schema_editor() as editor: editor.create_model(Author) # Ensure there's no surname field columns = self.column_classes(Author) self.assertNotIn("surname", columns) # Create a row Author.objects.create(name='Anonymous1') # Add new CharField with a default new_field = CharField(max_length=15, blank=True, default='surname default') new_field.set_attributes_from_name("surname") with connection.schema_editor() as editor: editor.add_field(Author, new_field) # Ensure field was added with the right default with connection.cursor() as cursor: cursor.execute("SELECT surname FROM schema_author;") item = cursor.fetchall()[0] self.assertEqual(item[0], 'surname default') # And that the default is no longer set in the database. field = next( f for f in connection.introspection.get_table_description(cursor, "schema_author") if f.name == "surname" ) if connection.features.can_introspect_default: self.assertIsNone(field.default) def test_add_field_default_nullable(self): with connection.schema_editor() as editor: editor.create_model(Author) # Add new nullable CharField with a default. new_field = CharField(max_length=15, blank=True, null=True, default='surname') new_field.set_attributes_from_name('surname') with connection.schema_editor() as editor: editor.add_field(Author, new_field) Author.objects.create(name='Anonymous1') with connection.cursor() as cursor: cursor.execute('SELECT surname FROM schema_author;') item = cursor.fetchall()[0] self.assertIsNone(item[0]) field = next( f for f in connection.introspection.get_table_description( cursor, 'schema_author', ) if f.name == 'surname' ) # Field is still nullable. self.assertTrue(field.null_ok) # The database default is no longer set. if connection.features.can_introspect_default: self.assertIn(field.default, ['NULL', None]) def test_add_textfield_default_nullable(self): with connection.schema_editor() as editor: editor.create_model(Author) # Add new nullable TextField with a default. new_field = TextField(blank=True, null=True, default='text') new_field.set_attributes_from_name('description') with connection.schema_editor() as editor: editor.add_field(Author, new_field) Author.objects.create(name='Anonymous1') with connection.cursor() as cursor: cursor.execute('SELECT description FROM schema_author;') item = cursor.fetchall()[0] self.assertIsNone(item[0]) field = next( f for f in connection.introspection.get_table_description( cursor, 'schema_author', ) if f.name == 'description' ) # Field is still nullable. self.assertTrue(field.null_ok) # The database default is no longer set. if connection.features.can_introspect_default: self.assertIn(field.default, ['NULL', None]) def test_alter_field_default_dropped(self): # Create the table with connection.schema_editor() as editor: editor.create_model(Author) # Create a row Author.objects.create(name='Anonymous1') self.assertIsNone(Author.objects.get().height) old_field = Author._meta.get_field('height') # The default from the new field is used in updating existing rows. new_field = IntegerField(blank=True, default=42) new_field.set_attributes_from_name('height') with connection.schema_editor() as editor: editor.alter_field(Author, old_field, new_field, strict=True) self.assertEqual(Author.objects.get().height, 42) # The database default should be removed. with connection.cursor() as cursor: field = next( f for f in connection.introspection.get_table_description(cursor, "schema_author") if f.name == "height" ) if connection.features.can_introspect_default: self.assertIsNone(field.default) def test_alter_field_default_doesnt_perform_queries(self): """ No queries are performed if a field default changes and the field's not changing from null to non-null. """ with connection.schema_editor() as editor: editor.create_model(AuthorWithDefaultHeight) old_field = AuthorWithDefaultHeight._meta.get_field('height') new_default = old_field.default * 2 new_field = PositiveIntegerField(null=True, blank=True, default=new_default) new_field.set_attributes_from_name('height') with connection.schema_editor() as editor, self.assertNumQueries(0): editor.alter_field(AuthorWithDefaultHeight, old_field, new_field, strict=True) @skipUnlessDBFeature('supports_foreign_keys') def test_alter_field_fk_attributes_noop(self): """ No queries are performed when changing field attributes that don't affect the schema. """ with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(Book) old_field = Book._meta.get_field('author') new_field = ForeignKey( Author, blank=True, editable=False, error_messages={'invalid': 'error message'}, help_text='help text', limit_choices_to={'limit': 'choice'}, on_delete=PROTECT, related_name='related_name', related_query_name='related_query_name', validators=[lambda x: x], verbose_name='verbose name', ) new_field.set_attributes_from_name('author') with connection.schema_editor() as editor, self.assertNumQueries(0): editor.alter_field(Book, old_field, new_field, strict=True) with connection.schema_editor() as editor, self.assertNumQueries(0): editor.alter_field(Book, new_field, old_field, strict=True) def test_add_textfield_unhashable_default(self): # Create the table with connection.schema_editor() as editor: editor.create_model(Author) # Create a row Author.objects.create(name='Anonymous1') # Create a field that has an unhashable default new_field = TextField(default={}) new_field.set_attributes_from_name("info") with connection.schema_editor() as editor: editor.add_field(Author, new_field) @unittest.skipUnless(connection.vendor == 'postgresql', "PostgreSQL specific") def test_add_indexed_charfield(self): field = CharField(max_length=255, db_index=True) field.set_attributes_from_name('nom_de_plume') with connection.schema_editor() as editor: editor.create_model(Author) editor.add_field(Author, field) # Should create two indexes; one for like operator. self.assertEqual( self.get_constraints_for_column(Author, 'nom_de_plume'), ['schema_author_nom_de_plume_7570a851', 'schema_author_nom_de_plume_7570a851_like'], ) @unittest.skipUnless(connection.vendor == 'postgresql', "PostgreSQL specific") def test_add_unique_charfield(self): field = CharField(max_length=255, unique=True) field.set_attributes_from_name('nom_de_plume') with connection.schema_editor() as editor: editor.create_model(Author) editor.add_field(Author, field) # Should create two indexes; one for like operator. self.assertEqual( self.get_constraints_for_column(Author, 'nom_de_plume'), ['schema_author_nom_de_plume_7570a851_like', 'schema_author_nom_de_plume_key'] ) @unittest.skipUnless(connection.vendor == 'postgresql', "PostgreSQL specific") def test_alter_field_add_index_to_charfield(self): # Create the table and verify no initial indexes. with connection.schema_editor() as editor: editor.create_model(Author) self.assertEqual(self.get_constraints_for_column(Author, 'name'), []) # Alter to add db_index=True and create 2 indexes. old_field = Author._meta.get_field('name') new_field = CharField(max_length=255, db_index=True) new_field.set_attributes_from_name('name') with connection.schema_editor() as editor: editor.alter_field(Author, old_field, new_field, strict=True) self.assertEqual( self.get_constraints_for_column(Author, 'name'), ['schema_author_name_1fbc5617', 'schema_author_name_1fbc5617_like'] ) # Remove db_index=True to drop both indexes. with connection.schema_editor() as editor: editor.alter_field(Author, new_field, old_field, strict=True) self.assertEqual(self.get_constraints_for_column(Author, 'name'), []) @unittest.skipUnless(connection.vendor == 'postgresql', "PostgreSQL specific") def test_alter_field_add_unique_to_charfield(self): # Create the table and verify no initial indexes. with connection.schema_editor() as editor: editor.create_model(Author) self.assertEqual(self.get_constraints_for_column(Author, 'name'), []) # Alter to add unique=True and create 2 indexes. old_field = Author._meta.get_field('name') new_field = CharField(max_length=255, unique=True) new_field.set_attributes_from_name('name') with connection.schema_editor() as editor: editor.alter_field(Author, old_field, new_field, strict=True) self.assertEqual( self.get_constraints_for_column(Author, 'name'), ['schema_author_name_1fbc5617_like', 'schema_author_name_1fbc5617_uniq'] ) # Remove unique=True to drop both indexes. with connection.schema_editor() as editor: editor.alter_field(Author, new_field, old_field, strict=True) self.assertEqual(self.get_constraints_for_column(Author, 'name'), []) @unittest.skipUnless(connection.vendor == 'postgresql', "PostgreSQL specific") def test_alter_field_add_index_to_textfield(self): # Create the table and verify no initial indexes. with connection.schema_editor() as editor: editor.create_model(Note) self.assertEqual(self.get_constraints_for_column(Note, 'info'), []) # Alter to add db_index=True and create 2 indexes. old_field = Note._meta.get_field('info') new_field = TextField(db_index=True) new_field.set_attributes_from_name('info') with connection.schema_editor() as editor: editor.alter_field(Note, old_field, new_field, strict=True) self.assertEqual( self.get_constraints_for_column(Note, 'info'), ['schema_note_info_4b0ea695', 'schema_note_info_4b0ea695_like'] ) # Remove db_index=True to drop both indexes. with connection.schema_editor() as editor: editor.alter_field(Note, new_field, old_field, strict=True) self.assertEqual(self.get_constraints_for_column(Note, 'info'), []) @unittest.skipUnless(connection.vendor == 'postgresql', "PostgreSQL specific") def test_alter_field_add_unique_to_charfield_with_db_index(self): # Create the table and verify initial indexes. with connection.schema_editor() as editor: editor.create_model(BookWithoutAuthor) self.assertEqual( self.get_constraints_for_column(BookWithoutAuthor, 'title'), ['schema_book_title_2dfb2dff', 'schema_book_title_2dfb2dff_like'] ) # Alter to add unique=True (should replace the index) old_field = BookWithoutAuthor._meta.get_field('title') new_field = CharField(max_length=100, db_index=True, unique=True) new_field.set_attributes_from_name('title') with connection.schema_editor() as editor: editor.alter_field(BookWithoutAuthor, old_field, new_field, strict=True) self.assertEqual( self.get_constraints_for_column(BookWithoutAuthor, 'title'), ['schema_book_title_2dfb2dff_like', 'schema_book_title_2dfb2dff_uniq'] ) # Alter to remove unique=True (should drop unique index) new_field2 = CharField(max_length=100, db_index=True) new_field2.set_attributes_from_name('title') with connection.schema_editor() as editor: editor.alter_field(BookWithoutAuthor, new_field, new_field2, strict=True) self.assertEqual( self.get_constraints_for_column(BookWithoutAuthor, 'title'), ['schema_book_title_2dfb2dff', 'schema_book_title_2dfb2dff_like'] ) @unittest.skipUnless(connection.vendor == 'postgresql', "PostgreSQL specific") def test_alter_field_remove_unique_and_db_index_from_charfield(self): # Create the table and verify initial indexes. with connection.schema_editor() as editor: editor.create_model(BookWithoutAuthor) self.assertEqual( self.get_constraints_for_column(BookWithoutAuthor, 'title'), ['schema_book_title_2dfb2dff', 'schema_book_title_2dfb2dff_like'] ) # Alter to add unique=True (should replace the index) old_field = BookWithoutAuthor._meta.get_field('title') new_field = CharField(max_length=100, db_index=True, unique=True) new_field.set_attributes_from_name('title') with connection.schema_editor() as editor: editor.alter_field(BookWithoutAuthor, old_field, new_field, strict=True) self.assertEqual( self.get_constraints_for_column(BookWithoutAuthor, 'title'), ['schema_book_title_2dfb2dff_like', 'schema_book_title_2dfb2dff_uniq'] ) # Alter to remove both unique=True and db_index=True (should drop all indexes) new_field2 = CharField(max_length=100) new_field2.set_attributes_from_name('title') with connection.schema_editor() as editor: editor.alter_field(BookWithoutAuthor, new_field, new_field2, strict=True) self.assertEqual(self.get_constraints_for_column(BookWithoutAuthor, 'title'), []) @unittest.skipUnless(connection.vendor == 'postgresql', "PostgreSQL specific") def test_alter_field_swap_unique_and_db_index_with_charfield(self): # Create the table and verify initial indexes. with connection.schema_editor() as editor: editor.create_model(BookWithoutAuthor) self.assertEqual( self.get_constraints_for_column(BookWithoutAuthor, 'title'), ['schema_book_title_2dfb2dff', 'schema_book_title_2dfb2dff_like'] ) # Alter to set unique=True and remove db_index=True (should replace the index) old_field = BookWithoutAuthor._meta.get_field('title') new_field = CharField(max_length=100, unique=True) new_field.set_attributes_from_name('title') with connection.schema_editor() as editor: editor.alter_field(BookWithoutAuthor, old_field, new_field, strict=True) self.assertEqual( self.get_constraints_for_column(BookWithoutAuthor, 'title'), ['schema_book_title_2dfb2dff_like', 'schema_book_title_2dfb2dff_uniq'] ) # Alter to set db_index=True and remove unique=True (should restore index) new_field2 = CharField(max_length=100, db_index=True) new_field2.set_attributes_from_name('title') with connection.schema_editor() as editor: editor.alter_field(BookWithoutAuthor, new_field, new_field2, strict=True) self.assertEqual( self.get_constraints_for_column(BookWithoutAuthor, 'title'), ['schema_book_title_2dfb2dff', 'schema_book_title_2dfb2dff_like'] ) @unittest.skipUnless(connection.vendor == 'postgresql', "PostgreSQL specific") def test_alter_field_add_db_index_to_charfield_with_unique(self): # Create the table and verify initial indexes. with connection.schema_editor() as editor: editor.create_model(Tag) self.assertEqual( self.get_constraints_for_column(Tag, 'slug'), ['schema_tag_slug_2c418ba3_like', 'schema_tag_slug_key'] ) # Alter to add db_index=True old_field = Tag._meta.get_field('slug') new_field = SlugField(db_index=True, unique=True) new_field.set_attributes_from_name('slug') with connection.schema_editor() as editor: editor.alter_field(Tag, old_field, new_field, strict=True) self.assertEqual( self.get_constraints_for_column(Tag, 'slug'), ['schema_tag_slug_2c418ba3_like', 'schema_tag_slug_key'] ) # Alter to remove db_index=True new_field2 = SlugField(unique=True) new_field2.set_attributes_from_name('slug') with connection.schema_editor() as editor: editor.alter_field(Tag, new_field, new_field2, strict=True) self.assertEqual( self.get_constraints_for_column(Tag, 'slug'), ['schema_tag_slug_2c418ba3_like', 'schema_tag_slug_key'] ) def test_alter_field_add_index_to_integerfield(self): # Create the table and verify no initial indexes. with connection.schema_editor() as editor: editor.create_model(Author) self.assertEqual(self.get_constraints_for_column(Author, 'weight'), []) # Alter to add db_index=True and create index. old_field = Author._meta.get_field('weight') new_field = IntegerField(null=True, db_index=True) new_field.set_attributes_from_name('weight') with connection.schema_editor() as editor: editor.alter_field(Author, old_field, new_field, strict=True) self.assertEqual(self.get_constraints_for_column(Author, 'weight'), ['schema_author_weight_587740f9']) # Remove db_index=True to drop index. with connection.schema_editor() as editor: editor.alter_field(Author, new_field, old_field, strict=True) self.assertEqual(self.get_constraints_for_column(Author, 'weight'), []) def test_alter_pk_with_self_referential_field(self): """ Changing the primary key field name of a model with a self-referential foreign key (#26384). """ with connection.schema_editor() as editor: editor.create_model(Node) old_field = Node._meta.get_field('node_id') new_field = AutoField(primary_key=True) new_field.set_attributes_from_name('id') with connection.schema_editor() as editor: editor.alter_field(Node, old_field, new_field, strict=True) self.assertForeignKeyExists(Node, 'parent_id', Node._meta.db_table) @mock.patch('django.db.backends.base.schema.datetime') @mock.patch('django.db.backends.base.schema.timezone') def test_add_datefield_and_datetimefield_use_effective_default(self, mocked_datetime, mocked_tz): """ effective_default() should be used for DateField, DateTimeField, and TimeField if auto_now or auto_now_add is set (#25005). """ now = datetime.datetime(month=1, day=1, year=2000, hour=1, minute=1) now_tz = datetime.datetime(month=1, day=1, year=2000, hour=1, minute=1, tzinfo=timezone.utc) mocked_datetime.now = mock.MagicMock(return_value=now) mocked_tz.now = mock.MagicMock(return_value=now_tz) # Create the table with connection.schema_editor() as editor: editor.create_model(Author) # Check auto_now/auto_now_add attributes are not defined columns = self.column_classes(Author) self.assertNotIn("dob_auto_now", columns) self.assertNotIn("dob_auto_now_add", columns) self.assertNotIn("dtob_auto_now", columns) self.assertNotIn("dtob_auto_now_add", columns) self.assertNotIn("tob_auto_now", columns) self.assertNotIn("tob_auto_now_add", columns) # Create a row Author.objects.create(name='Anonymous1') # Ensure fields were added with the correct defaults dob_auto_now = DateField(auto_now=True) dob_auto_now.set_attributes_from_name('dob_auto_now') self.check_added_field_default( editor, Author, dob_auto_now, 'dob_auto_now', now.date(), cast_function=lambda x: x.date(), ) dob_auto_now_add = DateField(auto_now_add=True) dob_auto_now_add.set_attributes_from_name('dob_auto_now_add') self.check_added_field_default( editor, Author, dob_auto_now_add, 'dob_auto_now_add', now.date(), cast_function=lambda x: x.date(), ) dtob_auto_now = DateTimeField(auto_now=True) dtob_auto_now.set_attributes_from_name('dtob_auto_now') self.check_added_field_default( editor, Author, dtob_auto_now, 'dtob_auto_now', now, ) dt_tm_of_birth_auto_now_add = DateTimeField(auto_now_add=True) dt_tm_of_birth_auto_now_add.set_attributes_from_name('dtob_auto_now_add') self.check_added_field_default( editor, Author, dt_tm_of_birth_auto_now_add, 'dtob_auto_now_add', now, ) tob_auto_now = TimeField(auto_now=True) tob_auto_now.set_attributes_from_name('tob_auto_now') self.check_added_field_default( editor, Author, tob_auto_now, 'tob_auto_now', now.time(), cast_function=lambda x: x.time(), ) tob_auto_now_add = TimeField(auto_now_add=True) tob_auto_now_add.set_attributes_from_name('tob_auto_now_add') self.check_added_field_default( editor, Author, tob_auto_now_add, 'tob_auto_now_add', now.time(), cast_function=lambda x: x.time(), ) def test_namespaced_db_table_create_index_name(self): """ Table names are stripped of their namespace/schema before being used to generate index names. """ with connection.schema_editor() as editor: max_name_length = connection.ops.max_name_length() or 200 namespace = 'n' * max_name_length table_name = 't' * max_name_length namespaced_table_name = '"%s"."%s"' % (namespace, table_name) self.assertEqual( editor._create_index_name(table_name, []), editor._create_index_name(namespaced_table_name, []), ) @unittest.skipUnless(connection.vendor == 'oracle', 'Oracle specific db_table syntax') def test_creation_with_db_table_double_quotes(self): oracle_user = connection.creation._test_database_user() class Student(Model): name = CharField(max_length=30) class Meta: app_label = 'schema' apps = new_apps db_table = '"%s"."DJANGO_STUDENT_TABLE"' % oracle_user class Document(Model): name = CharField(max_length=30) students = ManyToManyField(Student) class Meta: app_label = 'schema' apps = new_apps db_table = '"%s"."DJANGO_DOCUMENT_TABLE"' % oracle_user self.local_models = [Student, Document] with connection.schema_editor() as editor: editor.create_model(Student) editor.create_model(Document) doc = Document.objects.create(name='Test Name') student = Student.objects.create(name='Some man') doc.students.add(student) @isolate_apps('schema') @unittest.skipUnless(connection.vendor == 'postgresql', 'PostgreSQL specific db_table syntax.') def test_namespaced_db_table_foreign_key_reference(self): with connection.cursor() as cursor: cursor.execute('CREATE SCHEMA django_schema_tests') def delete_schema(): with connection.cursor() as cursor: cursor.execute('DROP SCHEMA django_schema_tests CASCADE') self.addCleanup(delete_schema) class Author(Model): class Meta: app_label = 'schema' class Book(Model): class Meta: app_label = 'schema' db_table = '"django_schema_tests"."schema_book"' author = ForeignKey(Author, CASCADE) author.set_attributes_from_name('author') with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(Book) editor.add_field(Book, author) def test_rename_table_renames_deferred_sql_references(self): atomic_rename = connection.features.supports_atomic_references_rename with connection.schema_editor(atomic=atomic_rename) as editor: editor.create_model(Author) editor.create_model(Book) editor.alter_db_table(Author, 'schema_author', 'schema_renamed_author') editor.alter_db_table(Author, 'schema_book', 'schema_renamed_book') try: self.assertGreater(len(editor.deferred_sql), 0) for statement in editor.deferred_sql: self.assertIs(statement.references_table('schema_author'), False) self.assertIs(statement.references_table('schema_book'), False) finally: editor.alter_db_table(Author, 'schema_renamed_author', 'schema_author') editor.alter_db_table(Author, 'schema_renamed_book', 'schema_book') def test_rename_column_renames_deferred_sql_references(self): with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(Book) old_title = Book._meta.get_field('title') new_title = CharField(max_length=100, db_index=True) new_title.set_attributes_from_name('renamed_title') editor.alter_field(Book, old_title, new_title) old_author = Book._meta.get_field('author') new_author = ForeignKey(Author, CASCADE) new_author.set_attributes_from_name('renamed_author') editor.alter_field(Book, old_author, new_author) self.assertGreater(len(editor.deferred_sql), 0) for statement in editor.deferred_sql: self.assertIs(statement.references_column('book', 'title'), False) self.assertIs(statement.references_column('book', 'author_id'), False) @isolate_apps('schema') def test_referenced_field_without_constraint_rename_inside_atomic_block(self): """ Foreign keys without database level constraint don't prevent the field they reference from being renamed in an atomic block. """ class Foo(Model): field = CharField(max_length=255, unique=True) class Meta: app_label = 'schema' class Bar(Model): foo = ForeignKey(Foo, CASCADE, to_field='field', db_constraint=False) class Meta: app_label = 'schema' self.isolated_local_models = [Foo, Bar] with connection.schema_editor() as editor: editor.create_model(Foo) editor.create_model(Bar) new_field = CharField(max_length=255, unique=True) new_field.set_attributes_from_name('renamed') with connection.schema_editor(atomic=True) as editor: editor.alter_field(Foo, Foo._meta.get_field('field'), new_field) @isolate_apps('schema') def test_referenced_table_without_constraint_rename_inside_atomic_block(self): """ Foreign keys without database level constraint don't prevent the table they reference from being renamed in an atomic block. """ class Foo(Model): field = CharField(max_length=255, unique=True) class Meta: app_label = 'schema' class Bar(Model): foo = ForeignKey(Foo, CASCADE, to_field='field', db_constraint=False) class Meta: app_label = 'schema' self.isolated_local_models = [Foo, Bar] with connection.schema_editor() as editor: editor.create_model(Foo) editor.create_model(Bar) new_field = CharField(max_length=255, unique=True) new_field.set_attributes_from_name('renamed') with connection.schema_editor(atomic=True) as editor: editor.alter_db_table(Foo, Foo._meta.db_table, 'renamed_table') Foo._meta.db_table = 'renamed_table' @isolate_apps('schema') @skipUnlessDBFeature('supports_collation_on_charfield') def test_db_collation_charfield(self): collation = connection.features.test_collations.get('non_default') if not collation: self.skipTest('Language collations are not supported.') class Foo(Model): field = CharField(max_length=255, db_collation=collation) class Meta: app_label = 'schema' self.isolated_local_models = [Foo] with connection.schema_editor() as editor: editor.create_model(Foo) self.assertEqual( self.get_column_collation(Foo._meta.db_table, 'field'), collation, ) @isolate_apps('schema') @skipUnlessDBFeature('supports_collation_on_textfield') def test_db_collation_textfield(self): collation = connection.features.test_collations.get('non_default') if not collation: self.skipTest('Language collations are not supported.') class Foo(Model): field = TextField(db_collation=collation) class Meta: app_label = 'schema' self.isolated_local_models = [Foo] with connection.schema_editor() as editor: editor.create_model(Foo) self.assertEqual( self.get_column_collation(Foo._meta.db_table, 'field'), collation, ) @skipUnlessDBFeature('supports_collation_on_charfield') def test_add_field_db_collation(self): collation = connection.features.test_collations.get('non_default') if not collation: self.skipTest('Language collations are not supported.') with connection.schema_editor() as editor: editor.create_model(Author) new_field = CharField(max_length=255, db_collation=collation) new_field.set_attributes_from_name('alias') with connection.schema_editor() as editor: editor.add_field(Author, new_field) columns = self.column_classes(Author) self.assertEqual( columns['alias'][0], connection.features.introspected_field_types['CharField'], ) self.assertEqual(columns['alias'][1][8], collation) @skipUnlessDBFeature('supports_collation_on_charfield') def test_alter_field_db_collation(self): collation = connection.features.test_collations.get('non_default') if not collation: self.skipTest('Language collations are not supported.') with connection.schema_editor() as editor: editor.create_model(Author) old_field = Author._meta.get_field('name') new_field = CharField(max_length=255, db_collation=collation) new_field.set_attributes_from_name('name') new_field.model = Author with connection.schema_editor() as editor: editor.alter_field(Author, old_field, new_field, strict=True) self.assertEqual( self.get_column_collation(Author._meta.db_table, 'name'), collation, ) with connection.schema_editor() as editor: editor.alter_field(Author, new_field, old_field, strict=True) self.assertIsNone(self.get_column_collation(Author._meta.db_table, 'name')) @skipUnlessDBFeature('supports_collation_on_charfield') def test_alter_field_type_and_db_collation(self): collation = connection.features.test_collations.get('non_default') if not collation: self.skipTest('Language collations are not supported.') with connection.schema_editor() as editor: editor.create_model(Note) old_field = Note._meta.get_field('info') new_field = CharField(max_length=255, db_collation=collation) new_field.set_attributes_from_name('info') new_field.model = Note with connection.schema_editor() as editor: editor.alter_field(Note, old_field, new_field, strict=True) columns = self.column_classes(Note) self.assertEqual( columns['info'][0], connection.features.introspected_field_types['CharField'], ) self.assertEqual(columns['info'][1][8], collation) with connection.schema_editor() as editor: editor.alter_field(Note, new_field, old_field, strict=True) columns = self.column_classes(Note) self.assertEqual(columns['info'][0], 'TextField') self.assertIsNone(columns['info'][1][8]) @skipUnlessDBFeature( 'supports_collation_on_charfield', 'supports_non_deterministic_collations', ) def test_ci_cs_db_collation(self): cs_collation = connection.features.test_collations.get('cs') ci_collation = connection.features.test_collations.get('ci') try: if connection.vendor == 'mysql': cs_collation = 'latin1_general_cs' elif connection.vendor == 'postgresql': cs_collation = 'en-x-icu' with connection.cursor() as cursor: cursor.execute( "CREATE COLLATION IF NOT EXISTS case_insensitive " "(provider = icu, locale = 'und-u-ks-level2', " "deterministic = false)" ) ci_collation = 'case_insensitive' # Create the table. with connection.schema_editor() as editor: editor.create_model(Author) # Case-insensitive collation. old_field = Author._meta.get_field('name') new_field_ci = CharField(max_length=255, db_collation=ci_collation) new_field_ci.set_attributes_from_name('name') new_field_ci.model = Author with connection.schema_editor() as editor: editor.alter_field(Author, old_field, new_field_ci, strict=True) Author.objects.create(name='ANDREW') self.assertIs(Author.objects.filter(name='Andrew').exists(), True) # Case-sensitive collation. new_field_cs = CharField(max_length=255, db_collation=cs_collation) new_field_cs.set_attributes_from_name('name') new_field_cs.model = Author with connection.schema_editor() as editor: editor.alter_field(Author, new_field_ci, new_field_cs, strict=True) self.assertIs(Author.objects.filter(name='Andrew').exists(), False) finally: if connection.vendor == 'postgresql': with connection.cursor() as cursor: cursor.execute('DROP COLLATION IF EXISTS case_insensitive')
c78a31c4c4e25d9bf7514f35532bff35efd4a60cbbf43fd7eb618eec4dabbd0e
from django.contrib.admin import ModelAdmin, TabularInline from django.contrib.admin.helpers import InlineAdminForm from django.contrib.admin.tests import AdminSeleniumTestCase from django.contrib.auth.models import Permission, User from django.contrib.contenttypes.models import ContentType from django.test import RequestFactory, TestCase, override_settings from django.urls import reverse from .admin import InnerInline, site as admin_site from .models import ( Author, BinaryTree, Book, BothVerboseNameProfile, Chapter, Child, ChildModel1, ChildModel2, Fashionista, FootNote, Holder, Holder2, Holder3, Holder4, Inner, Inner2, Inner3, Inner4Stacked, Inner4Tabular, Novel, OutfitItem, Parent, ParentModelWithCustomPk, Person, Poll, Profile, ProfileCollection, Question, ShowInlineParent, Sighting, SomeChildModel, SomeParentModel, Teacher, VerboseNamePluralProfile, VerboseNameProfile, ) INLINE_CHANGELINK_HTML = 'class="inlinechangelink">Change</a>' class TestDataMixin: @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', email='[email protected]', password='secret') @override_settings(ROOT_URLCONF='admin_inlines.urls') class TestInline(TestDataMixin, TestCase): factory = RequestFactory() @classmethod def setUpTestData(cls): super().setUpTestData() cls.holder = Holder.objects.create(dummy=13) Inner.objects.create(dummy=42, holder=cls.holder) cls.parent = SomeParentModel.objects.create(name='a') SomeChildModel.objects.create(name='b', position='0', parent=cls.parent) SomeChildModel.objects.create(name='c', position='1', parent=cls.parent) cls.view_only_user = User.objects.create_user( username='user', password='pwd', is_staff=True, ) parent_ct = ContentType.objects.get_for_model(SomeParentModel) child_ct = ContentType.objects.get_for_model(SomeChildModel) permission = Permission.objects.get( codename='view_someparentmodel', content_type=parent_ct, ) cls.view_only_user.user_permissions.add(permission) permission = Permission.objects.get( codename='view_somechildmodel', content_type=child_ct, ) cls.view_only_user.user_permissions.add(permission) def setUp(self): self.client.force_login(self.superuser) def test_can_delete(self): """ can_delete should be passed to inlineformset factory. """ response = self.client.get( reverse('admin:admin_inlines_holder_change', args=(self.holder.id,)) ) inner_formset = response.context['inline_admin_formsets'][0].formset expected = InnerInline.can_delete actual = inner_formset.can_delete self.assertEqual(expected, actual, 'can_delete must be equal') def test_readonly_stacked_inline_label(self): """Bug #13174.""" holder = Holder.objects.create(dummy=42) Inner.objects.create(holder=holder, dummy=42, readonly='') response = self.client.get( reverse('admin:admin_inlines_holder_change', args=(holder.id,)) ) self.assertContains(response, '<label>Inner readonly label:</label>') def test_many_to_many_inlines(self): "Autogenerated many-to-many inlines are displayed correctly (#13407)" response = self.client.get(reverse('admin:admin_inlines_author_add')) # The heading for the m2m inline block uses the right text self.assertContains(response, '<h2>Author-book relationships</h2>') # The "add another" label is correct self.assertContains(response, 'Add another Author-book relationship') # The '+' is dropped from the autogenerated form prefix (Author_books+) self.assertContains(response, 'id="id_Author_books-TOTAL_FORMS"') def test_inline_primary(self): person = Person.objects.create(firstname='Imelda') item = OutfitItem.objects.create(name='Shoes') # Imelda likes shoes, but can't carry her own bags. data = { 'shoppingweakness_set-TOTAL_FORMS': 1, 'shoppingweakness_set-INITIAL_FORMS': 0, 'shoppingweakness_set-MAX_NUM_FORMS': 0, '_save': 'Save', 'person': person.id, 'max_weight': 0, 'shoppingweakness_set-0-item': item.id, } response = self.client.post(reverse('admin:admin_inlines_fashionista_add'), data) self.assertEqual(response.status_code, 302) self.assertEqual(len(Fashionista.objects.filter(person__firstname='Imelda')), 1) def test_tabular_inline_column_css_class(self): """ Field names are included in the context to output a field-specific CSS class name in the column headers. """ response = self.client.get(reverse('admin:admin_inlines_poll_add')) text_field, call_me_field = list(response.context['inline_admin_formset'].fields()) # Editable field. self.assertEqual(text_field['name'], 'text') self.assertContains(response, '<th class="column-text required">') # Read-only field. self.assertEqual(call_me_field['name'], 'call_me') self.assertContains(response, '<th class="column-call_me">') def test_custom_form_tabular_inline_label(self): """ A model form with a form field specified (TitleForm.title1) should have its label rendered in the tabular inline. """ response = self.client.get(reverse('admin:admin_inlines_titlecollection_add')) self.assertContains(response, '<th class="column-title1 required">Title1</th>', html=True) def test_custom_form_tabular_inline_extra_field_label(self): response = self.client.get(reverse('admin:admin_inlines_outfititem_add')) _, extra_field = list(response.context['inline_admin_formset'].fields()) self.assertEqual(extra_field['label'], 'Extra field') def test_non_editable_custom_form_tabular_inline_extra_field_label(self): response = self.client.get(reverse('admin:admin_inlines_chapter_add')) _, extra_field = list(response.context['inline_admin_formset'].fields()) self.assertEqual(extra_field['label'], 'Extra field') def test_custom_form_tabular_inline_overridden_label(self): """ SomeChildModelForm.__init__() overrides the label of a form field. That label is displayed in the TabularInline. """ response = self.client.get(reverse('admin:admin_inlines_someparentmodel_add')) field = list(response.context['inline_admin_formset'].fields())[0] self.assertEqual(field['label'], 'new label') self.assertContains(response, '<th class="column-name required">New label</th>', html=True) def test_tabular_non_field_errors(self): """ non_field_errors are displayed correctly, including the correct value for colspan. """ data = { 'title_set-TOTAL_FORMS': 1, 'title_set-INITIAL_FORMS': 0, 'title_set-MAX_NUM_FORMS': 0, '_save': 'Save', 'title_set-0-title1': 'a title', 'title_set-0-title2': 'a different title', } response = self.client.post(reverse('admin:admin_inlines_titlecollection_add'), data) # Here colspan is "4": two fields (title1 and title2), one hidden field and the delete checkbox. self.assertContains( response, '<tr class="row-form-errors"><td colspan="4"><ul class="errorlist nonfield">' '<li>The two titles must be the same</li></ul></td></tr>' ) def test_no_parent_callable_lookup(self): """Admin inline `readonly_field` shouldn't invoke parent ModelAdmin callable""" # Identically named callable isn't present in the parent ModelAdmin, # rendering of the add view shouldn't explode response = self.client.get(reverse('admin:admin_inlines_novel_add')) # View should have the child inlines section self.assertContains( response, '<div class="js-inline-admin-formset inline-group" id="chapter_set-group"' ) def test_callable_lookup(self): """Admin inline should invoke local callable when its name is listed in readonly_fields""" response = self.client.get(reverse('admin:admin_inlines_poll_add')) # Add parent object view should have the child inlines section self.assertContains( response, '<div class="js-inline-admin-formset inline-group" id="question_set-group"' ) # The right callable should be used for the inline readonly_fields # column cells self.assertContains(response, '<p>Callable in QuestionInline</p>') def test_help_text(self): """ The inlines' model field help texts are displayed when using both the stacked and tabular layouts. """ response = self.client.get(reverse('admin:admin_inlines_holder4_add')) self.assertContains(response, '<div class="help">Awesome stacked help text is awesome.</div>', 4) self.assertContains( response, '<img src="/static/admin/img/icon-unknown.svg" ' 'class="help help-tooltip" width="10" height="10" ' 'alt="(Awesome tabular help text is awesome.)" ' 'title="Awesome tabular help text is awesome.">', 1 ) # ReadOnly fields response = self.client.get(reverse('admin:admin_inlines_capofamiglia_add')) self.assertContains( response, '<img src="/static/admin/img/icon-unknown.svg" ' 'class="help help-tooltip" width="10" height="10" ' 'alt="(Help text for ReadOnlyInline)" ' 'title="Help text for ReadOnlyInline">', 1 ) def test_tabular_model_form_meta_readonly_field(self): """ Tabular inlines use ModelForm.Meta.help_texts and labels for read-only fields. """ response = self.client.get(reverse('admin:admin_inlines_someparentmodel_add')) self.assertContains( response, '<img src="/static/admin/img/icon-unknown.svg" ' 'class="help help-tooltip" width="10" height="10" ' 'alt="(Help text from ModelForm.Meta)" ' 'title="Help text from ModelForm.Meta">' ) self.assertContains(response, 'Label from ModelForm.Meta') def test_inline_hidden_field_no_column(self): """#18263 -- Make sure hidden fields don't get a column in tabular inlines""" parent = SomeParentModel.objects.create(name='a') SomeChildModel.objects.create(name='b', position='0', parent=parent) SomeChildModel.objects.create(name='c', position='1', parent=parent) response = self.client.get(reverse('admin:admin_inlines_someparentmodel_change', args=(parent.pk,))) self.assertNotContains(response, '<td class="field-position">') self.assertInHTML( '<input id="id_somechildmodel_set-1-position" ' 'name="somechildmodel_set-1-position" type="hidden" value="1">', response.rendered_content, ) def test_tabular_inline_hidden_field_with_view_only_permissions(self): """ Content of hidden field is not visible in tabular inline when user has view-only permission. """ self.client.force_login(self.view_only_user) url = reverse( 'tabular_inline_hidden_field_admin:admin_inlines_someparentmodel_change', args=(self.parent.pk,), ) response = self.client.get(url) self.assertInHTML('<th class="column-position hidden">Position</th>', response.rendered_content) self.assertInHTML('<td class="field-position hidden"><p>0</p></td>', response.rendered_content) self.assertInHTML('<td class="field-position hidden"><p>1</p></td>', response.rendered_content) def test_stacked_inline_hidden_field_with_view_only_permissions(self): """ Content of hidden field is not visible in stacked inline when user has view-only permission. """ self.client.force_login(self.view_only_user) url = reverse( 'stacked_inline_hidden_field_in_group_admin:admin_inlines_someparentmodel_change', args=(self.parent.pk,), ) response = self.client.get(url) # The whole line containing name + position fields is not hidden. self.assertContains(response, '<div class="form-row field-name field-position">') # The div containing the position field is hidden. self.assertInHTML( '<div class="fieldBox field-position hidden">' '<label class="inline">Position:</label>' '<div class="readonly">0</div></div>', response.rendered_content, ) self.assertInHTML( '<div class="fieldBox field-position hidden">' '<label class="inline">Position:</label>' '<div class="readonly">1</div></div>', response.rendered_content, ) def test_stacked_inline_single_hidden_field_in_line_with_view_only_permissions(self): """ Content of hidden field is not visible in stacked inline when user has view-only permission and the field is grouped on a separate line. """ self.client.force_login(self.view_only_user) url = reverse( 'stacked_inline_hidden_field_on_single_line_admin:admin_inlines_someparentmodel_change', args=(self.parent.pk,), ) response = self.client.get(url) # The whole line containing position field is hidden. self.assertInHTML( '<div class="form-row hidden field-position">' '<div><label>Position:</label>' '<div class="readonly">0</div></div></div>', response.rendered_content, ) self.assertInHTML( '<div class="form-row hidden field-position">' '<div><label>Position:</label>' '<div class="readonly">1</div></div></div>', response.rendered_content, ) def test_tabular_inline_with_hidden_field_non_field_errors_has_correct_colspan(self): """ In tabular inlines, when a form has non-field errors, those errors are rendered in a table line with a single cell spanning the whole table width. Colspan must be equal to the number of visible columns. """ parent = SomeParentModel.objects.create(name='a') child = SomeChildModel.objects.create(name='b', position='0', parent=parent) url = reverse( 'tabular_inline_hidden_field_admin:admin_inlines_someparentmodel_change', args=(parent.id,), ) data = { 'name': parent.name, 'somechildmodel_set-TOTAL_FORMS': 1, 'somechildmodel_set-INITIAL_FORMS': 1, 'somechildmodel_set-MIN_NUM_FORMS': 0, 'somechildmodel_set-MAX_NUM_FORMS': 1000, '_save': 'Save', 'somechildmodel_set-0-id': child.id, 'somechildmodel_set-0-parent': parent.id, 'somechildmodel_set-0-name': child.name, 'somechildmodel_set-0-position': 1, } response = self.client.post(url, data) # Form has 3 visible columns and 1 hidden column. self.assertInHTML( '<thead><tr><th class="original"></th>' '<th class="column-name required">Name</th>' '<th class="column-position required hidden">Position</th>' '<th>Delete?</th></tr></thead>', response.rendered_content, ) # The non-field error must be spanned on 3 (visible) columns. self.assertInHTML( '<tr class="row-form-errors"><td colspan="3">' '<ul class="errorlist nonfield"><li>A non-field error</li></ul></td></tr>', response.rendered_content, ) def test_non_related_name_inline(self): """ Multiple inlines with related_name='+' have correct form prefixes. """ response = self.client.get(reverse('admin:admin_inlines_capofamiglia_add')) self.assertContains(response, '<input type="hidden" name="-1-0-id" id="id_-1-0-id">', html=True) self.assertContains( response, '<input type="hidden" name="-1-0-capo_famiglia" id="id_-1-0-capo_famiglia">', html=True ) self.assertContains( response, '<input id="id_-1-0-name" type="text" class="vTextField" name="-1-0-name" maxlength="100">', html=True ) self.assertContains(response, '<input type="hidden" name="-2-0-id" id="id_-2-0-id">', html=True) self.assertContains( response, '<input type="hidden" name="-2-0-capo_famiglia" id="id_-2-0-capo_famiglia">', html=True ) self.assertContains( response, '<input id="id_-2-0-name" type="text" class="vTextField" name="-2-0-name" maxlength="100">', html=True ) @override_settings(USE_THOUSAND_SEPARATOR=True) def test_localize_pk_shortcut(self): """ The "View on Site" link is correct for locales that use thousand separators. """ holder = Holder.objects.create(pk=123456789, dummy=42) inner = Inner.objects.create(pk=987654321, holder=holder, dummy=42, readonly='') response = self.client.get(reverse('admin:admin_inlines_holder_change', args=(holder.id,))) inner_shortcut = 'r/%s/%s/' % (ContentType.objects.get_for_model(inner).pk, inner.pk) self.assertContains(response, inner_shortcut) def test_custom_pk_shortcut(self): """ The "View on Site" link is correct for models with a custom primary key field. """ parent = ParentModelWithCustomPk.objects.create(my_own_pk="foo", name="Foo") child1 = ChildModel1.objects.create(my_own_pk="bar", name="Bar", parent=parent) child2 = ChildModel2.objects.create(my_own_pk="baz", name="Baz", parent=parent) response = self.client.get(reverse('admin:admin_inlines_parentmodelwithcustompk_change', args=('foo',))) child1_shortcut = 'r/%s/%s/' % (ContentType.objects.get_for_model(child1).pk, child1.pk) child2_shortcut = 'r/%s/%s/' % (ContentType.objects.get_for_model(child2).pk, child2.pk) self.assertContains(response, child1_shortcut) self.assertContains(response, child2_shortcut) def test_create_inlines_on_inherited_model(self): """ An object can be created with inlines when it inherits another class. """ data = { 'name': 'Martian', 'sighting_set-TOTAL_FORMS': 1, 'sighting_set-INITIAL_FORMS': 0, 'sighting_set-MAX_NUM_FORMS': 0, 'sighting_set-0-place': 'Zone 51', '_save': 'Save', } response = self.client.post(reverse('admin:admin_inlines_extraterrestrial_add'), data) self.assertEqual(response.status_code, 302) self.assertEqual(Sighting.objects.filter(et__name='Martian').count(), 1) def test_custom_get_extra_form(self): bt_head = BinaryTree.objects.create(name="Tree Head") BinaryTree.objects.create(name="First Child", parent=bt_head) # The maximum number of forms should respect 'get_max_num' on the # ModelAdmin max_forms_input = ( '<input id="id_binarytree_set-MAX_NUM_FORMS" ' 'name="binarytree_set-MAX_NUM_FORMS" type="hidden" value="%d">' ) # The total number of forms will remain the same in either case total_forms_hidden = ( '<input id="id_binarytree_set-TOTAL_FORMS" ' 'name="binarytree_set-TOTAL_FORMS" type="hidden" value="2">' ) response = self.client.get(reverse('admin:admin_inlines_binarytree_add')) self.assertInHTML(max_forms_input % 3, response.rendered_content) self.assertInHTML(total_forms_hidden, response.rendered_content) response = self.client.get(reverse('admin:admin_inlines_binarytree_change', args=(bt_head.id,))) self.assertInHTML(max_forms_input % 2, response.rendered_content) self.assertInHTML(total_forms_hidden, response.rendered_content) def test_min_num(self): """ min_num and extra determine number of forms. """ class MinNumInline(TabularInline): model = BinaryTree min_num = 2 extra = 3 modeladmin = ModelAdmin(BinaryTree, admin_site) modeladmin.inlines = [MinNumInline] min_forms = ( '<input id="id_binarytree_set-MIN_NUM_FORMS" ' 'name="binarytree_set-MIN_NUM_FORMS" type="hidden" value="2">' ) total_forms = ( '<input id="id_binarytree_set-TOTAL_FORMS" ' 'name="binarytree_set-TOTAL_FORMS" type="hidden" value="5">' ) request = self.factory.get(reverse('admin:admin_inlines_binarytree_add')) request.user = User(username='super', is_superuser=True) response = modeladmin.changeform_view(request) self.assertInHTML(min_forms, response.rendered_content) self.assertInHTML(total_forms, response.rendered_content) def test_custom_min_num(self): bt_head = BinaryTree.objects.create(name="Tree Head") BinaryTree.objects.create(name="First Child", parent=bt_head) class MinNumInline(TabularInline): model = BinaryTree extra = 3 def get_min_num(self, request, obj=None, **kwargs): if obj: return 5 return 2 modeladmin = ModelAdmin(BinaryTree, admin_site) modeladmin.inlines = [MinNumInline] min_forms = ( '<input id="id_binarytree_set-MIN_NUM_FORMS" ' 'name="binarytree_set-MIN_NUM_FORMS" type="hidden" value="%d">' ) total_forms = ( '<input id="id_binarytree_set-TOTAL_FORMS" ' 'name="binarytree_set-TOTAL_FORMS" type="hidden" value="%d">' ) request = self.factory.get(reverse('admin:admin_inlines_binarytree_add')) request.user = User(username='super', is_superuser=True) response = modeladmin.changeform_view(request) self.assertInHTML(min_forms % 2, response.rendered_content) self.assertInHTML(total_forms % 5, response.rendered_content) request = self.factory.get(reverse('admin:admin_inlines_binarytree_change', args=(bt_head.id,))) request.user = User(username='super', is_superuser=True) response = modeladmin.changeform_view(request, object_id=str(bt_head.id)) self.assertInHTML(min_forms % 5, response.rendered_content) self.assertInHTML(total_forms % 8, response.rendered_content) def test_inline_nonauto_noneditable_pk(self): response = self.client.get(reverse('admin:admin_inlines_author_add')) self.assertContains( response, '<input id="id_nonautopkbook_set-0-rand_pk" ' 'name="nonautopkbook_set-0-rand_pk" type="hidden">', html=True ) self.assertContains( response, '<input id="id_nonautopkbook_set-2-0-rand_pk" ' 'name="nonautopkbook_set-2-0-rand_pk" type="hidden">', html=True ) def test_inline_nonauto_noneditable_inherited_pk(self): response = self.client.get(reverse('admin:admin_inlines_author_add')) self.assertContains( response, '<input id="id_nonautopkbookchild_set-0-nonautopkbook_ptr" ' 'name="nonautopkbookchild_set-0-nonautopkbook_ptr" type="hidden">', html=True ) self.assertContains( response, '<input id="id_nonautopkbookchild_set-2-nonautopkbook_ptr" ' 'name="nonautopkbookchild_set-2-nonautopkbook_ptr" type="hidden">', html=True ) def test_inline_editable_pk(self): response = self.client.get(reverse('admin:admin_inlines_author_add')) self.assertContains( response, '<input class="vIntegerField" id="id_editablepkbook_set-0-manual_pk" ' 'name="editablepkbook_set-0-manual_pk" type="number">', html=True, count=1 ) self.assertContains( response, '<input class="vIntegerField" id="id_editablepkbook_set-2-0-manual_pk" ' 'name="editablepkbook_set-2-0-manual_pk" type="number">', html=True, count=1 ) def test_stacked_inline_edit_form_contains_has_original_class(self): holder = Holder.objects.create(dummy=1) holder.inner_set.create(dummy=1) response = self.client.get(reverse('admin:admin_inlines_holder_change', args=(holder.pk,))) self.assertContains( response, '<div class="inline-related has_original" id="inner_set-0">', count=1 ) self.assertContains( response, '<div class="inline-related" id="inner_set-1">', count=1 ) def test_inlines_show_change_link_registered(self): "Inlines `show_change_link` for registered models when enabled." holder = Holder4.objects.create(dummy=1) item1 = Inner4Stacked.objects.create(dummy=1, holder=holder) item2 = Inner4Tabular.objects.create(dummy=1, holder=holder) items = ( ('inner4stacked', item1.pk), ('inner4tabular', item2.pk), ) response = self.client.get(reverse('admin:admin_inlines_holder4_change', args=(holder.pk,))) self.assertTrue(response.context['inline_admin_formset'].opts.has_registered_model) for model, pk in items: url = reverse('admin:admin_inlines_%s_change' % model, args=(pk,)) self.assertContains(response, '<a href="%s" %s' % (url, INLINE_CHANGELINK_HTML)) def test_inlines_show_change_link_unregistered(self): "Inlines `show_change_link` disabled for unregistered models." parent = ParentModelWithCustomPk.objects.create(my_own_pk="foo", name="Foo") ChildModel1.objects.create(my_own_pk="bar", name="Bar", parent=parent) ChildModel2.objects.create(my_own_pk="baz", name="Baz", parent=parent) response = self.client.get(reverse('admin:admin_inlines_parentmodelwithcustompk_change', args=('foo',))) self.assertFalse(response.context['inline_admin_formset'].opts.has_registered_model) self.assertNotContains(response, INLINE_CHANGELINK_HTML) def test_tabular_inline_show_change_link_false_registered(self): "Inlines `show_change_link` disabled by default." poll = Poll.objects.create(name="New poll") Question.objects.create(poll=poll) response = self.client.get(reverse('admin:admin_inlines_poll_change', args=(poll.pk,))) self.assertTrue(response.context['inline_admin_formset'].opts.has_registered_model) self.assertNotContains(response, INLINE_CHANGELINK_HTML) def test_noneditable_inline_has_field_inputs(self): """Inlines without change permission shows field inputs on add form.""" response = self.client.get(reverse('admin:admin_inlines_novelreadonlychapter_add')) self.assertContains( response, '<input type="text" name="chapter_set-0-name" ' 'class="vTextField" maxlength="40" id="id_chapter_set-0-name">', html=True ) def test_inlines_plural_heading_foreign_key(self): response = self.client.get(reverse('admin:admin_inlines_holder4_add')) self.assertContains(response, '<h2>Inner4 stackeds</h2>', html=True) self.assertContains(response, '<h2>Inner4 tabulars</h2>', html=True) def test_inlines_singular_heading_one_to_one(self): response = self.client.get(reverse('admin:admin_inlines_person_add')) self.assertContains(response, '<h2>Author</h2>', html=True) # Tabular. self.assertContains(response, '<h2>Fashionista</h2>', html=True) # Stacked. def test_inlines_based_on_model_state(self): parent = ShowInlineParent.objects.create(show_inlines=False) data = { 'show_inlines': 'on', '_save': 'Save', } change_url = reverse( 'admin:admin_inlines_showinlineparent_change', args=(parent.id,), ) response = self.client.post(change_url, data) self.assertEqual(response.status_code, 302) parent.refresh_from_db() self.assertIs(parent.show_inlines, True) @override_settings(ROOT_URLCONF='admin_inlines.urls') class TestInlineMedia(TestDataMixin, TestCase): def setUp(self): self.client.force_login(self.superuser) def test_inline_media_only_base(self): holder = Holder(dummy=13) holder.save() Inner(dummy=42, holder=holder).save() change_url = reverse('admin:admin_inlines_holder_change', args=(holder.id,)) response = self.client.get(change_url) self.assertContains(response, 'my_awesome_admin_scripts.js') def test_inline_media_only_inline(self): holder = Holder3(dummy=13) holder.save() Inner3(dummy=42, holder=holder).save() change_url = reverse('admin:admin_inlines_holder3_change', args=(holder.id,)) response = self.client.get(change_url) self.assertEqual( response.context['inline_admin_formsets'][0].media._js, [ 'admin/js/vendor/jquery/jquery.min.js', 'my_awesome_inline_scripts.js', 'custom_number.js', 'admin/js/jquery.init.js', 'admin/js/inlines.js', ] ) self.assertContains(response, 'my_awesome_inline_scripts.js') def test_all_inline_media(self): holder = Holder2(dummy=13) holder.save() Inner2(dummy=42, holder=holder).save() change_url = reverse('admin:admin_inlines_holder2_change', args=(holder.id,)) response = self.client.get(change_url) self.assertContains(response, 'my_awesome_admin_scripts.js') self.assertContains(response, 'my_awesome_inline_scripts.js') @override_settings(ROOT_URLCONF='admin_inlines.urls') class TestInlineAdminForm(TestCase): def test_immutable_content_type(self): """Regression for #9362 The problem depends only on InlineAdminForm and its "original" argument, so we can safely set the other arguments to None/{}. We just need to check that the content_type argument of Child isn't altered by the internals of the inline form.""" sally = Teacher.objects.create(name='Sally') john = Parent.objects.create(name='John') joe = Child.objects.create(name='Joe', teacher=sally, parent=john) iaf = InlineAdminForm(None, None, {}, {}, joe) parent_ct = ContentType.objects.get_for_model(Parent) self.assertEqual(iaf.original.content_type, parent_ct) @override_settings(ROOT_URLCONF='admin_inlines.urls') class TestInlineProtectedOnDelete(TestDataMixin, TestCase): def setUp(self): self.client.force_login(self.superuser) def test_deleting_inline_with_protected_delete_does_not_validate(self): lotr = Novel.objects.create(name='Lord of the rings') chapter = Chapter.objects.create(novel=lotr, name='Many Meetings') foot_note = FootNote.objects.create(chapter=chapter, note='yadda yadda') change_url = reverse('admin:admin_inlines_novel_change', args=(lotr.id,)) response = self.client.get(change_url) data = { 'name': lotr.name, 'chapter_set-TOTAL_FORMS': 1, 'chapter_set-INITIAL_FORMS': 1, 'chapter_set-MAX_NUM_FORMS': 1000, '_save': 'Save', 'chapter_set-0-id': chapter.id, 'chapter_set-0-name': chapter.name, 'chapter_set-0-novel': lotr.id, 'chapter_set-0-DELETE': 'on' } response = self.client.post(change_url, data) self.assertContains(response, "Deleting chapter %s would require deleting " "the following protected related objects: foot note %s" % (chapter, foot_note)) @override_settings(ROOT_URLCONF='admin_inlines.urls') class TestInlinePermissions(TestCase): """ Make sure the admin respects permissions for objects that are edited inline. Refs #8060. """ @classmethod def setUpTestData(cls): cls.user = User(username='admin', is_staff=True, is_active=True) cls.user.set_password('secret') cls.user.save() cls.author_ct = ContentType.objects.get_for_model(Author) cls.holder_ct = ContentType.objects.get_for_model(Holder2) cls.book_ct = ContentType.objects.get_for_model(Book) cls.inner_ct = ContentType.objects.get_for_model(Inner2) # User always has permissions to add and change Authors, and Holders, # the main (parent) models of the inlines. Permissions on the inlines # vary per test. permission = Permission.objects.get(codename='add_author', content_type=cls.author_ct) cls.user.user_permissions.add(permission) permission = Permission.objects.get(codename='change_author', content_type=cls.author_ct) cls.user.user_permissions.add(permission) permission = Permission.objects.get(codename='add_holder2', content_type=cls.holder_ct) cls.user.user_permissions.add(permission) permission = Permission.objects.get(codename='change_holder2', content_type=cls.holder_ct) cls.user.user_permissions.add(permission) author = Author.objects.create(pk=1, name='The Author') cls.book = author.books.create(name='The inline Book') cls.author_change_url = reverse('admin:admin_inlines_author_change', args=(author.id,)) # Get the ID of the automatically created intermediate model for the Author-Book m2m author_book_auto_m2m_intermediate = Author.books.through.objects.get(author=author, book=cls.book) cls.author_book_auto_m2m_intermediate_id = author_book_auto_m2m_intermediate.pk cls.holder = Holder2.objects.create(dummy=13) cls.inner2 = Inner2.objects.create(dummy=42, holder=cls.holder) def setUp(self): self.holder_change_url = reverse('admin:admin_inlines_holder2_change', args=(self.holder.id,)) self.client.force_login(self.user) def test_inline_add_m2m_noperm(self): response = self.client.get(reverse('admin:admin_inlines_author_add')) # No change permission on books, so no inline self.assertNotContains(response, '<h2>Author-book relationships</h2>') self.assertNotContains(response, 'Add another Author-Book Relationship') self.assertNotContains(response, 'id="id_Author_books-TOTAL_FORMS"') def test_inline_add_fk_noperm(self): response = self.client.get(reverse('admin:admin_inlines_holder2_add')) # No permissions on Inner2s, so no inline self.assertNotContains(response, '<h2>Inner2s</h2>') self.assertNotContains(response, 'Add another Inner2') self.assertNotContains(response, 'id="id_inner2_set-TOTAL_FORMS"') def test_inline_change_m2m_noperm(self): response = self.client.get(self.author_change_url) # No change permission on books, so no inline self.assertNotContains(response, '<h2>Author-book relationships</h2>') self.assertNotContains(response, 'Add another Author-Book Relationship') self.assertNotContains(response, 'id="id_Author_books-TOTAL_FORMS"') def test_inline_change_fk_noperm(self): response = self.client.get(self.holder_change_url) # No permissions on Inner2s, so no inline self.assertNotContains(response, '<h2>Inner2s</h2>') self.assertNotContains(response, 'Add another Inner2') self.assertNotContains(response, 'id="id_inner2_set-TOTAL_FORMS"') def test_inline_add_m2m_view_only_perm(self): permission = Permission.objects.get(codename='view_book', content_type=self.book_ct) self.user.user_permissions.add(permission) response = self.client.get(reverse('admin:admin_inlines_author_add')) # View-only inlines. (It could be nicer to hide the empty, non-editable # inlines on the add page.) self.assertIs(response.context['inline_admin_formset'].has_view_permission, True) self.assertIs(response.context['inline_admin_formset'].has_add_permission, False) self.assertIs(response.context['inline_admin_formset'].has_change_permission, False) self.assertIs(response.context['inline_admin_formset'].has_delete_permission, False) self.assertContains(response, '<h2>Author-book relationships</h2>') self.assertContains( response, '<input type="hidden" name="Author_books-TOTAL_FORMS" value="0" ' 'id="id_Author_books-TOTAL_FORMS">', html=True, ) self.assertNotContains(response, 'Add another Author-Book Relationship') def test_inline_add_m2m_add_perm(self): permission = Permission.objects.get(codename='add_book', content_type=self.book_ct) self.user.user_permissions.add(permission) response = self.client.get(reverse('admin:admin_inlines_author_add')) # No change permission on Books, so no inline self.assertNotContains(response, '<h2>Author-book relationships</h2>') self.assertNotContains(response, 'Add another Author-Book Relationship') self.assertNotContains(response, 'id="id_Author_books-TOTAL_FORMS"') def test_inline_add_fk_add_perm(self): permission = Permission.objects.get(codename='add_inner2', content_type=self.inner_ct) self.user.user_permissions.add(permission) response = self.client.get(reverse('admin:admin_inlines_holder2_add')) # Add permission on inner2s, so we get the inline self.assertContains(response, '<h2>Inner2s</h2>') self.assertContains(response, 'Add another Inner2') self.assertContains(response, '<input type="hidden" id="id_inner2_set-TOTAL_FORMS" ' 'value="3" name="inner2_set-TOTAL_FORMS">', html=True) def test_inline_change_m2m_add_perm(self): permission = Permission.objects.get(codename='add_book', content_type=self.book_ct) self.user.user_permissions.add(permission) response = self.client.get(self.author_change_url) # No change permission on books, so no inline self.assertNotContains(response, '<h2>Author-book relationships</h2>') self.assertNotContains(response, 'Add another Author-Book Relationship') self.assertNotContains(response, 'id="id_Author_books-TOTAL_FORMS"') self.assertNotContains(response, 'id="id_Author_books-0-DELETE"') def test_inline_change_m2m_view_only_perm(self): permission = Permission.objects.get(codename='view_book', content_type=self.book_ct) self.user.user_permissions.add(permission) response = self.client.get(self.author_change_url) # View-only inlines. self.assertIs(response.context['inline_admin_formset'].has_view_permission, True) self.assertIs(response.context['inline_admin_formset'].has_add_permission, False) self.assertIs(response.context['inline_admin_formset'].has_change_permission, False) self.assertIs(response.context['inline_admin_formset'].has_delete_permission, False) self.assertContains(response, '<h2>Author-book relationships</h2>') self.assertContains( response, '<input type="hidden" name="Author_books-TOTAL_FORMS" value="1" ' 'id="id_Author_books-TOTAL_FORMS">', html=True, ) # The field in the inline is read-only. self.assertContains(response, '<p>%s</p>' % self.book) self.assertNotContains( response, '<input type="checkbox" name="Author_books-0-DELETE" id="id_Author_books-0-DELETE">', html=True, ) def test_inline_change_m2m_change_perm(self): permission = Permission.objects.get(codename='change_book', content_type=self.book_ct) self.user.user_permissions.add(permission) response = self.client.get(self.author_change_url) # We have change perm on books, so we can add/change/delete inlines self.assertIs(response.context['inline_admin_formset'].has_view_permission, True) self.assertIs(response.context['inline_admin_formset'].has_add_permission, True) self.assertIs(response.context['inline_admin_formset'].has_change_permission, True) self.assertIs(response.context['inline_admin_formset'].has_delete_permission, True) self.assertContains(response, '<h2>Author-book relationships</h2>') self.assertContains(response, 'Add another Author-book relationship') self.assertContains(response, '<input type="hidden" id="id_Author_books-TOTAL_FORMS" ' 'value="4" name="Author_books-TOTAL_FORMS">', html=True) self.assertContains( response, '<input type="hidden" id="id_Author_books-0-id" value="%i" ' 'name="Author_books-0-id">' % self.author_book_auto_m2m_intermediate_id, html=True ) self.assertContains(response, 'id="id_Author_books-0-DELETE"') def test_inline_change_fk_add_perm(self): permission = Permission.objects.get(codename='add_inner2', content_type=self.inner_ct) self.user.user_permissions.add(permission) response = self.client.get(self.holder_change_url) # Add permission on inner2s, so we can add but not modify existing self.assertContains(response, '<h2>Inner2s</h2>') self.assertContains(response, 'Add another Inner2') # 3 extra forms only, not the existing instance form self.assertContains( response, '<input type="hidden" id="id_inner2_set-TOTAL_FORMS" value="3" ' 'name="inner2_set-TOTAL_FORMS">', html=True ) self.assertNotContains( response, '<input type="hidden" id="id_inner2_set-0-id" value="%i" name="inner2_set-0-id">' % self.inner2.id, html=True ) def test_inline_change_fk_change_perm(self): permission = Permission.objects.get(codename='change_inner2', content_type=self.inner_ct) self.user.user_permissions.add(permission) response = self.client.get(self.holder_change_url) # Change permission on inner2s, so we can change existing but not add new self.assertContains(response, '<h2>Inner2s</h2>', count=2) # Just the one form for existing instances self.assertContains( response, '<input type="hidden" id="id_inner2_set-TOTAL_FORMS" value="1" name="inner2_set-TOTAL_FORMS">', html=True ) self.assertContains( response, '<input type="hidden" id="id_inner2_set-0-id" value="%i" name="inner2_set-0-id">' % self.inner2.id, html=True ) # max-num 0 means we can't add new ones self.assertContains( response, '<input type="hidden" id="id_inner2_set-MAX_NUM_FORMS" value="0" name="inner2_set-MAX_NUM_FORMS">', html=True ) # TabularInline self.assertContains(response, '<th class="column-dummy required">Dummy</th>', html=True) self.assertContains( response, '<input type="number" name="inner2_set-2-0-dummy" value="%s" ' 'class="vIntegerField" id="id_inner2_set-2-0-dummy">' % self.inner2.dummy, html=True, ) def test_inline_change_fk_add_change_perm(self): permission = Permission.objects.get(codename='add_inner2', content_type=self.inner_ct) self.user.user_permissions.add(permission) permission = Permission.objects.get(codename='change_inner2', content_type=self.inner_ct) self.user.user_permissions.add(permission) response = self.client.get(self.holder_change_url) # Add/change perm, so we can add new and change existing self.assertContains(response, '<h2>Inner2s</h2>') # One form for existing instance and three extra for new self.assertContains( response, '<input type="hidden" id="id_inner2_set-TOTAL_FORMS" value="4" name="inner2_set-TOTAL_FORMS">', html=True ) self.assertContains( response, '<input type="hidden" id="id_inner2_set-0-id" value="%i" name="inner2_set-0-id">' % self.inner2.id, html=True ) def test_inline_change_fk_change_del_perm(self): permission = Permission.objects.get(codename='change_inner2', content_type=self.inner_ct) self.user.user_permissions.add(permission) permission = Permission.objects.get(codename='delete_inner2', content_type=self.inner_ct) self.user.user_permissions.add(permission) response = self.client.get(self.holder_change_url) # Change/delete perm on inner2s, so we can change/delete existing self.assertContains(response, '<h2>Inner2s</h2>') # One form for existing instance only, no new self.assertContains( response, '<input type="hidden" id="id_inner2_set-TOTAL_FORMS" value="1" name="inner2_set-TOTAL_FORMS">', html=True ) self.assertContains( response, '<input type="hidden" id="id_inner2_set-0-id" value="%i" name="inner2_set-0-id">' % self.inner2.id, html=True ) self.assertContains(response, 'id="id_inner2_set-0-DELETE"') def test_inline_change_fk_all_perms(self): permission = Permission.objects.get(codename='add_inner2', content_type=self.inner_ct) self.user.user_permissions.add(permission) permission = Permission.objects.get(codename='change_inner2', content_type=self.inner_ct) self.user.user_permissions.add(permission) permission = Permission.objects.get(codename='delete_inner2', content_type=self.inner_ct) self.user.user_permissions.add(permission) response = self.client.get(self.holder_change_url) # All perms on inner2s, so we can add/change/delete self.assertContains(response, '<h2>Inner2s</h2>', count=2) # One form for existing instance only, three for new self.assertContains( response, '<input type="hidden" id="id_inner2_set-TOTAL_FORMS" value="4" name="inner2_set-TOTAL_FORMS">', html=True ) self.assertContains( response, '<input type="hidden" id="id_inner2_set-0-id" value="%i" name="inner2_set-0-id">' % self.inner2.id, html=True ) self.assertContains(response, 'id="id_inner2_set-0-DELETE"') # TabularInline self.assertContains(response, '<th class="column-dummy required">Dummy</th>', html=True) self.assertContains( response, '<input type="number" name="inner2_set-2-0-dummy" value="%s" ' 'class="vIntegerField" id="id_inner2_set-2-0-dummy">' % self.inner2.dummy, html=True, ) @override_settings(ROOT_URLCONF='admin_inlines.urls') class TestReadOnlyChangeViewInlinePermissions(TestCase): @classmethod def setUpTestData(cls): cls.user = User.objects.create_user('testing', password='password', is_staff=True) cls.user.user_permissions.add( Permission.objects.get(codename='view_poll', content_type=ContentType.objects.get_for_model(Poll)) ) cls.user.user_permissions.add( *Permission.objects.filter( codename__endswith="question", content_type=ContentType.objects.get_for_model(Question) ).values_list('pk', flat=True) ) cls.poll = Poll.objects.create(name="Survey") cls.add_url = reverse('admin:admin_inlines_poll_add') cls.change_url = reverse('admin:admin_inlines_poll_change', args=(cls.poll.id,)) def setUp(self): self.client.force_login(self.user) def test_add_url_not_allowed(self): response = self.client.get(self.add_url) self.assertEqual(response.status_code, 403) response = self.client.post(self.add_url, {}) self.assertEqual(response.status_code, 403) def test_post_to_change_url_not_allowed(self): response = self.client.post(self.change_url, {}) self.assertEqual(response.status_code, 403) def test_get_to_change_url_is_allowed(self): response = self.client.get(self.change_url) self.assertEqual(response.status_code, 200) def test_main_model_is_rendered_as_read_only(self): response = self.client.get(self.change_url) self.assertContains( response, '<div class="readonly">%s</div>' % self.poll.name, html=True ) input = '<input type="text" name="name" value="%s" class="vTextField" maxlength="40" required id="id_name">' self.assertNotContains( response, input % self.poll.name, html=True ) def test_inlines_are_rendered_as_read_only(self): question = Question.objects.create(text="How will this be rendered?", poll=self.poll) response = self.client.get(self.change_url) self.assertContains( response, '<td class="field-text"><p>%s</p></td>' % question.text, html=True ) self.assertNotContains(response, 'id="id_question_set-0-text"') self.assertNotContains(response, 'id="id_related_objs-0-DELETE"') def test_submit_line_shows_only_close_button(self): response = self.client.get(self.change_url) self.assertContains( response, '<a href="/admin/admin_inlines/poll/" class="closelink">Close</a>', html=True ) delete_link = '<p class="deletelink-box"><a href="/admin/admin_inlines/poll/%s/delete/" class="deletelink">Delete</a></p>' # noqa self.assertNotContains( response, delete_link % self.poll.id, html=True ) self.assertNotContains(response, '<input type="submit" value="Save and add another" name="_addanother">') self.assertNotContains(response, '<input type="submit" value="Save and continue editing" name="_continue">') def test_inline_delete_buttons_are_not_shown(self): Question.objects.create(text="How will this be rendered?", poll=self.poll) response = self.client.get(self.change_url) self.assertNotContains( response, '<input type="checkbox" name="question_set-0-DELETE" id="id_question_set-0-DELETE">', html=True ) def test_extra_inlines_are_not_shown(self): response = self.client.get(self.change_url) self.assertNotContains(response, 'id="id_question_set-0-text"') @override_settings(ROOT_URLCONF='admin_inlines.urls') class TestVerboseNameInlineForms(TestDataMixin, TestCase): factory = RequestFactory() def test_verbose_name_inline(self): class NonVerboseProfileInline(TabularInline): model = Profile verbose_name = 'Non-verbose childs' class VerboseNameProfileInline(TabularInline): model = VerboseNameProfile verbose_name = 'Childs with verbose name' class VerboseNamePluralProfileInline(TabularInline): model = VerboseNamePluralProfile verbose_name = 'Childs with verbose name plural' class BothVerboseNameProfileInline(TabularInline): model = BothVerboseNameProfile verbose_name = 'Childs with both verbose names' modeladmin = ModelAdmin(ProfileCollection, admin_site) modeladmin.inlines = [ NonVerboseProfileInline, VerboseNameProfileInline, VerboseNamePluralProfileInline, BothVerboseNameProfileInline, ] obj = ProfileCollection.objects.create() url = reverse('admin:admin_inlines_profilecollection_change', args=(obj.pk,)) request = self.factory.get(url) request.user = self.superuser response = modeladmin.changeform_view(request) self.assertNotContains(response, 'Add another Profile') # Non-verbose model. self.assertContains(response, '<h2>Non-verbose childss</h2>') self.assertContains(response, 'Add another Non-verbose child') self.assertNotContains(response, '<h2>Profiles</h2>') # Model with verbose name. self.assertContains(response, '<h2>Childs with verbose names</h2>') self.assertContains(response, 'Add another Childs with verbose name') self.assertNotContains(response, '<h2>Model with verbose name onlys</h2>') self.assertNotContains(response, 'Add another Model with verbose name only') # Model with verbose name plural. self.assertContains(response, '<h2>Childs with verbose name plurals</h2>') self.assertContains(response, 'Add another Childs with verbose name plural') self.assertNotContains(response, '<h2>Model with verbose name plural only</h2>') # Model with both verbose names. self.assertContains(response, '<h2>Childs with both verbose namess</h2>') self.assertContains(response, 'Add another Childs with both verbose names') self.assertNotContains(response, '<h2>Model with both - plural name</h2>') self.assertNotContains(response, 'Add another Model with both - name') def test_verbose_name_plural_inline(self): class NonVerboseProfileInline(TabularInline): model = Profile verbose_name_plural = 'Non-verbose childs' class VerboseNameProfileInline(TabularInline): model = VerboseNameProfile verbose_name_plural = 'Childs with verbose name' class VerboseNamePluralProfileInline(TabularInline): model = VerboseNamePluralProfile verbose_name_plural = 'Childs with verbose name plural' class BothVerboseNameProfileInline(TabularInline): model = BothVerboseNameProfile verbose_name_plural = 'Childs with both verbose names' modeladmin = ModelAdmin(ProfileCollection, admin_site) modeladmin.inlines = [ NonVerboseProfileInline, VerboseNameProfileInline, VerboseNamePluralProfileInline, BothVerboseNameProfileInline, ] obj = ProfileCollection.objects.create() url = reverse('admin:admin_inlines_profilecollection_change', args=(obj.pk,)) request = self.factory.get(url) request.user = self.superuser response = modeladmin.changeform_view(request) # Non-verbose model. self.assertContains(response, '<h2>Non-verbose childs</h2>') self.assertContains(response, 'Add another Profile') self.assertNotContains(response, '<h2>Profiles</h2>') # Model with verbose name. self.assertContains(response, '<h2>Childs with verbose name</h2>') self.assertContains(response, 'Add another Model with verbose name only') self.assertNotContains(response, '<h2>Model with verbose name onlys</h2>') # Model with verbose name plural. self.assertContains(response, '<h2>Childs with verbose name plural</h2>') self.assertContains(response, 'Add another Profile') self.assertNotContains(response, '<h2>Model with verbose name plural only</h2>') # Model with both verbose names. self.assertContains(response, '<h2>Childs with both verbose names</h2>') self.assertContains(response, 'Add another Model with both - name') self.assertNotContains(response, '<h2>Model with both - plural name</h2>') def test_both_verbose_names_inline(self): class NonVerboseProfileInline(TabularInline): model = Profile verbose_name = 'Non-verbose childs - name' verbose_name_plural = 'Non-verbose childs - plural name' class VerboseNameProfileInline(TabularInline): model = VerboseNameProfile verbose_name = 'Childs with verbose name - name' verbose_name_plural = 'Childs with verbose name - plural name' class VerboseNamePluralProfileInline(TabularInline): model = VerboseNamePluralProfile verbose_name = 'Childs with verbose name plural - name' verbose_name_plural = 'Childs with verbose name plural - plural name' class BothVerboseNameProfileInline(TabularInline): model = BothVerboseNameProfile verbose_name = 'Childs with both - name' verbose_name_plural = 'Childs with both - plural name' modeladmin = ModelAdmin(ProfileCollection, admin_site) modeladmin.inlines = [ NonVerboseProfileInline, VerboseNameProfileInline, VerboseNamePluralProfileInline, BothVerboseNameProfileInline, ] obj = ProfileCollection.objects.create() url = reverse('admin:admin_inlines_profilecollection_change', args=(obj.pk,)) request = self.factory.get(url) request.user = self.superuser response = modeladmin.changeform_view(request) self.assertNotContains(response, 'Add another Profile') # Non-verbose model. self.assertContains(response, '<h2>Non-verbose childs - plural name</h2>') self.assertContains(response, 'Add another Non-verbose childs - name') self.assertNotContains(response, '<h2>Profiles</h2>') # Model with verbose name. self.assertContains(response, '<h2>Childs with verbose name - plural name</h2>') self.assertContains(response, 'Add another Childs with verbose name - name') self.assertNotContains(response, '<h2>Model with verbose name onlys</h2>') # Model with verbose name plural. self.assertContains( response, '<h2>Childs with verbose name plural - plural name</h2>', ) self.assertContains( response, 'Add another Childs with verbose name plural - name', ) self.assertNotContains(response, '<h2>Model with verbose name plural only</h2>') # Model with both verbose names. self.assertContains(response, '<h2>Childs with both - plural name</h2>') self.assertContains(response, 'Add another Childs with both - name') self.assertNotContains(response, '<h2>Model with both - plural name</h2>') self.assertNotContains(response, 'Add another Model with both - name') @override_settings(ROOT_URLCONF='admin_inlines.urls') class SeleniumTests(AdminSeleniumTestCase): available_apps = ['admin_inlines'] + AdminSeleniumTestCase.available_apps def setUp(self): User.objects.create_superuser(username='super', password='secret', email='[email protected]') def test_add_stackeds(self): """ The "Add another XXX" link correctly adds items to the stacked formset. """ from selenium.webdriver.common.by import By self.admin_login(username='super', password='secret') self.selenium.get(self.live_server_url + reverse('admin:admin_inlines_holder4_add')) inline_id = '#inner4stacked_set-group' rows_selector = '%s .dynamic-inner4stacked_set' % inline_id self.assertCountSeleniumElements(rows_selector, 3) add_button = self.selenium.find_element(By.LINK_TEXT, 'Add another Inner4 stacked') add_button.click() self.assertCountSeleniumElements(rows_selector, 4) def test_delete_stackeds(self): from selenium.webdriver.common.by import By self.admin_login(username='super', password='secret') self.selenium.get(self.live_server_url + reverse('admin:admin_inlines_holder4_add')) inline_id = '#inner4stacked_set-group' rows_selector = '%s .dynamic-inner4stacked_set' % inline_id self.assertCountSeleniumElements(rows_selector, 3) add_button = self.selenium.find_element(By.LINK_TEXT, 'Add another Inner4 stacked') add_button.click() add_button.click() self.assertCountSeleniumElements(rows_selector, 5) for delete_link in self.selenium.find_elements(By.CSS_SELECTOR, '%s .inline-deletelink' % inline_id): delete_link.click() with self.disable_implicit_wait(): self.assertCountSeleniumElements(rows_selector, 0) def test_delete_invalid_stacked_inlines(self): from selenium.common.exceptions import NoSuchElementException from selenium.webdriver.common.by import By self.admin_login(username='super', password='secret') self.selenium.get(self.live_server_url + reverse('admin:admin_inlines_holder4_add')) inline_id = '#inner4stacked_set-group' rows_selector = '%s .dynamic-inner4stacked_set' % inline_id self.assertCountSeleniumElements(rows_selector, 3) add_button = self.selenium.find_element( By.LINK_TEXT, 'Add another Inner4 stacked', ) add_button.click() add_button.click() self.assertCountSeleniumElements('#id_inner4stacked_set-4-dummy', 1) # Enter some data and click 'Save'. self.selenium.find_element(By.NAME, 'dummy').send_keys('1') self.selenium.find_element(By.NAME, 'inner4stacked_set-0-dummy').send_keys('100') self.selenium.find_element(By.NAME, 'inner4stacked_set-1-dummy').send_keys('101') self.selenium.find_element(By.NAME, 'inner4stacked_set-2-dummy').send_keys('222') self.selenium.find_element(By.NAME, 'inner4stacked_set-3-dummy').send_keys('103') self.selenium.find_element(By.NAME, 'inner4stacked_set-4-dummy').send_keys('222') with self.wait_page_loaded(): self.selenium.find_element(By.XPATH, '//input[@value="Save"]').click() # Sanity check. self.assertCountSeleniumElements(rows_selector, 5) errorlist = self.selenium.find_element( By.CSS_SELECTOR, '%s .dynamic-inner4stacked_set .errorlist li' % inline_id, ) self.assertEqual('Please correct the duplicate values below.', errorlist.text) delete_link = self.selenium.find_element(By.CSS_SELECTOR, '#inner4stacked_set-4 .inline-deletelink') delete_link.click() self.assertCountSeleniumElements(rows_selector, 4) with self.disable_implicit_wait(), self.assertRaises(NoSuchElementException): self.selenium.find_element(By.CSS_SELECTOR, '%s .dynamic-inner4stacked_set .errorlist li' % inline_id) with self.wait_page_loaded(): self.selenium.find_element(By.XPATH, '//input[@value="Save"]').click() # The objects have been created in the database. self.assertEqual(Inner4Stacked.objects.all().count(), 4) def test_delete_invalid_tabular_inlines(self): from selenium.common.exceptions import NoSuchElementException from selenium.webdriver.common.by import By self.admin_login(username='super', password='secret') self.selenium.get(self.live_server_url + reverse('admin:admin_inlines_holder4_add')) inline_id = '#inner4tabular_set-group' rows_selector = '%s .dynamic-inner4tabular_set' % inline_id self.assertCountSeleniumElements(rows_selector, 3) add_button = self.selenium.find_element( By.LINK_TEXT, 'Add another Inner4 tabular' ) add_button.click() add_button.click() self.assertCountSeleniumElements('#id_inner4tabular_set-4-dummy', 1) # Enter some data and click 'Save'. self.selenium.find_element(By.NAME, 'dummy').send_keys('1') self.selenium.find_element(By.NAME, 'inner4tabular_set-0-dummy').send_keys('100') self.selenium.find_element(By.NAME, 'inner4tabular_set-1-dummy').send_keys('101') self.selenium.find_element(By.NAME, 'inner4tabular_set-2-dummy').send_keys('222') self.selenium.find_element(By.NAME, 'inner4tabular_set-3-dummy').send_keys('103') self.selenium.find_element(By.NAME, 'inner4tabular_set-4-dummy').send_keys('222') with self.wait_page_loaded(): self.selenium.find_element(By.XPATH, '//input[@value="Save"]').click() # Sanity Check. self.assertCountSeleniumElements(rows_selector, 5) # Non-field errorlist is in its own <tr> just before # tr#inner4tabular_set-3: errorlist = self.selenium.find_element( By.CSS_SELECTOR, '%s #inner4tabular_set-3 + .row-form-errors .errorlist li' % inline_id ) self.assertEqual('Please correct the duplicate values below.', errorlist.text) delete_link = self.selenium.find_element(By.CSS_SELECTOR, '#inner4tabular_set-4 .inline-deletelink') delete_link.click() self.assertCountSeleniumElements(rows_selector, 4) with self.disable_implicit_wait(), self.assertRaises(NoSuchElementException): self.selenium.find_element(By.CSS_SELECTOR, '%s .dynamic-inner4tabular_set .errorlist li' % inline_id) with self.wait_page_loaded(): self.selenium.find_element(By.XPATH, '//input[@value="Save"]').click() # The objects have been created in the database. self.assertEqual(Inner4Tabular.objects.all().count(), 4) def test_add_inlines(self): """ The "Add another XXX" link correctly adds items to the inline form. """ from selenium.webdriver.common.by import By self.admin_login(username='super', password='secret') self.selenium.get(self.live_server_url + reverse('admin:admin_inlines_profilecollection_add')) # There's only one inline to start with and it has the correct ID. self.assertCountSeleniumElements('.dynamic-profile_set', 1) self.assertEqual( self.selenium.find_elements(By.CSS_SELECTOR, '.dynamic-profile_set')[0].get_attribute('id'), 'profile_set-0', ) self.assertCountSeleniumElements('.dynamic-profile_set#profile_set-0 input[name=profile_set-0-first_name]', 1) self.assertCountSeleniumElements('.dynamic-profile_set#profile_set-0 input[name=profile_set-0-last_name]', 1) # Add an inline self.selenium.find_element(By.LINK_TEXT, 'Add another Profile').click() # The inline has been added, it has the right id, and it contains the # correct fields. self.assertCountSeleniumElements('.dynamic-profile_set', 2) self.assertEqual( self.selenium.find_elements(By.CSS_SELECTOR, '.dynamic-profile_set')[1].get_attribute('id'), 'profile_set-1', ) self.assertCountSeleniumElements('.dynamic-profile_set#profile_set-1 input[name=profile_set-1-first_name]', 1) self.assertCountSeleniumElements('.dynamic-profile_set#profile_set-1 input[name=profile_set-1-last_name]', 1) # Let's add another one to be sure self.selenium.find_element(By.LINK_TEXT, 'Add another Profile').click() self.assertCountSeleniumElements('.dynamic-profile_set', 3) self.assertEqual( self.selenium.find_elements(By.CSS_SELECTOR, '.dynamic-profile_set')[2].get_attribute('id'), 'profile_set-2', ) self.assertCountSeleniumElements('.dynamic-profile_set#profile_set-2 input[name=profile_set-2-first_name]', 1) self.assertCountSeleniumElements('.dynamic-profile_set#profile_set-2 input[name=profile_set-2-last_name]', 1) # Enter some data and click 'Save' self.selenium.find_element(By.NAME, 'profile_set-0-first_name').send_keys('0 first name 1') self.selenium.find_element(By.NAME, 'profile_set-0-last_name').send_keys('0 last name 2') self.selenium.find_element(By.NAME, 'profile_set-1-first_name').send_keys('1 first name 1') self.selenium.find_element(By.NAME, 'profile_set-1-last_name').send_keys('1 last name 2') self.selenium.find_element(By.NAME, 'profile_set-2-first_name').send_keys('2 first name 1') self.selenium.find_element(By.NAME, 'profile_set-2-last_name').send_keys('2 last name 2') with self.wait_page_loaded(): self.selenium.find_element(By.XPATH, '//input[@value="Save"]').click() # The objects have been created in the database self.assertEqual(ProfileCollection.objects.all().count(), 1) self.assertEqual(Profile.objects.all().count(), 3) def test_add_inline_link_absent_for_view_only_parent_model(self): from selenium.common.exceptions import NoSuchElementException from selenium.webdriver.common.by import By user = User.objects.create_user('testing', password='password', is_staff=True) user.user_permissions.add( Permission.objects.get(codename='view_poll', content_type=ContentType.objects.get_for_model(Poll)) ) user.user_permissions.add( *Permission.objects.filter( codename__endswith="question", content_type=ContentType.objects.get_for_model(Question) ).values_list('pk', flat=True) ) self.admin_login(username='testing', password='password') poll = Poll.objects.create(name="Survey") change_url = reverse('admin:admin_inlines_poll_change', args=(poll.id,)) self.selenium.get(self.live_server_url + change_url) with self.disable_implicit_wait(): with self.assertRaises(NoSuchElementException): self.selenium.find_element(By.LINK_TEXT, 'Add another Question') def test_delete_inlines(self): from selenium.webdriver.common.by import By self.admin_login(username='super', password='secret') self.selenium.get(self.live_server_url + reverse('admin:admin_inlines_profilecollection_add')) # Add a few inlines self.selenium.find_element(By.LINK_TEXT, 'Add another Profile').click() self.selenium.find_element(By.LINK_TEXT, 'Add another Profile').click() self.selenium.find_element(By.LINK_TEXT, 'Add another Profile').click() self.selenium.find_element(By.LINK_TEXT, 'Add another Profile').click() self.assertCountSeleniumElements('#profile_set-group table tr.dynamic-profile_set', 5) self.assertCountSeleniumElements('form#profilecollection_form tr.dynamic-profile_set#profile_set-0', 1) self.assertCountSeleniumElements('form#profilecollection_form tr.dynamic-profile_set#profile_set-1', 1) self.assertCountSeleniumElements('form#profilecollection_form tr.dynamic-profile_set#profile_set-2', 1) self.assertCountSeleniumElements('form#profilecollection_form tr.dynamic-profile_set#profile_set-3', 1) self.assertCountSeleniumElements('form#profilecollection_form tr.dynamic-profile_set#profile_set-4', 1) # Click on a few delete buttons self.selenium.find_element( By.CSS_SELECTOR, 'form#profilecollection_form tr.dynamic-profile_set#profile_set-1 td.delete a', ).click() self.selenium.find_element( By.CSS_SELECTOR, 'form#profilecollection_form tr.dynamic-profile_set#profile_set-2 td.delete a', ).click() # The rows are gone and the IDs have been re-sequenced self.assertCountSeleniumElements('#profile_set-group table tr.dynamic-profile_set', 3) self.assertCountSeleniumElements('form#profilecollection_form tr.dynamic-profile_set#profile_set-0', 1) self.assertCountSeleniumElements('form#profilecollection_form tr.dynamic-profile_set#profile_set-1', 1) self.assertCountSeleniumElements('form#profilecollection_form tr.dynamic-profile_set#profile_set-2', 1) def test_collapsed_inlines(self): from selenium.webdriver.common.by import By # Collapsed inlines have SHOW/HIDE links. self.admin_login(username='super', password='secret') self.selenium.get(self.live_server_url + reverse('admin:admin_inlines_author_add')) # One field is in a stacked inline, other in a tabular one. test_fields = ['#id_nonautopkbook_set-0-title', '#id_nonautopkbook_set-2-0-title'] show_links = self.selenium.find_elements(By.LINK_TEXT, 'SHOW') self.assertEqual(len(show_links), 3) for show_index, field_name in enumerate(test_fields, 0): self.wait_until_invisible(field_name) show_links[show_index].click() self.wait_until_visible(field_name) hide_links = self.selenium.find_elements(By.LINK_TEXT, 'HIDE') self.assertEqual(len(hide_links), 2) for hide_index, field_name in enumerate(test_fields, 0): self.wait_until_visible(field_name) hide_links[hide_index].click() self.wait_until_invisible(field_name) def test_added_stacked_inline_with_collapsed_fields(self): from selenium.webdriver.common.by import By self.admin_login(username='super', password='secret') self.selenium.get(self.live_server_url + reverse('admin:admin_inlines_teacher_add')) self.selenium.find_element(By.LINK_TEXT, 'Add another Child').click() test_fields = ['#id_child_set-0-name', '#id_child_set-1-name'] show_links = self.selenium.find_elements(By.LINK_TEXT, 'SHOW') self.assertEqual(len(show_links), 2) for show_index, field_name in enumerate(test_fields, 0): self.wait_until_invisible(field_name) show_links[show_index].click() self.wait_until_visible(field_name) hide_links = self.selenium.find_elements(By.LINK_TEXT, 'HIDE') self.assertEqual(len(hide_links), 2) for hide_index, field_name in enumerate(test_fields, 0): self.wait_until_visible(field_name) hide_links[hide_index].click() self.wait_until_invisible(field_name) def assertBorder(self, element, border): width, style, color = border.split(' ') border_properties = [ 'border-bottom-%s', 'border-left-%s', 'border-right-%s', 'border-top-%s', ] for prop in border_properties: prop = prop % 'width' self.assertEqual(element.value_of_css_property(prop), width) for prop in border_properties: prop = prop % 'style' self.assertEqual(element.value_of_css_property(prop), style) # Convert hex color to rgb. self.assertRegex(color, '#[0-9a-f]{6}') r, g, b = int(color[1:3], 16), int(color[3:5], 16), int(color[5:], 16) # The value may be expressed as either rgb() or rgba() depending on the # browser. colors = [ 'rgb(%d, %d, %d)' % (r, g, b), 'rgba(%d, %d, %d, 1)' % (r, g, b), ] for prop in border_properties: prop = prop % 'color' self.assertIn(element.value_of_css_property(prop), colors) def test_inline_formset_error_input_border(self): from selenium.webdriver.common.by import By self.admin_login(username='super', password='secret') self.selenium.get(self.live_server_url + reverse('admin:admin_inlines_holder5_add')) self.wait_until_visible('#id_dummy') self.selenium.find_element(By.ID, 'id_dummy').send_keys(1) fields = ['id_inner5stacked_set-0-dummy', 'id_inner5tabular_set-0-dummy'] show_links = self.selenium.find_elements(By.LINK_TEXT, 'SHOW') for show_index, field_name in enumerate(fields): show_links[show_index].click() self.wait_until_visible('#' + field_name) self.selenium.find_element(By.ID, field_name).send_keys(1) # Before save all inputs have default border for inline in ('stacked', 'tabular'): for field_name in ('name', 'select', 'text'): element_id = 'id_inner5%s_set-0-%s' % (inline, field_name) self.assertBorder( self.selenium.find_element(By.ID, element_id), '1px solid #cccccc', ) self.selenium.find_element(By.XPATH, '//input[@value="Save"]').click() # Test the red border around inputs by css selectors stacked_selectors = ['.errors input', '.errors select', '.errors textarea'] for selector in stacked_selectors: self.assertBorder( self.selenium.find_element(By.CSS_SELECTOR, selector), '1px solid #ba2121', ) tabular_selectors = [ 'td ul.errorlist + input', 'td ul.errorlist + select', 'td ul.errorlist + textarea' ] for selector in tabular_selectors: self.assertBorder( self.selenium.find_element(By.CSS_SELECTOR, selector), '1px solid #ba2121', ) def test_inline_formset_error(self): from selenium.webdriver.common.by import By self.admin_login(username='super', password='secret') self.selenium.get(self.live_server_url + reverse('admin:admin_inlines_holder5_add')) stacked_inline_formset_selector = 'div#inner5stacked_set-group fieldset.module.collapse' tabular_inline_formset_selector = 'div#inner5tabular_set-group fieldset.module.collapse' # Inlines without errors, both inlines collapsed self.selenium.find_element(By.XPATH, '//input[@value="Save"]').click() self.assertCountSeleniumElements(stacked_inline_formset_selector + '.collapsed', 1) self.assertCountSeleniumElements(tabular_inline_formset_selector + '.collapsed', 1) show_links = self.selenium.find_elements(By.LINK_TEXT, 'SHOW') self.assertEqual(len(show_links), 2) # Inlines with errors, both inlines expanded test_fields = ['#id_inner5stacked_set-0-dummy', '#id_inner5tabular_set-0-dummy'] for show_index, field_name in enumerate(test_fields): show_links[show_index].click() self.wait_until_visible(field_name) self.selenium.find_element(By.ID, field_name[1:]).send_keys(1) hide_links = self.selenium.find_elements(By.LINK_TEXT, 'HIDE') self.assertEqual(len(hide_links), 2) for hide_index, field_name in enumerate(test_fields): hide_link = hide_links[hide_index] self.selenium.execute_script('window.scrollTo(0, %s);' % hide_link.location['y']) hide_link.click() self.wait_until_invisible(field_name) with self.wait_page_loaded(): self.selenium.find_element(By.XPATH, '//input[@value="Save"]').click() with self.disable_implicit_wait(): self.assertCountSeleniumElements(stacked_inline_formset_selector + '.collapsed', 0) self.assertCountSeleniumElements(tabular_inline_formset_selector + '.collapsed', 0) self.assertCountSeleniumElements(stacked_inline_formset_selector, 1) self.assertCountSeleniumElements(tabular_inline_formset_selector, 1) def test_inlines_verbose_name(self): """ The item added by the "Add another XXX" link must use the correct verbose_name in the inline form. """ from selenium.webdriver.common.by import By self.admin_login(username='super', password='secret') # Hide sidebar. self.selenium.get(self.live_server_url + reverse('admin:admin_inlines_course_add')) toggle_button = self.selenium.find_element(By.CSS_SELECTOR, '#toggle-nav-sidebar') toggle_button.click() # Each combination of horizontal/vertical filter with stacked/tabular # inlines. tests = [ 'admin:admin_inlines_course_add', 'admin:admin_inlines_courseproxy_add', 'admin:admin_inlines_courseproxy1_add', 'admin:admin_inlines_courseproxy2_add', ] css_selector = '.dynamic-class_set#class_set-%s h2' for url_name in tests: with self.subTest(url=url_name): self.selenium.get(self.live_server_url + reverse(url_name)) # First inline shows the verbose_name. available, chosen = self.selenium.find_elements(By.CSS_SELECTOR, css_selector % 0) self.assertEqual(available.text, 'AVAILABLE ATTENDANT') self.assertEqual(chosen.text, 'CHOSEN ATTENDANT') # Added inline should also have the correct verbose_name. self.selenium.find_element(By.LINK_TEXT, 'Add another Class').click() available, chosen = self.selenium.find_elements(By.CSS_SELECTOR, css_selector % 1) self.assertEqual(available.text, 'AVAILABLE ATTENDANT') self.assertEqual(chosen.text, 'CHOSEN ATTENDANT') # Third inline should also have the correct verbose_name. self.selenium.find_element(By.LINK_TEXT, 'Add another Class').click() available, chosen = self.selenium.find_elements(By.CSS_SELECTOR, css_selector % 2) self.assertEqual(available.text, 'AVAILABLE ATTENDANT') self.assertEqual(chosen.text, 'CHOSEN ATTENDANT')
3747d616aa02e5d9eb9c1846b1b2bab80a56c5d0af315eeb69e616a6c092bd4c
""" Testing of admin inline formsets. """ import random from django.contrib.contenttypes.fields import GenericForeignKey from django.contrib.contenttypes.models import ContentType from django.db import models class Parent(models.Model): name = models.CharField(max_length=50) def __str__(self): return self.name class Teacher(models.Model): name = models.CharField(max_length=50) def __str__(self): return self.name class Child(models.Model): name = models.CharField(max_length=50) teacher = models.ForeignKey(Teacher, models.CASCADE) content_type = models.ForeignKey(ContentType, models.CASCADE) object_id = models.PositiveIntegerField() parent = GenericForeignKey() def __str__(self): return 'I am %s, a child of %s' % (self.name, self.parent) class Book(models.Model): name = models.CharField(max_length=50) def __str__(self): return self.name class Author(models.Model): name = models.CharField(max_length=50) books = models.ManyToManyField(Book) person = models.OneToOneField('Person', models.CASCADE, null=True) class NonAutoPKBook(models.Model): rand_pk = models.IntegerField(primary_key=True, editable=False) author = models.ForeignKey(Author, models.CASCADE) title = models.CharField(max_length=50) def save(self, *args, **kwargs): while not self.rand_pk: test_pk = random.randint(1, 99999) if not NonAutoPKBook.objects.filter(rand_pk=test_pk).exists(): self.rand_pk = test_pk super().save(*args, **kwargs) class NonAutoPKBookChild(NonAutoPKBook): pass class EditablePKBook(models.Model): manual_pk = models.IntegerField(primary_key=True) author = models.ForeignKey(Author, models.CASCADE) title = models.CharField(max_length=50) class Holder(models.Model): dummy = models.IntegerField() class Inner(models.Model): dummy = models.IntegerField() holder = models.ForeignKey(Holder, models.CASCADE) readonly = models.CharField("Inner readonly label", max_length=1) def get_absolute_url(self): return '/inner/' class Holder2(models.Model): dummy = models.IntegerField() class Inner2(models.Model): dummy = models.IntegerField() holder = models.ForeignKey(Holder2, models.CASCADE) class Holder3(models.Model): dummy = models.IntegerField() class Inner3(models.Model): dummy = models.IntegerField() holder = models.ForeignKey(Holder3, models.CASCADE) # Models for ticket #8190 class Holder4(models.Model): dummy = models.IntegerField() class Inner4Stacked(models.Model): dummy = models.IntegerField(help_text="Awesome stacked help text is awesome.") holder = models.ForeignKey(Holder4, models.CASCADE) class Meta: constraints = [ models.UniqueConstraint(fields=['dummy', 'holder'], name='unique_stacked_dummy_per_holder') ] class Inner4Tabular(models.Model): dummy = models.IntegerField(help_text="Awesome tabular help text is awesome.") holder = models.ForeignKey(Holder4, models.CASCADE) class Meta: constraints = [ models.UniqueConstraint(fields=['dummy', 'holder'], name='unique_tabular_dummy_per_holder') ] # Models for ticket #31441 class Holder5(models.Model): dummy = models.IntegerField() class Inner5Stacked(models.Model): name = models.CharField(max_length=10) select = models.CharField(choices=(('1', 'One'), ('2', 'Two')), max_length=10) text = models.TextField() dummy = models.IntegerField() holder = models.ForeignKey(Holder5, models.CASCADE) class Inner5Tabular(models.Model): name = models.CharField(max_length=10) select = models.CharField(choices=(('1', 'One'), ('2', 'Two')), max_length=10) text = models.TextField() dummy = models.IntegerField() holder = models.ForeignKey(Holder5, models.CASCADE) # Models for #12749 class Person(models.Model): firstname = models.CharField(max_length=15) class OutfitItem(models.Model): name = models.CharField(max_length=15) class Fashionista(models.Model): person = models.OneToOneField(Person, models.CASCADE, primary_key=True) weaknesses = models.ManyToManyField(OutfitItem, through='ShoppingWeakness', blank=True) class ShoppingWeakness(models.Model): fashionista = models.ForeignKey(Fashionista, models.CASCADE) item = models.ForeignKey(OutfitItem, models.CASCADE) # Models for #13510 class TitleCollection(models.Model): pass class Title(models.Model): collection = models.ForeignKey(TitleCollection, models.SET_NULL, blank=True, null=True) title1 = models.CharField(max_length=100) title2 = models.CharField(max_length=100) # Models for #15424 class Poll(models.Model): name = models.CharField(max_length=40) class Question(models.Model): text = models.CharField(max_length=40) poll = models.ForeignKey(Poll, models.CASCADE) class Novel(models.Model): name = models.CharField(max_length=40) class NovelReadonlyChapter(Novel): class Meta: proxy = True class Chapter(models.Model): name = models.CharField(max_length=40) novel = models.ForeignKey(Novel, models.CASCADE) class FootNote(models.Model): """ Model added for ticket 19838 """ chapter = models.ForeignKey(Chapter, models.PROTECT) note = models.CharField(max_length=40) # Models for #16838 class CapoFamiglia(models.Model): name = models.CharField(max_length=100) class Consigliere(models.Model): name = models.CharField(max_length=100, help_text='Help text for Consigliere') capo_famiglia = models.ForeignKey(CapoFamiglia, models.CASCADE, related_name='+') class SottoCapo(models.Model): name = models.CharField(max_length=100) capo_famiglia = models.ForeignKey(CapoFamiglia, models.CASCADE, related_name='+') class ReadOnlyInline(models.Model): name = models.CharField(max_length=100, help_text='Help text for ReadOnlyInline') capo_famiglia = models.ForeignKey(CapoFamiglia, models.CASCADE) # Models for #18433 class ParentModelWithCustomPk(models.Model): my_own_pk = models.CharField(max_length=100, primary_key=True) name = models.CharField(max_length=100) class ChildModel1(models.Model): my_own_pk = models.CharField(max_length=100, primary_key=True) name = models.CharField(max_length=100) parent = models.ForeignKey(ParentModelWithCustomPk, models.CASCADE) def get_absolute_url(self): return '/child_model1/' class ChildModel2(models.Model): my_own_pk = models.CharField(max_length=100, primary_key=True) name = models.CharField(max_length=100) parent = models.ForeignKey(ParentModelWithCustomPk, models.CASCADE) def get_absolute_url(self): return '/child_model2/' # Models for #19425 class BinaryTree(models.Model): name = models.CharField(max_length=100) parent = models.ForeignKey('self', models.SET_NULL, null=True, blank=True) # Models for #19524 class LifeForm(models.Model): pass class ExtraTerrestrial(LifeForm): name = models.CharField(max_length=100) class Sighting(models.Model): et = models.ForeignKey(ExtraTerrestrial, models.CASCADE) place = models.CharField(max_length=100) # Models for #18263 class SomeParentModel(models.Model): name = models.CharField(max_length=1) class SomeChildModel(models.Model): name = models.CharField(max_length=1) position = models.PositiveIntegerField() parent = models.ForeignKey(SomeParentModel, models.CASCADE) readonly_field = models.CharField(max_length=1) # Models for #30231 class Course(models.Model): name = models.CharField(max_length=128) def __str__(self): return self.name class Class(models.Model): person = models.ManyToManyField(Person, verbose_name='attendant') course = models.ForeignKey(Course, on_delete=models.CASCADE) class CourseProxy(Course): class Meta: proxy = True class CourseProxy1(Course): class Meta: proxy = True class CourseProxy2(Course): class Meta: proxy = True # Other models class ShowInlineParent(models.Model): show_inlines = models.BooleanField(default=False) class ShowInlineChild(models.Model): parent = models.ForeignKey(ShowInlineParent, on_delete=models.CASCADE) class ProfileCollection(models.Model): pass class Profile(models.Model): collection = models.ForeignKey(ProfileCollection, models.SET_NULL, blank=True, null=True) first_name = models.CharField(max_length=100) last_name = models.CharField(max_length=100) class VerboseNameProfile(Profile): class Meta: verbose_name = 'Model with verbose name only' class VerboseNamePluralProfile(Profile): class Meta: verbose_name_plural = 'Model with verbose name plural only' class BothVerboseNameProfile(Profile): class Meta: verbose_name = 'Model with both - name' verbose_name_plural = 'Model with both - plural name'
f09dab5693bf67b953809b47f1223f196e098c87eac787dd9a325c9301f5adfa
from django import forms from django.contrib import admin from django.core.exceptions import ValidationError from django.db import models from .models import ( Author, BinaryTree, CapoFamiglia, Chapter, Child, ChildModel1, ChildModel2, Class, Consigliere, Course, CourseProxy, CourseProxy1, CourseProxy2, EditablePKBook, ExtraTerrestrial, Fashionista, FootNote, Holder, Holder2, Holder3, Holder4, Holder5, Inner, Inner2, Inner3, Inner4Stacked, Inner4Tabular, Inner5Stacked, Inner5Tabular, NonAutoPKBook, NonAutoPKBookChild, Novel, NovelReadonlyChapter, OutfitItem, ParentModelWithCustomPk, Person, Poll, Profile, ProfileCollection, Question, ReadOnlyInline, ShoppingWeakness, ShowInlineChild, ShowInlineParent, Sighting, SomeChildModel, SomeParentModel, SottoCapo, Teacher, Title, TitleCollection, ) site = admin.AdminSite(name="admin") class BookInline(admin.TabularInline): model = Author.books.through class NonAutoPKBookTabularInline(admin.TabularInline): model = NonAutoPKBook classes = ('collapse',) class NonAutoPKBookChildTabularInline(admin.TabularInline): model = NonAutoPKBookChild classes = ('collapse',) class NonAutoPKBookStackedInline(admin.StackedInline): model = NonAutoPKBook classes = ('collapse',) class EditablePKBookTabularInline(admin.TabularInline): model = EditablePKBook class EditablePKBookStackedInline(admin.StackedInline): model = EditablePKBook class AuthorAdmin(admin.ModelAdmin): inlines = [ BookInline, NonAutoPKBookTabularInline, NonAutoPKBookStackedInline, EditablePKBookTabularInline, EditablePKBookStackedInline, NonAutoPKBookChildTabularInline, ] class InnerInline(admin.StackedInline): model = Inner can_delete = False readonly_fields = ('readonly',) # For bug #13174 tests. class HolderAdmin(admin.ModelAdmin): class Media: js = ('my_awesome_admin_scripts.js',) class ReadOnlyInlineInline(admin.TabularInline): model = ReadOnlyInline readonly_fields = ['name'] class InnerInline2(admin.StackedInline): model = Inner2 class Media: js = ('my_awesome_inline_scripts.js',) class InnerInline2Tabular(admin.TabularInline): model = Inner2 class CustomNumberWidget(forms.NumberInput): class Media: js = ('custom_number.js',) class InnerInline3(admin.StackedInline): model = Inner3 formfield_overrides = { models.IntegerField: {'widget': CustomNumberWidget}, } class Media: js = ('my_awesome_inline_scripts.js',) class TitleForm(forms.ModelForm): title1 = forms.CharField(max_length=100) def clean(self): cleaned_data = self.cleaned_data title1 = cleaned_data.get("title1") title2 = cleaned_data.get("title2") if title1 != title2: raise ValidationError("The two titles must be the same") return cleaned_data class TitleInline(admin.TabularInline): model = Title form = TitleForm extra = 1 class Inner4StackedInline(admin.StackedInline): model = Inner4Stacked show_change_link = True class Inner4TabularInline(admin.TabularInline): model = Inner4Tabular show_change_link = True class Holder4Admin(admin.ModelAdmin): inlines = [Inner4StackedInline, Inner4TabularInline] class Inner5StackedInline(admin.StackedInline): model = Inner5Stacked classes = ('collapse',) class Inner5TabularInline(admin.TabularInline): model = Inner5Tabular classes = ('collapse',) class Holder5Admin(admin.ModelAdmin): inlines = [Inner5StackedInline, Inner5TabularInline] class InlineWeakness(admin.TabularInline): model = ShoppingWeakness extra = 1 class WeaknessForm(forms.ModelForm): extra_field = forms.CharField() class Meta: model = ShoppingWeakness fields = '__all__' class WeaknessInlineCustomForm(admin.TabularInline): model = ShoppingWeakness form = WeaknessForm class FootNoteForm(forms.ModelForm): extra_field = forms.CharField() class Meta: model = FootNote fields = '__all__' class FootNoteNonEditableInlineCustomForm(admin.TabularInline): model = FootNote form = FootNoteForm def has_change_permission(self, request, obj=None): return False class QuestionInline(admin.TabularInline): model = Question readonly_fields = ['call_me'] def call_me(self, obj): return 'Callable in QuestionInline' class PollAdmin(admin.ModelAdmin): inlines = [QuestionInline] def call_me(self, obj): return 'Callable in PollAdmin' class ChapterInline(admin.TabularInline): model = Chapter readonly_fields = ['call_me'] def call_me(self, obj): return 'Callable in ChapterInline' class NovelAdmin(admin.ModelAdmin): inlines = [ChapterInline] class ReadOnlyChapterInline(admin.TabularInline): model = Chapter def has_change_permission(self, request, obj=None): return False class NovelReadonlyChapterAdmin(admin.ModelAdmin): inlines = [ReadOnlyChapterInline] class ConsigliereInline(admin.TabularInline): model = Consigliere class SottoCapoInline(admin.TabularInline): model = SottoCapo class ProfileInline(admin.TabularInline): model = Profile extra = 1 # admin for #18433 class ChildModel1Inline(admin.TabularInline): model = ChildModel1 class ChildModel2Inline(admin.StackedInline): model = ChildModel2 # admin for #19425 and #18388 class BinaryTreeAdmin(admin.TabularInline): model = BinaryTree def get_extra(self, request, obj=None, **kwargs): extra = 2 if obj: return extra - obj.binarytree_set.count() return extra def get_max_num(self, request, obj=None, **kwargs): max_num = 3 if obj: return max_num - obj.binarytree_set.count() return max_num # admin for #19524 class SightingInline(admin.TabularInline): model = Sighting # admin and form for #18263 class SomeChildModelForm(forms.ModelForm): class Meta: fields = '__all__' model = SomeChildModel widgets = { 'position': forms.HiddenInput, } labels = {'readonly_field': 'Label from ModelForm.Meta'} help_texts = {'readonly_field': 'Help text from ModelForm.Meta'} def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.fields['name'].label = 'new label' class SomeChildModelInline(admin.TabularInline): model = SomeChildModel form = SomeChildModelForm readonly_fields = ('readonly_field',) class StudentInline(admin.StackedInline): model = Child extra = 1 fieldsets = [ ('Name', {'fields': ('name',), 'classes': ('collapse',)}), ] class TeacherAdmin(admin.ModelAdmin): inlines = [StudentInline] class AuthorTabularInline(admin.TabularInline): model = Author class FashonistaStackedInline(admin.StackedInline): model = Fashionista # Admin for #30231 class ClassStackedHorizontal(admin.StackedInline): model = Class extra = 1 filter_horizontal = ['person'] class ClassAdminStackedHorizontal(admin.ModelAdmin): inlines = [ClassStackedHorizontal] class ClassTabularHorizontal(admin.TabularInline): model = Class extra = 1 filter_horizontal = ['person'] class ClassAdminTabularHorizontal(admin.ModelAdmin): inlines = [ClassTabularHorizontal] class ClassTabularVertical(admin.TabularInline): model = Class extra = 1 filter_vertical = ['person'] class ClassAdminTabularVertical(admin.ModelAdmin): inlines = [ClassTabularVertical] class ClassStackedVertical(admin.StackedInline): model = Class extra = 1 filter_vertical = ['person'] class ClassAdminStackedVertical(admin.ModelAdmin): inlines = [ClassStackedVertical] class ChildHiddenFieldForm(forms.ModelForm): class Meta: model = SomeChildModel fields = ['name', 'position', 'parent'] widgets = {'position': forms.HiddenInput} def _post_clean(self): super()._post_clean() if self.instance is not None and self.instance.position == 1: self.add_error(None, ValidationError('A non-field error')) class ChildHiddenFieldTabularInline(admin.TabularInline): model = SomeChildModel form = ChildHiddenFieldForm class ChildHiddenFieldInFieldsGroupStackedInline(admin.StackedInline): model = SomeChildModel form = ChildHiddenFieldForm fields = [('name', 'position')] class ChildHiddenFieldOnSingleLineStackedInline(admin.StackedInline): model = SomeChildModel form = ChildHiddenFieldForm fields = ('name', 'position') class ShowInlineChildInline(admin.StackedInline): model = ShowInlineChild class ShowInlineParentAdmin(admin.ModelAdmin): def get_inlines(self, request, obj): if obj is not None and obj.show_inlines: return [ShowInlineChildInline] return [] site.register(TitleCollection, inlines=[TitleInline]) # Test bug #12561 and #12778 # only ModelAdmin media site.register(Holder, HolderAdmin, inlines=[InnerInline]) # ModelAdmin and Inline media site.register(Holder2, HolderAdmin, inlines=[InnerInline2, InnerInline2Tabular]) # only Inline media site.register(Holder3, inlines=[InnerInline3]) site.register(Poll, PollAdmin) site.register(Novel, NovelAdmin) site.register(NovelReadonlyChapter, NovelReadonlyChapterAdmin) site.register(Fashionista, inlines=[InlineWeakness]) site.register(Holder4, Holder4Admin) site.register(Holder5, Holder5Admin) site.register(Author, AuthorAdmin) site.register(CapoFamiglia, inlines=[ConsigliereInline, SottoCapoInline, ReadOnlyInlineInline]) site.register(ProfileCollection, inlines=[ProfileInline]) site.register(ParentModelWithCustomPk, inlines=[ChildModel1Inline, ChildModel2Inline]) site.register(BinaryTree, inlines=[BinaryTreeAdmin]) site.register(ExtraTerrestrial, inlines=[SightingInline]) site.register(SomeParentModel, inlines=[SomeChildModelInline]) site.register([Question, Inner4Stacked, Inner4Tabular]) site.register(Teacher, TeacherAdmin) site.register(Chapter, inlines=[FootNoteNonEditableInlineCustomForm]) site.register(OutfitItem, inlines=[WeaknessInlineCustomForm]) site.register(Person, inlines=[AuthorTabularInline, FashonistaStackedInline]) site.register(Course, ClassAdminStackedHorizontal) site.register(CourseProxy, ClassAdminStackedVertical) site.register(CourseProxy1, ClassAdminTabularVertical) site.register(CourseProxy2, ClassAdminTabularHorizontal) site.register(ShowInlineParent, ShowInlineParentAdmin) # Used to test hidden fields in tabular and stacked inlines. site2 = admin.AdminSite(name='tabular_inline_hidden_field_admin') site2.register(SomeParentModel, inlines=[ChildHiddenFieldTabularInline]) site3 = admin.AdminSite(name='stacked_inline_hidden_field_in_group_admin') site3.register(SomeParentModel, inlines=[ChildHiddenFieldInFieldsGroupStackedInline]) site4 = admin.AdminSite(name='stacked_inline_hidden_field_on_single_line_admin') site4.register(SomeParentModel, inlines=[ChildHiddenFieldOnSingleLineStackedInline])
72dbda5027cc1db4fe4d7073ce7f9dcf0212a8fc224844c07e81e6f54e73a127
from django.urls import path from . import admin urlpatterns = [ path('admin/', admin.site.urls), path('admin2/', admin.site2.urls), path('admin3/', admin.site3.urls), path('admin4/', admin.site4.urls), ]
3eeb707c386d47895aac7dbcd2360d71aec3eb5de4b3669996b676c51f1dc369
from copy import copy, deepcopy from django.core.checks import Error from django.core.checks.templates import ( E001, E002, E003, check_for_template_tags_with_the_same_name, check_setting_app_dirs_loaders, check_string_if_invalid_is_string, ) from django.test import SimpleTestCase from django.test.utils import override_settings class CheckTemplateSettingsAppDirsTest(SimpleTestCase): TEMPLATES_APP_DIRS_AND_LOADERS = [ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'APP_DIRS': True, 'OPTIONS': { 'loaders': ['django.template.loaders.filesystem.Loader'], }, }, ] @override_settings(TEMPLATES=TEMPLATES_APP_DIRS_AND_LOADERS) def test_app_dirs_and_loaders(self): """ Error if template loaders are specified and APP_DIRS is True. """ self.assertEqual(check_setting_app_dirs_loaders(None), [E001]) def test_app_dirs_removed(self): TEMPLATES = deepcopy(self.TEMPLATES_APP_DIRS_AND_LOADERS) del TEMPLATES[0]['APP_DIRS'] with self.settings(TEMPLATES=TEMPLATES): self.assertEqual(check_setting_app_dirs_loaders(None), []) def test_loaders_removed(self): TEMPLATES = deepcopy(self.TEMPLATES_APP_DIRS_AND_LOADERS) del TEMPLATES[0]['OPTIONS']['loaders'] with self.settings(TEMPLATES=TEMPLATES): self.assertEqual(check_setting_app_dirs_loaders(None), []) class CheckTemplateStringIfInvalidTest(SimpleTestCase): TEMPLATES_STRING_IF_INVALID = [ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'OPTIONS': { 'string_if_invalid': False, }, }, { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'OPTIONS': { 'string_if_invalid': 42, }, }, ] @classmethod def setUpClass(cls): super().setUpClass() cls.error1 = copy(E002) cls.error2 = copy(E002) string_if_invalid1 = cls.TEMPLATES_STRING_IF_INVALID[0]['OPTIONS']['string_if_invalid'] string_if_invalid2 = cls.TEMPLATES_STRING_IF_INVALID[1]['OPTIONS']['string_if_invalid'] cls.error1.msg = cls.error1.msg.format(string_if_invalid1, type(string_if_invalid1).__name__) cls.error2.msg = cls.error2.msg.format(string_if_invalid2, type(string_if_invalid2).__name__) @override_settings(TEMPLATES=TEMPLATES_STRING_IF_INVALID) def test_string_if_invalid_not_string(self): self.assertEqual(check_string_if_invalid_is_string(None), [self.error1, self.error2]) def test_string_if_invalid_first_is_string(self): TEMPLATES = deepcopy(self.TEMPLATES_STRING_IF_INVALID) TEMPLATES[0]['OPTIONS']['string_if_invalid'] = 'test' with self.settings(TEMPLATES=TEMPLATES): self.assertEqual(check_string_if_invalid_is_string(None), [self.error2]) def test_string_if_invalid_both_are_strings(self): TEMPLATES = deepcopy(self.TEMPLATES_STRING_IF_INVALID) TEMPLATES[0]['OPTIONS']['string_if_invalid'] = 'test' TEMPLATES[1]['OPTIONS']['string_if_invalid'] = 'test' with self.settings(TEMPLATES=TEMPLATES): self.assertEqual(check_string_if_invalid_is_string(None), []) def test_string_if_invalid_not_specified(self): TEMPLATES = deepcopy(self.TEMPLATES_STRING_IF_INVALID) del TEMPLATES[1]['OPTIONS']['string_if_invalid'] with self.settings(TEMPLATES=TEMPLATES): self.assertEqual(check_string_if_invalid_is_string(None), [self.error1]) class CheckTemplateTagLibrariesWithSameName(SimpleTestCase): @classmethod def setUpClass(cls): super().setUpClass() cls.error_same_tags = Error( E003.msg.format( "'same_tags'", "'check_framework.template_test_apps.same_tags_app_1." "templatetags.same_tags', " "'check_framework.template_test_apps.same_tags_app_2." "templatetags.same_tags'", ), id=E003.id, ) @staticmethod def get_settings(module_name, module_path): return { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'OPTIONS': { 'libraries': { module_name: f'check_framework.template_test_apps.{module_path}', }, }, } @override_settings(INSTALLED_APPS=[ 'check_framework.template_test_apps.same_tags_app_1', 'check_framework.template_test_apps.same_tags_app_2', ]) def test_template_tags_with_same_name(self): self.assertEqual( check_for_template_tags_with_the_same_name(None), [self.error_same_tags], ) def test_template_tags_with_same_library_name(self): with self.settings(TEMPLATES=[ self.get_settings('same_tags', 'same_tags_app_1.templatetags.same_tags'), self.get_settings('same_tags', 'same_tags_app_2.templatetags.same_tags'), ]): self.assertEqual( check_for_template_tags_with_the_same_name(None), [self.error_same_tags], ) @override_settings(INSTALLED_APPS=[ 'check_framework.template_test_apps.same_tags_app_1' ]) def test_template_tags_with_same_library_name_and_module_name(self): with self.settings(TEMPLATES=[ self.get_settings( 'same_tags', 'different_tags_app.templatetags.different_tags', ), ]): self.assertEqual(check_for_template_tags_with_the_same_name(None), [Error( E003.msg.format( "'same_tags'", "'check_framework.template_test_apps.different_tags_app." "templatetags.different_tags', " "'check_framework.template_test_apps.same_tags_app_1." "templatetags.same_tags'", ), id=E003.id, )]) def test_template_tags_with_different_library_name(self): with self.settings(TEMPLATES=[ self.get_settings('same_tags', 'same_tags_app_1.templatetags.same_tags'), self.get_settings('not_same_tags', 'same_tags_app_2.templatetags.same_tags'), ]): self.assertEqual(check_for_template_tags_with_the_same_name(None), []) @override_settings(INSTALLED_APPS=[ 'check_framework.template_test_apps.same_tags_app_1', 'check_framework.template_test_apps.different_tags_app', ]) def test_template_tags_with_different_name(self): self.assertEqual(check_for_template_tags_with_the_same_name(None), [])
7cb23e7b03b0279a35f84af8ea3a496f61b8fe1031bf6ebbaf84ba2413e2bd33
import datetime from decimal import Decimal from django.contrib.humanize.templatetags import humanize from django.template import Context, Template, defaultfilters from django.test import SimpleTestCase, modify_settings, override_settings from django.utils import translation from django.utils.html import escape from django.utils.timezone import get_fixed_timezone, utc from django.utils.translation import gettext as _ # Mock out datetime in some tests so they don't fail occasionally when they # run too slow. Use a fixed datetime for datetime.now(). DST change in # America/Chicago (the default time zone) happened on March 11th in 2012. now = datetime.datetime(2012, 3, 9, 22, 30) class MockDateTime(datetime.datetime): @classmethod def now(cls, tz=None): if tz is None or tz.utcoffset(now) is None: return now else: # equals now.replace(tzinfo=utc) return now.replace(tzinfo=tz) + tz.utcoffset(now) @modify_settings(INSTALLED_APPS={'append': 'django.contrib.humanize'}) class HumanizeTests(SimpleTestCase): def humanize_tester(self, test_list, result_list, method, normalize_result_func=escape): for test_content, result in zip(test_list, result_list): with self.subTest(test_content): t = Template('{%% load humanize %%}{{ test_content|%s }}' % method) rendered = t.render(Context(locals())).strip() self.assertEqual( rendered, normalize_result_func(result), msg="%s test failed, produced '%s', should've produced '%s'" % (method, rendered, result) ) def test_ordinal(self): test_list = ('1', '2', '3', '4', '11', '12', '13', '101', '102', '103', '111', 'something else', None) result_list = ('1st', '2nd', '3rd', '4th', '11th', '12th', '13th', '101st', '102nd', '103rd', '111th', 'something else', None) with translation.override('en'): self.humanize_tester(test_list, result_list, 'ordinal') def test_i18n_html_ordinal(self): """Allow html in output on i18n strings""" test_list = ('1', '2', '3', '4', '11', '12', '13', '101', '102', '103', '111', 'something else', None) result_list = ('1<sup>er</sup>', '2<sup>e</sup>', '3<sup>e</sup>', '4<sup>e</sup>', '11<sup>e</sup>', '12<sup>e</sup>', '13<sup>e</sup>', '101<sup>er</sup>', '102<sup>e</sup>', '103<sup>e</sup>', '111<sup>e</sup>', 'something else', 'None') with translation.override('fr-fr'): self.humanize_tester(test_list, result_list, 'ordinal', lambda x: x) def test_intcomma(self): test_list = ( 100, 1000, 10123, 10311, 1000000, 1234567.25, '100', '1000', '10123', '10311', '1000000', '1234567.1234567', Decimal('1234567.1234567'), None, '1234567', '1234567.12', ) result_list = ( '100', '1,000', '10,123', '10,311', '1,000,000', '1,234,567.25', '100', '1,000', '10,123', '10,311', '1,000,000', '1,234,567.1234567', '1,234,567.1234567', None, '1,234,567', '1,234,567.12', ) with translation.override('en'): self.humanize_tester(test_list, result_list, 'intcomma') def test_l10n_intcomma(self): test_list = ( 100, 1000, 10123, 10311, 1000000, 1234567.25, '100', '1000', '10123', '10311', '1000000', '1234567.1234567', Decimal('1234567.1234567'), None, '1234567', '1234567.12', ) result_list = ( '100', '1,000', '10,123', '10,311', '1,000,000', '1,234,567.25', '100', '1,000', '10,123', '10,311', '1,000,000', '1,234,567.1234567', '1,234,567.1234567', None, '1,234,567', '1,234,567.12', ) with self.settings(USE_THOUSAND_SEPARATOR=False): with translation.override('en'): self.humanize_tester(test_list, result_list, 'intcomma') def test_intcomma_without_number_grouping(self): # Regression for #17414 with translation.override('ja'): self.humanize_tester([100], ['100'], 'intcomma') def test_intword(self): # Positive integers. test_list_positive = ( '100', '1000000', '1200000', '1290000', '1000000000', '2000000000', '6000000000000', '1300000000000000', '3500000000000000000000', '8100000000000000000000000000000000', ('1' + '0' * 100), ('1' + '0' * 104), ) result_list_positive = ( '100', '1.0 million', '1.2 million', '1.3 million', '1.0 billion', '2.0 billion', '6.0 trillion', '1.3 quadrillion', '3.5 sextillion', '8.1 decillion', '1.0 googol', ('1' + '0' * 104), ) # Negative integers. test_list_negative = ('-' + test for test in test_list_positive) result_list_negative = ('-' + result for result in result_list_positive) with translation.override('en'): self.humanize_tester( (*test_list_positive, *test_list_negative, None), (*result_list_positive, *result_list_negative, None), 'intword', ) def test_i18n_intcomma(self): test_list = (100, 1000, 10123, 10311, 1000000, 1234567.25, '100', '1000', '10123', '10311', '1000000', None) result_list = ('100', '1.000', '10.123', '10.311', '1.000.000', '1.234.567,25', '100', '1.000', '10.123', '10.311', '1.000.000', None) with self.settings(USE_THOUSAND_SEPARATOR=True): with translation.override('de'): self.humanize_tester(test_list, result_list, 'intcomma') def test_i18n_intword(self): # Positive integers. test_list_positive = ( '100', '1000000', '1200000', '1290000', '1000000000', '2000000000', '6000000000000', ) result_list_positive = ( '100', '1,0 Million', '1,2 Millionen', '1,3 Millionen', '1,0 Milliarde', '2,0 Milliarden', '6,0 Billionen', ) # Negative integers. test_list_negative = ('-' + test for test in test_list_positive) result_list_negative = ('-' + result for result in result_list_positive) with self.settings(USE_THOUSAND_SEPARATOR=True): with translation.override('de'): self.humanize_tester( (*test_list_positive, *test_list_negative), (*result_list_positive, *result_list_negative), 'intword', ) def test_apnumber(self): test_list = [str(x) for x in range(1, 11)] test_list.append(None) result_list = ('one', 'two', 'three', 'four', 'five', 'six', 'seven', 'eight', 'nine', '10', None) with translation.override('en'): self.humanize_tester(test_list, result_list, 'apnumber') def test_naturalday(self): today = datetime.date.today() yesterday = today - datetime.timedelta(days=1) tomorrow = today + datetime.timedelta(days=1) someday = today - datetime.timedelta(days=10) notdate = "I'm not a date value" test_list = (today, yesterday, tomorrow, someday, notdate, None) someday_result = defaultfilters.date(someday) result_list = (_('today'), _('yesterday'), _('tomorrow'), someday_result, "I'm not a date value", None) self.humanize_tester(test_list, result_list, 'naturalday') def test_naturalday_tz(self): today = datetime.date.today() tz_one = get_fixed_timezone(-720) tz_two = get_fixed_timezone(720) # Can be today or yesterday date_one = datetime.datetime(today.year, today.month, today.day, tzinfo=tz_one) naturalday_one = humanize.naturalday(date_one) # Can be today or tomorrow date_two = datetime.datetime(today.year, today.month, today.day, tzinfo=tz_two) naturalday_two = humanize.naturalday(date_two) # As 24h of difference they will never be the same self.assertNotEqual(naturalday_one, naturalday_two) def test_naturalday_uses_localtime(self): # Regression for #18504 # This is 2012-03-08HT19:30:00-06:00 in America/Chicago dt = datetime.datetime(2012, 3, 9, 1, 30, tzinfo=utc) orig_humanize_datetime, humanize.datetime = humanize.datetime, MockDateTime try: with override_settings(TIME_ZONE="America/Chicago", USE_TZ=True): with translation.override('en'): self.humanize_tester([dt], ['yesterday'], 'naturalday') finally: humanize.datetime = orig_humanize_datetime def test_naturaltime(self): class naive(datetime.tzinfo): def utcoffset(self, dt): return None test_list = [ 'test', now, now - datetime.timedelta(microseconds=1), now - datetime.timedelta(seconds=1), now - datetime.timedelta(seconds=30), now - datetime.timedelta(minutes=1, seconds=30), now - datetime.timedelta(minutes=2), now - datetime.timedelta(hours=1, minutes=30, seconds=30), now - datetime.timedelta(hours=23, minutes=50, seconds=50), now - datetime.timedelta(days=1), now - datetime.timedelta(days=500), now + datetime.timedelta(seconds=1), now + datetime.timedelta(seconds=30), now + datetime.timedelta(minutes=1, seconds=30), now + datetime.timedelta(minutes=2), now + datetime.timedelta(hours=1, minutes=30, seconds=30), now + datetime.timedelta(hours=23, minutes=50, seconds=50), now + datetime.timedelta(days=1), now + datetime.timedelta(days=2, hours=6), now + datetime.timedelta(days=500), now.replace(tzinfo=naive()), now.replace(tzinfo=utc), ] result_list = [ 'test', 'now', 'now', 'a second ago', '30\xa0seconds ago', 'a minute ago', '2\xa0minutes ago', 'an hour ago', '23\xa0hours ago', '1\xa0day ago', '1\xa0year, 4\xa0months ago', 'a second from now', '30\xa0seconds from now', 'a minute from now', '2\xa0minutes from now', 'an hour from now', '23\xa0hours from now', '1\xa0day from now', '2\xa0days, 6\xa0hours from now', '1\xa0year, 4\xa0months from now', 'now', 'now', ] # Because of the DST change, 2 days and 6 hours after the chosen # date in naive arithmetic is only 2 days and 5 hours after in # aware arithmetic. result_list_with_tz_support = result_list[:] assert result_list_with_tz_support[-4] == '2\xa0days, 6\xa0hours from now' result_list_with_tz_support[-4] == '2\xa0days, 5\xa0hours from now' orig_humanize_datetime, humanize.datetime = humanize.datetime, MockDateTime try: with translation.override('en'): self.humanize_tester(test_list, result_list, 'naturaltime') with override_settings(USE_TZ=True): self.humanize_tester( test_list, result_list_with_tz_support, 'naturaltime') finally: humanize.datetime = orig_humanize_datetime def test_naturaltime_as_documented(self): """ #23340 -- Verify the documented behavior of humanize.naturaltime. """ time_format = '%d %b %Y %H:%M:%S' documented_now = datetime.datetime.strptime('17 Feb 2007 16:30:00', time_format) test_data = ( ('17 Feb 2007 16:30:00', 'now'), ('17 Feb 2007 16:29:31', '29 seconds ago'), ('17 Feb 2007 16:29:00', 'a minute ago'), ('17 Feb 2007 16:25:35', '4 minutes ago'), ('17 Feb 2007 15:30:29', '59 minutes ago'), ('17 Feb 2007 15:30:01', '59 minutes ago'), ('17 Feb 2007 15:30:00', 'an hour ago'), ('17 Feb 2007 13:31:29', '2 hours ago'), ('16 Feb 2007 13:31:29', '1 day, 2 hours ago'), ('16 Feb 2007 13:30:01', '1 day, 2 hours ago'), ('16 Feb 2007 13:30:00', '1 day, 3 hours ago'), ('17 Feb 2007 16:30:30', '30 seconds from now'), ('17 Feb 2007 16:30:29', '29 seconds from now'), ('17 Feb 2007 16:31:00', 'a minute from now'), ('17 Feb 2007 16:34:35', '4 minutes from now'), ('17 Feb 2007 17:30:29', 'an hour from now'), ('17 Feb 2007 18:31:29', '2 hours from now'), ('18 Feb 2007 16:31:29', '1 day from now'), ('26 Feb 2007 18:31:29', '1 week, 2 days from now'), ) class DocumentedMockDateTime(datetime.datetime): @classmethod def now(cls, tz=None): if tz is None or tz.utcoffset(documented_now) is None: return documented_now else: return documented_now.replace(tzinfo=tz) + tz.utcoffset(now) orig_humanize_datetime = humanize.datetime humanize.datetime = DocumentedMockDateTime try: for test_time_string, expected_natural_time in test_data: with self.subTest(test_time_string): test_time = datetime.datetime.strptime(test_time_string, time_format) natural_time = humanize.naturaltime(test_time).replace('\xa0', ' ') self.assertEqual(expected_natural_time, natural_time) finally: humanize.datetime = orig_humanize_datetime def test_inflection_for_timedelta(self): """ Translation of '%d day'/'%d month'/… may differ depending on the context of the string it is inserted in. """ test_list = [ # "%(delta)s ago" translations now - datetime.timedelta(days=1), now - datetime.timedelta(days=2), now - datetime.timedelta(days=30), now - datetime.timedelta(days=60), now - datetime.timedelta(days=500), now - datetime.timedelta(days=865), # "%(delta)s from now" translations now + datetime.timedelta(days=1), now + datetime.timedelta(days=2), now + datetime.timedelta(days=30), now + datetime.timedelta(days=60), now + datetime.timedelta(days=500), now + datetime.timedelta(days=865), ] result_list = [ 'před 1\xa0dnem', 'před 2\xa0dny', 'před 1\xa0měsícem', 'před 2\xa0měsíci', 'před 1\xa0rokem, 4\xa0měsíci', 'před 2\xa0lety, 4\xa0měsíci', 'za 1\xa0den', 'za 2\xa0dny', 'za 1\xa0měsíc', 'za 2\xa0měsíce', 'za 1\xa0rok, 4\xa0měsíce', 'za 2\xa0roky, 4\xa0měsíce', ] orig_humanize_datetime, humanize.datetime = humanize.datetime, MockDateTime try: # Choose a language with different naturaltime-past/naturaltime-future translations with translation.override('cs'): self.humanize_tester(test_list, result_list, 'naturaltime') finally: humanize.datetime = orig_humanize_datetime
c51e1c1dd073ab6388375c61b557ed0fc9216cad466239db1815d5585f3b5f43
from functools import update_wrapper, wraps from unittest import TestCase, mock from django.contrib.admin.views.decorators import staff_member_required from django.contrib.auth.decorators import ( login_required, permission_required, user_passes_test, ) from django.http import HttpRequest, HttpResponse, HttpResponseNotAllowed from django.middleware.clickjacking import XFrameOptionsMiddleware from django.test import SimpleTestCase from django.utils.decorators import method_decorator from django.utils.functional import keep_lazy, keep_lazy_text, lazy from django.utils.safestring import mark_safe from django.views.decorators.cache import ( cache_control, cache_page, never_cache, ) from django.views.decorators.clickjacking import ( xframe_options_deny, xframe_options_exempt, xframe_options_sameorigin, ) from django.views.decorators.http import ( condition, require_GET, require_http_methods, require_POST, require_safe, ) from django.views.decorators.vary import vary_on_cookie, vary_on_headers def fully_decorated(request): """Expected __doc__""" return HttpResponse('<html><body>dummy</body></html>') fully_decorated.anything = "Expected __dict__" def compose(*functions): # compose(f, g)(*args, **kwargs) == f(g(*args, **kwargs)) functions = list(reversed(functions)) def _inner(*args, **kwargs): result = functions[0](*args, **kwargs) for f in functions[1:]: result = f(result) return result return _inner full_decorator = compose( # django.views.decorators.http require_http_methods(["GET"]), require_GET, require_POST, require_safe, condition(lambda r: None, lambda r: None), # django.views.decorators.vary vary_on_headers('Accept-language'), vary_on_cookie, # django.views.decorators.cache cache_page(60 * 15), cache_control(private=True), never_cache, # django.contrib.auth.decorators # Apply user_passes_test twice to check #9474 user_passes_test(lambda u: True), login_required, permission_required('change_world'), # django.contrib.admin.views.decorators staff_member_required, # django.utils.functional keep_lazy(HttpResponse), keep_lazy_text, lazy, # django.utils.safestring mark_safe, ) fully_decorated = full_decorator(fully_decorated) class DecoratorsTest(TestCase): def test_attributes(self): """ Built-in decorators set certain attributes of the wrapped function. """ self.assertEqual(fully_decorated.__name__, 'fully_decorated') self.assertEqual(fully_decorated.__doc__, 'Expected __doc__') self.assertEqual(fully_decorated.__dict__['anything'], 'Expected __dict__') def test_user_passes_test_composition(self): """ The user_passes_test decorator can be applied multiple times (#9474). """ def test1(user): user.decorators_applied.append('test1') return True def test2(user): user.decorators_applied.append('test2') return True def callback(request): return request.user.decorators_applied callback = user_passes_test(test1)(callback) callback = user_passes_test(test2)(callback) class DummyUser: pass class DummyRequest: pass request = DummyRequest() request.user = DummyUser() request.user.decorators_applied = [] response = callback(request) self.assertEqual(response, ['test2', 'test1']) def test_cache_page(self): def my_view(request): return "response" my_view_cached = cache_page(123)(my_view) self.assertEqual(my_view_cached(HttpRequest()), "response") my_view_cached2 = cache_page(123, key_prefix="test")(my_view) self.assertEqual(my_view_cached2(HttpRequest()), "response") def test_require_safe_accepts_only_safe_methods(self): """ Test for the require_safe decorator. A view returns either a response or an exception. Refs #15637. """ def my_view(request): return HttpResponse("OK") my_safe_view = require_safe(my_view) request = HttpRequest() request.method = 'GET' self.assertIsInstance(my_safe_view(request), HttpResponse) request.method = 'HEAD' self.assertIsInstance(my_safe_view(request), HttpResponse) request.method = 'POST' self.assertIsInstance(my_safe_view(request), HttpResponseNotAllowed) request.method = 'PUT' self.assertIsInstance(my_safe_view(request), HttpResponseNotAllowed) request.method = 'DELETE' self.assertIsInstance(my_safe_view(request), HttpResponseNotAllowed) # For testing method_decorator, a decorator that assumes a single argument. # We will get type arguments if there is a mismatch in the number of arguments. def simple_dec(func): def wrapper(arg): return func("test:" + arg) return wraps(func)(wrapper) simple_dec_m = method_decorator(simple_dec) # For testing method_decorator, two decorators that add an attribute to the function def myattr_dec(func): def wrapper(*args, **kwargs): return func(*args, **kwargs) wrapper.myattr = True return wrapper myattr_dec_m = method_decorator(myattr_dec) def myattr2_dec(func): def wrapper(*args, **kwargs): return func(*args, **kwargs) wrapper.myattr2 = True return wrapper myattr2_dec_m = method_decorator(myattr2_dec) class ClsDec: def __init__(self, myattr): self.myattr = myattr def __call__(self, f): def wrapped(): return f() and self.myattr return update_wrapper(wrapped, f) class MethodDecoratorTests(SimpleTestCase): """ Tests for method_decorator """ def test_preserve_signature(self): class Test: @simple_dec_m def say(self, arg): return arg self.assertEqual("test:hello", Test().say("hello")) def test_preserve_attributes(self): # Sanity check myattr_dec and myattr2_dec @myattr_dec def func(): pass self.assertIs(getattr(func, 'myattr', False), True) @myattr2_dec def func(): pass self.assertIs(getattr(func, 'myattr2', False), True) @myattr_dec @myattr2_dec def func(): pass self.assertIs(getattr(func, 'myattr', False), True) self.assertIs(getattr(func, 'myattr2', False), False) # Decorate using method_decorator() on the method. class TestPlain: @myattr_dec_m @myattr2_dec_m def method(self): "A method" pass # Decorate using method_decorator() on both the class and the method. # The decorators applied to the methods are applied before the ones # applied to the class. @method_decorator(myattr_dec_m, "method") class TestMethodAndClass: @method_decorator(myattr2_dec_m) def method(self): "A method" pass # Decorate using an iterable of function decorators. @method_decorator((myattr_dec, myattr2_dec), 'method') class TestFunctionIterable: def method(self): "A method" pass # Decorate using an iterable of method decorators. decorators = (myattr_dec_m, myattr2_dec_m) @method_decorator(decorators, "method") class TestMethodIterable: def method(self): "A method" pass tests = (TestPlain, TestMethodAndClass, TestFunctionIterable, TestMethodIterable) for Test in tests: with self.subTest(Test=Test): self.assertIs(getattr(Test().method, 'myattr', False), True) self.assertIs(getattr(Test().method, 'myattr2', False), True) self.assertIs(getattr(Test.method, 'myattr', False), True) self.assertIs(getattr(Test.method, 'myattr2', False), True) self.assertEqual(Test.method.__doc__, 'A method') self.assertEqual(Test.method.__name__, 'method') def test_new_attribute(self): """A decorator that sets a new attribute on the method.""" def decorate(func): func.x = 1 return func class MyClass: @method_decorator(decorate) def method(self): return True obj = MyClass() self.assertEqual(obj.method.x, 1) self.assertIs(obj.method(), True) def test_bad_iterable(self): decorators = {myattr_dec_m, myattr2_dec_m} msg = "'set' object is not subscriptable" with self.assertRaisesMessage(TypeError, msg): @method_decorator(decorators, "method") class TestIterable: def method(self): "A method" pass # Test for argumented decorator def test_argumented(self): class Test: @method_decorator(ClsDec(False)) def method(self): return True self.assertIs(Test().method(), False) def test_descriptors(self): def original_dec(wrapped): def _wrapped(arg): return wrapped(arg) return _wrapped method_dec = method_decorator(original_dec) class bound_wrapper: def __init__(self, wrapped): self.wrapped = wrapped self.__name__ = wrapped.__name__ def __call__(self, arg): return self.wrapped(arg) def __get__(self, instance, cls=None): return self class descriptor_wrapper: def __init__(self, wrapped): self.wrapped = wrapped self.__name__ = wrapped.__name__ def __get__(self, instance, cls=None): return bound_wrapper(self.wrapped.__get__(instance, cls)) class Test: @method_dec @descriptor_wrapper def method(self, arg): return arg self.assertEqual(Test().method(1), 1) def test_class_decoration(self): """ @method_decorator can be used to decorate a class and its methods. """ def deco(func): def _wrapper(*args, **kwargs): return True return _wrapper @method_decorator(deco, name="method") class Test: def method(self): return False self.assertTrue(Test().method()) def test_tuple_of_decorators(self): """ @method_decorator can accept a tuple of decorators. """ def add_question_mark(func): def _wrapper(*args, **kwargs): return func(*args, **kwargs) + "?" return _wrapper def add_exclamation_mark(func): def _wrapper(*args, **kwargs): return func(*args, **kwargs) + "!" return _wrapper # The order should be consistent with the usual order in which # decorators are applied, e.g. # @add_exclamation_mark # @add_question_mark # def func(): # ... decorators = (add_exclamation_mark, add_question_mark) @method_decorator(decorators, name="method") class TestFirst: def method(self): return "hello world" class TestSecond: @method_decorator(decorators) def method(self): return "hello world" self.assertEqual(TestFirst().method(), "hello world?!") self.assertEqual(TestSecond().method(), "hello world?!") def test_invalid_non_callable_attribute_decoration(self): """ @method_decorator on a non-callable attribute raises an error. """ msg = ( "Cannot decorate 'prop' as it isn't a callable attribute of " "<class 'Test'> (1)" ) with self.assertRaisesMessage(TypeError, msg): @method_decorator(lambda: None, name="prop") class Test: prop = 1 @classmethod def __module__(cls): return "tests" def test_invalid_method_name_to_decorate(self): """ @method_decorator on a nonexistent method raises an error. """ msg = ( "The keyword argument `name` must be the name of a method of the " "decorated class: <class 'Test'>. Got 'nonexistent_method' instead" ) with self.assertRaisesMessage(ValueError, msg): @method_decorator(lambda: None, name='nonexistent_method') class Test: @classmethod def __module__(cls): return "tests" def test_wrapper_assignments(self): """@method_decorator preserves wrapper assignments.""" func_name = None func_module = None def decorator(func): @wraps(func) def inner(*args, **kwargs): nonlocal func_name, func_module func_name = getattr(func, '__name__', None) func_module = getattr(func, '__module__', None) return func(*args, **kwargs) return inner class Test: @method_decorator(decorator) def method(self): return 'tests' Test().method() self.assertEqual(func_name, 'method') self.assertIsNotNone(func_module) class XFrameOptionsDecoratorsTests(TestCase): """ Tests for the X-Frame-Options decorators. """ def test_deny_decorator(self): """ Ensures @xframe_options_deny properly sets the X-Frame-Options header. """ @xframe_options_deny def a_view(request): return HttpResponse() r = a_view(HttpRequest()) self.assertEqual(r.headers['X-Frame-Options'], 'DENY') def test_sameorigin_decorator(self): """ Ensures @xframe_options_sameorigin properly sets the X-Frame-Options header. """ @xframe_options_sameorigin def a_view(request): return HttpResponse() r = a_view(HttpRequest()) self.assertEqual(r.headers['X-Frame-Options'], 'SAMEORIGIN') def test_exempt_decorator(self): """ Ensures @xframe_options_exempt properly instructs the XFrameOptionsMiddleware to NOT set the header. """ @xframe_options_exempt def a_view(request): return HttpResponse() req = HttpRequest() resp = a_view(req) self.assertIsNone(resp.get('X-Frame-Options', None)) self.assertTrue(resp.xframe_options_exempt) # Since the real purpose of the exempt decorator is to suppress # the middleware's functionality, let's make sure it actually works... r = XFrameOptionsMiddleware(a_view)(req) self.assertIsNone(r.get('X-Frame-Options', None)) class HttpRequestProxy: def __init__(self, request): self._request = request def __getattr__(self, attr): """Proxy to the underlying HttpRequest object.""" return getattr(self._request, attr) class NeverCacheDecoratorTest(SimpleTestCase): @mock.patch('time.time') def test_never_cache_decorator_headers(self, mocked_time): @never_cache def a_view(request): return HttpResponse() mocked_time.return_value = 1167616461.0 response = a_view(HttpRequest()) self.assertEqual( response.headers['Expires'], 'Mon, 01 Jan 2007 01:54:21 GMT', ) self.assertEqual( response.headers['Cache-Control'], 'max-age=0, no-cache, no-store, must-revalidate, private', ) def test_never_cache_decorator_expires_not_overridden(self): @never_cache def a_view(request): return HttpResponse(headers={'Expires': 'tomorrow'}) response = a_view(HttpRequest()) self.assertEqual(response.headers['Expires'], 'tomorrow') def test_never_cache_decorator_http_request(self): class MyClass: @never_cache def a_view(self, request): return HttpResponse() request = HttpRequest() msg = ( "never_cache didn't receive an HttpRequest. If you are decorating " "a classmethod, be sure to use @method_decorator." ) with self.assertRaisesMessage(TypeError, msg): MyClass().a_view(request) with self.assertRaisesMessage(TypeError, msg): MyClass().a_view(HttpRequestProxy(request)) def test_never_cache_decorator_http_request_proxy(self): class MyClass: @method_decorator(never_cache) def a_view(self, request): return HttpResponse() request = HttpRequest() response = MyClass().a_view(HttpRequestProxy(request)) self.assertIn('Cache-Control', response.headers) self.assertIn('Expires', response.headers) class CacheControlDecoratorTest(SimpleTestCase): def test_cache_control_decorator_http_request(self): class MyClass: @cache_control(a='b') def a_view(self, request): return HttpResponse() msg = ( "cache_control didn't receive an HttpRequest. If you are " "decorating a classmethod, be sure to use @method_decorator." ) request = HttpRequest() with self.assertRaisesMessage(TypeError, msg): MyClass().a_view(request) with self.assertRaisesMessage(TypeError, msg): MyClass().a_view(HttpRequestProxy(request)) def test_cache_control_decorator_http_request_proxy(self): class MyClass: @method_decorator(cache_control(a='b')) def a_view(self, request): return HttpResponse() request = HttpRequest() response = MyClass().a_view(HttpRequestProxy(request)) self.assertEqual(response.headers['Cache-Control'], 'a=b')
9697a1629977eb658781fe85fb212a6f28d506d3a154f2084b567f95b57e9607
from django.db import models from django.test import TestCase from .models import ( Book, Car, ConfusedBook, CustomManager, CustomQuerySet, DeconstructibleCustomManager, FastCarAsBase, FastCarAsDefault, FunPerson, OneToOneRestrictedModel, Person, PersonFromAbstract, PersonManager, PublishedBookManager, RelatedModel, RestrictedModel, ) class CustomManagerTests(TestCase): custom_manager_names = [ 'custom_queryset_default_manager', 'custom_queryset_custom_manager', ] @classmethod def setUpTestData(cls): cls.b1 = Book.published_objects.create( title="How to program", author="Rodney Dangerfield", is_published=True) cls.b2 = Book.published_objects.create( title="How to be smart", author="Albert Einstein", is_published=False) cls.p1 = Person.objects.create(first_name="Bugs", last_name="Bunny", fun=True) cls.droopy = Person.objects.create(first_name="Droopy", last_name="Dog", fun=False) def test_custom_manager_basic(self): """ Test a custom Manager method. """ self.assertQuerysetEqual( Person.objects.get_fun_people(), [ "Bugs Bunny" ], str ) def test_queryset_copied_to_default(self): """ The methods of a custom QuerySet are properly copied onto the default Manager. """ for manager_name in self.custom_manager_names: with self.subTest(manager_name=manager_name): manager = getattr(Person, manager_name) # Public methods are copied manager.public_method() # Private methods are not copied with self.assertRaises(AttributeError): manager._private_method() def test_manager_honors_queryset_only(self): for manager_name in self.custom_manager_names: with self.subTest(manager_name=manager_name): manager = getattr(Person, manager_name) # Methods with queryset_only=False are copied even if they are private. manager._optin_private_method() # Methods with queryset_only=True aren't copied even if they are public. msg = "%r object has no attribute 'optout_public_method'" % manager.__class__.__name__ with self.assertRaisesMessage(AttributeError, msg): manager.optout_public_method() def test_manager_use_queryset_methods(self): """ Custom manager will use the queryset methods """ for manager_name in self.custom_manager_names: with self.subTest(manager_name=manager_name): manager = getattr(Person, manager_name) queryset = manager.filter() self.assertQuerysetEqual(queryset, ["Bugs Bunny"], str) self.assertIs(queryset._filter_CustomQuerySet, True) # Specialized querysets inherit from our custom queryset. queryset = manager.values_list('first_name', flat=True).filter() self.assertEqual(list(queryset), ["Bugs"]) self.assertIs(queryset._filter_CustomQuerySet, True) self.assertIsInstance(queryset.values(), CustomQuerySet) self.assertIsInstance(queryset.values().values(), CustomQuerySet) self.assertIsInstance(queryset.values_list().values(), CustomQuerySet) def test_init_args(self): """ The custom manager __init__() argument has been set. """ self.assertEqual(Person.custom_queryset_custom_manager.init_arg, 'hello') def test_manager_attributes(self): """ Custom manager method is only available on the manager and not on querysets. """ Person.custom_queryset_custom_manager.manager_only() msg = "'CustomQuerySet' object has no attribute 'manager_only'" with self.assertRaisesMessage(AttributeError, msg): Person.custom_queryset_custom_manager.all().manager_only() def test_queryset_and_manager(self): """ Queryset method doesn't override the custom manager method. """ queryset = Person.custom_queryset_custom_manager.filter() self.assertQuerysetEqual(queryset, ["Bugs Bunny"], str) self.assertIs(queryset._filter_CustomManager, True) def test_related_manager(self): """ The related managers extend the default manager. """ self.assertIsInstance(self.droopy.books, PublishedBookManager) self.assertIsInstance(self.b2.authors, PersonManager) def test_no_objects(self): """ The default manager, "objects", doesn't exist, because a custom one was provided. """ msg = "type object 'Book' has no attribute 'objects'" with self.assertRaisesMessage(AttributeError, msg): Book.objects def test_filtering(self): """ Custom managers respond to usual filtering methods """ self.assertQuerysetEqual( Book.published_objects.all(), [ "How to program", ], lambda b: b.title ) def test_fk_related_manager(self): Person.objects.create(first_name="Bugs", last_name="Bunny", fun=True, favorite_book=self.b1) Person.objects.create(first_name="Droopy", last_name="Dog", fun=False, favorite_book=self.b1) FunPerson.objects.create(first_name="Bugs", last_name="Bunny", fun=True, favorite_book=self.b1) FunPerson.objects.create(first_name="Droopy", last_name="Dog", fun=False, favorite_book=self.b1) self.assertQuerysetEqual( self.b1.favorite_books.order_by('first_name').all(), [ "Bugs", "Droopy", ], lambda c: c.first_name, ordered=False, ) self.assertQuerysetEqual( self.b1.fun_people_favorite_books.all(), [ "Bugs", ], lambda c: c.first_name, ordered=False, ) self.assertQuerysetEqual( self.b1.favorite_books(manager='boring_people').all(), [ "Droopy", ], lambda c: c.first_name, ordered=False, ) self.assertQuerysetEqual( self.b1.favorite_books(manager='fun_people').all(), [ "Bugs", ], lambda c: c.first_name, ordered=False, ) def test_fk_related_manager_reused(self): self.assertIs(self.b1.favorite_books, self.b1.favorite_books) self.assertIn('favorite_books', self.b1._state.related_managers_cache) def test_gfk_related_manager(self): Person.objects.create(first_name="Bugs", last_name="Bunny", fun=True, favorite_thing=self.b1) Person.objects.create(first_name="Droopy", last_name="Dog", fun=False, favorite_thing=self.b1) FunPerson.objects.create(first_name="Bugs", last_name="Bunny", fun=True, favorite_thing=self.b1) FunPerson.objects.create(first_name="Droopy", last_name="Dog", fun=False, favorite_thing=self.b1) self.assertQuerysetEqual( self.b1.favorite_things.all(), [ "Bugs", "Droopy", ], lambda c: c.first_name, ordered=False, ) self.assertQuerysetEqual( self.b1.fun_people_favorite_things.all(), [ "Bugs", ], lambda c: c.first_name, ordered=False, ) self.assertQuerysetEqual( self.b1.favorite_things(manager='boring_people').all(), [ "Droopy", ], lambda c: c.first_name, ordered=False, ) self.assertQuerysetEqual( self.b1.favorite_things(manager='fun_people').all(), [ "Bugs", ], lambda c: c.first_name, ordered=False, ) def test_gfk_related_manager_reused(self): self.assertIs( self.b1.fun_people_favorite_things, self.b1.fun_people_favorite_things, ) self.assertIn( 'fun_people_favorite_things', self.b1._state.related_managers_cache, ) def test_gfk_related_manager_not_reused_when_alternate(self): self.assertIsNot( self.b1.favorite_things(manager='fun_people'), self.b1.favorite_things(manager='fun_people'), ) def test_gfk_related_manager_no_overlap_when_not_hidden(self): """ If a GenericRelation defines a related_query_name (and thus the related_name) which shadows another GenericRelation, it should not cause those separate managers to clash. """ book = ConfusedBook.objects.create( title='How to program', author='Rodney Dangerfield', ) person = Person.objects.create( first_name='Bugs', last_name='Bunny', fun=True, favorite_thing=book, ) fun_person = FunPerson.objects.create( first_name='Droopy', last_name='Dog', fun=False, favorite_thing=book, ) # The managers don't collide in the internal cache. self.assertIsNot(book.favorite_things, book.less_favorite_things) self.assertIs(book.favorite_things, book.favorite_things) self.assertIs(book.less_favorite_things, book.less_favorite_things) # Both managers are cached separately despite the collision in names. self.assertIn('favorite_things', book._state.related_managers_cache) self.assertIn('less_favorite_things', book._state.related_managers_cache) # "less_favorite_things" isn't available as a reverse related manager, # so never ends up in the cache. self.assertQuerysetEqual(fun_person.favorite_things.all(), [book]) with self.assertRaises(AttributeError): fun_person.less_favorite_things self.assertIn('favorite_things', fun_person._state.related_managers_cache) self.assertNotIn( 'less_favorite_things', fun_person._state.related_managers_cache, ) # The GenericRelation doesn't exist for Person, only FunPerson, so the # exception prevents the cache from being polluted. with self.assertRaises(AttributeError): person.favorite_things self.assertNotIn('favorite_things', person._state.related_managers_cache) def test_m2m_related_manager(self): bugs = Person.objects.create(first_name="Bugs", last_name="Bunny", fun=True) self.b1.authors.add(bugs) droopy = Person.objects.create(first_name="Droopy", last_name="Dog", fun=False) self.b1.authors.add(droopy) bugs = FunPerson.objects.create(first_name="Bugs", last_name="Bunny", fun=True) self.b1.fun_authors.add(bugs) droopy = FunPerson.objects.create(first_name="Droopy", last_name="Dog", fun=False) self.b1.fun_authors.add(droopy) self.assertQuerysetEqual( self.b1.authors.order_by('first_name').all(), [ "Bugs", "Droopy", ], lambda c: c.first_name, ordered=False, ) self.assertQuerysetEqual( self.b1.fun_authors.order_by('first_name').all(), [ "Bugs", ], lambda c: c.first_name, ordered=False, ) self.assertQuerysetEqual( self.b1.authors(manager='boring_people').all(), [ "Droopy", ], lambda c: c.first_name, ordered=False, ) self.assertQuerysetEqual( self.b1.authors(manager='fun_people').all(), [ "Bugs", ], lambda c: c.first_name, ordered=False, ) def test_m2m_related_forward_manager_reused(self): self.assertIs(self.b1.authors, self.b1.authors) self.assertIn('authors', self.b1._state.related_managers_cache) def test_m2m_related_revers_manager_reused(self): bugs = Person.objects.create(first_name='Bugs', last_name='Bunny') self.b1.authors.add(bugs) self.assertIs(bugs.books, bugs.books) self.assertIn('books', bugs._state.related_managers_cache) def test_removal_through_default_fk_related_manager(self, bulk=True): bugs = FunPerson.objects.create(first_name="Bugs", last_name="Bunny", fun=True, favorite_book=self.b1) droopy = FunPerson.objects.create(first_name="Droopy", last_name="Dog", fun=False, favorite_book=self.b1) self.b1.fun_people_favorite_books.remove(droopy, bulk=bulk) self.assertQuerysetEqual( FunPerson._base_manager.filter(favorite_book=self.b1), [ "Bugs", "Droopy", ], lambda c: c.first_name, ordered=False, ) self.b1.fun_people_favorite_books.remove(bugs, bulk=bulk) self.assertQuerysetEqual( FunPerson._base_manager.filter(favorite_book=self.b1), [ "Droopy", ], lambda c: c.first_name, ordered=False, ) bugs.favorite_book = self.b1 bugs.save() self.b1.fun_people_favorite_books.clear(bulk=bulk) self.assertQuerysetEqual( FunPerson._base_manager.filter(favorite_book=self.b1), [ "Droopy", ], lambda c: c.first_name, ordered=False, ) def test_slow_removal_through_default_fk_related_manager(self): self.test_removal_through_default_fk_related_manager(bulk=False) def test_removal_through_specified_fk_related_manager(self, bulk=True): Person.objects.create(first_name="Bugs", last_name="Bunny", fun=True, favorite_book=self.b1) droopy = Person.objects.create(first_name="Droopy", last_name="Dog", fun=False, favorite_book=self.b1) # The fun manager DOESN'T remove boring people. self.b1.favorite_books(manager='fun_people').remove(droopy, bulk=bulk) self.assertQuerysetEqual( self.b1.favorite_books(manager='boring_people').all(), [ "Droopy", ], lambda c: c.first_name, ordered=False, ) # The boring manager DOES remove boring people. self.b1.favorite_books(manager='boring_people').remove(droopy, bulk=bulk) self.assertQuerysetEqual( self.b1.favorite_books(manager='boring_people').all(), [ ], lambda c: c.first_name, ordered=False, ) droopy.favorite_book = self.b1 droopy.save() # The fun manager ONLY clears fun people. self.b1.favorite_books(manager='fun_people').clear(bulk=bulk) self.assertQuerysetEqual( self.b1.favorite_books(manager='boring_people').all(), [ "Droopy", ], lambda c: c.first_name, ordered=False, ) self.assertQuerysetEqual( self.b1.favorite_books(manager='fun_people').all(), [ ], lambda c: c.first_name, ordered=False, ) def test_slow_removal_through_specified_fk_related_manager(self): self.test_removal_through_specified_fk_related_manager(bulk=False) def test_removal_through_default_gfk_related_manager(self, bulk=True): bugs = FunPerson.objects.create(first_name="Bugs", last_name="Bunny", fun=True, favorite_thing=self.b1) droopy = FunPerson.objects.create(first_name="Droopy", last_name="Dog", fun=False, favorite_thing=self.b1) self.b1.fun_people_favorite_things.remove(droopy, bulk=bulk) self.assertQuerysetEqual( FunPerson._base_manager.order_by('first_name').filter(favorite_thing_id=self.b1.pk), [ "Bugs", "Droopy", ], lambda c: c.first_name, ordered=False, ) self.b1.fun_people_favorite_things.remove(bugs, bulk=bulk) self.assertQuerysetEqual( FunPerson._base_manager.order_by('first_name').filter(favorite_thing_id=self.b1.pk), [ "Droopy", ], lambda c: c.first_name, ordered=False, ) bugs.favorite_book = self.b1 bugs.save() self.b1.fun_people_favorite_things.clear(bulk=bulk) self.assertQuerysetEqual( FunPerson._base_manager.order_by('first_name').filter(favorite_thing_id=self.b1.pk), [ "Droopy", ], lambda c: c.first_name, ordered=False, ) def test_slow_removal_through_default_gfk_related_manager(self): self.test_removal_through_default_gfk_related_manager(bulk=False) def test_removal_through_specified_gfk_related_manager(self, bulk=True): Person.objects.create(first_name="Bugs", last_name="Bunny", fun=True, favorite_thing=self.b1) droopy = Person.objects.create(first_name="Droopy", last_name="Dog", fun=False, favorite_thing=self.b1) # The fun manager DOESN'T remove boring people. self.b1.favorite_things(manager='fun_people').remove(droopy, bulk=bulk) self.assertQuerysetEqual( self.b1.favorite_things(manager='boring_people').all(), [ "Droopy", ], lambda c: c.first_name, ordered=False, ) # The boring manager DOES remove boring people. self.b1.favorite_things(manager='boring_people').remove(droopy, bulk=bulk) self.assertQuerysetEqual( self.b1.favorite_things(manager='boring_people').all(), [ ], lambda c: c.first_name, ordered=False, ) droopy.favorite_thing = self.b1 droopy.save() # The fun manager ONLY clears fun people. self.b1.favorite_things(manager='fun_people').clear(bulk=bulk) self.assertQuerysetEqual( self.b1.favorite_things(manager='boring_people').all(), [ "Droopy", ], lambda c: c.first_name, ordered=False, ) self.assertQuerysetEqual( self.b1.favorite_things(manager='fun_people').all(), [ ], lambda c: c.first_name, ordered=False, ) def test_slow_removal_through_specified_gfk_related_manager(self): self.test_removal_through_specified_gfk_related_manager(bulk=False) def test_removal_through_default_m2m_related_manager(self): bugs = FunPerson.objects.create(first_name="Bugs", last_name="Bunny", fun=True) self.b1.fun_authors.add(bugs) droopy = FunPerson.objects.create(first_name="Droopy", last_name="Dog", fun=False) self.b1.fun_authors.add(droopy) self.b1.fun_authors.remove(droopy) self.assertQuerysetEqual( self.b1.fun_authors.through._default_manager.all(), [ "Bugs", "Droopy", ], lambda c: c.funperson.first_name, ordered=False, ) self.b1.fun_authors.remove(bugs) self.assertQuerysetEqual( self.b1.fun_authors.through._default_manager.all(), [ "Droopy", ], lambda c: c.funperson.first_name, ordered=False, ) self.b1.fun_authors.add(bugs) self.b1.fun_authors.clear() self.assertQuerysetEqual( self.b1.fun_authors.through._default_manager.all(), [ "Droopy", ], lambda c: c.funperson.first_name, ordered=False, ) def test_removal_through_specified_m2m_related_manager(self): bugs = Person.objects.create(first_name="Bugs", last_name="Bunny", fun=True) self.b1.authors.add(bugs) droopy = Person.objects.create(first_name="Droopy", last_name="Dog", fun=False) self.b1.authors.add(droopy) # The fun manager DOESN'T remove boring people. self.b1.authors(manager='fun_people').remove(droopy) self.assertQuerysetEqual( self.b1.authors(manager='boring_people').all(), [ "Droopy", ], lambda c: c.first_name, ordered=False, ) # The boring manager DOES remove boring people. self.b1.authors(manager='boring_people').remove(droopy) self.assertQuerysetEqual( self.b1.authors(manager='boring_people').all(), [ ], lambda c: c.first_name, ordered=False, ) self.b1.authors.add(droopy) # The fun manager ONLY clears fun people. self.b1.authors(manager='fun_people').clear() self.assertQuerysetEqual( self.b1.authors(manager='boring_people').all(), [ "Droopy", ], lambda c: c.first_name, ordered=False, ) self.assertQuerysetEqual( self.b1.authors(manager='fun_people').all(), [ ], lambda c: c.first_name, ordered=False, ) def test_deconstruct_default(self): mgr = models.Manager() as_manager, mgr_path, qs_path, args, kwargs = mgr.deconstruct() self.assertFalse(as_manager) self.assertEqual(mgr_path, 'django.db.models.manager.Manager') self.assertEqual(args, ()) self.assertEqual(kwargs, {}) def test_deconstruct_as_manager(self): mgr = CustomQuerySet.as_manager() as_manager, mgr_path, qs_path, args, kwargs = mgr.deconstruct() self.assertTrue(as_manager) self.assertEqual(qs_path, 'custom_managers.models.CustomQuerySet') def test_deconstruct_from_queryset(self): mgr = DeconstructibleCustomManager('a', 'b') as_manager, mgr_path, qs_path, args, kwargs = mgr.deconstruct() self.assertFalse(as_manager) self.assertEqual(mgr_path, 'custom_managers.models.DeconstructibleCustomManager') self.assertEqual(args, ('a', 'b',)) self.assertEqual(kwargs, {}) mgr = DeconstructibleCustomManager('x', 'y', c=3, d=4) as_manager, mgr_path, qs_path, args, kwargs = mgr.deconstruct() self.assertFalse(as_manager) self.assertEqual(mgr_path, 'custom_managers.models.DeconstructibleCustomManager') self.assertEqual(args, ('x', 'y',)) self.assertEqual(kwargs, {'c': 3, 'd': 4}) def test_deconstruct_from_queryset_failing(self): mgr = CustomManager('arg') msg = ("Could not find manager BaseCustomManagerFromCustomQuerySet in " "django.db.models.manager.\n" "Please note that you need to inherit from managers you " "dynamically generated with 'from_queryset()'.") with self.assertRaisesMessage(ValueError, msg): mgr.deconstruct() def test_abstract_model_with_custom_manager_name(self): """ A custom manager may be defined on an abstract model. It will be inherited by the abstract model's children. """ PersonFromAbstract.abstract_persons.create(objects='Test') self.assertQuerysetEqual( PersonFromAbstract.abstract_persons.all(), ["Test"], lambda c: c.objects, ) class TestCars(TestCase): def test_managers(self): # Each model class gets a "_default_manager" attribute, which is a # reference to the first manager defined in the class. Car.cars.create(name="Corvette", mileage=21, top_speed=180) Car.cars.create(name="Neon", mileage=31, top_speed=100) self.assertQuerysetEqual( Car._default_manager.order_by("name"), [ "Corvette", "Neon", ], lambda c: c.name ) self.assertQuerysetEqual( Car.cars.order_by("name"), [ "Corvette", "Neon", ], lambda c: c.name ) # alternate manager self.assertQuerysetEqual( Car.fast_cars.all(), [ "Corvette", ], lambda c: c.name ) # explicit default manager self.assertQuerysetEqual( FastCarAsDefault.cars.order_by("name"), [ "Corvette", "Neon", ], lambda c: c.name ) self.assertQuerysetEqual( FastCarAsDefault._default_manager.all(), [ "Corvette", ], lambda c: c.name ) # explicit base manager self.assertQuerysetEqual( FastCarAsBase.cars.order_by("name"), [ "Corvette", "Neon", ], lambda c: c.name ) self.assertQuerysetEqual( FastCarAsBase._base_manager.all(), [ "Corvette", ], lambda c: c.name ) class CustomManagersRegressTestCase(TestCase): def test_filtered_default_manager(self): """Even though the default manager filters out some records, we must still be able to save (particularly, save by updating existing records) those filtered instances. This is a regression test for #8990, #9527""" related = RelatedModel.objects.create(name="xyzzy") obj = RestrictedModel.objects.create(name="hidden", related=related) obj.name = "still hidden" obj.save() # If the hidden object wasn't seen during the save process, # there would now be two objects in the database. self.assertEqual(RestrictedModel.plain_manager.count(), 1) def test_refresh_from_db_when_default_manager_filters(self): """ Model.refresh_from_db() works for instances hidden by the default manager. """ book = Book._base_manager.create(is_published=False) Book._base_manager.filter(pk=book.pk).update(title='Hi') book.refresh_from_db() self.assertEqual(book.title, 'Hi') def test_save_clears_annotations_from_base_manager(self): """Model.save() clears annotations from the base manager.""" self.assertEqual(Book._meta.base_manager.name, 'annotated_objects') book = Book.annotated_objects.create(title='Hunting') Person.objects.create( first_name='Bugs', last_name='Bunny', fun=True, favorite_book=book, favorite_thing_id=1, ) book = Book.annotated_objects.first() self.assertEqual(book.favorite_avg, 1) # Annotation from the manager. book.title = 'New Hunting' # save() fails if annotations that involve related fields aren't # cleared before the update query. book.save() self.assertEqual(Book.annotated_objects.first().title, 'New Hunting') def test_delete_related_on_filtered_manager(self): """Deleting related objects should also not be distracted by a restricted manager on the related object. This is a regression test for #2698.""" related = RelatedModel.objects.create(name="xyzzy") for name, public in (('one', True), ('two', False), ('three', False)): RestrictedModel.objects.create(name=name, is_public=public, related=related) obj = RelatedModel.objects.get(name="xyzzy") obj.delete() # All of the RestrictedModel instances should have been # deleted, since they *all* pointed to the RelatedModel. If # the default manager is used, only the public one will be # deleted. self.assertEqual(len(RestrictedModel.plain_manager.all()), 0) def test_delete_one_to_one_manager(self): # The same test case as the last one, but for one-to-one # models, which are implemented slightly different internally, # so it's a different code path. obj = RelatedModel.objects.create(name="xyzzy") OneToOneRestrictedModel.objects.create(name="foo", is_public=False, related=obj) obj = RelatedModel.objects.get(name="xyzzy") obj.delete() self.assertEqual(len(OneToOneRestrictedModel.plain_manager.all()), 0) def test_queryset_with_custom_init(self): """ BaseManager.get_queryset() should use kwargs rather than args to allow custom kwargs (#24911). """ qs_custom = Person.custom_init_queryset_manager.all() qs_default = Person.objects.all() self.assertQuerysetEqual(qs_custom, qs_default)
cf4e27b352484e40d979b07ea753bcea021d917232f0120b45018af056d89ee0
""" Giving models a custom manager You can use a custom ``Manager`` in a particular model by extending the base ``Manager`` class and instantiating your custom ``Manager`` in your model. There are two reasons you might want to customize a ``Manager``: to add extra ``Manager`` methods, and/or to modify the initial ``QuerySet`` the ``Manager`` returns. """ from django.contrib.contenttypes.fields import ( GenericForeignKey, GenericRelation, ) from django.db import models class PersonManager(models.Manager): def get_fun_people(self): return self.filter(fun=True) class PublishedBookManager(models.Manager): def get_queryset(self): return super().get_queryset().filter(is_published=True) class AnnotatedBookManager(models.Manager): def get_queryset(self): return super().get_queryset().annotate( favorite_avg=models.Avg('favorite_books__favorite_thing_id') ) class CustomQuerySet(models.QuerySet): def filter(self, *args, **kwargs): queryset = super().filter(fun=True) queryset._filter_CustomQuerySet = True return queryset def public_method(self, *args, **kwargs): return self.all() def _private_method(self, *args, **kwargs): return self.all() def optout_public_method(self, *args, **kwargs): return self.all() optout_public_method.queryset_only = True def _optin_private_method(self, *args, **kwargs): return self.all() _optin_private_method.queryset_only = False class BaseCustomManager(models.Manager): def __init__(self, arg): super().__init__() self.init_arg = arg def filter(self, *args, **kwargs): queryset = super().filter(fun=True) queryset._filter_CustomManager = True return queryset def manager_only(self): return self.all() CustomManager = BaseCustomManager.from_queryset(CustomQuerySet) class CustomInitQuerySet(models.QuerySet): # QuerySet with an __init__() method that takes an additional argument. def __init__(self, custom_optional_arg=None, model=None, query=None, using=None, hints=None): super().__init__(model=model, query=query, using=using, hints=hints) class DeconstructibleCustomManager(BaseCustomManager.from_queryset(CustomQuerySet)): def __init__(self, a, b, c=1, d=2): super().__init__(a) class FunPeopleManager(models.Manager): def get_queryset(self): return super().get_queryset().filter(fun=True) class BoringPeopleManager(models.Manager): def get_queryset(self): return super().get_queryset().filter(fun=False) class Person(models.Model): first_name = models.CharField(max_length=30) last_name = models.CharField(max_length=30) fun = models.BooleanField(default=False) favorite_book = models.ForeignKey('Book', models.SET_NULL, null=True, related_name='favorite_books') favorite_thing_type = models.ForeignKey('contenttypes.ContentType', models.SET_NULL, null=True) favorite_thing_id = models.IntegerField(null=True) favorite_thing = GenericForeignKey('favorite_thing_type', 'favorite_thing_id') objects = PersonManager() fun_people = FunPeopleManager() boring_people = BoringPeopleManager() custom_queryset_default_manager = CustomQuerySet.as_manager() custom_queryset_custom_manager = CustomManager('hello') custom_init_queryset_manager = CustomInitQuerySet.as_manager() def __str__(self): return "%s %s" % (self.first_name, self.last_name) class FunPerson(models.Model): first_name = models.CharField(max_length=30) last_name = models.CharField(max_length=30) fun = models.BooleanField(default=True) favorite_book = models.ForeignKey( 'Book', models.SET_NULL, null=True, related_name='fun_people_favorite_books', ) favorite_thing_type = models.ForeignKey('contenttypes.ContentType', models.SET_NULL, null=True) favorite_thing_id = models.IntegerField(null=True) favorite_thing = GenericForeignKey('favorite_thing_type', 'favorite_thing_id') objects = FunPeopleManager() class Book(models.Model): title = models.CharField(max_length=50) author = models.CharField(max_length=30) is_published = models.BooleanField(default=False) authors = models.ManyToManyField(Person, related_name='books') fun_authors = models.ManyToManyField(FunPerson, related_name='books') favorite_things = GenericRelation( Person, content_type_field='favorite_thing_type', object_id_field='favorite_thing_id', ) fun_people_favorite_things = GenericRelation( FunPerson, content_type_field='favorite_thing_type', object_id_field='favorite_thing_id', ) published_objects = PublishedBookManager() annotated_objects = AnnotatedBookManager() class Meta: base_manager_name = 'annotated_objects' class ConfusedBook(models.Model): title = models.CharField(max_length=50) author = models.CharField(max_length=30) favorite_things = GenericRelation( Person, content_type_field='favorite_thing_type', object_id_field='favorite_thing_id', ) less_favorite_things = GenericRelation( FunPerson, content_type_field='favorite_thing_type', object_id_field='favorite_thing_id', related_query_name='favorite_things', ) class FastCarManager(models.Manager): def get_queryset(self): return super().get_queryset().filter(top_speed__gt=150) class Car(models.Model): name = models.CharField(max_length=10) mileage = models.IntegerField() top_speed = models.IntegerField(help_text="In miles per hour.") cars = models.Manager() fast_cars = FastCarManager() class FastCarAsBase(Car): class Meta: proxy = True base_manager_name = 'fast_cars' class FastCarAsDefault(Car): class Meta: proxy = True default_manager_name = 'fast_cars' class RestrictedManager(models.Manager): def get_queryset(self): return super().get_queryset().filter(is_public=True) class RelatedModel(models.Model): name = models.CharField(max_length=50) class RestrictedModel(models.Model): name = models.CharField(max_length=50) is_public = models.BooleanField(default=False) related = models.ForeignKey(RelatedModel, models.CASCADE) objects = RestrictedManager() plain_manager = models.Manager() class OneToOneRestrictedModel(models.Model): name = models.CharField(max_length=50) is_public = models.BooleanField(default=False) related = models.OneToOneField(RelatedModel, models.CASCADE) objects = RestrictedManager() plain_manager = models.Manager() class AbstractPerson(models.Model): abstract_persons = models.Manager() objects = models.CharField(max_length=30) class Meta: abstract = True class PersonFromAbstract(AbstractPerson): pass
0b8c923eff6bfc2e70c9932cf142ed6f264bb3d45e6fc685bd4afc30b72c8fce
""" A series of tests to establish that the command-line management tools work as advertised - especially with regards to the handling of the DJANGO_SETTINGS_MODULE and default settings.py files. """ import os import re import shutil import socket import stat import subprocess import sys import tempfile import unittest from io import StringIO from unittest import mock from django import conf, get_version from django.conf import settings from django.core.management import ( BaseCommand, CommandError, call_command, color, execute_from_command_line, ) from django.core.management.commands.loaddata import Command as LoaddataCommand from django.core.management.commands.runserver import ( Command as RunserverCommand, ) from django.core.management.commands.testserver import ( Command as TestserverCommand, ) from django.db import ConnectionHandler, connection from django.db.migrations.recorder import MigrationRecorder from django.test import ( LiveServerTestCase, SimpleTestCase, TestCase, override_settings, ) from django.test.utils import captured_stderr, captured_stdout from django.urls import path from django.utils.version import PY39 from django.views.static import serve from . import urls custom_templates_dir = os.path.join(os.path.dirname(__file__), 'custom_templates') SYSTEM_CHECK_MSG = 'System check identified no issues' class AdminScriptTestCase(SimpleTestCase): def setUp(self): tmpdir = tempfile.TemporaryDirectory() self.addCleanup(tmpdir.cleanup) # os.path.realpath() is required for temporary directories on macOS, # where `/var` is a symlink to `/private/var`. self.test_dir = os.path.realpath(os.path.join(tmpdir.name, 'test_project')) os.mkdir(self.test_dir) def write_settings(self, filename, apps=None, is_dir=False, sdict=None, extra=None): if is_dir: settings_dir = os.path.join(self.test_dir, filename) os.mkdir(settings_dir) settings_file_path = os.path.join(settings_dir, '__init__.py') else: settings_file_path = os.path.join(self.test_dir, filename) with open(settings_file_path, 'w') as settings_file: settings_file.write('# Settings file automatically generated by admin_scripts test case\n') if extra: settings_file.write("%s\n" % extra) exports = [ 'DATABASES', 'DEFAULT_AUTO_FIELD', 'ROOT_URLCONF', 'SECRET_KEY', 'USE_TZ', ] for s in exports: if hasattr(settings, s): o = getattr(settings, s) if not isinstance(o, (dict, tuple, list)): o = "'%s'" % o settings_file.write("%s = %s\n" % (s, o)) if apps is None: apps = ['django.contrib.auth', 'django.contrib.contenttypes', 'admin_scripts'] settings_file.write("INSTALLED_APPS = %s\n" % apps) if sdict: for k, v in sdict.items(): settings_file.write("%s = %s\n" % (k, v)) def _ext_backend_paths(self): """ Returns the paths for any external backend packages. """ paths = [] for backend in settings.DATABASES.values(): package = backend['ENGINE'].split('.')[0] if package != 'django': backend_pkg = __import__(package) backend_dir = os.path.dirname(backend_pkg.__file__) paths.append(os.path.dirname(backend_dir)) return paths def run_test(self, args, settings_file=None, apps=None, umask=None): base_dir = os.path.dirname(self.test_dir) # The base dir for Django's tests is one level up. tests_dir = os.path.dirname(os.path.dirname(__file__)) # The base dir for Django is one level above the test dir. We don't use # `import django` to figure that out, so we don't pick up a Django # from site-packages or similar. django_dir = os.path.dirname(tests_dir) ext_backend_base_dirs = self._ext_backend_paths() # Define a temporary environment for the subprocess test_environ = os.environ.copy() # Set the test environment if settings_file: test_environ['DJANGO_SETTINGS_MODULE'] = settings_file elif 'DJANGO_SETTINGS_MODULE' in test_environ: del test_environ['DJANGO_SETTINGS_MODULE'] python_path = [base_dir, django_dir, tests_dir] python_path.extend(ext_backend_base_dirs) test_environ['PYTHONPATH'] = os.pathsep.join(python_path) test_environ['PYTHONWARNINGS'] = '' p = subprocess.run( [sys.executable, *args], capture_output=True, cwd=self.test_dir, env=test_environ, text=True, # subprocess.run()'s umask was added in Python 3.9. **({'umask': umask} if umask and PY39 else {}), ) return p.stdout, p.stderr def run_django_admin(self, args, settings_file=None, umask=None): return self.run_test(['-m', 'django', *args], settings_file, umask=umask) def run_manage(self, args, settings_file=None, manage_py=None): template_manage_py = ( os.path.join(os.path.dirname(__file__), manage_py) if manage_py else os.path.join(os.path.dirname(conf.__file__), 'project_template', 'manage.py-tpl') ) test_manage_py = os.path.join(self.test_dir, 'manage.py') shutil.copyfile(template_manage_py, test_manage_py) with open(test_manage_py) as fp: manage_py_contents = fp.read() manage_py_contents = manage_py_contents.replace( "{{ project_name }}", "test_project") with open(test_manage_py, 'w') as fp: fp.write(manage_py_contents) return self.run_test(['./manage.py', *args], settings_file) def assertNoOutput(self, stream): "Utility assertion: assert that the given stream is empty" self.assertEqual(len(stream), 0, "Stream should be empty: actually contains '%s'" % stream) def assertOutput(self, stream, msg, regex=False): "Utility assertion: assert that the given message exists in the output" if regex: self.assertIsNotNone( re.search(msg, stream), "'%s' does not match actual output text '%s'" % (msg, stream) ) else: self.assertIn(msg, stream, "'%s' does not match actual output text '%s'" % (msg, stream)) def assertNotInOutput(self, stream, msg): "Utility assertion: assert that the given message doesn't exist in the output" self.assertNotIn(msg, stream, "'%s' matches actual output text '%s'" % (msg, stream)) ########################################################################## # DJANGO ADMIN TESTS # This first series of test classes checks the environment processing # of the django-admin. ########################################################################## class DjangoAdminNoSettings(AdminScriptTestCase): "A series of tests for django-admin when there is no settings.py file." def test_builtin_command(self): "no settings: django-admin builtin commands fail with an error when no settings provided" args = ['check', 'admin_scripts'] out, err = self.run_django_admin(args) self.assertNoOutput(out) self.assertOutput(err, 'settings are not configured') def test_builtin_with_bad_settings(self): "no settings: django-admin builtin commands fail if settings file (from argument) doesn't exist" args = ['check', '--settings=bad_settings', 'admin_scripts'] out, err = self.run_django_admin(args) self.assertNoOutput(out) self.assertOutput(err, "No module named '?bad_settings'?", regex=True) def test_builtin_with_bad_environment(self): "no settings: django-admin builtin commands fail if settings file (from environment) doesn't exist" args = ['check', 'admin_scripts'] out, err = self.run_django_admin(args, 'bad_settings') self.assertNoOutput(out) self.assertOutput(err, "No module named '?bad_settings'?", regex=True) def test_commands_with_invalid_settings(self): """ Commands that don't require settings succeed if the settings file doesn't exist. """ args = ['startproject'] out, err = self.run_django_admin(args, settings_file='bad_settings') self.assertNoOutput(out) self.assertOutput(err, "You must provide a project name", regex=True) class DjangoAdminDefaultSettings(AdminScriptTestCase): """ A series of tests for django-admin when using a settings.py file that contains the test application. """ def setUp(self): super().setUp() self.write_settings('settings.py') def test_builtin_command(self): "default: django-admin builtin commands fail with an error when no settings provided" args = ['check', 'admin_scripts'] out, err = self.run_django_admin(args) self.assertNoOutput(out) self.assertOutput(err, 'settings are not configured') def test_builtin_with_settings(self): "default: django-admin builtin commands succeed if settings are provided as argument" args = ['check', '--settings=test_project.settings', 'admin_scripts'] out, err = self.run_django_admin(args) self.assertNoOutput(err) self.assertOutput(out, SYSTEM_CHECK_MSG) def test_builtin_with_environment(self): "default: django-admin builtin commands succeed if settings are provided in the environment" args = ['check', 'admin_scripts'] out, err = self.run_django_admin(args, 'test_project.settings') self.assertNoOutput(err) self.assertOutput(out, SYSTEM_CHECK_MSG) def test_builtin_with_bad_settings(self): "default: django-admin builtin commands fail if settings file (from argument) doesn't exist" args = ['check', '--settings=bad_settings', 'admin_scripts'] out, err = self.run_django_admin(args) self.assertNoOutput(out) self.assertOutput(err, "No module named '?bad_settings'?", regex=True) def test_builtin_with_bad_environment(self): "default: django-admin builtin commands fail if settings file (from environment) doesn't exist" args = ['check', 'admin_scripts'] out, err = self.run_django_admin(args, 'bad_settings') self.assertNoOutput(out) self.assertOutput(err, "No module named '?bad_settings'?", regex=True) def test_custom_command(self): "default: django-admin can't execute user commands if it isn't provided settings" args = ['noargs_command'] out, err = self.run_django_admin(args) self.assertNoOutput(out) self.assertOutput(err, "No Django settings specified") self.assertOutput(err, "Unknown command: 'noargs_command'") def test_custom_command_with_settings(self): "default: django-admin can execute user commands if settings are provided as argument" args = ['noargs_command', '--settings=test_project.settings'] out, err = self.run_django_admin(args) self.assertNoOutput(err) self.assertOutput(out, "EXECUTE: noargs_command") def test_custom_command_with_environment(self): "default: django-admin can execute user commands if settings are provided in environment" args = ['noargs_command'] out, err = self.run_django_admin(args, 'test_project.settings') self.assertNoOutput(err) self.assertOutput(out, "EXECUTE: noargs_command") class DjangoAdminFullPathDefaultSettings(AdminScriptTestCase): """ A series of tests for django-admin when using a settings.py file that contains the test application specified using a full path. """ def setUp(self): super().setUp() self.write_settings('settings.py', ['django.contrib.auth', 'django.contrib.contenttypes', 'admin_scripts', 'admin_scripts.complex_app']) def test_builtin_command(self): "fulldefault: django-admin builtin commands fail with an error when no settings provided" args = ['check', 'admin_scripts'] out, err = self.run_django_admin(args) self.assertNoOutput(out) self.assertOutput(err, 'settings are not configured') def test_builtin_with_settings(self): "fulldefault: django-admin builtin commands succeed if a settings file is provided" args = ['check', '--settings=test_project.settings', 'admin_scripts'] out, err = self.run_django_admin(args) self.assertNoOutput(err) self.assertOutput(out, SYSTEM_CHECK_MSG) def test_builtin_with_environment(self): "fulldefault: django-admin builtin commands succeed if the environment contains settings" args = ['check', 'admin_scripts'] out, err = self.run_django_admin(args, 'test_project.settings') self.assertNoOutput(err) self.assertOutput(out, SYSTEM_CHECK_MSG) def test_builtin_with_bad_settings(self): "fulldefault: django-admin builtin commands fail if settings file (from argument) doesn't exist" args = ['check', '--settings=bad_settings', 'admin_scripts'] out, err = self.run_django_admin(args) self.assertNoOutput(out) self.assertOutput(err, "No module named '?bad_settings'?", regex=True) def test_builtin_with_bad_environment(self): "fulldefault: django-admin builtin commands fail if settings file (from environment) doesn't exist" args = ['check', 'admin_scripts'] out, err = self.run_django_admin(args, 'bad_settings') self.assertNoOutput(out) self.assertOutput(err, "No module named '?bad_settings'?", regex=True) def test_custom_command(self): "fulldefault: django-admin can't execute user commands unless settings are provided" args = ['noargs_command'] out, err = self.run_django_admin(args) self.assertNoOutput(out) self.assertOutput(err, "No Django settings specified") self.assertOutput(err, "Unknown command: 'noargs_command'") def test_custom_command_with_settings(self): "fulldefault: django-admin can execute user commands if settings are provided as argument" args = ['noargs_command', '--settings=test_project.settings'] out, err = self.run_django_admin(args) self.assertNoOutput(err) self.assertOutput(out, "EXECUTE: noargs_command") def test_custom_command_with_environment(self): "fulldefault: django-admin can execute user commands if settings are provided in environment" args = ['noargs_command'] out, err = self.run_django_admin(args, 'test_project.settings') self.assertNoOutput(err) self.assertOutput(out, "EXECUTE: noargs_command") class DjangoAdminMinimalSettings(AdminScriptTestCase): """ A series of tests for django-admin when using a settings.py file that doesn't contain the test application. """ def setUp(self): super().setUp() self.write_settings('settings.py', apps=['django.contrib.auth', 'django.contrib.contenttypes']) def test_builtin_command(self): "minimal: django-admin builtin commands fail with an error when no settings provided" args = ['check', 'admin_scripts'] out, err = self.run_django_admin(args) self.assertNoOutput(out) self.assertOutput(err, 'settings are not configured') def test_builtin_with_settings(self): "minimal: django-admin builtin commands fail if settings are provided as argument" args = ['check', '--settings=test_project.settings', 'admin_scripts'] out, err = self.run_django_admin(args) self.assertNoOutput(out) self.assertOutput(err, "No installed app with label 'admin_scripts'.") def test_builtin_with_environment(self): "minimal: django-admin builtin commands fail if settings are provided in the environment" args = ['check', 'admin_scripts'] out, err = self.run_django_admin(args, 'test_project.settings') self.assertNoOutput(out) self.assertOutput(err, "No installed app with label 'admin_scripts'.") def test_builtin_with_bad_settings(self): "minimal: django-admin builtin commands fail if settings file (from argument) doesn't exist" args = ['check', '--settings=bad_settings', 'admin_scripts'] out, err = self.run_django_admin(args) self.assertNoOutput(out) self.assertOutput(err, "No module named '?bad_settings'?", regex=True) def test_builtin_with_bad_environment(self): "minimal: django-admin builtin commands fail if settings file (from environment) doesn't exist" args = ['check', 'admin_scripts'] out, err = self.run_django_admin(args, 'bad_settings') self.assertNoOutput(out) self.assertOutput(err, "No module named '?bad_settings'?", regex=True) def test_custom_command(self): "minimal: django-admin can't execute user commands unless settings are provided" args = ['noargs_command'] out, err = self.run_django_admin(args) self.assertNoOutput(out) self.assertOutput(err, "No Django settings specified") self.assertOutput(err, "Unknown command: 'noargs_command'") def test_custom_command_with_settings(self): "minimal: django-admin can't execute user commands, even if settings are provided as argument" args = ['noargs_command', '--settings=test_project.settings'] out, err = self.run_django_admin(args) self.assertNoOutput(out) self.assertOutput(err, "Unknown command: 'noargs_command'") def test_custom_command_with_environment(self): "minimal: django-admin can't execute user commands, even if settings are provided in environment" args = ['noargs_command'] out, err = self.run_django_admin(args, 'test_project.settings') self.assertNoOutput(out) self.assertOutput(err, "Unknown command: 'noargs_command'") class DjangoAdminAlternateSettings(AdminScriptTestCase): """ A series of tests for django-admin when using a settings file with a name other than 'settings.py'. """ def setUp(self): super().setUp() self.write_settings('alternate_settings.py') def test_builtin_command(self): "alternate: django-admin builtin commands fail with an error when no settings provided" args = ['check', 'admin_scripts'] out, err = self.run_django_admin(args) self.assertNoOutput(out) self.assertOutput(err, 'settings are not configured') def test_builtin_with_settings(self): "alternate: django-admin builtin commands succeed if settings are provided as argument" args = ['check', '--settings=test_project.alternate_settings', 'admin_scripts'] out, err = self.run_django_admin(args) self.assertNoOutput(err) self.assertOutput(out, SYSTEM_CHECK_MSG) def test_builtin_with_environment(self): "alternate: django-admin builtin commands succeed if settings are provided in the environment" args = ['check', 'admin_scripts'] out, err = self.run_django_admin(args, 'test_project.alternate_settings') self.assertNoOutput(err) self.assertOutput(out, SYSTEM_CHECK_MSG) def test_builtin_with_bad_settings(self): "alternate: django-admin builtin commands fail if settings file (from argument) doesn't exist" args = ['check', '--settings=bad_settings', 'admin_scripts'] out, err = self.run_django_admin(args) self.assertNoOutput(out) self.assertOutput(err, "No module named '?bad_settings'?", regex=True) def test_builtin_with_bad_environment(self): "alternate: django-admin builtin commands fail if settings file (from environment) doesn't exist" args = ['check', 'admin_scripts'] out, err = self.run_django_admin(args, 'bad_settings') self.assertNoOutput(out) self.assertOutput(err, "No module named '?bad_settings'?", regex=True) def test_custom_command(self): "alternate: django-admin can't execute user commands unless settings are provided" args = ['noargs_command'] out, err = self.run_django_admin(args) self.assertNoOutput(out) self.assertOutput(err, "No Django settings specified") self.assertOutput(err, "Unknown command: 'noargs_command'") def test_custom_command_with_settings(self): "alternate: django-admin can execute user commands if settings are provided as argument" args = ['noargs_command', '--settings=test_project.alternate_settings'] out, err = self.run_django_admin(args) self.assertNoOutput(err) self.assertOutput(out, "EXECUTE: noargs_command") def test_custom_command_with_environment(self): "alternate: django-admin can execute user commands if settings are provided in environment" args = ['noargs_command'] out, err = self.run_django_admin(args, 'test_project.alternate_settings') self.assertNoOutput(err) self.assertOutput(out, "EXECUTE: noargs_command") class DjangoAdminMultipleSettings(AdminScriptTestCase): """ A series of tests for django-admin when multiple settings files (including the default 'settings.py') are available. The default settings file is insufficient for performing the operations described, so the alternate settings must be used by the running script. """ def setUp(self): super().setUp() self.write_settings('settings.py', apps=['django.contrib.auth', 'django.contrib.contenttypes']) self.write_settings('alternate_settings.py') def test_builtin_command(self): "alternate: django-admin builtin commands fail with an error when no settings provided" args = ['check', 'admin_scripts'] out, err = self.run_django_admin(args) self.assertNoOutput(out) self.assertOutput(err, 'settings are not configured') def test_builtin_with_settings(self): "alternate: django-admin builtin commands succeed if settings are provided as argument" args = ['check', '--settings=test_project.alternate_settings', 'admin_scripts'] out, err = self.run_django_admin(args) self.assertNoOutput(err) self.assertOutput(out, SYSTEM_CHECK_MSG) def test_builtin_with_environment(self): "alternate: django-admin builtin commands succeed if settings are provided in the environment" args = ['check', 'admin_scripts'] out, err = self.run_django_admin(args, 'test_project.alternate_settings') self.assertNoOutput(err) self.assertOutput(out, SYSTEM_CHECK_MSG) def test_builtin_with_bad_settings(self): "alternate: django-admin builtin commands fail if settings file (from argument) doesn't exist" args = ['check', '--settings=bad_settings', 'admin_scripts'] out, err = self.run_django_admin(args) self.assertOutput(err, "No module named '?bad_settings'?", regex=True) def test_builtin_with_bad_environment(self): "alternate: django-admin builtin commands fail if settings file (from environment) doesn't exist" args = ['check', 'admin_scripts'] out, err = self.run_django_admin(args, 'bad_settings') self.assertNoOutput(out) self.assertOutput(err, "No module named '?bad_settings'?", regex=True) def test_custom_command(self): "alternate: django-admin can't execute user commands unless settings are provided" args = ['noargs_command'] out, err = self.run_django_admin(args) self.assertNoOutput(out) self.assertOutput(err, "No Django settings specified") self.assertOutput(err, "Unknown command: 'noargs_command'") def test_custom_command_with_settings(self): "alternate: django-admin can execute user commands if settings are provided as argument" args = ['noargs_command', '--settings=test_project.alternate_settings'] out, err = self.run_django_admin(args) self.assertNoOutput(err) self.assertOutput(out, "EXECUTE: noargs_command") def test_custom_command_with_environment(self): "alternate: django-admin can execute user commands if settings are provided in environment" args = ['noargs_command'] out, err = self.run_django_admin(args, 'test_project.alternate_settings') self.assertNoOutput(err) self.assertOutput(out, "EXECUTE: noargs_command") class DjangoAdminSettingsDirectory(AdminScriptTestCase): """ A series of tests for django-admin when the settings file is in a directory. (see #9751). """ def setUp(self): super().setUp() self.write_settings('settings', is_dir=True) def test_setup_environ(self): "directory: startapp creates the correct directory" args = ['startapp', 'settings_test'] app_path = os.path.join(self.test_dir, 'settings_test') out, err = self.run_django_admin(args, 'test_project.settings') self.assertNoOutput(err) self.assertTrue(os.path.exists(app_path)) with open(os.path.join(app_path, 'apps.py')) as f: content = f.read() self.assertIn("class SettingsTestConfig(AppConfig)", content) self.assertIn("name = 'settings_test'", content) def test_setup_environ_custom_template(self): "directory: startapp creates the correct directory with a custom template" template_path = os.path.join(custom_templates_dir, 'app_template') args = ['startapp', '--template', template_path, 'custom_settings_test'] app_path = os.path.join(self.test_dir, 'custom_settings_test') out, err = self.run_django_admin(args, 'test_project.settings') self.assertNoOutput(err) self.assertTrue(os.path.exists(app_path)) self.assertTrue(os.path.exists(os.path.join(app_path, 'api.py'))) def test_startapp_unicode_name(self): """startapp creates the correct directory with Unicode characters.""" args = ['startapp', 'こんにちは'] app_path = os.path.join(self.test_dir, 'こんにちは') out, err = self.run_django_admin(args, 'test_project.settings') self.assertNoOutput(err) self.assertTrue(os.path.exists(app_path)) with open(os.path.join(app_path, 'apps.py'), encoding='utf8') as f: content = f.read() self.assertIn("class こんにちはConfig(AppConfig)", content) self.assertIn("name = 'こんにちは'", content) def test_builtin_command(self): "directory: django-admin builtin commands fail with an error when no settings provided" args = ['check', 'admin_scripts'] out, err = self.run_django_admin(args) self.assertNoOutput(out) self.assertOutput(err, 'settings are not configured') def test_builtin_with_bad_settings(self): "directory: django-admin builtin commands fail if settings file (from argument) doesn't exist" args = ['check', '--settings=bad_settings', 'admin_scripts'] out, err = self.run_django_admin(args) self.assertOutput(err, "No module named '?bad_settings'?", regex=True) def test_builtin_with_bad_environment(self): "directory: django-admin builtin commands fail if settings file (from environment) doesn't exist" args = ['check', 'admin_scripts'] out, err = self.run_django_admin(args, 'bad_settings') self.assertNoOutput(out) self.assertOutput(err, "No module named '?bad_settings'?", regex=True) def test_custom_command(self): "directory: django-admin can't execute user commands unless settings are provided" args = ['noargs_command'] out, err = self.run_django_admin(args) self.assertNoOutput(out) self.assertOutput(err, "No Django settings specified") self.assertOutput(err, "Unknown command: 'noargs_command'") def test_builtin_with_settings(self): "directory: django-admin builtin commands succeed if settings are provided as argument" args = ['check', '--settings=test_project.settings', 'admin_scripts'] out, err = self.run_django_admin(args) self.assertNoOutput(err) self.assertOutput(out, SYSTEM_CHECK_MSG) def test_builtin_with_environment(self): "directory: django-admin builtin commands succeed if settings are provided in the environment" args = ['check', 'admin_scripts'] out, err = self.run_django_admin(args, 'test_project.settings') self.assertNoOutput(err) self.assertOutput(out, SYSTEM_CHECK_MSG) ########################################################################## # MANAGE.PY TESTS # This next series of test classes checks the environment processing # of the generated manage.py script ########################################################################## class ManageManuallyConfiguredSettings(AdminScriptTestCase): """Customized manage.py calling settings.configure().""" def test_non_existent_command_output(self): out, err = self.run_manage(['invalid_command'], manage_py='configured_settings_manage.py') self.assertNoOutput(out) self.assertOutput(err, "Unknown command: 'invalid_command'") self.assertNotInOutput(err, 'No Django settings specified') class ManageNoSettings(AdminScriptTestCase): "A series of tests for manage.py when there is no settings.py file." def test_builtin_command(self): "no settings: manage.py builtin commands fail with an error when no settings provided" args = ['check', 'admin_scripts'] out, err = self.run_manage(args) self.assertNoOutput(out) self.assertOutput(err, r"No module named '?(test_project\.)?settings'?", regex=True) def test_builtin_with_bad_settings(self): "no settings: manage.py builtin commands fail if settings file (from argument) doesn't exist" args = ['check', '--settings=bad_settings', 'admin_scripts'] out, err = self.run_manage(args) self.assertNoOutput(out) self.assertOutput(err, "No module named '?bad_settings'?", regex=True) def test_builtin_with_bad_environment(self): "no settings: manage.py builtin commands fail if settings file (from environment) doesn't exist" args = ['check', 'admin_scripts'] out, err = self.run_manage(args, 'bad_settings') self.assertNoOutput(out) self.assertOutput(err, "No module named '?bad_settings'?", regex=True) class ManageDefaultSettings(AdminScriptTestCase): """A series of tests for manage.py when using a settings.py file that contains the test application. """ def setUp(self): super().setUp() self.write_settings('settings.py') def test_builtin_command(self): "default: manage.py builtin commands succeed when default settings are appropriate" args = ['check', 'admin_scripts'] out, err = self.run_manage(args) self.assertNoOutput(err) self.assertOutput(out, SYSTEM_CHECK_MSG) def test_builtin_with_settings(self): "default: manage.py builtin commands succeed if settings are provided as argument" args = ['check', '--settings=test_project.settings', 'admin_scripts'] out, err = self.run_manage(args) self.assertNoOutput(err) self.assertOutput(out, SYSTEM_CHECK_MSG) def test_builtin_with_environment(self): "default: manage.py builtin commands succeed if settings are provided in the environment" args = ['check', 'admin_scripts'] out, err = self.run_manage(args, 'test_project.settings') self.assertNoOutput(err) self.assertOutput(out, SYSTEM_CHECK_MSG) def test_builtin_with_bad_settings(self): "default: manage.py builtin commands succeed if settings file (from argument) doesn't exist" args = ['check', '--settings=bad_settings', 'admin_scripts'] out, err = self.run_manage(args) self.assertNoOutput(out) self.assertOutput(err, "No module named '?bad_settings'?", regex=True) def test_builtin_with_bad_environment(self): "default: manage.py builtin commands fail if settings file (from environment) doesn't exist" args = ['check', 'admin_scripts'] out, err = self.run_manage(args, 'bad_settings') self.assertNoOutput(out) self.assertOutput(err, "No module named '?bad_settings'?", regex=True) def test_custom_command(self): "default: manage.py can execute user commands when default settings are appropriate" args = ['noargs_command'] out, err = self.run_manage(args) self.assertNoOutput(err) self.assertOutput(out, "EXECUTE: noargs_command") def test_custom_command_with_settings(self): "default: manage.py can execute user commands when settings are provided as argument" args = ['noargs_command', '--settings=test_project.settings'] out, err = self.run_manage(args) self.assertNoOutput(err) self.assertOutput(out, "EXECUTE: noargs_command") def test_custom_command_with_environment(self): "default: manage.py can execute user commands when settings are provided in environment" args = ['noargs_command'] out, err = self.run_manage(args, 'test_project.settings') self.assertNoOutput(err) self.assertOutput(out, "EXECUTE: noargs_command") class ManageFullPathDefaultSettings(AdminScriptTestCase): """A series of tests for manage.py when using a settings.py file that contains the test application specified using a full path. """ def setUp(self): super().setUp() self.write_settings('settings.py', ['django.contrib.auth', 'django.contrib.contenttypes', 'admin_scripts']) def test_builtin_command(self): "fulldefault: manage.py builtin commands succeed when default settings are appropriate" args = ['check', 'admin_scripts'] out, err = self.run_manage(args) self.assertNoOutput(err) self.assertOutput(out, SYSTEM_CHECK_MSG) def test_builtin_with_settings(self): "fulldefault: manage.py builtin commands succeed if settings are provided as argument" args = ['check', '--settings=test_project.settings', 'admin_scripts'] out, err = self.run_manage(args) self.assertNoOutput(err) self.assertOutput(out, SYSTEM_CHECK_MSG) def test_builtin_with_environment(self): "fulldefault: manage.py builtin commands succeed if settings are provided in the environment" args = ['check', 'admin_scripts'] out, err = self.run_manage(args, 'test_project.settings') self.assertNoOutput(err) self.assertOutput(out, SYSTEM_CHECK_MSG) def test_builtin_with_bad_settings(self): "fulldefault: manage.py builtin commands succeed if settings file (from argument) doesn't exist" args = ['check', '--settings=bad_settings', 'admin_scripts'] out, err = self.run_manage(args) self.assertNoOutput(out) self.assertOutput(err, "No module named '?bad_settings'?", regex=True) def test_builtin_with_bad_environment(self): "fulldefault: manage.py builtin commands fail if settings file (from environment) doesn't exist" args = ['check', 'admin_scripts'] out, err = self.run_manage(args, 'bad_settings') self.assertNoOutput(out) self.assertOutput(err, "No module named '?bad_settings'?", regex=True) def test_custom_command(self): "fulldefault: manage.py can execute user commands when default settings are appropriate" args = ['noargs_command'] out, err = self.run_manage(args) self.assertNoOutput(err) self.assertOutput(out, "EXECUTE: noargs_command") def test_custom_command_with_settings(self): "fulldefault: manage.py can execute user commands when settings are provided as argument" args = ['noargs_command', '--settings=test_project.settings'] out, err = self.run_manage(args) self.assertNoOutput(err) self.assertOutput(out, "EXECUTE: noargs_command") def test_custom_command_with_environment(self): "fulldefault: manage.py can execute user commands when settings are provided in environment" args = ['noargs_command'] out, err = self.run_manage(args, 'test_project.settings') self.assertNoOutput(err) self.assertOutput(out, "EXECUTE: noargs_command") class ManageMinimalSettings(AdminScriptTestCase): """A series of tests for manage.py when using a settings.py file that doesn't contain the test application. """ def setUp(self): super().setUp() self.write_settings('settings.py', apps=['django.contrib.auth', 'django.contrib.contenttypes']) def test_builtin_command(self): "minimal: manage.py builtin commands fail with an error when no settings provided" args = ['check', 'admin_scripts'] out, err = self.run_manage(args) self.assertNoOutput(out) self.assertOutput(err, "No installed app with label 'admin_scripts'.") def test_builtin_with_settings(self): "minimal: manage.py builtin commands fail if settings are provided as argument" args = ['check', '--settings=test_project.settings', 'admin_scripts'] out, err = self.run_manage(args) self.assertNoOutput(out) self.assertOutput(err, "No installed app with label 'admin_scripts'.") def test_builtin_with_environment(self): "minimal: manage.py builtin commands fail if settings are provided in the environment" args = ['check', 'admin_scripts'] out, err = self.run_manage(args, 'test_project.settings') self.assertNoOutput(out) self.assertOutput(err, "No installed app with label 'admin_scripts'.") def test_builtin_with_bad_settings(self): "minimal: manage.py builtin commands fail if settings file (from argument) doesn't exist" args = ['check', '--settings=bad_settings', 'admin_scripts'] out, err = self.run_manage(args) self.assertNoOutput(out) self.assertOutput(err, "No module named '?bad_settings'?", regex=True) def test_builtin_with_bad_environment(self): "minimal: manage.py builtin commands fail if settings file (from environment) doesn't exist" args = ['check', 'admin_scripts'] out, err = self.run_manage(args, 'bad_settings') self.assertNoOutput(out) self.assertOutput(err, "No module named '?bad_settings'?", regex=True) def test_custom_command(self): "minimal: manage.py can't execute user commands without appropriate settings" args = ['noargs_command'] out, err = self.run_manage(args) self.assertNoOutput(out) self.assertOutput(err, "Unknown command: 'noargs_command'") def test_custom_command_with_settings(self): "minimal: manage.py can't execute user commands, even if settings are provided as argument" args = ['noargs_command', '--settings=test_project.settings'] out, err = self.run_manage(args) self.assertNoOutput(out) self.assertOutput(err, "Unknown command: 'noargs_command'") def test_custom_command_with_environment(self): "minimal: manage.py can't execute user commands, even if settings are provided in environment" args = ['noargs_command'] out, err = self.run_manage(args, 'test_project.settings') self.assertNoOutput(out) self.assertOutput(err, "Unknown command: 'noargs_command'") class ManageAlternateSettings(AdminScriptTestCase): """A series of tests for manage.py when using a settings file with a name other than 'settings.py'. """ def setUp(self): super().setUp() self.write_settings('alternate_settings.py') def test_builtin_command(self): "alternate: manage.py builtin commands fail with an error when no default settings provided" args = ['check', 'admin_scripts'] out, err = self.run_manage(args) self.assertNoOutput(out) self.assertOutput(err, r"No module named '?(test_project\.)?settings'?", regex=True) def test_builtin_with_settings(self): "alternate: manage.py builtin commands work with settings provided as argument" args = ['check', '--settings=alternate_settings', 'admin_scripts'] out, err = self.run_manage(args) self.assertOutput(out, SYSTEM_CHECK_MSG) self.assertNoOutput(err) def test_builtin_with_environment(self): "alternate: manage.py builtin commands work if settings are provided in the environment" args = ['check', 'admin_scripts'] out, err = self.run_manage(args, 'alternate_settings') self.assertOutput(out, SYSTEM_CHECK_MSG) self.assertNoOutput(err) def test_builtin_with_bad_settings(self): "alternate: manage.py builtin commands fail if settings file (from argument) doesn't exist" args = ['check', '--settings=bad_settings', 'admin_scripts'] out, err = self.run_manage(args) self.assertNoOutput(out) self.assertOutput(err, "No module named '?bad_settings'?", regex=True) def test_builtin_with_bad_environment(self): "alternate: manage.py builtin commands fail if settings file (from environment) doesn't exist" args = ['check', 'admin_scripts'] out, err = self.run_manage(args, 'bad_settings') self.assertNoOutput(out) self.assertOutput(err, "No module named '?bad_settings'?", regex=True) def test_custom_command(self): "alternate: manage.py can't execute user commands without settings" args = ['noargs_command'] out, err = self.run_manage(args) self.assertNoOutput(out) self.assertOutput(err, r"No module named '?(test_project\.)?settings'?", regex=True) def test_custom_command_with_settings(self): "alternate: manage.py can execute user commands if settings are provided as argument" args = ['noargs_command', '--settings=alternate_settings'] out, err = self.run_manage(args) self.assertOutput( out, "EXECUTE: noargs_command options=[('force_color', False), " "('no_color', False), ('pythonpath', None), ('settings', " "'alternate_settings'), ('traceback', False), ('verbosity', 1)]" ) self.assertNoOutput(err) def test_custom_command_with_environment(self): "alternate: manage.py can execute user commands if settings are provided in environment" args = ['noargs_command'] out, err = self.run_manage(args, 'alternate_settings') self.assertOutput( out, "EXECUTE: noargs_command options=[('force_color', False), " "('no_color', False), ('pythonpath', None), ('settings', None), " "('traceback', False), ('verbosity', 1)]" ) self.assertNoOutput(err) def test_custom_command_output_color(self): "alternate: manage.py output syntax color can be deactivated with the `--no-color` option" args = ['noargs_command', '--no-color', '--settings=alternate_settings'] out, err = self.run_manage(args) self.assertOutput( out, "EXECUTE: noargs_command options=[('force_color', False), " "('no_color', True), ('pythonpath', None), ('settings', " "'alternate_settings'), ('traceback', False), ('verbosity', 1)]" ) self.assertNoOutput(err) class ManageMultipleSettings(AdminScriptTestCase): """A series of tests for manage.py when multiple settings files (including the default 'settings.py') are available. The default settings file is insufficient for performing the operations described, so the alternate settings must be used by the running script. """ def setUp(self): super().setUp() self.write_settings('settings.py', apps=['django.contrib.auth', 'django.contrib.contenttypes']) self.write_settings('alternate_settings.py') def test_builtin_command(self): "multiple: manage.py builtin commands fail with an error when no settings provided" args = ['check', 'admin_scripts'] out, err = self.run_manage(args) self.assertNoOutput(out) self.assertOutput(err, "No installed app with label 'admin_scripts'.") def test_builtin_with_settings(self): "multiple: manage.py builtin commands succeed if settings are provided as argument" args = ['check', '--settings=alternate_settings', 'admin_scripts'] out, err = self.run_manage(args) self.assertNoOutput(err) self.assertOutput(out, SYSTEM_CHECK_MSG) def test_builtin_with_environment(self): "multiple: manage.py can execute builtin commands if settings are provided in the environment" args = ['check', 'admin_scripts'] out, err = self.run_manage(args, 'alternate_settings') self.assertNoOutput(err) self.assertOutput(out, SYSTEM_CHECK_MSG) def test_builtin_with_bad_settings(self): "multiple: manage.py builtin commands fail if settings file (from argument) doesn't exist" args = ['check', '--settings=bad_settings', 'admin_scripts'] out, err = self.run_manage(args) self.assertNoOutput(out) self.assertOutput(err, "No module named '?bad_settings'?", regex=True) def test_builtin_with_bad_environment(self): "multiple: manage.py builtin commands fail if settings file (from environment) doesn't exist" args = ['check', 'admin_scripts'] out, err = self.run_manage(args, 'bad_settings') self.assertNoOutput(out) self.assertOutput(err, "No module named '?bad_settings'?", regex=True) def test_custom_command(self): "multiple: manage.py can't execute user commands using default settings" args = ['noargs_command'] out, err = self.run_manage(args) self.assertNoOutput(out) self.assertOutput(err, "Unknown command: 'noargs_command'") def test_custom_command_with_settings(self): "multiple: manage.py can execute user commands if settings are provided as argument" args = ['noargs_command', '--settings=alternate_settings'] out, err = self.run_manage(args) self.assertNoOutput(err) self.assertOutput(out, "EXECUTE: noargs_command") def test_custom_command_with_environment(self): "multiple: manage.py can execute user commands if settings are provided in environment" args = ['noargs_command'] out, err = self.run_manage(args, 'alternate_settings') self.assertNoOutput(err) self.assertOutput(out, "EXECUTE: noargs_command") class ManageSettingsWithSettingsErrors(AdminScriptTestCase): """ Tests for manage.py when using the default settings.py file containing runtime errors. """ def write_settings_with_import_error(self, filename): settings_file_path = os.path.join(self.test_dir, filename) with open(settings_file_path, 'w') as settings_file: settings_file.write('# Settings file automatically generated by admin_scripts test case\n') settings_file.write('# The next line will cause an import error:\nimport foo42bar\n') def test_import_error(self): """ import error: manage.py builtin commands shows useful diagnostic info when settings with import errors is provided (#14130). """ self.write_settings_with_import_error('settings.py') args = ['check', 'admin_scripts'] out, err = self.run_manage(args) self.assertNoOutput(out) self.assertOutput(err, "No module named") self.assertOutput(err, "foo42bar") def test_attribute_error(self): """ manage.py builtin commands does not swallow attribute error due to bad settings (#18845). """ self.write_settings('settings.py', sdict={'BAD_VAR': 'INSTALLED_APPS.crash'}) args = ['collectstatic', 'admin_scripts'] out, err = self.run_manage(args) self.assertNoOutput(out) self.assertOutput(err, "AttributeError: 'list' object has no attribute 'crash'") def test_key_error(self): self.write_settings('settings.py', sdict={'BAD_VAR': 'DATABASES["blah"]'}) args = ['collectstatic', 'admin_scripts'] out, err = self.run_manage(args) self.assertNoOutput(out) self.assertOutput(err, "KeyError: 'blah'") def test_help(self): """ Test listing available commands output note when only core commands are available. """ self.write_settings( 'settings.py', extra='from django.core.exceptions import ImproperlyConfigured\n' 'raise ImproperlyConfigured()', ) args = ['help'] out, err = self.run_manage(args) self.assertOutput(out, 'only Django core commands are listed') self.assertNoOutput(err) class ManageCheck(AdminScriptTestCase): def test_nonexistent_app(self): """check reports an error on a nonexistent app in INSTALLED_APPS.""" self.write_settings( 'settings.py', apps=['admin_scriptz.broken_app'], sdict={'USE_I18N': False}, ) args = ['check'] out, err = self.run_manage(args) self.assertNoOutput(out) self.assertOutput(err, 'ModuleNotFoundError') self.assertOutput(err, 'No module named') self.assertOutput(err, 'admin_scriptz') def test_broken_app(self): """ manage.py check reports an ImportError if an app's models.py raises one on import """ self.write_settings('settings.py', apps=['admin_scripts.broken_app']) args = ['check'] out, err = self.run_manage(args) self.assertNoOutput(out) self.assertOutput(err, 'ImportError') def test_complex_app(self): """ manage.py check does not raise an ImportError validating a complex app with nested calls to load_app """ self.write_settings( 'settings.py', apps=[ 'admin_scripts.complex_app', 'admin_scripts.simple_app', 'django.contrib.admin.apps.SimpleAdminConfig', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.messages', ], sdict={ 'DEBUG': True, 'MIDDLEWARE': [ 'django.contrib.messages.middleware.MessageMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', ], 'TEMPLATES': [ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': [], 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django.template.context_processors.request', 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages', ], }, }, ], } ) args = ['check'] out, err = self.run_manage(args) self.assertNoOutput(err) self.assertEqual(out, 'System check identified no issues (0 silenced).\n') def test_app_with_import(self): """ manage.py check does not raise errors when an app imports a base class that itself has an abstract base. """ self.write_settings( 'settings.py', apps=[ 'admin_scripts.app_with_import', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sites', ], sdict={'DEBUG': True}, ) args = ['check'] out, err = self.run_manage(args) self.assertNoOutput(err) self.assertEqual(out, 'System check identified no issues (0 silenced).\n') def test_output_format(self): """ All errors/warnings should be sorted by level and by message. """ self.write_settings( 'settings.py', apps=[ 'admin_scripts.app_raising_messages', 'django.contrib.auth', 'django.contrib.contenttypes', ], sdict={'DEBUG': True}, ) args = ['check'] out, err = self.run_manage(args) expected_err = ( "SystemCheckError: System check identified some issues:\n" "\n" "ERRORS:\n" "?: An error\n" "\tHINT: Error hint\n" "\n" "WARNINGS:\n" "a: Second warning\n" "obj: First warning\n" "\tHINT: Hint\n" "\n" "System check identified 3 issues (0 silenced).\n" ) self.assertEqual(err, expected_err) self.assertNoOutput(out) def test_warning_does_not_halt(self): """ When there are only warnings or less serious messages, then Django should not prevent user from launching their project, so `check` command should not raise `CommandError` exception. In this test we also test output format. """ self.write_settings( 'settings.py', apps=[ 'admin_scripts.app_raising_warning', 'django.contrib.auth', 'django.contrib.contenttypes', ], sdict={'DEBUG': True}, ) args = ['check'] out, err = self.run_manage(args) expected_err = ( "System check identified some issues:\n" # No "CommandError: " part "\n" "WARNINGS:\n" "?: A warning\n" "\n" "System check identified 1 issue (0 silenced).\n" ) self.assertEqual(err, expected_err) self.assertNoOutput(out) class ManageRunserver(SimpleTestCase): def setUp(self): def monkey_run(*args, **options): return self.output = StringIO() self.cmd = RunserverCommand(stdout=self.output) self.cmd.run = monkey_run def assertServerSettings(self, addr, port, ipv6=False, raw_ipv6=False): self.assertEqual(self.cmd.addr, addr) self.assertEqual(self.cmd.port, port) self.assertEqual(self.cmd.use_ipv6, ipv6) self.assertEqual(self.cmd._raw_ipv6, raw_ipv6) def test_runserver_addrport(self): call_command(self.cmd) self.assertServerSettings('127.0.0.1', '8000') call_command(self.cmd, addrport="1.2.3.4:8000") self.assertServerSettings('1.2.3.4', '8000') call_command(self.cmd, addrport="7000") self.assertServerSettings('127.0.0.1', '7000') @unittest.skipUnless(socket.has_ipv6, "platform doesn't support IPv6") def test_runner_addrport_ipv6(self): call_command(self.cmd, addrport="", use_ipv6=True) self.assertServerSettings('::1', '8000', ipv6=True, raw_ipv6=True) call_command(self.cmd, addrport="7000", use_ipv6=True) self.assertServerSettings('::1', '7000', ipv6=True, raw_ipv6=True) call_command(self.cmd, addrport="[2001:0db8:1234:5678::9]:7000") self.assertServerSettings('2001:0db8:1234:5678::9', '7000', ipv6=True, raw_ipv6=True) def test_runner_hostname(self): call_command(self.cmd, addrport="localhost:8000") self.assertServerSettings('localhost', '8000') call_command(self.cmd, addrport="test.domain.local:7000") self.assertServerSettings('test.domain.local', '7000') @unittest.skipUnless(socket.has_ipv6, "platform doesn't support IPv6") def test_runner_hostname_ipv6(self): call_command(self.cmd, addrport="test.domain.local:7000", use_ipv6=True) self.assertServerSettings('test.domain.local', '7000', ipv6=True) def test_runner_custom_defaults(self): self.cmd.default_addr = '0.0.0.0' self.cmd.default_port = '5000' call_command(self.cmd) self.assertServerSettings('0.0.0.0', '5000') @unittest.skipUnless(socket.has_ipv6, "platform doesn't support IPv6") def test_runner_custom_defaults_ipv6(self): self.cmd.default_addr_ipv6 = '::' call_command(self.cmd, use_ipv6=True) self.assertServerSettings('::', '8000', ipv6=True, raw_ipv6=True) def test_runner_ambiguous(self): # Only 4 characters, all of which could be in an ipv6 address call_command(self.cmd, addrport="beef:7654") self.assertServerSettings('beef', '7654') # Uses only characters that could be in an ipv6 address call_command(self.cmd, addrport="deadbeef:7654") self.assertServerSettings('deadbeef', '7654') def test_no_database(self): """ Ensure runserver.check_migrations doesn't choke on empty DATABASES. """ tested_connections = ConnectionHandler({}) with mock.patch('django.core.management.base.connections', new=tested_connections): self.cmd.check_migrations() def test_readonly_database(self): """ runserver.check_migrations() doesn't choke when a database is read-only. """ with mock.patch.object(MigrationRecorder, 'has_table', return_value=False): self.cmd.check_migrations() # You have # ... self.assertIn('unapplied migration(s)', self.output.getvalue()) @mock.patch('django.core.management.commands.runserver.run') @mock.patch('django.core.management.base.BaseCommand.check_migrations') @mock.patch('django.core.management.base.BaseCommand.check') def test_skip_checks(self, mocked_check, *mocked_objects): call_command( 'runserver', use_reloader=False, skip_checks=True, stdout=self.output, ) self.assertNotIn('Performing system checks...', self.output.getvalue()) mocked_check.assert_not_called() self.output.truncate(0) call_command( 'runserver', use_reloader=False, skip_checks=False, stdout=self.output, ) self.assertIn('Performing system checks...', self.output.getvalue()) mocked_check.assert_called() class ManageRunserverMigrationWarning(TestCase): def setUp(self): self.stdout = StringIO() self.runserver_command = RunserverCommand(stdout=self.stdout) @override_settings(INSTALLED_APPS=["admin_scripts.app_waiting_migration"]) def test_migration_warning_one_app(self): self.runserver_command.check_migrations() output = self.stdout.getvalue() self.assertIn('You have 1 unapplied migration(s)', output) self.assertIn('apply the migrations for app(s): app_waiting_migration.', output) @override_settings( INSTALLED_APPS=[ "admin_scripts.app_waiting_migration", "admin_scripts.another_app_waiting_migration", ], ) def test_migration_warning_multiple_apps(self): self.runserver_command.check_migrations() output = self.stdout.getvalue() self.assertIn('You have 2 unapplied migration(s)', output) self.assertIn( 'apply the migrations for app(s): another_app_waiting_migration, ' 'app_waiting_migration.', output ) class ManageRunserverEmptyAllowedHosts(AdminScriptTestCase): def setUp(self): super().setUp() self.write_settings('settings.py', sdict={ 'ALLOWED_HOSTS': [], 'DEBUG': False, }) def test_empty_allowed_hosts_error(self): out, err = self.run_manage(['runserver']) self.assertNoOutput(out) self.assertOutput(err, 'CommandError: You must set settings.ALLOWED_HOSTS if DEBUG is False.') class ManageRunserverHelpOutput(AdminScriptTestCase): def test_suppressed_options(self): """runserver doesn't support --verbosity and --trackback options.""" out, err = self.run_manage(['runserver', '--help']) self.assertNotInOutput(out, '--verbosity') self.assertNotInOutput(out, '--trackback') self.assertOutput(out, '--settings') class ManageTestserver(SimpleTestCase): @mock.patch.object(TestserverCommand, 'handle', return_value='') def test_testserver_handle_params(self, mock_handle): out = StringIO() call_command('testserver', 'blah.json', stdout=out) mock_handle.assert_called_with( 'blah.json', stdout=out, settings=None, pythonpath=None, verbosity=1, traceback=False, addrport='', no_color=False, use_ipv6=False, skip_checks=True, interactive=True, force_color=False, ) @mock.patch('django.db.connection.creation.create_test_db', return_value='test_db') @mock.patch.object(LoaddataCommand, 'handle', return_value='') @mock.patch.object(RunserverCommand, 'handle', return_value='') def test_params_to_runserver(self, mock_runserver_handle, mock_loaddata_handle, mock_create_test_db): call_command('testserver', 'blah.json') mock_runserver_handle.assert_called_with( addrport='', force_color=False, insecure_serving=False, no_color=False, pythonpath=None, settings=None, shutdown_message=( "\nServer stopped.\nNote that the test database, 'test_db', " "has not been deleted. You can explore it on your own." ), skip_checks=True, traceback=False, use_ipv6=False, use_reloader=False, use_static_handler=True, use_threading=connection.features.test_db_allows_multiple_connections, verbosity=1, ) ########################################################################## # COMMAND PROCESSING TESTS # user-space commands are correctly handled - in particular, arguments to # the commands are correctly parsed and processed. ########################################################################## class ColorCommand(BaseCommand): requires_system_checks = [] def handle(self, *args, **options): self.stdout.write('Hello, world!', self.style.ERROR) self.stderr.write('Hello, world!', self.style.ERROR) class CommandTypes(AdminScriptTestCase): "Tests for the various types of base command types that can be defined." def setUp(self): super().setUp() self.write_settings('settings.py') def test_version(self): "version is handled as a special case" args = ['version'] out, err = self.run_manage(args) self.assertNoOutput(err) self.assertOutput(out, get_version()) def test_version_alternative(self): "--version is equivalent to version" args1, args2 = ['version'], ['--version'] # It's possible one outputs on stderr and the other on stdout, hence the set self.assertEqual(set(self.run_manage(args1)), set(self.run_manage(args2))) def test_help(self): "help is handled as a special case" args = ['help'] out, err = self.run_manage(args) self.assertOutput(out, "Type 'manage.py help <subcommand>' for help on a specific subcommand.") self.assertOutput(out, '[django]') self.assertOutput(out, 'startapp') self.assertOutput(out, 'startproject') def test_help_commands(self): "help --commands shows the list of all available commands" args = ['help', '--commands'] out, err = self.run_manage(args) self.assertNotInOutput(out, 'usage:') self.assertNotInOutput(out, 'Options:') self.assertNotInOutput(out, '[django]') self.assertOutput(out, 'startapp') self.assertOutput(out, 'startproject') self.assertNotInOutput(out, '\n\n') def test_help_alternative(self): "--help is equivalent to help" args1, args2 = ['help'], ['--help'] self.assertEqual(self.run_manage(args1), self.run_manage(args2)) def test_help_short_altert(self): "-h is handled as a short form of --help" args1, args2 = ['--help'], ['-h'] self.assertEqual(self.run_manage(args1), self.run_manage(args2)) def test_specific_help(self): "--help can be used on a specific command" args = ['check', '--help'] out, err = self.run_manage(args) self.assertNoOutput(err) # Command-specific options like --tag appear before options common to # all commands like --version. tag_location = out.find('--tag') version_location = out.find('--version') self.assertNotEqual(tag_location, -1) self.assertNotEqual(version_location, -1) self.assertLess(tag_location, version_location) self.assertOutput(out, "Checks the entire Django project for potential problems.") def test_help_default_options_with_custom_arguments(self): args = ['base_command', '--help'] out, err = self.run_manage(args) self.assertNoOutput(err) expected_options = [ '-h', '--option_a OPTION_A', '--option_b OPTION_B', '--option_c OPTION_C', '--version', '-v {0,1,2,3}', '--settings SETTINGS', '--pythonpath PYTHONPATH', '--traceback', '--no-color', '--force-color', 'args ...', ] for option in expected_options: self.assertOutput(out, f'[{option}]') self.assertOutput(out, '--option_a OPTION_A, -a OPTION_A') self.assertOutput(out, '--option_b OPTION_B, -b OPTION_B') self.assertOutput(out, '--option_c OPTION_C, -c OPTION_C') self.assertOutput(out, '-v {0,1,2,3}, --verbosity {0,1,2,3}') def test_color_style(self): style = color.no_style() self.assertEqual(style.ERROR('Hello, world!'), 'Hello, world!') style = color.make_style('nocolor') self.assertEqual(style.ERROR('Hello, world!'), 'Hello, world!') style = color.make_style('dark') self.assertIn('Hello, world!', style.ERROR('Hello, world!')) self.assertNotEqual(style.ERROR('Hello, world!'), 'Hello, world!') # Default palette has color. style = color.make_style('') self.assertIn('Hello, world!', style.ERROR('Hello, world!')) self.assertNotEqual(style.ERROR('Hello, world!'), 'Hello, world!') def test_command_color(self): out = StringIO() err = StringIO() command = ColorCommand(stdout=out, stderr=err) call_command(command) if color.supports_color(): self.assertIn('Hello, world!\n', out.getvalue()) self.assertIn('Hello, world!\n', err.getvalue()) self.assertNotEqual(out.getvalue(), 'Hello, world!\n') self.assertNotEqual(err.getvalue(), 'Hello, world!\n') else: self.assertEqual(out.getvalue(), 'Hello, world!\n') self.assertEqual(err.getvalue(), 'Hello, world!\n') def test_command_no_color(self): "--no-color prevent colorization of the output" out = StringIO() err = StringIO() command = ColorCommand(stdout=out, stderr=err, no_color=True) call_command(command) self.assertEqual(out.getvalue(), 'Hello, world!\n') self.assertEqual(err.getvalue(), 'Hello, world!\n') out = StringIO() err = StringIO() command = ColorCommand(stdout=out, stderr=err) call_command(command, no_color=True) self.assertEqual(out.getvalue(), 'Hello, world!\n') self.assertEqual(err.getvalue(), 'Hello, world!\n') def test_force_color_execute(self): out = StringIO() err = StringIO() with mock.patch.object(sys.stdout, 'isatty', lambda: False): command = ColorCommand(stdout=out, stderr=err) call_command(command, force_color=True) self.assertEqual(out.getvalue(), '\x1b[31;1mHello, world!\n\x1b[0m') self.assertEqual(err.getvalue(), '\x1b[31;1mHello, world!\n\x1b[0m') def test_force_color_command_init(self): out = StringIO() err = StringIO() with mock.patch.object(sys.stdout, 'isatty', lambda: False): command = ColorCommand(stdout=out, stderr=err, force_color=True) call_command(command) self.assertEqual(out.getvalue(), '\x1b[31;1mHello, world!\n\x1b[0m') self.assertEqual(err.getvalue(), '\x1b[31;1mHello, world!\n\x1b[0m') def test_no_color_force_color_mutually_exclusive_execute(self): msg = "The --no-color and --force-color options can't be used together." with self.assertRaisesMessage(CommandError, msg): call_command(BaseCommand(), no_color=True, force_color=True) def test_no_color_force_color_mutually_exclusive_command_init(self): msg = "'no_color' and 'force_color' can't be used together." with self.assertRaisesMessage(CommandError, msg): call_command(BaseCommand(no_color=True, force_color=True)) def test_custom_stdout(self): class Command(BaseCommand): requires_system_checks = [] def handle(self, *args, **options): self.stdout.write("Hello, World!") out = StringIO() command = Command(stdout=out) call_command(command) self.assertEqual(out.getvalue(), "Hello, World!\n") out.truncate(0) new_out = StringIO() call_command(command, stdout=new_out) self.assertEqual(out.getvalue(), "") self.assertEqual(new_out.getvalue(), "Hello, World!\n") def test_custom_stderr(self): class Command(BaseCommand): requires_system_checks = [] def handle(self, *args, **options): self.stderr.write("Hello, World!") err = StringIO() command = Command(stderr=err) call_command(command) self.assertEqual(err.getvalue(), "Hello, World!\n") err.truncate(0) new_err = StringIO() call_command(command, stderr=new_err) self.assertEqual(err.getvalue(), "") self.assertEqual(new_err.getvalue(), "Hello, World!\n") def test_base_command(self): "User BaseCommands can execute when a label is provided" args = ['base_command', 'testlabel'] expected_labels = "('testlabel',)" self._test_base_command(args, expected_labels) def test_base_command_no_label(self): "User BaseCommands can execute when no labels are provided" args = ['base_command'] expected_labels = "()" self._test_base_command(args, expected_labels) def test_base_command_multiple_label(self): "User BaseCommands can execute when no labels are provided" args = ['base_command', 'testlabel', 'anotherlabel'] expected_labels = "('testlabel', 'anotherlabel')" self._test_base_command(args, expected_labels) def test_base_command_with_option(self): "User BaseCommands can execute with options when a label is provided" args = ['base_command', 'testlabel', '--option_a=x'] expected_labels = "('testlabel',)" self._test_base_command(args, expected_labels, option_a="'x'") def test_base_command_with_options(self): "User BaseCommands can execute with multiple options when a label is provided" args = ['base_command', 'testlabel', '-a', 'x', '--option_b=y'] expected_labels = "('testlabel',)" self._test_base_command(args, expected_labels, option_a="'x'", option_b="'y'") def test_base_command_with_wrong_option(self): "User BaseCommands outputs command usage when wrong option is specified" args = ['base_command', '--invalid'] out, err = self.run_manage(args) self.assertNoOutput(out) self.assertOutput(err, "usage: manage.py base_command") self.assertOutput(err, "error: unrecognized arguments: --invalid") def _test_base_command(self, args, labels, option_a="'1'", option_b="'2'"): out, err = self.run_manage(args) expected_out = ( "EXECUTE:BaseCommand labels=%s, " "options=[('force_color', False), ('no_color', False), " "('option_a', %s), ('option_b', %s), ('option_c', '3'), " "('pythonpath', None), ('settings', None), ('traceback', False), " "('verbosity', 1)]") % (labels, option_a, option_b) self.assertNoOutput(err) self.assertOutput(out, expected_out) def test_base_run_from_argv(self): """ Test run_from_argv properly terminates even with custom execute() (#19665) Also test proper traceback display. """ err = StringIO() command = BaseCommand(stderr=err) def raise_command_error(*args, **kwargs): raise CommandError("Custom error") command.execute = lambda args: args # This will trigger TypeError # If the Exception is not CommandError it should always # raise the original exception. with self.assertRaises(TypeError): command.run_from_argv(['', '']) # If the Exception is CommandError and --traceback is not present # this command should raise a SystemExit and don't print any # traceback to the stderr. command.execute = raise_command_error err.truncate(0) with self.assertRaises(SystemExit): command.run_from_argv(['', '']) err_message = err.getvalue() self.assertNotIn("Traceback", err_message) self.assertIn("CommandError", err_message) # If the Exception is CommandError and --traceback is present # this command should raise the original CommandError as if it # were not a CommandError. err.truncate(0) with self.assertRaises(CommandError): command.run_from_argv(['', '', '--traceback']) def test_run_from_argv_non_ascii_error(self): """ Non-ASCII message of CommandError does not raise any UnicodeDecodeError in run_from_argv. """ def raise_command_error(*args, **kwargs): raise CommandError("Erreur personnalisée") command = BaseCommand(stderr=StringIO()) command.execute = raise_command_error with self.assertRaises(SystemExit): command.run_from_argv(['', '']) def test_run_from_argv_closes_connections(self): """ A command called from the command line should close connections after being executed (#21255). """ command = BaseCommand() command.check = lambda: [] command.handle = lambda *args, **kwargs: args with mock.patch('django.core.management.base.connections') as mock_connections: command.run_from_argv(['', '']) # Test connections have been closed self.assertTrue(mock_connections.close_all.called) def test_noargs(self): "NoArg Commands can be executed" args = ['noargs_command'] out, err = self.run_manage(args) self.assertNoOutput(err) self.assertOutput( out, "EXECUTE: noargs_command options=[('force_color', False), " "('no_color', False), ('pythonpath', None), ('settings', None), " "('traceback', False), ('verbosity', 1)]" ) def test_noargs_with_args(self): "NoArg Commands raise an error if an argument is provided" args = ['noargs_command', 'argument'] out, err = self.run_manage(args) self.assertOutput(err, "error: unrecognized arguments: argument") def test_app_command(self): "User AppCommands can execute when a single app name is provided" args = ['app_command', 'auth'] out, err = self.run_manage(args) self.assertNoOutput(err) self.assertOutput(out, "EXECUTE:AppCommand name=django.contrib.auth, options=") self.assertOutput( out, ", options=[('force_color', False), ('no_color', False), " "('pythonpath', None), ('settings', None), ('traceback', False), " "('verbosity', 1)]" ) def test_app_command_no_apps(self): "User AppCommands raise an error when no app name is provided" args = ['app_command'] out, err = self.run_manage(args) self.assertOutput(err, 'error: Enter at least one application label.') def test_app_command_multiple_apps(self): "User AppCommands raise an error when multiple app names are provided" args = ['app_command', 'auth', 'contenttypes'] out, err = self.run_manage(args) self.assertNoOutput(err) self.assertOutput(out, "EXECUTE:AppCommand name=django.contrib.auth, options=") self.assertOutput( out, ", options=[('force_color', False), ('no_color', False), " "('pythonpath', None), ('settings', None), ('traceback', False), " "('verbosity', 1)]" ) self.assertOutput(out, "EXECUTE:AppCommand name=django.contrib.contenttypes, options=") self.assertOutput( out, ", options=[('force_color', False), ('no_color', False), " "('pythonpath', None), ('settings', None), ('traceback', False), " "('verbosity', 1)]" ) def test_app_command_invalid_app_label(self): "User AppCommands can execute when a single app name is provided" args = ['app_command', 'NOT_AN_APP'] out, err = self.run_manage(args) self.assertOutput(err, "No installed app with label 'NOT_AN_APP'.") def test_app_command_some_invalid_app_labels(self): "User AppCommands can execute when some of the provided app names are invalid" args = ['app_command', 'auth', 'NOT_AN_APP'] out, err = self.run_manage(args) self.assertOutput(err, "No installed app with label 'NOT_AN_APP'.") def test_label_command(self): "User LabelCommands can execute when a label is provided" args = ['label_command', 'testlabel'] out, err = self.run_manage(args) self.assertNoOutput(err) self.assertOutput( out, "EXECUTE:LabelCommand label=testlabel, options=[('force_color', " "False), ('no_color', False), ('pythonpath', None), ('settings', " "None), ('traceback', False), ('verbosity', 1)]" ) def test_label_command_no_label(self): "User LabelCommands raise an error if no label is provided" args = ['label_command'] out, err = self.run_manage(args) self.assertOutput(err, 'Enter at least one label') def test_label_command_multiple_label(self): "User LabelCommands are executed multiple times if multiple labels are provided" args = ['label_command', 'testlabel', 'anotherlabel'] out, err = self.run_manage(args) self.assertNoOutput(err) self.assertOutput( out, "EXECUTE:LabelCommand label=testlabel, options=[('force_color', " "False), ('no_color', False), ('pythonpath', None), " "('settings', None), ('traceback', False), ('verbosity', 1)]" ) self.assertOutput( out, "EXECUTE:LabelCommand label=anotherlabel, options=[('force_color', " "False), ('no_color', False), ('pythonpath', None), " "('settings', None), ('traceback', False), ('verbosity', 1)]" ) def test_suppress_base_options_command_help(self): args = ['suppress_base_options_command', '--help'] out, err = self.run_manage(args) self.assertNoOutput(err) self.assertOutput(out, 'Test suppress base options command.') self.assertNotInOutput(out, 'input file') self.assertOutput(out, '-h, --help') self.assertNotInOutput(out, '--version') self.assertNotInOutput(out, '--verbosity') self.assertNotInOutput(out, '-v {0,1,2,3}') self.assertNotInOutput(out, '--settings') self.assertNotInOutput(out, '--pythonpath') self.assertNotInOutput(out, '--traceback') self.assertNotInOutput(out, '--no-color') self.assertNotInOutput(out, '--force-color') def test_suppress_base_options_command_defaults(self): args = ['suppress_base_options_command'] out, err = self.run_manage(args) self.assertNoOutput(err) self.assertOutput( out, "EXECUTE:SuppressBaseOptionsCommand options=[('file', None), " "('force_color', False), ('no_color', False), " "('pythonpath', None), ('settings', None), " "('traceback', False), ('verbosity', 1)]" ) class Discovery(SimpleTestCase): def test_precedence(self): """ Apps listed first in INSTALLED_APPS have precedence. """ with self.settings(INSTALLED_APPS=['admin_scripts.complex_app', 'admin_scripts.simple_app', 'django.contrib.auth', 'django.contrib.contenttypes']): out = StringIO() call_command('duplicate', stdout=out) self.assertEqual(out.getvalue().strip(), 'complex_app') with self.settings(INSTALLED_APPS=['admin_scripts.simple_app', 'admin_scripts.complex_app', 'django.contrib.auth', 'django.contrib.contenttypes']): out = StringIO() call_command('duplicate', stdout=out) self.assertEqual(out.getvalue().strip(), 'simple_app') class ArgumentOrder(AdminScriptTestCase): """Tests for 2-stage argument parsing scheme. django-admin command arguments are parsed in 2 parts; the core arguments (--settings, --traceback and --pythonpath) are parsed using a basic parser, ignoring any unknown options. Then the full settings are passed to the command parser, which extracts commands of interest to the individual command. """ def setUp(self): super().setUp() self.write_settings('settings.py', apps=['django.contrib.auth', 'django.contrib.contenttypes']) self.write_settings('alternate_settings.py') def test_setting_then_option(self): """ Options passed after settings are correctly handled. """ args = ['base_command', 'testlabel', '--settings=alternate_settings', '--option_a=x'] self._test(args) def test_setting_then_short_option(self): """ Short options passed after settings are correctly handled. """ args = ['base_command', 'testlabel', '--settings=alternate_settings', '-a', 'x'] self._test(args) def test_option_then_setting(self): """ Options passed before settings are correctly handled. """ args = ['base_command', 'testlabel', '--option_a=x', '--settings=alternate_settings'] self._test(args) def test_short_option_then_setting(self): """ Short options passed before settings are correctly handled. """ args = ['base_command', 'testlabel', '-a', 'x', '--settings=alternate_settings'] self._test(args) def test_option_then_setting_then_option(self): """ Options are correctly handled when they are passed before and after a setting. """ args = ['base_command', 'testlabel', '--option_a=x', '--settings=alternate_settings', '--option_b=y'] self._test(args, option_b="'y'") def _test(self, args, option_b="'2'"): out, err = self.run_manage(args) self.assertNoOutput(err) self.assertOutput( out, "EXECUTE:BaseCommand labels=('testlabel',), options=[" "('force_color', False), ('no_color', False), ('option_a', 'x'), " "('option_b', %s), ('option_c', '3'), ('pythonpath', None), " "('settings', 'alternate_settings'), ('traceback', False), " "('verbosity', 1)]" % option_b ) class ExecuteFromCommandLine(SimpleTestCase): def test_program_name_from_argv(self): """ Program name is computed from the execute_from_command_line()'s argv argument, not sys.argv. """ args = ['help', 'shell'] with captured_stdout() as out, captured_stderr() as err: with mock.patch('sys.argv', [None] + args): execute_from_command_line(['django-admin'] + args) self.assertIn('usage: django-admin shell', out.getvalue()) self.assertEqual(err.getvalue(), '') @override_settings(ROOT_URLCONF='admin_scripts.urls') class StartProject(LiveServerTestCase, AdminScriptTestCase): available_apps = [ 'admin_scripts', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', ] def test_wrong_args(self): "Make sure passing the wrong kinds of arguments outputs an error and prints usage" out, err = self.run_django_admin(['startproject']) self.assertNoOutput(out) self.assertOutput(err, "usage:") self.assertOutput(err, "You must provide a project name.") def test_simple_project(self): "Make sure the startproject management command creates a project" args = ['startproject', 'testproject'] testproject_dir = os.path.join(self.test_dir, 'testproject') out, err = self.run_django_admin(args) self.assertNoOutput(err) self.assertTrue(os.path.isdir(testproject_dir)) # running again.. out, err = self.run_django_admin(args) self.assertNoOutput(out) self.assertOutput( err, "CommandError: 'testproject' conflicts with the name of an " "existing Python module and cannot be used as a project name. " "Please try another name.", ) def test_invalid_project_name(self): "Make sure the startproject management command validates a project name" for bad_name in ('7testproject', '../testproject'): with self.subTest(project_name=bad_name): args = ['startproject', bad_name] testproject_dir = os.path.join(self.test_dir, bad_name) out, err = self.run_django_admin(args) self.assertOutput( err, "Error: '%s' is not a valid project name. Please make " "sure the name is a valid identifier." % bad_name ) self.assertFalse(os.path.exists(testproject_dir)) def test_importable_project_name(self): """ startproject validates that project name doesn't clash with existing Python modules. """ bad_name = 'os' args = ['startproject', bad_name] testproject_dir = os.path.join(self.test_dir, bad_name) out, err = self.run_django_admin(args) self.assertOutput( err, "CommandError: 'os' conflicts with the name of an existing " "Python module and cannot be used as a project name. Please try " "another name." ) self.assertFalse(os.path.exists(testproject_dir)) def test_simple_project_different_directory(self): "Make sure the startproject management command creates a project in a specific directory" args = ['startproject', 'testproject', 'othertestproject'] testproject_dir = os.path.join(self.test_dir, 'othertestproject') os.mkdir(testproject_dir) out, err = self.run_django_admin(args) self.assertNoOutput(err) self.assertTrue(os.path.exists(os.path.join(testproject_dir, 'manage.py'))) # running again.. out, err = self.run_django_admin(args) self.assertNoOutput(out) self.assertOutput( err, "already exists. Overlaying a project into an existing directory " "won't replace conflicting files." ) def test_custom_project_template(self): "Make sure the startproject management command is able to use a different project template" template_path = os.path.join(custom_templates_dir, 'project_template') args = ['startproject', '--template', template_path, 'customtestproject'] testproject_dir = os.path.join(self.test_dir, 'customtestproject') out, err = self.run_django_admin(args) self.assertNoOutput(err) self.assertTrue(os.path.isdir(testproject_dir)) self.assertTrue(os.path.exists(os.path.join(testproject_dir, 'additional_dir'))) def test_template_dir_with_trailing_slash(self): "Ticket 17475: Template dir passed has a trailing path separator" template_path = os.path.join(custom_templates_dir, 'project_template' + os.sep) args = ['startproject', '--template', template_path, 'customtestproject'] testproject_dir = os.path.join(self.test_dir, 'customtestproject') out, err = self.run_django_admin(args) self.assertNoOutput(err) self.assertTrue(os.path.isdir(testproject_dir)) self.assertTrue(os.path.exists(os.path.join(testproject_dir, 'additional_dir'))) def test_custom_project_template_from_tarball_by_path(self): "Make sure the startproject management command is able to use a different project template from a tarball" template_path = os.path.join(custom_templates_dir, 'project_template.tgz') args = ['startproject', '--template', template_path, 'tarballtestproject'] testproject_dir = os.path.join(self.test_dir, 'tarballtestproject') out, err = self.run_django_admin(args) self.assertNoOutput(err) self.assertTrue(os.path.isdir(testproject_dir)) self.assertTrue(os.path.exists(os.path.join(testproject_dir, 'run.py'))) def test_custom_project_template_from_tarball_to_alternative_location(self): "Startproject can use a project template from a tarball and create it in a specified location" template_path = os.path.join(custom_templates_dir, 'project_template.tgz') args = ['startproject', '--template', template_path, 'tarballtestproject', 'altlocation'] testproject_dir = os.path.join(self.test_dir, 'altlocation') os.mkdir(testproject_dir) out, err = self.run_django_admin(args) self.assertNoOutput(err) self.assertTrue(os.path.isdir(testproject_dir)) self.assertTrue(os.path.exists(os.path.join(testproject_dir, 'run.py'))) def test_custom_project_template_from_tarball_by_url(self): """ The startproject management command is able to use a different project template from a tarball via a URL. """ template_url = '%s/custom_templates/project_template.tgz' % self.live_server_url args = ['startproject', '--template', template_url, 'urltestproject'] testproject_dir = os.path.join(self.test_dir, 'urltestproject') out, err = self.run_django_admin(args) self.assertNoOutput(err) self.assertTrue(os.path.isdir(testproject_dir)) self.assertTrue(os.path.exists(os.path.join(testproject_dir, 'run.py'))) def test_custom_project_template_from_tarball_by_url_django_user_agent(self): user_agent = None def serve_template(request, *args, **kwargs): nonlocal user_agent user_agent = request.headers['User-Agent'] return serve(request, *args, **kwargs) old_urlpatterns = urls.urlpatterns[:] try: urls.urlpatterns += [ path( 'user_agent_check/<path:path>', serve_template, {'document_root': os.path.join(urls.here, 'custom_templates')}, ), ] template_url = f'{self.live_server_url}/user_agent_check/project_template.tgz' args = ['startproject', '--template', template_url, 'urltestproject'] _, err = self.run_django_admin(args) self.assertNoOutput(err) self.assertIn('Django/%s' % get_version(), user_agent) finally: urls.urlpatterns = old_urlpatterns def test_project_template_tarball_url(self): "Startproject management command handles project template tar/zip balls from non-canonical urls" template_url = '%s/custom_templates/project_template.tgz/' % self.live_server_url args = ['startproject', '--template', template_url, 'urltestproject'] testproject_dir = os.path.join(self.test_dir, 'urltestproject') out, err = self.run_django_admin(args) self.assertNoOutput(err) self.assertTrue(os.path.isdir(testproject_dir)) self.assertTrue(os.path.exists(os.path.join(testproject_dir, 'run.py'))) def test_file_without_extension(self): "Make sure the startproject management command is able to render custom files" template_path = os.path.join(custom_templates_dir, 'project_template') args = ['startproject', '--template', template_path, 'customtestproject', '-e', 'txt', '-n', 'Procfile'] testproject_dir = os.path.join(self.test_dir, 'customtestproject') out, err = self.run_django_admin(args) self.assertNoOutput(err) self.assertTrue(os.path.isdir(testproject_dir)) self.assertTrue(os.path.exists(os.path.join(testproject_dir, 'additional_dir'))) base_path = os.path.join(testproject_dir, 'additional_dir') for f in ('Procfile', 'additional_file.py', 'requirements.txt'): self.assertTrue(os.path.exists(os.path.join(base_path, f))) with open(os.path.join(base_path, f)) as fh: self.assertEqual(fh.read().strip(), '# some file for customtestproject test project') def test_custom_project_template_context_variables(self): "Make sure template context variables are rendered with proper values" template_path = os.path.join(custom_templates_dir, 'project_template') args = ['startproject', '--template', template_path, 'another_project', 'project_dir'] testproject_dir = os.path.join(self.test_dir, 'project_dir') os.mkdir(testproject_dir) out, err = self.run_django_admin(args) self.assertNoOutput(err) test_manage_py = os.path.join(testproject_dir, 'manage.py') with open(test_manage_py) as fp: content = fp.read() self.assertIn("project_name = 'another_project'", content) self.assertIn("project_directory = '%s'" % testproject_dir, content) def test_no_escaping_of_project_variables(self): "Make sure template context variables are not html escaped" # We're using a custom command so we need the alternate settings self.write_settings('alternate_settings.py') template_path = os.path.join(custom_templates_dir, 'project_template') args = [ 'custom_startproject', '--template', template_path, 'another_project', 'project_dir', '--extra', '<&>', '--settings=alternate_settings', ] testproject_dir = os.path.join(self.test_dir, 'project_dir') os.mkdir(testproject_dir) out, err = self.run_manage(args) self.assertNoOutput(err) test_manage_py = os.path.join(testproject_dir, 'additional_dir', 'extra.py') with open(test_manage_py) as fp: content = fp.read() self.assertIn("<&>", content) def test_custom_project_destination_missing(self): """ Make sure an exception is raised when the provided destination directory doesn't exist """ template_path = os.path.join(custom_templates_dir, 'project_template') args = ['startproject', '--template', template_path, 'yet_another_project', 'project_dir2'] testproject_dir = os.path.join(self.test_dir, 'project_dir2') out, err = self.run_django_admin(args) self.assertNoOutput(out) self.assertOutput(err, "Destination directory '%s' does not exist, please create it first." % testproject_dir) self.assertFalse(os.path.exists(testproject_dir)) def test_custom_project_template_with_non_ascii_templates(self): """ The startproject management command is able to render templates with non-ASCII content. """ template_path = os.path.join(custom_templates_dir, 'project_template') args = ['startproject', '--template', template_path, '--extension=txt', 'customtestproject'] testproject_dir = os.path.join(self.test_dir, 'customtestproject') out, err = self.run_django_admin(args) self.assertNoOutput(err) self.assertTrue(os.path.isdir(testproject_dir)) path = os.path.join(testproject_dir, 'ticket-18091-non-ascii-template.txt') with open(path, encoding='utf-8') as f: self.assertEqual(f.read().splitlines(False), [ 'Some non-ASCII text for testing ticket #18091:', 'üäö €']) def test_custom_project_template_hidden_directory_default_excluded(self): """Hidden directories are excluded by default.""" template_path = os.path.join(custom_templates_dir, 'project_template') args = [ 'startproject', '--template', template_path, 'custom_project_template_hidden_directories', 'project_dir', ] testproject_dir = os.path.join(self.test_dir, 'project_dir') os.mkdir(testproject_dir) _, err = self.run_django_admin(args) self.assertNoOutput(err) hidden_dir = os.path.join(testproject_dir, '.hidden') self.assertIs(os.path.exists(hidden_dir), False) def test_custom_project_template_hidden_directory_included(self): """ Template context variables in hidden directories are rendered, if not excluded. """ template_path = os.path.join(custom_templates_dir, 'project_template') project_name = 'custom_project_template_hidden_directories_included' args = [ 'startproject', '--template', template_path, project_name, 'project_dir', '--exclude', ] testproject_dir = os.path.join(self.test_dir, 'project_dir') os.mkdir(testproject_dir) _, err = self.run_django_admin(args) self.assertNoOutput(err) render_py_path = os.path.join(testproject_dir, '.hidden', 'render.py') with open(render_py_path) as fp: self.assertIn( f'# The {project_name} should be rendered.', fp.read(), ) def test_custom_project_template_exclude_directory(self): """ Excluded directories (in addition to .git and __pycache__) are not included in the project. """ template_path = os.path.join(custom_templates_dir, 'project_template') project_name = 'custom_project_with_excluded_directories' args = [ 'startproject', '--template', template_path, project_name, 'project_dir', '--exclude', 'additional_dir', '-x', '.hidden', ] testproject_dir = os.path.join(self.test_dir, 'project_dir') os.mkdir(testproject_dir) _, err = self.run_django_admin(args) self.assertNoOutput(err) excluded_directories = [ '.hidden', 'additional_dir', '.git', '__pycache__', ] for directory in excluded_directories: self.assertIs( os.path.exists(os.path.join(testproject_dir, directory)), False, ) not_excluded = os.path.join(testproject_dir, project_name) self.assertIs(os.path.exists(not_excluded), True) @unittest.skipIf( sys.platform == 'win32', 'Windows only partially supports umasks and chmod.', ) @unittest.skipUnless(PY39, "subprocess.run()'s umask was added in Python 3.9.") def test_honor_umask(self): _, err = self.run_django_admin(['startproject', 'testproject'], umask=0o077) self.assertNoOutput(err) testproject_dir = os.path.join(self.test_dir, 'testproject') self.assertIs(os.path.isdir(testproject_dir), True) tests = [ (['manage.py'], 0o700), (['testproject'], 0o700), (['testproject', 'settings.py'], 0o600), ] for paths, expected_mode in tests: file_path = os.path.join(testproject_dir, *paths) with self.subTest(paths[-1]): self.assertEqual( stat.S_IMODE(os.stat(file_path).st_mode), expected_mode, ) class StartApp(AdminScriptTestCase): def test_invalid_name(self): """startapp validates that app name is a valid Python identifier.""" for bad_name in ('7testproject', '../testproject'): with self.subTest(app_name=bad_name): args = ['startapp', bad_name] testproject_dir = os.path.join(self.test_dir, bad_name) out, err = self.run_django_admin(args) self.assertOutput( err, "CommandError: '{}' is not a valid app name. Please make " "sure the name is a valid identifier.".format(bad_name) ) self.assertFalse(os.path.exists(testproject_dir)) def test_importable_name(self): """ startapp validates that app name doesn't clash with existing Python modules. """ bad_name = 'os' args = ['startapp', bad_name] testproject_dir = os.path.join(self.test_dir, bad_name) out, err = self.run_django_admin(args) self.assertOutput( err, "CommandError: 'os' conflicts with the name of an existing " "Python module and cannot be used as an app name. Please try " "another name." ) self.assertFalse(os.path.exists(testproject_dir)) def test_invalid_target_name(self): for bad_target in ('invalid.dir_name', '7invalid_dir_name', '.invalid_dir_name'): with self.subTest(bad_target): _, err = self.run_django_admin(['startapp', 'app', bad_target]) self.assertOutput( err, "CommandError: '%s' is not a valid app directory. Please " "make sure the directory is a valid identifier." % bad_target ) def test_importable_target_name(self): _, err = self.run_django_admin(['startapp', 'app', 'os']) self.assertOutput( err, "CommandError: 'os' conflicts with the name of an existing Python " "module and cannot be used as an app directory. Please try " "another directory." ) def test_trailing_slash_in_target_app_directory_name(self): app_dir = os.path.join(self.test_dir, 'apps', 'app1') os.makedirs(app_dir) _, err = self.run_django_admin(['startapp', 'app', os.path.join('apps', 'app1', '')]) self.assertNoOutput(err) self.assertIs(os.path.exists(os.path.join(app_dir, 'apps.py')), True) def test_overlaying_app(self): # Use a subdirectory so it is outside the PYTHONPATH. os.makedirs(os.path.join(self.test_dir, 'apps/app1')) self.run_django_admin(['startapp', 'app1', 'apps/app1']) out, err = self.run_django_admin(['startapp', 'app2', 'apps/app1']) self.assertOutput( err, "already exists. Overlaying an app into an existing directory " "won't replace conflicting files." ) def test_template(self): out, err = self.run_django_admin(['startapp', 'new_app']) self.assertNoOutput(err) app_path = os.path.join(self.test_dir, 'new_app') self.assertIs(os.path.exists(app_path), True) with open(os.path.join(app_path, 'apps.py')) as f: content = f.read() self.assertIn('class NewAppConfig(AppConfig)', content) self.assertIn( "default_auto_field = 'django.db.models.BigAutoField'", content, ) self.assertIn("name = 'new_app'", content) class DiffSettings(AdminScriptTestCase): """Tests for diffsettings management command.""" def test_basic(self): """Runs without error and emits settings diff.""" self.write_settings('settings_to_diff.py', sdict={'FOO': '"bar"'}) args = ['diffsettings', '--settings=settings_to_diff'] out, err = self.run_manage(args) self.assertNoOutput(err) self.assertOutput(out, "FOO = 'bar' ###") # Attributes from django.conf.Settings don't appear. self.assertNotInOutput(out, 'is_overridden = ') def test_settings_configured(self): out, err = self.run_manage(['diffsettings'], manage_py='configured_settings_manage.py') self.assertNoOutput(err) self.assertOutput(out, 'CUSTOM = 1 ###\nDEBUG = True') # Attributes from django.conf.UserSettingsHolder don't appear. self.assertNotInOutput(out, 'default_settings = ') def test_dynamic_settings_configured(self): # Custom default settings appear. out, err = self.run_manage(['diffsettings'], manage_py='configured_dynamic_settings_manage.py') self.assertNoOutput(err) self.assertOutput(out, "FOO = 'bar' ###") def test_all(self): """The all option also shows settings with the default value.""" self.write_settings('settings_to_diff.py', sdict={'STATIC_URL': 'None'}) args = ['diffsettings', '--settings=settings_to_diff', '--all'] out, err = self.run_manage(args) self.assertNoOutput(err) self.assertOutput(out, "### STATIC_URL = None") def test_custom_default(self): """ The --default option specifies an alternate settings module for comparison. """ self.write_settings('settings_default.py', sdict={'FOO': '"foo"', 'BAR': '"bar1"'}) self.write_settings('settings_to_diff.py', sdict={'FOO': '"foo"', 'BAR': '"bar2"'}) out, err = self.run_manage(['diffsettings', '--settings=settings_to_diff', '--default=settings_default']) self.assertNoOutput(err) self.assertNotInOutput(out, "FOO") self.assertOutput(out, "BAR = 'bar2'") def test_unified(self): """--output=unified emits settings diff in unified mode.""" self.write_settings('settings_to_diff.py', sdict={'FOO': '"bar"'}) args = ['diffsettings', '--settings=settings_to_diff', '--output=unified'] out, err = self.run_manage(args) self.assertNoOutput(err) self.assertOutput(out, "+ FOO = 'bar'") self.assertOutput(out, "- SECRET_KEY = ''") self.assertOutput(out, "+ SECRET_KEY = 'django_tests_secret_key'") self.assertNotInOutput(out, " APPEND_SLASH = True") def test_unified_all(self): """ --output=unified --all emits settings diff in unified mode and includes settings with the default value. """ self.write_settings('settings_to_diff.py', sdict={'FOO': '"bar"'}) args = ['diffsettings', '--settings=settings_to_diff', '--output=unified', '--all'] out, err = self.run_manage(args) self.assertNoOutput(err) self.assertOutput(out, " APPEND_SLASH = True") self.assertOutput(out, "+ FOO = 'bar'") self.assertOutput(out, "- SECRET_KEY = ''") class Dumpdata(AdminScriptTestCase): """Tests for dumpdata management command.""" def setUp(self): super().setUp() self.write_settings('settings.py') def test_pks_parsing(self): """Regression for #20509 Test would raise an exception rather than printing an error message. """ args = ['dumpdata', '--pks=1'] out, err = self.run_manage(args) self.assertOutput(err, "You can only use --pks option with one model") self.assertNoOutput(out) class MainModule(AdminScriptTestCase): """python -m django works like django-admin.""" def test_program_name_in_help(self): out, err = self.run_test(['-m', 'django', 'help']) self.assertOutput(out, "Type 'python -m django help <subcommand>' for help on a specific subcommand.") class DjangoAdminSuggestions(AdminScriptTestCase): def setUp(self): super().setUp() self.write_settings('settings.py') def test_suggestions(self): args = ['rnserver', '--settings=test_project.settings'] out, err = self.run_django_admin(args) self.assertNoOutput(out) self.assertOutput(err, "Unknown command: 'rnserver'. Did you mean runserver?") def test_no_suggestions(self): args = ['abcdef', '--settings=test_project.settings'] out, err = self.run_django_admin(args) self.assertNoOutput(out) self.assertNotInOutput(err, 'Did you mean')
216e08725e2afa3dc1ad286d4d404b710122ed0e9b8ada05031c8df6097558f1
from math import ceil from operator import attrgetter from django.core.exceptions import FieldDoesNotExist from django.db import ( IntegrityError, NotSupportedError, OperationalError, ProgrammingError, connection, ) from django.db.models import FileField, Value from django.db.models.functions import Lower from django.test import ( TestCase, override_settings, skipIfDBFeature, skipUnlessDBFeature, ) from .models import ( BigAutoFieldModel, Country, NoFields, NullableFields, Pizzeria, ProxyCountry, ProxyMultiCountry, ProxyMultiProxyCountry, ProxyProxyCountry, RelatedModel, Restaurant, SmallAutoFieldModel, State, TwoFields, UpsertConflict, ) class BulkCreateTests(TestCase): def setUp(self): self.data = [ Country(name="United States of America", iso_two_letter="US"), Country(name="The Netherlands", iso_two_letter="NL"), Country(name="Germany", iso_two_letter="DE"), Country(name="Czech Republic", iso_two_letter="CZ") ] def test_simple(self): created = Country.objects.bulk_create(self.data) self.assertEqual(created, self.data) self.assertQuerysetEqual(Country.objects.order_by("-name"), [ "United States of America", "The Netherlands", "Germany", "Czech Republic" ], attrgetter("name")) created = Country.objects.bulk_create([]) self.assertEqual(created, []) self.assertEqual(Country.objects.count(), 4) @skipUnlessDBFeature('has_bulk_insert') def test_efficiency(self): with self.assertNumQueries(1): Country.objects.bulk_create(self.data) @skipUnlessDBFeature('has_bulk_insert') def test_long_non_ascii_text(self): """ Inserting non-ASCII values with a length in the range 2001 to 4000 characters, i.e. 4002 to 8000 bytes, must be set as a CLOB on Oracle (#22144). """ Country.objects.bulk_create([Country(description='Ж' * 3000)]) self.assertEqual(Country.objects.count(), 1) @skipUnlessDBFeature('has_bulk_insert') def test_long_and_short_text(self): Country.objects.bulk_create([ Country(description='a' * 4001, iso_two_letter='A'), Country(description='a', iso_two_letter='B'), Country(description='Ж' * 2001, iso_two_letter='C'), Country(description='Ж', iso_two_letter='D'), ]) self.assertEqual(Country.objects.count(), 4) def test_multi_table_inheritance_unsupported(self): expected_message = "Can't bulk create a multi-table inherited model" with self.assertRaisesMessage(ValueError, expected_message): Pizzeria.objects.bulk_create([ Pizzeria(name="The Art of Pizza"), ]) with self.assertRaisesMessage(ValueError, expected_message): ProxyMultiCountry.objects.bulk_create([ ProxyMultiCountry(name="Fillory", iso_two_letter="FL"), ]) with self.assertRaisesMessage(ValueError, expected_message): ProxyMultiProxyCountry.objects.bulk_create([ ProxyMultiProxyCountry(name="Fillory", iso_two_letter="FL"), ]) def test_proxy_inheritance_supported(self): ProxyCountry.objects.bulk_create([ ProxyCountry(name="Qwghlm", iso_two_letter="QW"), Country(name="Tortall", iso_two_letter="TA"), ]) self.assertQuerysetEqual(ProxyCountry.objects.all(), { "Qwghlm", "Tortall" }, attrgetter("name"), ordered=False) ProxyProxyCountry.objects.bulk_create([ ProxyProxyCountry(name="Netherlands", iso_two_letter="NT"), ]) self.assertQuerysetEqual(ProxyProxyCountry.objects.all(), { "Qwghlm", "Tortall", "Netherlands", }, attrgetter("name"), ordered=False) def test_non_auto_increment_pk(self): State.objects.bulk_create([ State(two_letter_code=s) for s in ["IL", "NY", "CA", "ME"] ]) self.assertQuerysetEqual(State.objects.order_by("two_letter_code"), [ "CA", "IL", "ME", "NY", ], attrgetter("two_letter_code")) @skipUnlessDBFeature('has_bulk_insert') def test_non_auto_increment_pk_efficiency(self): with self.assertNumQueries(1): State.objects.bulk_create([ State(two_letter_code=s) for s in ["IL", "NY", "CA", "ME"] ]) self.assertQuerysetEqual(State.objects.order_by("two_letter_code"), [ "CA", "IL", "ME", "NY", ], attrgetter("two_letter_code")) @skipIfDBFeature('allows_auto_pk_0') def test_zero_as_autoval(self): """ Zero as id for AutoField should raise exception in MySQL, because MySQL does not allow zero for automatic primary key if the NO_AUTO_VALUE_ON_ZERO SQL mode is not enabled. """ valid_country = Country(name='Germany', iso_two_letter='DE') invalid_country = Country(id=0, name='Poland', iso_two_letter='PL') msg = 'The database backend does not accept 0 as a value for AutoField.' with self.assertRaisesMessage(ValueError, msg): Country.objects.bulk_create([valid_country, invalid_country]) def test_batch_same_vals(self): # SQLite had a problem where all the same-valued models were # collapsed to one insert. Restaurant.objects.bulk_create([ Restaurant(name='foo') for i in range(0, 2) ]) self.assertEqual(Restaurant.objects.count(), 2) def test_large_batch(self): TwoFields.objects.bulk_create([ TwoFields(f1=i, f2=i + 1) for i in range(0, 1001) ]) self.assertEqual(TwoFields.objects.count(), 1001) self.assertEqual( TwoFields.objects.filter(f1__gte=450, f1__lte=550).count(), 101) self.assertEqual(TwoFields.objects.filter(f2__gte=901).count(), 101) @skipUnlessDBFeature('has_bulk_insert') def test_large_single_field_batch(self): # SQLite had a problem with more than 500 UNIONed selects in single # query. Restaurant.objects.bulk_create([ Restaurant() for i in range(0, 501) ]) @skipUnlessDBFeature('has_bulk_insert') def test_large_batch_efficiency(self): with override_settings(DEBUG=True): connection.queries_log.clear() TwoFields.objects.bulk_create([ TwoFields(f1=i, f2=i + 1) for i in range(0, 1001) ]) self.assertLess(len(connection.queries), 10) def test_large_batch_mixed(self): """ Test inserting a large batch with objects having primary key set mixed together with objects without PK set. """ TwoFields.objects.bulk_create([ TwoFields(id=i if i % 2 == 0 else None, f1=i, f2=i + 1) for i in range(100000, 101000) ]) self.assertEqual(TwoFields.objects.count(), 1000) # We can't assume much about the ID's created, except that the above # created IDs must exist. id_range = range(100000, 101000, 2) self.assertEqual(TwoFields.objects.filter(id__in=id_range).count(), 500) self.assertEqual(TwoFields.objects.exclude(id__in=id_range).count(), 500) @skipUnlessDBFeature('has_bulk_insert') def test_large_batch_mixed_efficiency(self): """ Test inserting a large batch with objects having primary key set mixed together with objects without PK set. """ with override_settings(DEBUG=True): connection.queries_log.clear() TwoFields.objects.bulk_create([ TwoFields(id=i if i % 2 == 0 else None, f1=i, f2=i + 1) for i in range(100000, 101000)]) self.assertLess(len(connection.queries), 10) def test_explicit_batch_size(self): objs = [TwoFields(f1=i, f2=i) for i in range(0, 4)] num_objs = len(objs) TwoFields.objects.bulk_create(objs, batch_size=1) self.assertEqual(TwoFields.objects.count(), num_objs) TwoFields.objects.all().delete() TwoFields.objects.bulk_create(objs, batch_size=2) self.assertEqual(TwoFields.objects.count(), num_objs) TwoFields.objects.all().delete() TwoFields.objects.bulk_create(objs, batch_size=3) self.assertEqual(TwoFields.objects.count(), num_objs) TwoFields.objects.all().delete() TwoFields.objects.bulk_create(objs, batch_size=num_objs) self.assertEqual(TwoFields.objects.count(), num_objs) def test_empty_model(self): NoFields.objects.bulk_create([NoFields() for i in range(2)]) self.assertEqual(NoFields.objects.count(), 2) @skipUnlessDBFeature('has_bulk_insert') def test_explicit_batch_size_efficiency(self): objs = [TwoFields(f1=i, f2=i) for i in range(0, 100)] with self.assertNumQueries(2): TwoFields.objects.bulk_create(objs, 50) TwoFields.objects.all().delete() with self.assertNumQueries(1): TwoFields.objects.bulk_create(objs, len(objs)) @skipUnlessDBFeature('has_bulk_insert') def test_explicit_batch_size_respects_max_batch_size(self): objs = [Country(name=f'Country {i}') for i in range(1000)] fields = ['name', 'iso_two_letter', 'description'] max_batch_size = max(connection.ops.bulk_batch_size(fields, objs), 1) with self.assertNumQueries(ceil(len(objs) / max_batch_size)): Country.objects.bulk_create(objs, batch_size=max_batch_size + 1) @skipUnlessDBFeature('has_bulk_insert') def test_bulk_insert_expressions(self): Restaurant.objects.bulk_create([ Restaurant(name="Sam's Shake Shack"), Restaurant(name=Lower(Value("Betty's Beetroot Bar"))) ]) bbb = Restaurant.objects.filter(name="betty's beetroot bar") self.assertEqual(bbb.count(), 1) @skipUnlessDBFeature('has_bulk_insert') def test_bulk_insert_nullable_fields(self): fk_to_auto_fields = { 'auto_field': NoFields.objects.create(), 'small_auto_field': SmallAutoFieldModel.objects.create(), 'big_auto_field': BigAutoFieldModel.objects.create(), } # NULL can be mixed with other values in nullable fields nullable_fields = [field for field in NullableFields._meta.get_fields() if field.name != 'id'] NullableFields.objects.bulk_create([ NullableFields(**{**fk_to_auto_fields, field.name: None}) for field in nullable_fields ]) self.assertEqual(NullableFields.objects.count(), len(nullable_fields)) for field in nullable_fields: with self.subTest(field=field): field_value = '' if isinstance(field, FileField) else None self.assertEqual(NullableFields.objects.filter(**{field.name: field_value}).count(), 1) @skipUnlessDBFeature('can_return_rows_from_bulk_insert') def test_set_pk_and_insert_single_item(self): with self.assertNumQueries(1): countries = Country.objects.bulk_create([self.data[0]]) self.assertEqual(len(countries), 1) self.assertEqual(Country.objects.get(pk=countries[0].pk), countries[0]) @skipUnlessDBFeature('can_return_rows_from_bulk_insert') def test_set_pk_and_query_efficiency(self): with self.assertNumQueries(1): countries = Country.objects.bulk_create(self.data) self.assertEqual(len(countries), 4) self.assertEqual(Country.objects.get(pk=countries[0].pk), countries[0]) self.assertEqual(Country.objects.get(pk=countries[1].pk), countries[1]) self.assertEqual(Country.objects.get(pk=countries[2].pk), countries[2]) self.assertEqual(Country.objects.get(pk=countries[3].pk), countries[3]) @skipUnlessDBFeature('can_return_rows_from_bulk_insert') def test_set_state(self): country_nl = Country(name='Netherlands', iso_two_letter='NL') country_be = Country(name='Belgium', iso_two_letter='BE') Country.objects.bulk_create([country_nl]) country_be.save() # Objects save via bulk_create() and save() should have equal state. self.assertEqual(country_nl._state.adding, country_be._state.adding) self.assertEqual(country_nl._state.db, country_be._state.db) def test_set_state_with_pk_specified(self): state_ca = State(two_letter_code='CA') state_ny = State(two_letter_code='NY') State.objects.bulk_create([state_ca]) state_ny.save() # Objects save via bulk_create() and save() should have equal state. self.assertEqual(state_ca._state.adding, state_ny._state.adding) self.assertEqual(state_ca._state.db, state_ny._state.db) @skipIfDBFeature('supports_ignore_conflicts') def test_ignore_conflicts_value_error(self): message = 'This database backend does not support ignoring conflicts.' with self.assertRaisesMessage(NotSupportedError, message): TwoFields.objects.bulk_create(self.data, ignore_conflicts=True) @skipUnlessDBFeature('supports_ignore_conflicts') def test_ignore_conflicts_ignore(self): data = [ TwoFields(f1=1, f2=1), TwoFields(f1=2, f2=2), TwoFields(f1=3, f2=3), ] TwoFields.objects.bulk_create(data) self.assertEqual(TwoFields.objects.count(), 3) # With ignore_conflicts=True, conflicts are ignored. conflicting_objects = [ TwoFields(f1=2, f2=2), TwoFields(f1=3, f2=3), ] TwoFields.objects.bulk_create([conflicting_objects[0]], ignore_conflicts=True) TwoFields.objects.bulk_create(conflicting_objects, ignore_conflicts=True) self.assertEqual(TwoFields.objects.count(), 3) self.assertIsNone(conflicting_objects[0].pk) self.assertIsNone(conflicting_objects[1].pk) # New objects are created and conflicts are ignored. new_object = TwoFields(f1=4, f2=4) TwoFields.objects.bulk_create(conflicting_objects + [new_object], ignore_conflicts=True) self.assertEqual(TwoFields.objects.count(), 4) self.assertIsNone(new_object.pk) # Without ignore_conflicts=True, there's a problem. with self.assertRaises(IntegrityError): TwoFields.objects.bulk_create(conflicting_objects) def test_nullable_fk_after_parent(self): parent = NoFields() child = NullableFields(auto_field=parent, integer_field=88) parent.save() NullableFields.objects.bulk_create([child]) child = NullableFields.objects.get(integer_field=88) self.assertEqual(child.auto_field, parent) @skipUnlessDBFeature('can_return_rows_from_bulk_insert') def test_nullable_fk_after_parent_bulk_create(self): parent = NoFields() child = NullableFields(auto_field=parent, integer_field=88) NoFields.objects.bulk_create([parent]) NullableFields.objects.bulk_create([child]) child = NullableFields.objects.get(integer_field=88) self.assertEqual(child.auto_field, parent) def test_unsaved_parent(self): parent = NoFields() msg = ( "bulk_create() prohibited to prevent data loss due to unsaved " "related object 'auto_field'." ) with self.assertRaisesMessage(ValueError, msg): NullableFields.objects.bulk_create([NullableFields(auto_field=parent)]) def test_invalid_batch_size_exception(self): msg = 'Batch size must be a positive integer.' with self.assertRaisesMessage(ValueError, msg): Country.objects.bulk_create([], batch_size=-1) @skipIfDBFeature('supports_update_conflicts') def test_update_conflicts_unsupported(self): msg = 'This database backend does not support updating conflicts.' with self.assertRaisesMessage(NotSupportedError, msg): Country.objects.bulk_create(self.data, update_conflicts=True) @skipUnlessDBFeature('supports_ignore_conflicts', 'supports_update_conflicts') def test_ignore_update_conflicts_exclusive(self): msg = 'ignore_conflicts and update_conflicts are mutually exclusive' with self.assertRaisesMessage(ValueError, msg): Country.objects.bulk_create( self.data, ignore_conflicts=True, update_conflicts=True, ) @skipUnlessDBFeature('supports_update_conflicts') def test_update_conflicts_no_update_fields(self): msg = ( 'Fields that will be updated when a row insertion fails on ' 'conflicts must be provided.' ) with self.assertRaisesMessage(ValueError, msg): Country.objects.bulk_create(self.data, update_conflicts=True) @skipUnlessDBFeature('supports_update_conflicts') @skipIfDBFeature('supports_update_conflicts_with_target') def test_update_conflicts_unique_field_unsupported(self): msg = ( 'This database backend does not support updating conflicts with ' 'specifying unique fields that can trigger the upsert.' ) with self.assertRaisesMessage(NotSupportedError, msg): TwoFields.objects.bulk_create( [TwoFields(f1=1, f2=1), TwoFields(f1=2, f2=2)], update_conflicts=True, update_fields=['f2'], unique_fields=['f1'], ) @skipUnlessDBFeature('supports_update_conflicts') def test_update_conflicts_nonexistent_update_fields(self): unique_fields = None if connection.features.supports_update_conflicts_with_target: unique_fields = ['f1'] msg = "TwoFields has no field named 'nonexistent'" with self.assertRaisesMessage(FieldDoesNotExist, msg): TwoFields.objects.bulk_create( [TwoFields(f1=1, f2=1), TwoFields(f1=2, f2=2)], update_conflicts=True, update_fields=['nonexistent'], unique_fields=unique_fields, ) @skipUnlessDBFeature( 'supports_update_conflicts', 'supports_update_conflicts_with_target', ) def test_update_conflicts_unique_fields_required(self): msg = 'Unique fields that can trigger the upsert must be provided.' with self.assertRaisesMessage(ValueError, msg): TwoFields.objects.bulk_create( [TwoFields(f1=1, f2=1), TwoFields(f1=2, f2=2)], update_conflicts=True, update_fields=['f1'], ) @skipUnlessDBFeature( 'supports_update_conflicts', 'supports_update_conflicts_with_target', ) def test_update_conflicts_invalid_update_fields(self): msg = ( 'bulk_create() can only be used with concrete fields in ' 'update_fields.' ) # Reverse one-to-one relationship. with self.assertRaisesMessage(ValueError, msg): Country.objects.bulk_create( self.data, update_conflicts=True, update_fields=['relatedmodel'], unique_fields=['pk'], ) # Many-to-many relationship. with self.assertRaisesMessage(ValueError, msg): RelatedModel.objects.bulk_create( [RelatedModel(country=self.data[0])], update_conflicts=True, update_fields=['big_auto_fields'], unique_fields=['country'], ) @skipUnlessDBFeature( 'supports_update_conflicts', 'supports_update_conflicts_with_target', ) def test_update_conflicts_pk_in_update_fields(self): msg = 'bulk_create() cannot be used with primary keys in update_fields.' with self.assertRaisesMessage(ValueError, msg): BigAutoFieldModel.objects.bulk_create( [BigAutoFieldModel()], update_conflicts=True, update_fields=['id'], unique_fields=['id'], ) @skipUnlessDBFeature( 'supports_update_conflicts', 'supports_update_conflicts_with_target', ) def test_update_conflicts_invalid_unique_fields(self): msg = ( 'bulk_create() can only be used with concrete fields in ' 'unique_fields.' ) # Reverse one-to-one relationship. with self.assertRaisesMessage(ValueError, msg): Country.objects.bulk_create( self.data, update_conflicts=True, update_fields=['name'], unique_fields=['relatedmodel'], ) # Many-to-many relationship. with self.assertRaisesMessage(ValueError, msg): RelatedModel.objects.bulk_create( [RelatedModel(country=self.data[0])], update_conflicts=True, update_fields=['name'], unique_fields=['big_auto_fields'], ) def _test_update_conflicts_two_fields(self, unique_fields): TwoFields.objects.bulk_create([ TwoFields(f1=1, f2=1, name='a'), TwoFields(f1=2, f2=2, name='b'), ]) self.assertEqual(TwoFields.objects.count(), 2) conflicting_objects = [ TwoFields(f1=1, f2=1, name='c'), TwoFields(f1=2, f2=2, name='d'), ] TwoFields.objects.bulk_create( conflicting_objects, update_conflicts=True, unique_fields=unique_fields, update_fields=['name'], ) self.assertEqual(TwoFields.objects.count(), 2) self.assertCountEqual(TwoFields.objects.values('f1', 'f2', 'name'), [ {'f1': 1, 'f2': 1, 'name': 'c'}, {'f1': 2, 'f2': 2, 'name': 'd'}, ]) @skipUnlessDBFeature('supports_update_conflicts', 'supports_update_conflicts_with_target') def test_update_conflicts_two_fields_unique_fields_first(self): self._test_update_conflicts_two_fields(['f1']) @skipUnlessDBFeature('supports_update_conflicts', 'supports_update_conflicts_with_target') def test_update_conflicts_two_fields_unique_fields_second(self): self._test_update_conflicts_two_fields(['f2']) @skipUnlessDBFeature('supports_update_conflicts', 'supports_update_conflicts_with_target') def test_update_conflicts_two_fields_unique_fields_both(self): with self.assertRaises((OperationalError, ProgrammingError)): self._test_update_conflicts_two_fields(['f1', 'f2']) @skipUnlessDBFeature('supports_update_conflicts') @skipIfDBFeature('supports_update_conflicts_with_target') def test_update_conflicts_two_fields_no_unique_fields(self): self._test_update_conflicts_two_fields([]) def _test_update_conflicts_unique_two_fields(self, unique_fields): Country.objects.bulk_create(self.data) self.assertEqual(Country.objects.count(), 4) new_data = [ # Conflicting countries. Country(name='Germany', iso_two_letter='DE', description=( 'Germany is a country in Central Europe.' )), Country(name='Czech Republic', iso_two_letter='CZ', description=( 'The Czech Republic is a landlocked country in Central Europe.' )), # New countries. Country(name='Australia', iso_two_letter='AU'), Country(name='Japan', iso_two_letter='JP', description=( 'Japan is an island country in East Asia.' )), ] Country.objects.bulk_create( new_data, update_conflicts=True, update_fields=['description'], unique_fields=unique_fields, ) self.assertEqual(Country.objects.count(), 6) self.assertCountEqual(Country.objects.values('iso_two_letter', 'description'), [ {'iso_two_letter': 'US', 'description': ''}, {'iso_two_letter': 'NL', 'description': ''}, {'iso_two_letter': 'DE', 'description': ( 'Germany is a country in Central Europe.' )}, {'iso_two_letter': 'CZ', 'description': ( 'The Czech Republic is a landlocked country in Central Europe.' )}, {'iso_two_letter': 'AU', 'description': ''}, {'iso_two_letter': 'JP', 'description': ( 'Japan is an island country in East Asia.' )}, ]) @skipUnlessDBFeature('supports_update_conflicts', 'supports_update_conflicts_with_target') def test_update_conflicts_unique_two_fields_unique_fields_both(self): self._test_update_conflicts_unique_two_fields(['iso_two_letter', 'name']) @skipUnlessDBFeature('supports_update_conflicts', 'supports_update_conflicts_with_target') def test_update_conflicts_unique_two_fields_unique_fields_one(self): with self.assertRaises((OperationalError, ProgrammingError)): self._test_update_conflicts_unique_two_fields(['iso_two_letter']) @skipUnlessDBFeature('supports_update_conflicts') @skipIfDBFeature('supports_update_conflicts_with_target') def test_update_conflicts_unique_two_fields_unique_no_unique_fields(self): self._test_update_conflicts_unique_two_fields([]) def _test_update_conflicts(self, unique_fields): UpsertConflict.objects.bulk_create([ UpsertConflict(number=1, rank=1, name='John'), UpsertConflict(number=2, rank=2, name='Mary'), UpsertConflict(number=3, rank=3, name='Hannah'), ]) self.assertEqual(UpsertConflict.objects.count(), 3) conflicting_objects = [ UpsertConflict(number=1, rank=4, name='Steve'), UpsertConflict(number=2, rank=2, name='Olivia'), UpsertConflict(number=3, rank=1, name='Hannah'), ] UpsertConflict.objects.bulk_create( conflicting_objects, update_conflicts=True, update_fields=['name', 'rank'], unique_fields=unique_fields, ) self.assertEqual(UpsertConflict.objects.count(), 3) self.assertCountEqual(UpsertConflict.objects.values('number', 'rank', 'name'), [ {'number': 1, 'rank': 4, 'name': 'Steve'}, {'number': 2, 'rank': 2, 'name': 'Olivia'}, {'number': 3, 'rank': 1, 'name': 'Hannah'}, ]) UpsertConflict.objects.bulk_create( conflicting_objects + [UpsertConflict(number=4, rank=4, name='Mark')], update_conflicts=True, update_fields=['name', 'rank'], unique_fields=unique_fields, ) self.assertEqual(UpsertConflict.objects.count(), 4) self.assertCountEqual(UpsertConflict.objects.values('number', 'rank', 'name'), [ {'number': 1, 'rank': 4, 'name': 'Steve'}, {'number': 2, 'rank': 2, 'name': 'Olivia'}, {'number': 3, 'rank': 1, 'name': 'Hannah'}, {'number': 4, 'rank': 4, 'name': 'Mark'}, ]) @skipUnlessDBFeature('supports_update_conflicts', 'supports_update_conflicts_with_target') def test_update_conflicts_unique_fields(self): self._test_update_conflicts(unique_fields=['number']) @skipUnlessDBFeature('supports_update_conflicts') @skipIfDBFeature('supports_update_conflicts_with_target') def test_update_conflicts_no_unique_fields(self): self._test_update_conflicts([])
8a24a22f97ae41ec48cf59fba58b7b46a5331bb35bcb4dbb292eed97b30fcf24
import datetime import uuid from decimal import Decimal from django.db import models from django.utils import timezone try: from PIL import Image except ImportError: Image = None class Country(models.Model): name = models.CharField(max_length=255) iso_two_letter = models.CharField(max_length=2) description = models.TextField() class Meta: constraints = [ models.UniqueConstraint( fields=['iso_two_letter', 'name'], name='country_name_iso_unique', ), ] class ProxyCountry(Country): class Meta: proxy = True class ProxyProxyCountry(ProxyCountry): class Meta: proxy = True class ProxyMultiCountry(ProxyCountry): pass class ProxyMultiProxyCountry(ProxyMultiCountry): class Meta: proxy = True class Place(models.Model): name = models.CharField(max_length=100) class Meta: abstract = True class Restaurant(Place): pass class Pizzeria(Restaurant): pass class State(models.Model): two_letter_code = models.CharField(max_length=2, primary_key=True) class TwoFields(models.Model): f1 = models.IntegerField(unique=True) f2 = models.IntegerField(unique=True) name = models.CharField(max_length=15, null=True) class UpsertConflict(models.Model): number = models.IntegerField(unique=True) rank = models.IntegerField() name = models.CharField(max_length=15) class NoFields(models.Model): pass class SmallAutoFieldModel(models.Model): id = models.SmallAutoField(primary_key=True) class BigAutoFieldModel(models.Model): id = models.BigAutoField(primary_key=True) class NullableFields(models.Model): # Fields in db.backends.oracle.BulkInsertMapper big_int_filed = models.BigIntegerField(null=True, default=1) binary_field = models.BinaryField(null=True, default=b'data') date_field = models.DateField(null=True, default=timezone.now) datetime_field = models.DateTimeField(null=True, default=timezone.now) decimal_field = models.DecimalField(null=True, max_digits=2, decimal_places=1, default=Decimal('1.1')) duration_field = models.DurationField(null=True, default=datetime.timedelta(1)) float_field = models.FloatField(null=True, default=3.2) integer_field = models.IntegerField(null=True, default=2) null_boolean_field = models.BooleanField(null=True, default=False) positive_big_integer_field = models.PositiveBigIntegerField(null=True, default=2 ** 63 - 1) positive_integer_field = models.PositiveIntegerField(null=True, default=3) positive_small_integer_field = models.PositiveSmallIntegerField(null=True, default=4) small_integer_field = models.SmallIntegerField(null=True, default=5) time_field = models.TimeField(null=True, default=timezone.now) auto_field = models.ForeignKey(NoFields, on_delete=models.CASCADE, null=True) small_auto_field = models.ForeignKey(SmallAutoFieldModel, on_delete=models.CASCADE, null=True) big_auto_field = models.ForeignKey(BigAutoFieldModel, on_delete=models.CASCADE, null=True) # Fields not required in BulkInsertMapper char_field = models.CharField(null=True, max_length=4, default='char') email_field = models.EmailField(null=True, default='[email protected]') file_field = models.FileField(null=True, default='file.txt') file_path_field = models.FilePathField(path='/tmp', null=True, default='file.txt') generic_ip_address_field = models.GenericIPAddressField(null=True, default='127.0.0.1') if Image: image_field = models.ImageField(null=True, default='image.jpg') slug_field = models.SlugField(null=True, default='slug') text_field = models.TextField(null=True, default='text') url_field = models.URLField(null=True, default='/') uuid_field = models.UUIDField(null=True, default=uuid.uuid4) class RelatedModel(models.Model): name = models.CharField(max_length=15, null=True) country = models.OneToOneField(Country, models.CASCADE, primary_key=True) big_auto_fields = models.ManyToManyField(BigAutoFieldModel)
1a9b26b89a8318a5017b7155300d6bb76c6b10f4c1082d32fadce900b92a893a
""" Regression tests for Model inheritance behavior. """ import datetime from operator import attrgetter from unittest import expectedFailure from django import forms from django.test import TestCase from .models import ( ArticleWithAuthor, BachelorParty, BirthdayParty, BusStation, Child, Congressman, DerivedM, InternalCertificationAudit, ItalianRestaurant, M2MChild, MessyBachelorParty, ParkingLot, ParkingLot3, ParkingLot4A, ParkingLot4B, Person, Place, Politician, Profile, QualityControl, Restaurant, SelfRefChild, SelfRefParent, Senator, Supplier, TrainStation, User, Wholesaler, ) class ModelInheritanceTest(TestCase): def test_model_inheritance(self): # Regression for #7350, #7202 # When you create a Parent object with a specific reference to an # existent child instance, saving the Parent doesn't duplicate the # child. This behavior is only activated during a raw save - it is # mostly relevant to deserialization, but any sort of CORBA style # 'narrow()' API would require a similar approach. # Create a child-parent-grandparent chain place1 = Place(name="Guido's House of Pasta", address='944 W. Fullerton') place1.save_base(raw=True) restaurant = Restaurant( place_ptr=place1, serves_hot_dogs=True, serves_pizza=False, ) restaurant.save_base(raw=True) italian_restaurant = ItalianRestaurant(restaurant_ptr=restaurant, serves_gnocchi=True) italian_restaurant.save_base(raw=True) # Create a child-parent chain with an explicit parent link place2 = Place(name='Main St', address='111 Main St') place2.save_base(raw=True) park = ParkingLot(parent=place2, capacity=100) park.save_base(raw=True) # No extra parent objects have been created. places = list(Place.objects.all()) self.assertEqual(places, [place1, place2]) dicts = list(Restaurant.objects.values('name', 'serves_hot_dogs')) self.assertEqual(dicts, [{ 'name': "Guido's House of Pasta", 'serves_hot_dogs': True }]) dicts = list(ItalianRestaurant.objects.values( 'name', 'serves_hot_dogs', 'serves_gnocchi')) self.assertEqual(dicts, [{ 'name': "Guido's House of Pasta", 'serves_gnocchi': True, 'serves_hot_dogs': True, }]) dicts = list(ParkingLot.objects.values('name', 'capacity')) self.assertEqual(dicts, [{ 'capacity': 100, 'name': 'Main St', }]) # You can also update objects when using a raw save. place1.name = "Guido's All New House of Pasta" place1.save_base(raw=True) restaurant.serves_hot_dogs = False restaurant.save_base(raw=True) italian_restaurant.serves_gnocchi = False italian_restaurant.save_base(raw=True) place2.name = 'Derelict lot' place2.save_base(raw=True) park.capacity = 50 park.save_base(raw=True) # No extra parent objects after an update, either. places = list(Place.objects.all()) self.assertEqual(places, [place2, place1]) self.assertEqual(places[0].name, 'Derelict lot') self.assertEqual(places[1].name, "Guido's All New House of Pasta") dicts = list(Restaurant.objects.values('name', 'serves_hot_dogs')) self.assertEqual(dicts, [{ 'name': "Guido's All New House of Pasta", 'serves_hot_dogs': False, }]) dicts = list(ItalianRestaurant.objects.values( 'name', 'serves_hot_dogs', 'serves_gnocchi')) self.assertEqual(dicts, [{ 'name': "Guido's All New House of Pasta", 'serves_gnocchi': False, 'serves_hot_dogs': False, }]) dicts = list(ParkingLot.objects.values('name', 'capacity')) self.assertEqual(dicts, [{ 'capacity': 50, 'name': 'Derelict lot', }]) # If you try to raw_save a parent attribute onto a child object, # the attribute will be ignored. italian_restaurant.name = "Lorenzo's Pasta Hut" italian_restaurant.save_base(raw=True) # Note that the name has not changed # - name is an attribute of Place, not ItalianRestaurant dicts = list(ItalianRestaurant.objects.values( 'name', 'serves_hot_dogs', 'serves_gnocchi')) self.assertEqual(dicts, [{ 'name': "Guido's All New House of Pasta", 'serves_gnocchi': False, 'serves_hot_dogs': False, }]) def test_issue_7105(self): # Regressions tests for #7105: dates() queries should be able to use # fields from the parent model as easily as the child. Child.objects.create( name='child', created=datetime.datetime(2008, 6, 26, 17, 0, 0)) datetimes = list(Child.objects.datetimes('created', 'month')) self.assertEqual(datetimes, [datetime.datetime(2008, 6, 1, 0, 0)]) def test_issue_7276(self): # Regression test for #7276: calling delete() on a model with # multi-table inheritance should delete the associated rows from any # ancestor tables, as well as any descendent objects. place1 = Place(name="Guido's House of Pasta", address='944 W. Fullerton') place1.save_base(raw=True) restaurant = Restaurant( place_ptr=place1, serves_hot_dogs=True, serves_pizza=False, ) restaurant.save_base(raw=True) italian_restaurant = ItalianRestaurant(restaurant_ptr=restaurant, serves_gnocchi=True) italian_restaurant.save_base(raw=True) ident = ItalianRestaurant.objects.all()[0].id self.assertEqual(Place.objects.get(pk=ident), place1) Restaurant.objects.create( name='a', address='xx', serves_hot_dogs=True, serves_pizza=False, ) # This should delete both Restaurants, plus the related places, plus # the ItalianRestaurant. Restaurant.objects.all().delete() with self.assertRaises(Place.DoesNotExist): Place.objects.get(pk=ident) with self.assertRaises(ItalianRestaurant.DoesNotExist): ItalianRestaurant.objects.get(pk=ident) def test_issue_6755(self): """ Regression test for #6755 """ r = Restaurant(serves_pizza=False, serves_hot_dogs=False) r.save() self.assertEqual(r.id, r.place_ptr_id) orig_id = r.id r = Restaurant(place_ptr_id=orig_id, serves_pizza=True, serves_hot_dogs=False) r.save() self.assertEqual(r.id, orig_id) self.assertEqual(r.id, r.place_ptr_id) def test_issue_7488(self): # Regression test for #7488. This looks a little crazy, but it's the # equivalent of what the admin interface has to do for the edit-inline # case. suppliers = Supplier.objects.filter( restaurant=Restaurant(name='xx', address='yy')) suppliers = list(suppliers) self.assertEqual(suppliers, []) def test_issue_11764(self): """ Regression test for #11764 """ wholesalers = list(Wholesaler.objects.all().select_related()) self.assertEqual(wholesalers, []) def test_issue_7853(self): """ Regression test for #7853 If the parent class has a self-referential link, make sure that any updates to that link via the child update the right table. """ obj = SelfRefChild.objects.create(child_data=37, parent_data=42) obj.delete() def test_get_next_previous_by_date(self): """ Regression tests for #8076 get_(next/previous)_by_date should work """ c1 = ArticleWithAuthor( headline='ArticleWithAuthor 1', author="Person 1", pub_date=datetime.datetime(2005, 8, 1, 3, 0)) c1.save() c2 = ArticleWithAuthor( headline='ArticleWithAuthor 2', author="Person 2", pub_date=datetime.datetime(2005, 8, 1, 10, 0)) c2.save() c3 = ArticleWithAuthor( headline='ArticleWithAuthor 3', author="Person 3", pub_date=datetime.datetime(2005, 8, 2)) c3.save() self.assertEqual(c1.get_next_by_pub_date(), c2) self.assertEqual(c2.get_next_by_pub_date(), c3) with self.assertRaises(ArticleWithAuthor.DoesNotExist): c3.get_next_by_pub_date() self.assertEqual(c3.get_previous_by_pub_date(), c2) self.assertEqual(c2.get_previous_by_pub_date(), c1) with self.assertRaises(ArticleWithAuthor.DoesNotExist): c1.get_previous_by_pub_date() def test_inherited_fields(self): """ Regression test for #8825 and #9390 Make sure all inherited fields (esp. m2m fields, in this case) appear on the child class. """ m2mchildren = list(M2MChild.objects.filter(articles__isnull=False)) self.assertEqual(m2mchildren, []) # Ordering should not include any database column more than once (this # is most likely to occur naturally with model inheritance, so we # check it here). Regression test for #9390. This necessarily pokes at # the SQL string for the query, since the duplicate problems are only # apparent at that late stage. qs = ArticleWithAuthor.objects.order_by('pub_date', 'pk') sql = qs.query.get_compiler(qs.db).as_sql()[0] fragment = sql[sql.find('ORDER BY'):] pos = fragment.find('pub_date') self.assertEqual(fragment.find('pub_date', pos + 1), -1) def test_queryset_update_on_parent_model(self): """ Regression test for #10362 It is possible to call update() and only change a field in an ancestor model. """ article = ArticleWithAuthor.objects.create( author="fred", headline="Hey there!", pub_date=datetime.datetime(2009, 3, 1, 8, 0, 0), ) update = ArticleWithAuthor.objects.filter(author='fred').update(headline='Oh, no!') self.assertEqual(update, 1) update = ArticleWithAuthor.objects.filter(pk=article.pk).update(headline='Oh, no!') self.assertEqual(update, 1) derivedm1 = DerivedM.objects.create( customPK=44, base_name="b1", derived_name='d1', ) self.assertEqual(derivedm1.customPK, 44) self.assertEqual(derivedm1.base_name, 'b1') self.assertEqual(derivedm1.derived_name, 'd1') derivedms = list(DerivedM.objects.all()) self.assertEqual(derivedms, [derivedm1]) def test_use_explicit_o2o_to_parent_as_pk(self): """ The connector from child to parent need not be the pk on the child. """ self.assertEqual(ParkingLot3._meta.pk.name, "primary_key") # the child->parent link self.assertEqual(ParkingLot3._meta.get_ancestor_link(Place).name, "parent") def test_use_explicit_o2o_to_parent_from_abstract_model(self): self.assertEqual(ParkingLot4A._meta.pk.name, "parent") ParkingLot4A.objects.create( name="Parking4A", address='21 Jump Street', ) self.assertEqual(ParkingLot4B._meta.pk.name, "parent") ParkingLot4A.objects.create( name="Parking4B", address='21 Jump Street', ) def test_all_fields_from_abstract_base_class(self): """ Regression tests for #7588 """ # All fields from an ABC, including those inherited non-abstractly # should be available on child classes (#7588). Creating this instance # should work without error. QualityControl.objects.create( headline="Problems in Django", pub_date=datetime.datetime.now(), quality=10, assignee='adrian', ) def test_abstract_base_class_m2m_relation_inheritance(self): # many-to-many relations defined on an abstract base class are # correctly inherited (and created) on the child class. p1 = Person.objects.create(name='Alice') p2 = Person.objects.create(name='Bob') p3 = Person.objects.create(name='Carol') p4 = Person.objects.create(name='Dave') birthday = BirthdayParty.objects.create(name='Birthday party for Alice') birthday.attendees.set([p1, p3]) bachelor = BachelorParty.objects.create(name='Bachelor party for Bob') bachelor.attendees.set([p2, p4]) parties = list(p1.birthdayparty_set.all()) self.assertEqual(parties, [birthday]) parties = list(p1.bachelorparty_set.all()) self.assertEqual(parties, []) parties = list(p2.bachelorparty_set.all()) self.assertEqual(parties, [bachelor]) # A subclass of a subclass of an abstract model doesn't get its own # accessor. self.assertFalse(hasattr(p2, 'messybachelorparty_set')) # ... but it does inherit the m2m from its parent messy = MessyBachelorParty.objects.create(name='Bachelor party for Dave') messy.attendees.set([p4]) messy_parent = messy.bachelorparty_ptr parties = list(p4.bachelorparty_set.all()) self.assertEqual(parties, [bachelor, messy_parent]) def test_abstract_base_class_m2m_relation_inheritance_manager_reused(self): p1 = Person.objects.create(name='Alice') self.assertIs(p1.birthdayparty_set, p1.birthdayparty_set) self.assertIs(p1.bachelorparty_set, p1.bachelorparty_set) def test_abstract_verbose_name_plural_inheritance(self): """ verbose_name_plural correctly inherited from ABC if inheritance chain includes an abstract model. """ # Regression test for #11369: verbose_name_plural should be inherited # from an ABC even when there are one or more intermediate # abstract models in the inheritance chain, for consistency with # verbose_name. self.assertEqual( InternalCertificationAudit._meta.verbose_name_plural, 'Audits' ) def test_inherited_nullable_exclude(self): obj = SelfRefChild.objects.create(child_data=37, parent_data=42) self.assertQuerysetEqual( SelfRefParent.objects.exclude(self_data=72), [ obj.pk ], attrgetter("pk") ) self.assertQuerysetEqual( SelfRefChild.objects.exclude(self_data=72), [ obj.pk ], attrgetter("pk") ) def test_concrete_abstract_concrete_pk(self): """ Primary key set correctly with concrete->abstract->concrete inheritance. """ # Regression test for #13987: Primary key is incorrectly determined # when more than one model has a concrete->abstract->concrete # inheritance hierarchy. self.assertEqual( len([field for field in BusStation._meta.local_fields if field.primary_key]), 1 ) self.assertEqual( len([field for field in TrainStation._meta.local_fields if field.primary_key]), 1 ) self.assertIs(BusStation._meta.pk.model, BusStation) self.assertIs(TrainStation._meta.pk.model, TrainStation) def test_inherited_unique_field_with_form(self): """ A model which has different primary key for the parent model passes unique field checking correctly (#17615). """ class ProfileForm(forms.ModelForm): class Meta: model = Profile fields = '__all__' User.objects.create(username="user_only") p = Profile.objects.create(username="user_with_profile") form = ProfileForm({'username': "user_with_profile", 'extra': "hello"}, instance=p) self.assertTrue(form.is_valid()) def test_inheritance_joins(self): # Test for #17502 - check that filtering through two levels of # inheritance chain doesn't generate extra joins. qs = ItalianRestaurant.objects.all() self.assertEqual(str(qs.query).count('JOIN'), 2) qs = ItalianRestaurant.objects.filter(name='foo') self.assertEqual(str(qs.query).count('JOIN'), 2) @expectedFailure def test_inheritance_values_joins(self): # It would be nice (but not too important) to skip the middle join in # this case. Skipping is possible as nothing from the middle model is # used in the qs and top contains direct pointer to the bottom model. qs = ItalianRestaurant.objects.values_list('serves_gnocchi').filter(name='foo') self.assertEqual(str(qs.query).count('JOIN'), 1) def test_issue_21554(self): senator = Senator.objects.create(name='John Doe', title='X', state='Y') senator = Senator.objects.get(pk=senator.pk) self.assertEqual(senator.name, 'John Doe') self.assertEqual(senator.title, 'X') self.assertEqual(senator.state, 'Y') def test_inheritance_resolve_columns(self): Restaurant.objects.create(name='Bobs Cafe', address="Somewhere", serves_pizza=True, serves_hot_dogs=True) p = Place.objects.all().select_related('restaurant')[0] self.assertIsInstance(p.restaurant.serves_pizza, bool) def test_inheritance_select_related(self): # Regression test for #7246 r1 = Restaurant.objects.create( name="Nobu", serves_hot_dogs=True, serves_pizza=False ) r2 = Restaurant.objects.create( name="Craft", serves_hot_dogs=False, serves_pizza=True ) Supplier.objects.create(name="John", restaurant=r1) Supplier.objects.create(name="Jane", restaurant=r2) self.assertQuerysetEqual( Supplier.objects.order_by("name").select_related(), [ "Jane", "John", ], attrgetter("name") ) jane = Supplier.objects.order_by("name").select_related("restaurant")[0] self.assertEqual(jane.restaurant.name, "Craft") def test_filter_with_parent_fk(self): r = Restaurant.objects.create() s = Supplier.objects.create(restaurant=r) # The mismatch between Restaurant and Place is intentional (#28175). self.assertSequenceEqual(Supplier.objects.filter(restaurant__in=Place.objects.all()), [s]) def test_ptr_accessor_assigns_state(self): r = Restaurant.objects.create() self.assertIs(r.place_ptr._state.adding, False) self.assertEqual(r.place_ptr._state.db, 'default') def test_related_filtering_query_efficiency_ticket_15844(self): r = Restaurant.objects.create( name="Guido's House of Pasta", address='944 W. Fullerton', serves_hot_dogs=True, serves_pizza=False, ) s = Supplier.objects.create(restaurant=r) with self.assertNumQueries(1): self.assertSequenceEqual(Supplier.objects.filter(restaurant=r), [s]) with self.assertNumQueries(1): self.assertSequenceEqual(r.supplier_set.all(), [s]) def test_queries_on_parent_access(self): italian_restaurant = ItalianRestaurant.objects.create( name="Guido's House of Pasta", address='944 W. Fullerton', serves_hot_dogs=True, serves_pizza=False, serves_gnocchi=True, ) # No queries are made when accessing the parent objects. italian_restaurant = ItalianRestaurant.objects.get(pk=italian_restaurant.pk) with self.assertNumQueries(0): restaurant = italian_restaurant.restaurant_ptr self.assertEqual(restaurant.place_ptr.restaurant, restaurant) self.assertEqual(restaurant.italianrestaurant, italian_restaurant) # One query is made when accessing the parent objects when the instance # is deferred. italian_restaurant = ItalianRestaurant.objects.only('serves_gnocchi').get(pk=italian_restaurant.pk) with self.assertNumQueries(1): restaurant = italian_restaurant.restaurant_ptr self.assertEqual(restaurant.place_ptr.restaurant, restaurant) self.assertEqual(restaurant.italianrestaurant, italian_restaurant) # No queries are made when accessing the parent objects when the # instance has deferred a field not present in the parent table. italian_restaurant = ItalianRestaurant.objects.defer('serves_gnocchi').get(pk=italian_restaurant.pk) with self.assertNumQueries(0): restaurant = italian_restaurant.restaurant_ptr self.assertEqual(restaurant.place_ptr.restaurant, restaurant) self.assertEqual(restaurant.italianrestaurant, italian_restaurant) def test_id_field_update_on_ancestor_change(self): place1 = Place.objects.create(name='House of Pasta', address='944 Fullerton') place2 = Place.objects.create(name='House of Pizza', address='954 Fullerton') place3 = Place.objects.create(name='Burger house', address='964 Fullerton') restaurant1 = Restaurant.objects.create( place_ptr=place1, serves_hot_dogs=True, serves_pizza=False, ) restaurant2 = Restaurant.objects.create( place_ptr=place2, serves_hot_dogs=True, serves_pizza=False, ) italian_restaurant = ItalianRestaurant.objects.create( restaurant_ptr=restaurant1, serves_gnocchi=True, ) # Changing the parent of a restaurant changes the restaurant's ID & PK. restaurant1.place_ptr = place3 self.assertEqual(restaurant1.pk, place3.pk) self.assertEqual(restaurant1.id, place3.id) self.assertEqual(restaurant1.pk, restaurant1.id) restaurant1.place_ptr = None self.assertIsNone(restaurant1.pk) self.assertIsNone(restaurant1.id) # Changing the parent of an italian restaurant changes the restaurant's # ID & PK. italian_restaurant.restaurant_ptr = restaurant2 self.assertEqual(italian_restaurant.pk, restaurant2.pk) self.assertEqual(italian_restaurant.id, restaurant2.id) self.assertEqual(italian_restaurant.pk, italian_restaurant.id) italian_restaurant.restaurant_ptr = None self.assertIsNone(italian_restaurant.pk) self.assertIsNone(italian_restaurant.id) def test_create_new_instance_with_pk_equals_none(self): p1 = Profile.objects.create(username='john') p2 = User.objects.get(pk=p1.user_ptr_id).profile # Create a new profile by setting pk = None. p2.pk = None p2.user_ptr_id = None p2.username = 'bill' p2.save() self.assertEqual(Profile.objects.count(), 2) self.assertEqual(User.objects.get(pk=p1.user_ptr_id).username, 'john') def test_create_new_instance_with_pk_equals_none_multi_inheritance(self): c1 = Congressman.objects.create(state='PA', name='John', title='senator 1') c2 = Person.objects.get(pk=c1.pk).congressman # Create a new congressman by setting pk = None. c2.pk = None c2.id = None c2.politician_ptr_id = None c2.name = 'Bill' c2.title = 'senator 2' c2.save() self.assertEqual(Congressman.objects.count(), 2) self.assertEqual(Person.objects.get(pk=c1.pk).name, 'John') self.assertEqual( Politician.objects.get(pk=c1.politician_ptr_id).title, 'senator 1', )
ab2db4acc3d908893b8fc6b9947169b76c12ba2884119b54f7ef8c7e55c1a495
import datetime from decimal import Decimal from unittest import mock from django.core.exceptions import FieldError from django.db import NotSupportedError, connection from django.db.models import ( Avg, BooleanField, Case, F, Func, IntegerField, Max, Min, OuterRef, Q, RowRange, Subquery, Sum, Value, ValueRange, When, Window, WindowFrame, ) from django.db.models.fields.json import KeyTextTransform, KeyTransform from django.db.models.functions import ( Cast, CumeDist, DenseRank, ExtractYear, FirstValue, Lag, LastValue, Lead, NthValue, Ntile, PercentRank, Rank, RowNumber, Upper, ) from django.test import SimpleTestCase, TestCase, skipUnlessDBFeature from .models import Detail, Employee @skipUnlessDBFeature('supports_over_clause') class WindowFunctionTests(TestCase): @classmethod def setUpTestData(cls): Employee.objects.bulk_create([ Employee( name=e[0], salary=e[1], department=e[2], hire_date=e[3], age=e[4], bonus=Decimal(e[1]) / 400, ) for e in [ ('Jones', 45000, 'Accounting', datetime.datetime(2005, 11, 1), 20), ('Williams', 37000, 'Accounting', datetime.datetime(2009, 6, 1), 20), ('Jenson', 45000, 'Accounting', datetime.datetime(2008, 4, 1), 20), ('Adams', 50000, 'Accounting', datetime.datetime(2013, 7, 1), 50), ('Smith', 55000, 'Sales', datetime.datetime(2007, 6, 1), 30), ('Brown', 53000, 'Sales', datetime.datetime(2009, 9, 1), 30), ('Johnson', 40000, 'Marketing', datetime.datetime(2012, 3, 1), 30), ('Smith', 38000, 'Marketing', datetime.datetime(2009, 10, 1), 20), ('Wilkinson', 60000, 'IT', datetime.datetime(2011, 3, 1), 40), ('Moore', 34000, 'IT', datetime.datetime(2013, 8, 1), 40), ('Miller', 100000, 'Management', datetime.datetime(2005, 6, 1), 40), ('Johnson', 80000, 'Management', datetime.datetime(2005, 7, 1), 50), ] ]) def test_dense_rank(self): tests = [ ExtractYear(F('hire_date')).asc(), F('hire_date__year').asc(), 'hire_date__year', ] for order_by in tests: with self.subTest(order_by=order_by): qs = Employee.objects.annotate( rank=Window(expression=DenseRank(), order_by=order_by), ) self.assertQuerysetEqual(qs, [ ('Jones', 45000, 'Accounting', datetime.date(2005, 11, 1), 1), ('Miller', 100000, 'Management', datetime.date(2005, 6, 1), 1), ('Johnson', 80000, 'Management', datetime.date(2005, 7, 1), 1), ('Smith', 55000, 'Sales', datetime.date(2007, 6, 1), 2), ('Jenson', 45000, 'Accounting', datetime.date(2008, 4, 1), 3), ('Smith', 38000, 'Marketing', datetime.date(2009, 10, 1), 4), ('Brown', 53000, 'Sales', datetime.date(2009, 9, 1), 4), ('Williams', 37000, 'Accounting', datetime.date(2009, 6, 1), 4), ('Wilkinson', 60000, 'IT', datetime.date(2011, 3, 1), 5), ('Johnson', 40000, 'Marketing', datetime.date(2012, 3, 1), 6), ('Moore', 34000, 'IT', datetime.date(2013, 8, 1), 7), ('Adams', 50000, 'Accounting', datetime.date(2013, 7, 1), 7), ], lambda entry: ( entry.name, entry.salary, entry.department, entry.hire_date, entry.rank, ), ordered=False) def test_department_salary(self): qs = Employee.objects.annotate(department_sum=Window( expression=Sum('salary'), partition_by=F('department'), order_by=[F('hire_date').asc()], )).order_by('department', 'department_sum') self.assertQuerysetEqual(qs, [ ('Jones', 'Accounting', 45000, 45000), ('Jenson', 'Accounting', 45000, 90000), ('Williams', 'Accounting', 37000, 127000), ('Adams', 'Accounting', 50000, 177000), ('Wilkinson', 'IT', 60000, 60000), ('Moore', 'IT', 34000, 94000), ('Miller', 'Management', 100000, 100000), ('Johnson', 'Management', 80000, 180000), ('Smith', 'Marketing', 38000, 38000), ('Johnson', 'Marketing', 40000, 78000), ('Smith', 'Sales', 55000, 55000), ('Brown', 'Sales', 53000, 108000), ], lambda entry: (entry.name, entry.department, entry.salary, entry.department_sum)) def test_rank(self): """ Rank the employees based on the year they're were hired. Since there are multiple employees hired in different years, this will contain gaps. """ qs = Employee.objects.annotate(rank=Window( expression=Rank(), order_by=F('hire_date__year').asc(), )) self.assertQuerysetEqual(qs, [ ('Jones', 45000, 'Accounting', datetime.date(2005, 11, 1), 1), ('Miller', 100000, 'Management', datetime.date(2005, 6, 1), 1), ('Johnson', 80000, 'Management', datetime.date(2005, 7, 1), 1), ('Smith', 55000, 'Sales', datetime.date(2007, 6, 1), 4), ('Jenson', 45000, 'Accounting', datetime.date(2008, 4, 1), 5), ('Smith', 38000, 'Marketing', datetime.date(2009, 10, 1), 6), ('Brown', 53000, 'Sales', datetime.date(2009, 9, 1), 6), ('Williams', 37000, 'Accounting', datetime.date(2009, 6, 1), 6), ('Wilkinson', 60000, 'IT', datetime.date(2011, 3, 1), 9), ('Johnson', 40000, 'Marketing', datetime.date(2012, 3, 1), 10), ('Moore', 34000, 'IT', datetime.date(2013, 8, 1), 11), ('Adams', 50000, 'Accounting', datetime.date(2013, 7, 1), 11), ], lambda entry: (entry.name, entry.salary, entry.department, entry.hire_date, entry.rank), ordered=False) def test_row_number(self): """ The row number window function computes the number based on the order in which the tuples were inserted. Depending on the backend, Oracle requires an ordering-clause in the Window expression. """ qs = Employee.objects.annotate(row_number=Window( expression=RowNumber(), order_by=F('pk').asc(), )).order_by('pk') self.assertQuerysetEqual(qs, [ ('Jones', 'Accounting', 1), ('Williams', 'Accounting', 2), ('Jenson', 'Accounting', 3), ('Adams', 'Accounting', 4), ('Smith', 'Sales', 5), ('Brown', 'Sales', 6), ('Johnson', 'Marketing', 7), ('Smith', 'Marketing', 8), ('Wilkinson', 'IT', 9), ('Moore', 'IT', 10), ('Miller', 'Management', 11), ('Johnson', 'Management', 12), ], lambda entry: (entry.name, entry.department, entry.row_number)) def test_row_number_no_ordering(self): """ The row number window function computes the number based on the order in which the tuples were inserted. """ # Add a default ordering for consistent results across databases. qs = Employee.objects.annotate(row_number=Window( expression=RowNumber(), )).order_by('pk') self.assertQuerysetEqual(qs, [ ('Jones', 'Accounting', 1), ('Williams', 'Accounting', 2), ('Jenson', 'Accounting', 3), ('Adams', 'Accounting', 4), ('Smith', 'Sales', 5), ('Brown', 'Sales', 6), ('Johnson', 'Marketing', 7), ('Smith', 'Marketing', 8), ('Wilkinson', 'IT', 9), ('Moore', 'IT', 10), ('Miller', 'Management', 11), ('Johnson', 'Management', 12), ], lambda entry: (entry.name, entry.department, entry.row_number)) def test_avg_salary_department(self): qs = Employee.objects.annotate(avg_salary=Window( expression=Avg('salary'), order_by=F('department').asc(), partition_by='department', )).order_by('department', '-salary', 'name') self.assertQuerysetEqual(qs, [ ('Adams', 50000, 'Accounting', 44250.00), ('Jenson', 45000, 'Accounting', 44250.00), ('Jones', 45000, 'Accounting', 44250.00), ('Williams', 37000, 'Accounting', 44250.00), ('Wilkinson', 60000, 'IT', 47000.00), ('Moore', 34000, 'IT', 47000.00), ('Miller', 100000, 'Management', 90000.00), ('Johnson', 80000, 'Management', 90000.00), ('Johnson', 40000, 'Marketing', 39000.00), ('Smith', 38000, 'Marketing', 39000.00), ('Smith', 55000, 'Sales', 54000.00), ('Brown', 53000, 'Sales', 54000.00), ], transform=lambda row: (row.name, row.salary, row.department, row.avg_salary)) def test_lag(self): """ Compute the difference between an employee's salary and the next highest salary in the employee's department. Return None if the employee has the lowest salary. """ qs = Employee.objects.annotate(lag=Window( expression=Lag(expression='salary', offset=1), partition_by=F('department'), order_by=[F('salary').asc(), F('name').asc()], )).order_by('department', F('salary').asc(), F('name').asc()) self.assertQuerysetEqual(qs, [ ('Williams', 37000, 'Accounting', None), ('Jenson', 45000, 'Accounting', 37000), ('Jones', 45000, 'Accounting', 45000), ('Adams', 50000, 'Accounting', 45000), ('Moore', 34000, 'IT', None), ('Wilkinson', 60000, 'IT', 34000), ('Johnson', 80000, 'Management', None), ('Miller', 100000, 'Management', 80000), ('Smith', 38000, 'Marketing', None), ('Johnson', 40000, 'Marketing', 38000), ('Brown', 53000, 'Sales', None), ('Smith', 55000, 'Sales', 53000), ], transform=lambda row: (row.name, row.salary, row.department, row.lag)) def test_lag_decimalfield(self): qs = Employee.objects.annotate(lag=Window( expression=Lag(expression='bonus', offset=1), partition_by=F('department'), order_by=[F('bonus').asc(), F('name').asc()], )).order_by('department', F('bonus').asc(), F('name').asc()) self.assertQuerysetEqual(qs, [ ('Williams', 92.5, 'Accounting', None), ('Jenson', 112.5, 'Accounting', 92.5), ('Jones', 112.5, 'Accounting', 112.5), ('Adams', 125, 'Accounting', 112.5), ('Moore', 85, 'IT', None), ('Wilkinson', 150, 'IT', 85), ('Johnson', 200, 'Management', None), ('Miller', 250, 'Management', 200), ('Smith', 95, 'Marketing', None), ('Johnson', 100, 'Marketing', 95), ('Brown', 132.5, 'Sales', None), ('Smith', 137.5, 'Sales', 132.5), ], transform=lambda row: (row.name, row.bonus, row.department, row.lag)) def test_first_value(self): qs = Employee.objects.annotate(first_value=Window( expression=FirstValue('salary'), partition_by=F('department'), order_by=F('hire_date').asc(), )).order_by('department', 'hire_date') self.assertQuerysetEqual(qs, [ ('Jones', 45000, 'Accounting', datetime.date(2005, 11, 1), 45000), ('Jenson', 45000, 'Accounting', datetime.date(2008, 4, 1), 45000), ('Williams', 37000, 'Accounting', datetime.date(2009, 6, 1), 45000), ('Adams', 50000, 'Accounting', datetime.date(2013, 7, 1), 45000), ('Wilkinson', 60000, 'IT', datetime.date(2011, 3, 1), 60000), ('Moore', 34000, 'IT', datetime.date(2013, 8, 1), 60000), ('Miller', 100000, 'Management', datetime.date(2005, 6, 1), 100000), ('Johnson', 80000, 'Management', datetime.date(2005, 7, 1), 100000), ('Smith', 38000, 'Marketing', datetime.date(2009, 10, 1), 38000), ('Johnson', 40000, 'Marketing', datetime.date(2012, 3, 1), 38000), ('Smith', 55000, 'Sales', datetime.date(2007, 6, 1), 55000), ('Brown', 53000, 'Sales', datetime.date(2009, 9, 1), 55000), ], lambda row: (row.name, row.salary, row.department, row.hire_date, row.first_value)) def test_last_value(self): qs = Employee.objects.annotate(last_value=Window( expression=LastValue('hire_date'), partition_by=F('department'), order_by=F('hire_date').asc(), )) self.assertQuerysetEqual(qs, [ ('Adams', 'Accounting', datetime.date(2013, 7, 1), 50000, datetime.date(2013, 7, 1)), ('Jenson', 'Accounting', datetime.date(2008, 4, 1), 45000, datetime.date(2008, 4, 1)), ('Jones', 'Accounting', datetime.date(2005, 11, 1), 45000, datetime.date(2005, 11, 1)), ('Williams', 'Accounting', datetime.date(2009, 6, 1), 37000, datetime.date(2009, 6, 1)), ('Moore', 'IT', datetime.date(2013, 8, 1), 34000, datetime.date(2013, 8, 1)), ('Wilkinson', 'IT', datetime.date(2011, 3, 1), 60000, datetime.date(2011, 3, 1)), ('Miller', 'Management', datetime.date(2005, 6, 1), 100000, datetime.date(2005, 6, 1)), ('Johnson', 'Management', datetime.date(2005, 7, 1), 80000, datetime.date(2005, 7, 1)), ('Johnson', 'Marketing', datetime.date(2012, 3, 1), 40000, datetime.date(2012, 3, 1)), ('Smith', 'Marketing', datetime.date(2009, 10, 1), 38000, datetime.date(2009, 10, 1)), ('Brown', 'Sales', datetime.date(2009, 9, 1), 53000, datetime.date(2009, 9, 1)), ('Smith', 'Sales', datetime.date(2007, 6, 1), 55000, datetime.date(2007, 6, 1)), ], transform=lambda row: (row.name, row.department, row.hire_date, row.salary, row.last_value), ordered=False) def test_function_list_of_values(self): qs = Employee.objects.annotate(lead=Window( expression=Lead(expression='salary'), order_by=[F('hire_date').asc(), F('name').desc()], partition_by='department', )).values_list('name', 'salary', 'department', 'hire_date', 'lead') \ .order_by('department', F('hire_date').asc(), F('name').desc()) self.assertNotIn('GROUP BY', str(qs.query)) self.assertSequenceEqual(qs, [ ('Jones', 45000, 'Accounting', datetime.date(2005, 11, 1), 45000), ('Jenson', 45000, 'Accounting', datetime.date(2008, 4, 1), 37000), ('Williams', 37000, 'Accounting', datetime.date(2009, 6, 1), 50000), ('Adams', 50000, 'Accounting', datetime.date(2013, 7, 1), None), ('Wilkinson', 60000, 'IT', datetime.date(2011, 3, 1), 34000), ('Moore', 34000, 'IT', datetime.date(2013, 8, 1), None), ('Miller', 100000, 'Management', datetime.date(2005, 6, 1), 80000), ('Johnson', 80000, 'Management', datetime.date(2005, 7, 1), None), ('Smith', 38000, 'Marketing', datetime.date(2009, 10, 1), 40000), ('Johnson', 40000, 'Marketing', datetime.date(2012, 3, 1), None), ('Smith', 55000, 'Sales', datetime.date(2007, 6, 1), 53000), ('Brown', 53000, 'Sales', datetime.date(2009, 9, 1), None), ]) def test_min_department(self): """An alternative way to specify a query for FirstValue.""" qs = Employee.objects.annotate(min_salary=Window( expression=Min('salary'), partition_by=F('department'), order_by=[F('salary').asc(), F('name').asc()] )).order_by('department', 'salary', 'name') self.assertQuerysetEqual(qs, [ ('Williams', 'Accounting', 37000, 37000), ('Jenson', 'Accounting', 45000, 37000), ('Jones', 'Accounting', 45000, 37000), ('Adams', 'Accounting', 50000, 37000), ('Moore', 'IT', 34000, 34000), ('Wilkinson', 'IT', 60000, 34000), ('Johnson', 'Management', 80000, 80000), ('Miller', 'Management', 100000, 80000), ('Smith', 'Marketing', 38000, 38000), ('Johnson', 'Marketing', 40000, 38000), ('Brown', 'Sales', 53000, 53000), ('Smith', 'Sales', 55000, 53000), ], lambda row: (row.name, row.department, row.salary, row.min_salary)) def test_max_per_year(self): """ Find the maximum salary awarded in the same year as the employee was hired, regardless of the department. """ qs = Employee.objects.annotate(max_salary_year=Window( expression=Max('salary'), order_by=ExtractYear('hire_date').asc(), partition_by=ExtractYear('hire_date') )).order_by(ExtractYear('hire_date'), 'salary') self.assertQuerysetEqual(qs, [ ('Jones', 'Accounting', 45000, 2005, 100000), ('Johnson', 'Management', 80000, 2005, 100000), ('Miller', 'Management', 100000, 2005, 100000), ('Smith', 'Sales', 55000, 2007, 55000), ('Jenson', 'Accounting', 45000, 2008, 45000), ('Williams', 'Accounting', 37000, 2009, 53000), ('Smith', 'Marketing', 38000, 2009, 53000), ('Brown', 'Sales', 53000, 2009, 53000), ('Wilkinson', 'IT', 60000, 2011, 60000), ('Johnson', 'Marketing', 40000, 2012, 40000), ('Moore', 'IT', 34000, 2013, 50000), ('Adams', 'Accounting', 50000, 2013, 50000), ], lambda row: (row.name, row.department, row.salary, row.hire_date.year, row.max_salary_year)) def test_cume_dist(self): """ Compute the cumulative distribution for the employees based on the salary in increasing order. Equal to rank/total number of rows (12). """ qs = Employee.objects.annotate(cume_dist=Window( expression=CumeDist(), order_by=F('salary').asc(), )).order_by('salary', 'name') # Round result of cume_dist because Oracle uses greater precision. self.assertQuerysetEqual(qs, [ ('Moore', 'IT', 34000, 0.0833333333), ('Williams', 'Accounting', 37000, 0.1666666667), ('Smith', 'Marketing', 38000, 0.25), ('Johnson', 'Marketing', 40000, 0.3333333333), ('Jenson', 'Accounting', 45000, 0.5), ('Jones', 'Accounting', 45000, 0.5), ('Adams', 'Accounting', 50000, 0.5833333333), ('Brown', 'Sales', 53000, 0.6666666667), ('Smith', 'Sales', 55000, 0.75), ('Wilkinson', 'IT', 60000, 0.8333333333), ('Johnson', 'Management', 80000, 0.9166666667), ('Miller', 'Management', 100000, 1), ], lambda row: (row.name, row.department, row.salary, round(row.cume_dist, 10))) def test_nthvalue(self): qs = Employee.objects.annotate( nth_value=Window(expression=NthValue( expression='salary', nth=2), order_by=[F('hire_date').asc(), F('name').desc()], partition_by=F('department'), ) ).order_by('department', 'hire_date', 'name') self.assertQuerysetEqual(qs, [ ('Jones', 'Accounting', datetime.date(2005, 11, 1), 45000, None), ('Jenson', 'Accounting', datetime.date(2008, 4, 1), 45000, 45000), ('Williams', 'Accounting', datetime.date(2009, 6, 1), 37000, 45000), ('Adams', 'Accounting', datetime.date(2013, 7, 1), 50000, 45000), ('Wilkinson', 'IT', datetime.date(2011, 3, 1), 60000, None), ('Moore', 'IT', datetime.date(2013, 8, 1), 34000, 34000), ('Miller', 'Management', datetime.date(2005, 6, 1), 100000, None), ('Johnson', 'Management', datetime.date(2005, 7, 1), 80000, 80000), ('Smith', 'Marketing', datetime.date(2009, 10, 1), 38000, None), ('Johnson', 'Marketing', datetime.date(2012, 3, 1), 40000, 40000), ('Smith', 'Sales', datetime.date(2007, 6, 1), 55000, None), ('Brown', 'Sales', datetime.date(2009, 9, 1), 53000, 53000), ], lambda row: (row.name, row.department, row.hire_date, row.salary, row.nth_value)) def test_lead(self): """ Determine what the next person hired in the same department makes. Because the dataset is ambiguous, the name is also part of the ordering clause. No default is provided, so None/NULL should be returned. """ qs = Employee.objects.annotate(lead=Window( expression=Lead(expression='salary'), order_by=[F('hire_date').asc(), F('name').desc()], partition_by='department', )).order_by('department', F('hire_date').asc(), F('name').desc()) self.assertQuerysetEqual(qs, [ ('Jones', 45000, 'Accounting', datetime.date(2005, 11, 1), 45000), ('Jenson', 45000, 'Accounting', datetime.date(2008, 4, 1), 37000), ('Williams', 37000, 'Accounting', datetime.date(2009, 6, 1), 50000), ('Adams', 50000, 'Accounting', datetime.date(2013, 7, 1), None), ('Wilkinson', 60000, 'IT', datetime.date(2011, 3, 1), 34000), ('Moore', 34000, 'IT', datetime.date(2013, 8, 1), None), ('Miller', 100000, 'Management', datetime.date(2005, 6, 1), 80000), ('Johnson', 80000, 'Management', datetime.date(2005, 7, 1), None), ('Smith', 38000, 'Marketing', datetime.date(2009, 10, 1), 40000), ('Johnson', 40000, 'Marketing', datetime.date(2012, 3, 1), None), ('Smith', 55000, 'Sales', datetime.date(2007, 6, 1), 53000), ('Brown', 53000, 'Sales', datetime.date(2009, 9, 1), None), ], transform=lambda row: (row.name, row.salary, row.department, row.hire_date, row.lead)) def test_lead_offset(self): """ Determine what the person hired after someone makes. Due to ambiguity, the name is also included in the ordering. """ qs = Employee.objects.annotate(lead=Window( expression=Lead('salary', offset=2), partition_by='department', order_by=F('hire_date').asc(), )) self.assertQuerysetEqual(qs, [ ('Jones', 45000, 'Accounting', datetime.date(2005, 11, 1), 37000), ('Jenson', 45000, 'Accounting', datetime.date(2008, 4, 1), 50000), ('Williams', 37000, 'Accounting', datetime.date(2009, 6, 1), None), ('Adams', 50000, 'Accounting', datetime.date(2013, 7, 1), None), ('Wilkinson', 60000, 'IT', datetime.date(2011, 3, 1), None), ('Moore', 34000, 'IT', datetime.date(2013, 8, 1), None), ('Johnson', 80000, 'Management', datetime.date(2005, 7, 1), None), ('Miller', 100000, 'Management', datetime.date(2005, 6, 1), None), ('Smith', 38000, 'Marketing', datetime.date(2009, 10, 1), None), ('Johnson', 40000, 'Marketing', datetime.date(2012, 3, 1), None), ('Smith', 55000, 'Sales', datetime.date(2007, 6, 1), None), ('Brown', 53000, 'Sales', datetime.date(2009, 9, 1), None), ], transform=lambda row: (row.name, row.salary, row.department, row.hire_date, row.lead), ordered=False ) @skipUnlessDBFeature('supports_default_in_lead_lag') def test_lead_default(self): qs = Employee.objects.annotate(lead_default=Window( expression=Lead(expression='salary', offset=5, default=60000), partition_by=F('department'), order_by=F('department').asc(), )) self.assertEqual(list(qs.values_list('lead_default', flat=True).distinct()), [60000]) def test_ntile(self): """ Compute the group for each of the employees across the entire company, based on how high the salary is for them. There are twelve employees so it divides evenly into four groups. """ qs = Employee.objects.annotate(ntile=Window( expression=Ntile(num_buckets=4), order_by='-salary', )).order_by('ntile', '-salary', 'name') self.assertQuerysetEqual(qs, [ ('Miller', 'Management', 100000, 1), ('Johnson', 'Management', 80000, 1), ('Wilkinson', 'IT', 60000, 1), ('Smith', 'Sales', 55000, 2), ('Brown', 'Sales', 53000, 2), ('Adams', 'Accounting', 50000, 2), ('Jenson', 'Accounting', 45000, 3), ('Jones', 'Accounting', 45000, 3), ('Johnson', 'Marketing', 40000, 3), ('Smith', 'Marketing', 38000, 4), ('Williams', 'Accounting', 37000, 4), ('Moore', 'IT', 34000, 4), ], lambda x: (x.name, x.department, x.salary, x.ntile)) def test_percent_rank(self): """ Calculate the percentage rank of the employees across the entire company based on salary and name (in case of ambiguity). """ qs = Employee.objects.annotate(percent_rank=Window( expression=PercentRank(), order_by=[F('salary').asc(), F('name').asc()], )).order_by('percent_rank') # Round to account for precision differences among databases. self.assertQuerysetEqual(qs, [ ('Moore', 'IT', 34000, 0.0), ('Williams', 'Accounting', 37000, 0.0909090909), ('Smith', 'Marketing', 38000, 0.1818181818), ('Johnson', 'Marketing', 40000, 0.2727272727), ('Jenson', 'Accounting', 45000, 0.3636363636), ('Jones', 'Accounting', 45000, 0.4545454545), ('Adams', 'Accounting', 50000, 0.5454545455), ('Brown', 'Sales', 53000, 0.6363636364), ('Smith', 'Sales', 55000, 0.7272727273), ('Wilkinson', 'IT', 60000, 0.8181818182), ('Johnson', 'Management', 80000, 0.9090909091), ('Miller', 'Management', 100000, 1.0), ], transform=lambda row: (row.name, row.department, row.salary, round(row.percent_rank, 10))) def test_nth_returns_null(self): """ Find the nth row of the data set. None is returned since there are fewer than 20 rows in the test data. """ qs = Employee.objects.annotate(nth_value=Window( expression=NthValue('salary', nth=20), order_by=F('salary').asc() )) self.assertEqual(list(qs.values_list('nth_value', flat=True).distinct()), [None]) def test_multiple_partitioning(self): """ Find the maximum salary for each department for people hired in the same year. """ qs = Employee.objects.annotate(max=Window( expression=Max('salary'), partition_by=[F('department'), F('hire_date__year')], )).order_by('department', 'hire_date', 'name') self.assertQuerysetEqual(qs, [ ('Jones', 45000, 'Accounting', datetime.date(2005, 11, 1), 45000), ('Jenson', 45000, 'Accounting', datetime.date(2008, 4, 1), 45000), ('Williams', 37000, 'Accounting', datetime.date(2009, 6, 1), 37000), ('Adams', 50000, 'Accounting', datetime.date(2013, 7, 1), 50000), ('Wilkinson', 60000, 'IT', datetime.date(2011, 3, 1), 60000), ('Moore', 34000, 'IT', datetime.date(2013, 8, 1), 34000), ('Miller', 100000, 'Management', datetime.date(2005, 6, 1), 100000), ('Johnson', 80000, 'Management', datetime.date(2005, 7, 1), 100000), ('Smith', 38000, 'Marketing', datetime.date(2009, 10, 1), 38000), ('Johnson', 40000, 'Marketing', datetime.date(2012, 3, 1), 40000), ('Smith', 55000, 'Sales', datetime.date(2007, 6, 1), 55000), ('Brown', 53000, 'Sales', datetime.date(2009, 9, 1), 53000), ], transform=lambda row: (row.name, row.salary, row.department, row.hire_date, row.max)) def test_multiple_ordering(self): """ Accumulate the salaries over the departments based on hire_date. If two people were hired on the same date in the same department, the ordering clause will render a different result for those people. """ qs = Employee.objects.annotate(sum=Window( expression=Sum('salary'), partition_by='department', order_by=[F('hire_date').asc(), F('name').asc()], )).order_by('department', 'sum') self.assertQuerysetEqual(qs, [ ('Jones', 45000, 'Accounting', datetime.date(2005, 11, 1), 45000), ('Jenson', 45000, 'Accounting', datetime.date(2008, 4, 1), 90000), ('Williams', 37000, 'Accounting', datetime.date(2009, 6, 1), 127000), ('Adams', 50000, 'Accounting', datetime.date(2013, 7, 1), 177000), ('Wilkinson', 60000, 'IT', datetime.date(2011, 3, 1), 60000), ('Moore', 34000, 'IT', datetime.date(2013, 8, 1), 94000), ('Miller', 100000, 'Management', datetime.date(2005, 6, 1), 100000), ('Johnson', 80000, 'Management', datetime.date(2005, 7, 1), 180000), ('Smith', 38000, 'Marketing', datetime.date(2009, 10, 1), 38000), ('Johnson', 40000, 'Marketing', datetime.date(2012, 3, 1), 78000), ('Smith', 55000, 'Sales', datetime.date(2007, 6, 1), 55000), ('Brown', 53000, 'Sales', datetime.date(2009, 9, 1), 108000), ], transform=lambda row: (row.name, row.salary, row.department, row.hire_date, row.sum)) def test_related_ordering_with_count(self): qs = Employee.objects.annotate(department_sum=Window( expression=Sum('salary'), partition_by=F('department'), order_by=['classification__code'], )) self.assertEqual(qs.count(), 12) @skipUnlessDBFeature('supports_frame_range_fixed_distance') def test_range_n_preceding_and_following(self): qs = Employee.objects.annotate(sum=Window( expression=Sum('salary'), order_by=F('salary').asc(), partition_by='department', frame=ValueRange(start=-2, end=2), )) self.assertIn('RANGE BETWEEN 2 PRECEDING AND 2 FOLLOWING', str(qs.query)) self.assertQuerysetEqual(qs, [ ('Williams', 37000, 'Accounting', datetime.date(2009, 6, 1), 37000), ('Jones', 45000, 'Accounting', datetime.date(2005, 11, 1), 90000), ('Jenson', 45000, 'Accounting', datetime.date(2008, 4, 1), 90000), ('Adams', 50000, 'Accounting', datetime.date(2013, 7, 1), 50000), ('Brown', 53000, 'Sales', datetime.date(2009, 9, 1), 53000), ('Smith', 55000, 'Sales', datetime.date(2007, 6, 1), 55000), ('Johnson', 40000, 'Marketing', datetime.date(2012, 3, 1), 40000), ('Smith', 38000, 'Marketing', datetime.date(2009, 10, 1), 38000), ('Wilkinson', 60000, 'IT', datetime.date(2011, 3, 1), 60000), ('Moore', 34000, 'IT', datetime.date(2013, 8, 1), 34000), ('Miller', 100000, 'Management', datetime.date(2005, 6, 1), 100000), ('Johnson', 80000, 'Management', datetime.date(2005, 7, 1), 80000), ], transform=lambda row: (row.name, row.salary, row.department, row.hire_date, row.sum), ordered=False) def test_range_unbound(self): """A query with RANGE BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING.""" qs = Employee.objects.annotate(sum=Window( expression=Sum('salary'), partition_by='age', order_by=[F('age').asc()], frame=ValueRange(start=None, end=None), )).order_by('department', 'hire_date', 'name') self.assertIn('RANGE BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING', str(qs.query)) self.assertQuerysetEqual(qs, [ ('Jones', 'Accounting', 45000, datetime.date(2005, 11, 1), 165000), ('Jenson', 'Accounting', 45000, datetime.date(2008, 4, 1), 165000), ('Williams', 'Accounting', 37000, datetime.date(2009, 6, 1), 165000), ('Adams', 'Accounting', 50000, datetime.date(2013, 7, 1), 130000), ('Wilkinson', 'IT', 60000, datetime.date(2011, 3, 1), 194000), ('Moore', 'IT', 34000, datetime.date(2013, 8, 1), 194000), ('Miller', 'Management', 100000, datetime.date(2005, 6, 1), 194000), ('Johnson', 'Management', 80000, datetime.date(2005, 7, 1), 130000), ('Smith', 'Marketing', 38000, datetime.date(2009, 10, 1), 165000), ('Johnson', 'Marketing', 40000, datetime.date(2012, 3, 1), 148000), ('Smith', 'Sales', 55000, datetime.date(2007, 6, 1), 148000), ('Brown', 'Sales', 53000, datetime.date(2009, 9, 1), 148000) ], transform=lambda row: (row.name, row.department, row.salary, row.hire_date, row.sum)) def test_subquery_row_range_rank(self): qs = Employee.objects.annotate( highest_avg_salary_date=Subquery( Employee.objects.filter( department=OuterRef('department'), ).annotate( avg_salary=Window( expression=Avg('salary'), order_by=[F('hire_date').asc()], frame=RowRange(start=-1, end=1), ), ).order_by('-avg_salary', 'hire_date').values('hire_date')[:1], ), ).order_by('department', 'name') self.assertQuerysetEqual(qs, [ ('Adams', 'Accounting', datetime.date(2005, 11, 1)), ('Jenson', 'Accounting', datetime.date(2005, 11, 1)), ('Jones', 'Accounting', datetime.date(2005, 11, 1)), ('Williams', 'Accounting', datetime.date(2005, 11, 1)), ('Moore', 'IT', datetime.date(2011, 3, 1)), ('Wilkinson', 'IT', datetime.date(2011, 3, 1)), ('Johnson', 'Management', datetime.date(2005, 6, 1)), ('Miller', 'Management', datetime.date(2005, 6, 1)), ('Johnson', 'Marketing', datetime.date(2009, 10, 1)), ('Smith', 'Marketing', datetime.date(2009, 10, 1)), ('Brown', 'Sales', datetime.date(2007, 6, 1)), ('Smith', 'Sales', datetime.date(2007, 6, 1)), ], transform=lambda row: (row.name, row.department, row.highest_avg_salary_date)) def test_row_range_rank(self): """ A query with ROWS BETWEEN UNBOUNDED PRECEDING AND 3 FOLLOWING. The resulting sum is the sum of the three next (if they exist) and all previous rows according to the ordering clause. """ qs = Employee.objects.annotate(sum=Window( expression=Sum('salary'), order_by=[F('hire_date').asc(), F('name').desc()], frame=RowRange(start=None, end=3), )).order_by('sum', 'hire_date') self.assertIn('ROWS BETWEEN UNBOUNDED PRECEDING AND 3 FOLLOWING', str(qs.query)) self.assertQuerysetEqual(qs, [ ('Miller', 100000, 'Management', datetime.date(2005, 6, 1), 280000), ('Johnson', 80000, 'Management', datetime.date(2005, 7, 1), 325000), ('Jones', 45000, 'Accounting', datetime.date(2005, 11, 1), 362000), ('Smith', 55000, 'Sales', datetime.date(2007, 6, 1), 415000), ('Jenson', 45000, 'Accounting', datetime.date(2008, 4, 1), 453000), ('Williams', 37000, 'Accounting', datetime.date(2009, 6, 1), 513000), ('Brown', 53000, 'Sales', datetime.date(2009, 9, 1), 553000), ('Smith', 38000, 'Marketing', datetime.date(2009, 10, 1), 603000), ('Wilkinson', 60000, 'IT', datetime.date(2011, 3, 1), 637000), ('Johnson', 40000, 'Marketing', datetime.date(2012, 3, 1), 637000), ('Adams', 50000, 'Accounting', datetime.date(2013, 7, 1), 637000), ('Moore', 34000, 'IT', datetime.date(2013, 8, 1), 637000), ], transform=lambda row: (row.name, row.salary, row.department, row.hire_date, row.sum)) @skipUnlessDBFeature('can_distinct_on_fields') def test_distinct_window_function(self): """ Window functions are not aggregates, and hence a query to filter out duplicates may be useful. """ qs = Employee.objects.annotate( sum=Window( expression=Sum('salary'), partition_by=ExtractYear('hire_date'), order_by=ExtractYear('hire_date') ), year=ExtractYear('hire_date'), ).values('year', 'sum').distinct('year').order_by('year') results = [ {'year': 2005, 'sum': 225000}, {'year': 2007, 'sum': 55000}, {'year': 2008, 'sum': 45000}, {'year': 2009, 'sum': 128000}, {'year': 2011, 'sum': 60000}, {'year': 2012, 'sum': 40000}, {'year': 2013, 'sum': 84000}, ] for idx, val in zip(range(len(results)), results): with self.subTest(result=val): self.assertEqual(qs[idx], val) def test_fail_update(self): """Window expressions can't be used in an UPDATE statement.""" msg = ( 'Window expressions are not allowed in this query (salary=<Window: ' 'Max(Col(expressions_window_employee, expressions_window.Employee.salary)) ' 'OVER (PARTITION BY Col(expressions_window_employee, ' 'expressions_window.Employee.department))>).' ) with self.assertRaisesMessage(FieldError, msg): Employee.objects.filter(department='Management').update( salary=Window(expression=Max('salary'), partition_by='department'), ) def test_fail_insert(self): """Window expressions can't be used in an INSERT statement.""" msg = ( 'Window expressions are not allowed in this query (salary=<Window: ' 'Sum(Value(10000), order_by=OrderBy(F(pk), descending=False)) OVER ()' ) with self.assertRaisesMessage(FieldError, msg): Employee.objects.create( name='Jameson', department='Management', hire_date=datetime.date(2007, 7, 1), salary=Window(expression=Sum(Value(10000), order_by=F('pk').asc())), ) def test_window_expression_within_subquery(self): subquery_qs = Employee.objects.annotate( highest=Window(FirstValue('id'), partition_by=F('department'), order_by=F('salary').desc()) ).values('highest') highest_salary = Employee.objects.filter(pk__in=subquery_qs) self.assertCountEqual(highest_salary.values('department', 'salary'), [ {'department': 'Accounting', 'salary': 50000}, {'department': 'Sales', 'salary': 55000}, {'department': 'Marketing', 'salary': 40000}, {'department': 'IT', 'salary': 60000}, {'department': 'Management', 'salary': 100000} ]) @skipUnlessDBFeature('supports_json_field') def test_key_transform(self): Detail.objects.bulk_create([ Detail(value={'department': 'IT', 'name': 'Smith', 'salary': 37000}), Detail(value={'department': 'IT', 'name': 'Nowak', 'salary': 32000}), Detail(value={'department': 'HR', 'name': 'Brown', 'salary': 50000}), Detail(value={'department': 'HR', 'name': 'Smith', 'salary': 55000}), Detail(value={'department': 'PR', 'name': 'Moore', 'salary': 90000}), ]) tests = [ (KeyTransform('department', 'value'), KeyTransform('name', 'value')), (F('value__department'), F('value__name')), ] for partition_by, order_by in tests: with self.subTest(partition_by=partition_by, order_by=order_by): qs = Detail.objects.annotate(department_sum=Window( expression=Sum(Cast( KeyTextTransform('salary', 'value'), output_field=IntegerField(), )), partition_by=[partition_by], order_by=[order_by], )).order_by('value__department', 'department_sum') self.assertQuerysetEqual(qs, [ ('Brown', 'HR', 50000, 50000), ('Smith', 'HR', 55000, 105000), ('Nowak', 'IT', 32000, 32000), ('Smith', 'IT', 37000, 69000), ('Moore', 'PR', 90000, 90000), ], lambda entry: ( entry.value['name'], entry.value['department'], entry.value['salary'], entry.department_sum, )) def test_invalid_start_value_range(self): msg = "start argument must be a negative integer, zero, or None, but got '3'." with self.assertRaisesMessage(ValueError, msg): list(Employee.objects.annotate(test=Window( expression=Sum('salary'), order_by=F('hire_date').asc(), frame=ValueRange(start=3), ))) def test_invalid_end_value_range(self): msg = "end argument must be a positive integer, zero, or None, but got '-3'." with self.assertRaisesMessage(ValueError, msg): list(Employee.objects.annotate(test=Window( expression=Sum('salary'), order_by=F('hire_date').asc(), frame=ValueRange(end=-3), ))) def test_invalid_type_end_value_range(self): msg = "end argument must be a positive integer, zero, or None, but got 'a'." with self.assertRaisesMessage(ValueError, msg): list(Employee.objects.annotate(test=Window( expression=Sum('salary'), order_by=F('hire_date').asc(), frame=ValueRange(end='a'), ))) def test_invalid_type_start_value_range(self): msg = "start argument must be a negative integer, zero, or None, but got 'a'." with self.assertRaisesMessage(ValueError, msg): list(Employee.objects.annotate(test=Window( expression=Sum('salary'), frame=ValueRange(start='a'), ))) def test_invalid_type_end_row_range(self): msg = "end argument must be a positive integer, zero, or None, but got 'a'." with self.assertRaisesMessage(ValueError, msg): list(Employee.objects.annotate(test=Window( expression=Sum('salary'), frame=RowRange(end='a'), ))) @skipUnlessDBFeature('only_supports_unbounded_with_preceding_and_following') def test_unsupported_range_frame_start(self): msg = '%s only supports UNBOUNDED together with PRECEDING and FOLLOWING.' % connection.display_name with self.assertRaisesMessage(NotSupportedError, msg): list(Employee.objects.annotate(test=Window( expression=Sum('salary'), order_by=F('hire_date').asc(), frame=ValueRange(start=-1), ))) @skipUnlessDBFeature('only_supports_unbounded_with_preceding_and_following') def test_unsupported_range_frame_end(self): msg = '%s only supports UNBOUNDED together with PRECEDING and FOLLOWING.' % connection.display_name with self.assertRaisesMessage(NotSupportedError, msg): list(Employee.objects.annotate(test=Window( expression=Sum('salary'), order_by=F('hire_date').asc(), frame=ValueRange(end=1), ))) def test_invalid_type_start_row_range(self): msg = "start argument must be a negative integer, zero, or None, but got 'a'." with self.assertRaisesMessage(ValueError, msg): list(Employee.objects.annotate(test=Window( expression=Sum('salary'), order_by=F('hire_date').asc(), frame=RowRange(start='a'), ))) class WindowUnsupportedTests(TestCase): def test_unsupported_backend(self): msg = 'This backend does not support window expressions.' with mock.patch.object(connection.features, 'supports_over_clause', False): with self.assertRaisesMessage(NotSupportedError, msg): Employee.objects.annotate(dense_rank=Window(expression=DenseRank())).get() class NonQueryWindowTests(SimpleTestCase): def test_window_repr(self): self.assertEqual( repr(Window(expression=Sum('salary'), partition_by='department')), '<Window: Sum(F(salary)) OVER (PARTITION BY F(department))>' ) self.assertEqual( repr(Window(expression=Avg('salary'), order_by=F('department').asc())), '<Window: Avg(F(salary)) OVER (OrderByList(OrderBy(F(department), descending=False)))>' ) def test_window_frame_repr(self): self.assertEqual( repr(RowRange(start=-1)), '<RowRange: ROWS BETWEEN 1 PRECEDING AND UNBOUNDED FOLLOWING>' ) self.assertEqual( repr(ValueRange(start=None, end=1)), '<ValueRange: RANGE BETWEEN UNBOUNDED PRECEDING AND 1 FOLLOWING>' ) self.assertEqual( repr(ValueRange(start=0, end=0)), '<ValueRange: RANGE BETWEEN CURRENT ROW AND CURRENT ROW>' ) self.assertEqual( repr(RowRange(start=0, end=0)), '<RowRange: ROWS BETWEEN CURRENT ROW AND CURRENT ROW>' ) def test_empty_group_by_cols(self): window = Window(expression=Sum('pk')) self.assertEqual(window.get_group_by_cols(), []) self.assertFalse(window.contains_aggregate) def test_frame_empty_group_by_cols(self): frame = WindowFrame() self.assertEqual(frame.get_group_by_cols(), []) def test_frame_window_frame_notimplemented(self): frame = WindowFrame() msg = 'Subclasses must implement window_frame_start_end().' with self.assertRaisesMessage(NotImplementedError, msg): frame.window_frame_start_end(None, None, None) def test_invalid_filter(self): msg = 'Window is disallowed in the filter clause' qs = Employee.objects.annotate(dense_rank=Window(expression=DenseRank())) with self.assertRaisesMessage(NotSupportedError, msg): qs.filter(dense_rank__gte=1) with self.assertRaisesMessage(NotSupportedError, msg): qs.annotate(inc_rank=F('dense_rank') + Value(1)).filter(inc_rank__gte=1) with self.assertRaisesMessage(NotSupportedError, msg): qs.filter(id=F('dense_rank')) with self.assertRaisesMessage(NotSupportedError, msg): qs.filter(id=Func('dense_rank', 2, function='div')) with self.assertRaisesMessage(NotSupportedError, msg): qs.annotate(total=Sum('dense_rank', filter=Q(name='Jones'))).filter(total=1) def test_conditional_annotation(self): qs = Employee.objects.annotate( dense_rank=Window(expression=DenseRank()), ).annotate( equal=Case( When(id=F('dense_rank'), then=Value(True)), default=Value(False), output_field=BooleanField(), ), ) # The SQL standard disallows referencing window functions in the WHERE # clause. msg = 'Window is disallowed in the filter clause' with self.assertRaisesMessage(NotSupportedError, msg): qs.filter(equal=True) def test_invalid_order_by(self): msg = ( 'Window.order_by must be either a string reference to a field, an ' 'expression, or a list or tuple of them.' ) with self.assertRaisesMessage(ValueError, msg): Window(expression=Sum('power'), order_by={'-horse'}) def test_invalid_source_expression(self): msg = "Expression 'Upper' isn't compatible with OVER clauses." with self.assertRaisesMessage(ValueError, msg): Window(expression=Upper('name'))
02e49db1ba7608355ee275fe2d4206acfc5b7ab02c6559b63b0ed9c1ba4b8f70
import sys from types import ModuleType from django.conf import USE_L10N_DEPRECATED_MSG, Settings, settings from django.test import TestCase, ignore_warnings from django.utils.deprecation import RemovedInDjango50Warning class DeprecationTests(TestCase): msg = USE_L10N_DEPRECATED_MSG def test_override_settings_warning(self): # Warning is raised when USE_L10N is set in UserSettingsHolder (used by # the @override_settings decorator). with self.assertRaisesMessage(RemovedInDjango50Warning, self.msg): with self.settings(USE_L10N=True): pass def test_settings_init_warning(self): settings_module = ModuleType('fake_settings_module') settings_module.SECRET_KEY = 'foo' settings_module.USE_TZ = True settings_module.USE_L10N = False sys.modules['fake_settings_module'] = settings_module try: with self.assertRaisesMessage(RemovedInDjango50Warning, self.msg): Settings('fake_settings_module') finally: del sys.modules['fake_settings_module'] def test_access_warning(self): with self.assertRaisesMessage(RemovedInDjango50Warning, self.msg): settings.USE_L10N # Works a second time. with self.assertRaisesMessage(RemovedInDjango50Warning, self.msg): settings.USE_L10N @ignore_warnings(category=RemovedInDjango50Warning) def test_access(self): with self.settings(USE_L10N=False): self.assertIs(settings.USE_L10N, False) # Works a second time. self.assertIs(settings.USE_L10N, False)
220887ee14e83a71be04949c651a1fb6ba1f5aef8ba25929ffeb3bbe3a829244
import sys from types import ModuleType from django.conf import CSRF_COOKIE_MASKED_DEPRECATED_MSG, Settings, settings from django.test import SimpleTestCase from django.utils.deprecation import RemovedInDjango50Warning class CsrfCookieMaskedDeprecationTests(SimpleTestCase): msg = CSRF_COOKIE_MASKED_DEPRECATED_MSG def test_override_settings_warning(self): with self.assertRaisesMessage(RemovedInDjango50Warning, self.msg): with self.settings(CSRF_COOKIE_MASKED=True): pass def test_settings_init_warning(self): settings_module = ModuleType('fake_settings_module') settings_module.USE_TZ = False settings_module.CSRF_COOKIE_MASKED = True sys.modules['fake_settings_module'] = settings_module try: with self.assertRaisesMessage(RemovedInDjango50Warning, self.msg): Settings('fake_settings_module') finally: del sys.modules['fake_settings_module'] def test_access(self): # Warning is not raised on access. self.assertEqual(settings.CSRF_COOKIE_MASKED, False)
fd2c4f75c14a9e15a1c57dd0ad07a9fd5884be02d133b7baf34a1c57972d0832
import base64 import hashlib import os import shutil import sys import tempfile as sys_tempfile import unittest from io import BytesIO, StringIO from unittest import mock from urllib.parse import quote from django.core.exceptions import SuspiciousFileOperation from django.core.files import temp as tempfile from django.core.files.uploadedfile import SimpleUploadedFile, UploadedFile from django.http.multipartparser import ( FILE, MultiPartParser, MultiPartParserError, Parser, parse_header, ) from django.test import SimpleTestCase, TestCase, client, override_settings from . import uploadhandler from .models import FileModel UNICODE_FILENAME = 'test-0123456789_中文_Orléans.jpg' MEDIA_ROOT = sys_tempfile.mkdtemp() UPLOAD_TO = os.path.join(MEDIA_ROOT, 'test_upload') CANDIDATE_TRAVERSAL_FILE_NAMES = [ '/tmp/hax0rd.txt', # Absolute path, *nix-style. 'C:\\Windows\\hax0rd.txt', # Absolute path, win-style. 'C:/Windows/hax0rd.txt', # Absolute path, broken-style. '\\tmp\\hax0rd.txt', # Absolute path, broken in a different way. '/tmp\\hax0rd.txt', # Absolute path, broken by mixing. 'subdir/hax0rd.txt', # Descendant path, *nix-style. 'subdir\\hax0rd.txt', # Descendant path, win-style. 'sub/dir\\hax0rd.txt', # Descendant path, mixed. '../../hax0rd.txt', # Relative path, *nix-style. '..\\..\\hax0rd.txt', # Relative path, win-style. '../..\\hax0rd.txt', # Relative path, mixed. '..&#x2F;hax0rd.txt', # HTML entities. '..&sol;hax0rd.txt', # HTML entities. ] CANDIDATE_INVALID_FILE_NAMES = [ '/tmp/', # Directory, *nix-style. 'c:\\tmp\\', # Directory, win-style. '/tmp/.', # Directory dot, *nix-style. 'c:\\tmp\\.', # Directory dot, *nix-style. '/tmp/..', # Parent directory, *nix-style. 'c:\\tmp\\..', # Parent directory, win-style. '', # Empty filename. ] @override_settings(MEDIA_ROOT=MEDIA_ROOT, ROOT_URLCONF='file_uploads.urls', MIDDLEWARE=[]) class FileUploadTests(TestCase): @classmethod def setUpClass(cls): super().setUpClass() os.makedirs(MEDIA_ROOT, exist_ok=True) cls.addClassCleanup(shutil.rmtree, MEDIA_ROOT) def test_upload_name_is_validated(self): candidates = [ '/tmp/', '/tmp/..', '/tmp/.', ] if sys.platform == 'win32': candidates.extend([ 'c:\\tmp\\', 'c:\\tmp\\..', 'c:\\tmp\\.', ]) for file_name in candidates: with self.subTest(file_name=file_name): self.assertRaises(SuspiciousFileOperation, UploadedFile, name=file_name) def test_simple_upload(self): with open(__file__, 'rb') as fp: post_data = { 'name': 'Ringo', 'file_field': fp, } response = self.client.post('/upload/', post_data) self.assertEqual(response.status_code, 200) def test_large_upload(self): file = tempfile.NamedTemporaryFile with file(suffix=".file1") as file1, file(suffix=".file2") as file2: file1.write(b'a' * (2 ** 21)) file1.seek(0) file2.write(b'a' * (10 * 2 ** 20)) file2.seek(0) post_data = { 'name': 'Ringo', 'file_field1': file1, 'file_field2': file2, } for key in list(post_data): try: post_data[key + '_hash'] = hashlib.sha1(post_data[key].read()).hexdigest() post_data[key].seek(0) except AttributeError: post_data[key + '_hash'] = hashlib.sha1(post_data[key].encode()).hexdigest() response = self.client.post('/verify/', post_data) self.assertEqual(response.status_code, 200) def _test_base64_upload(self, content, encode=base64.b64encode): payload = client.FakePayload("\r\n".join([ '--' + client.BOUNDARY, 'Content-Disposition: form-data; name="file"; filename="test.txt"', 'Content-Type: application/octet-stream', 'Content-Transfer-Encoding: base64', ''])) payload.write(b'\r\n' + encode(content.encode()) + b'\r\n') payload.write('--' + client.BOUNDARY + '--\r\n') r = { 'CONTENT_LENGTH': len(payload), 'CONTENT_TYPE': client.MULTIPART_CONTENT, 'PATH_INFO': "/echo_content/", 'REQUEST_METHOD': 'POST', 'wsgi.input': payload, } response = self.client.request(**r) self.assertEqual(response.json()['file'], content) def test_base64_upload(self): self._test_base64_upload("This data will be transmitted base64-encoded.") def test_big_base64_upload(self): self._test_base64_upload("Big data" * 68000) # > 512Kb def test_big_base64_newlines_upload(self): self._test_base64_upload("Big data" * 68000, encode=base64.encodebytes) def test_unicode_file_name(self): with sys_tempfile.TemporaryDirectory() as temp_dir: # This file contains Chinese symbols and an accented char in the name. with open(os.path.join(temp_dir, UNICODE_FILENAME), 'w+b') as file1: file1.write(b'b' * (2 ** 10)) file1.seek(0) response = self.client.post('/unicode_name/', {'file_unicode': file1}) self.assertEqual(response.status_code, 200) def test_unicode_file_name_rfc2231(self): """ Test receiving file upload when filename is encoded with RFC2231 (#22971). """ payload = client.FakePayload() payload.write('\r\n'.join([ '--' + client.BOUNDARY, 'Content-Disposition: form-data; name="file_unicode"; filename*=UTF-8\'\'%s' % quote(UNICODE_FILENAME), 'Content-Type: application/octet-stream', '', 'You got pwnd.\r\n', '\r\n--' + client.BOUNDARY + '--\r\n' ])) r = { 'CONTENT_LENGTH': len(payload), 'CONTENT_TYPE': client.MULTIPART_CONTENT, 'PATH_INFO': "/unicode_name/", 'REQUEST_METHOD': 'POST', 'wsgi.input': payload, } response = self.client.request(**r) self.assertEqual(response.status_code, 200) def test_unicode_name_rfc2231(self): """ Test receiving file upload when filename is encoded with RFC2231 (#22971). """ payload = client.FakePayload() payload.write( '\r\n'.join([ '--' + client.BOUNDARY, 'Content-Disposition: form-data; name*=UTF-8\'\'file_unicode; filename*=UTF-8\'\'%s' % quote( UNICODE_FILENAME ), 'Content-Type: application/octet-stream', '', 'You got pwnd.\r\n', '\r\n--' + client.BOUNDARY + '--\r\n' ]) ) r = { 'CONTENT_LENGTH': len(payload), 'CONTENT_TYPE': client.MULTIPART_CONTENT, 'PATH_INFO': "/unicode_name/", 'REQUEST_METHOD': 'POST', 'wsgi.input': payload, } response = self.client.request(**r) self.assertEqual(response.status_code, 200) def test_unicode_file_name_rfc2231_with_double_quotes(self): payload = client.FakePayload() payload.write('\r\n'.join([ '--' + client.BOUNDARY, 'Content-Disposition: form-data; name="file_unicode"; ' 'filename*="UTF-8\'\'%s"' % quote(UNICODE_FILENAME), 'Content-Type: application/octet-stream', '', 'You got pwnd.\r\n', '\r\n--' + client.BOUNDARY + '--\r\n', ])) r = { 'CONTENT_LENGTH': len(payload), 'CONTENT_TYPE': client.MULTIPART_CONTENT, 'PATH_INFO': '/unicode_name/', 'REQUEST_METHOD': 'POST', 'wsgi.input': payload, } response = self.client.request(**r) self.assertEqual(response.status_code, 200) def test_unicode_name_rfc2231_with_double_quotes(self): payload = client.FakePayload() payload.write('\r\n'.join([ '--' + client.BOUNDARY, 'Content-Disposition: form-data; name*="UTF-8\'\'file_unicode"; ' 'filename*="UTF-8\'\'%s"' % quote(UNICODE_FILENAME), 'Content-Type: application/octet-stream', '', 'You got pwnd.\r\n', '\r\n--' + client.BOUNDARY + '--\r\n' ])) r = { 'CONTENT_LENGTH': len(payload), 'CONTENT_TYPE': client.MULTIPART_CONTENT, 'PATH_INFO': '/unicode_name/', 'REQUEST_METHOD': 'POST', 'wsgi.input': payload, } response = self.client.request(**r) self.assertEqual(response.status_code, 200) def test_blank_filenames(self): """ Receiving file upload when filename is blank (before and after sanitization) should be okay. """ filenames = [ '', # Normalized by MultiPartParser.IE_sanitize(). 'C:\\Windows\\', # Normalized by os.path.basename(). '/', 'ends-with-slash/', ] payload = client.FakePayload() for i, name in enumerate(filenames): payload.write('\r\n'.join([ '--' + client.BOUNDARY, 'Content-Disposition: form-data; name="file%s"; filename="%s"' % (i, name), 'Content-Type: application/octet-stream', '', 'You got pwnd.\r\n' ])) payload.write('\r\n--' + client.BOUNDARY + '--\r\n') r = { 'CONTENT_LENGTH': len(payload), 'CONTENT_TYPE': client.MULTIPART_CONTENT, 'PATH_INFO': '/echo/', 'REQUEST_METHOD': 'POST', 'wsgi.input': payload, } response = self.client.request(**r) self.assertEqual(response.status_code, 200) # Empty filenames should be ignored received = response.json() for i, name in enumerate(filenames): self.assertIsNone(received.get('file%s' % i)) def test_non_printable_chars_in_file_names(self): file_name = 'non-\x00printable\x00\n_chars.txt\x00' payload = client.FakePayload() payload.write('\r\n'.join([ '--' + client.BOUNDARY, f'Content-Disposition: form-data; name="file"; filename="{file_name}"', 'Content-Type: application/octet-stream', '', 'You got pwnd.\r\n' ])) payload.write('\r\n--' + client.BOUNDARY + '--\r\n') r = { 'CONTENT_LENGTH': len(payload), 'CONTENT_TYPE': client.MULTIPART_CONTENT, 'PATH_INFO': '/echo/', 'REQUEST_METHOD': 'POST', 'wsgi.input': payload, } response = self.client.request(**r) # Non-printable chars are sanitized. received = response.json() self.assertEqual(received['file'], 'non-printable_chars.txt') def test_dangerous_file_names(self): """Uploaded file names should be sanitized before ever reaching the view.""" # This test simulates possible directory traversal attacks by a # malicious uploader We have to do some monkeybusiness here to construct # a malicious payload with an invalid file name (containing os.sep or # os.pardir). This similar to what an attacker would need to do when # trying such an attack. payload = client.FakePayload() for i, name in enumerate(CANDIDATE_TRAVERSAL_FILE_NAMES): payload.write('\r\n'.join([ '--' + client.BOUNDARY, 'Content-Disposition: form-data; name="file%s"; filename="%s"' % (i, name), 'Content-Type: application/octet-stream', '', 'You got pwnd.\r\n' ])) payload.write('\r\n--' + client.BOUNDARY + '--\r\n') r = { 'CONTENT_LENGTH': len(payload), 'CONTENT_TYPE': client.MULTIPART_CONTENT, 'PATH_INFO': "/echo/", 'REQUEST_METHOD': 'POST', 'wsgi.input': payload, } response = self.client.request(**r) # The filenames should have been sanitized by the time it got to the view. received = response.json() for i, name in enumerate(CANDIDATE_TRAVERSAL_FILE_NAMES): got = received["file%s" % i] self.assertEqual(got, "hax0rd.txt") def test_filename_overflow(self): """File names over 256 characters (dangerous on some platforms) get fixed up.""" long_str = 'f' * 300 cases = [ # field name, filename, expected ('long_filename', '%s.txt' % long_str, '%s.txt' % long_str[:251]), ('long_extension', 'foo.%s' % long_str, '.%s' % long_str[:254]), ('no_extension', long_str, long_str[:255]), ('no_filename', '.%s' % long_str, '.%s' % long_str[:254]), ('long_everything', '%s.%s' % (long_str, long_str), '.%s' % long_str[:254]), ] payload = client.FakePayload() for name, filename, _ in cases: payload.write("\r\n".join([ '--' + client.BOUNDARY, 'Content-Disposition: form-data; name="{}"; filename="{}"', 'Content-Type: application/octet-stream', '', 'Oops.', '' ]).format(name, filename)) payload.write('\r\n--' + client.BOUNDARY + '--\r\n') r = { 'CONTENT_LENGTH': len(payload), 'CONTENT_TYPE': client.MULTIPART_CONTENT, 'PATH_INFO': "/echo/", 'REQUEST_METHOD': 'POST', 'wsgi.input': payload, } response = self.client.request(**r) result = response.json() for name, _, expected in cases: got = result[name] self.assertEqual(expected, got, 'Mismatch for {}'.format(name)) self.assertLess(len(got), 256, "Got a long file name (%s characters)." % len(got)) def test_file_content(self): file = tempfile.NamedTemporaryFile with file(suffix=".ctype_extra") as no_content_type, file(suffix=".ctype_extra") as simple_file: no_content_type.write(b'no content') no_content_type.seek(0) simple_file.write(b'text content') simple_file.seek(0) simple_file.content_type = 'text/plain' string_io = StringIO('string content') bytes_io = BytesIO(b'binary content') response = self.client.post('/echo_content/', { 'no_content_type': no_content_type, 'simple_file': simple_file, 'string': string_io, 'binary': bytes_io, }) received = response.json() self.assertEqual(received['no_content_type'], 'no content') self.assertEqual(received['simple_file'], 'text content') self.assertEqual(received['string'], 'string content') self.assertEqual(received['binary'], 'binary content') def test_content_type_extra(self): """Uploaded files may have content type parameters available.""" file = tempfile.NamedTemporaryFile with file(suffix=".ctype_extra") as no_content_type, file(suffix=".ctype_extra") as simple_file: no_content_type.write(b'something') no_content_type.seek(0) simple_file.write(b'something') simple_file.seek(0) simple_file.content_type = 'text/plain; test-key=test_value' response = self.client.post('/echo_content_type_extra/', { 'no_content_type': no_content_type, 'simple_file': simple_file, }) received = response.json() self.assertEqual(received['no_content_type'], {}) self.assertEqual(received['simple_file'], {'test-key': 'test_value'}) def test_truncated_multipart_handled_gracefully(self): """ If passed an incomplete multipart message, MultiPartParser does not attempt to read beyond the end of the stream, and simply will handle the part that can be parsed gracefully. """ payload_str = "\r\n".join([ '--' + client.BOUNDARY, 'Content-Disposition: form-data; name="file"; filename="foo.txt"', 'Content-Type: application/octet-stream', '', 'file contents' '--' + client.BOUNDARY + '--', '', ]) payload = client.FakePayload(payload_str[:-10]) r = { 'CONTENT_LENGTH': len(payload), 'CONTENT_TYPE': client.MULTIPART_CONTENT, 'PATH_INFO': '/echo/', 'REQUEST_METHOD': 'POST', 'wsgi.input': payload, } self.assertEqual(self.client.request(**r).json(), {}) def test_empty_multipart_handled_gracefully(self): """ If passed an empty multipart message, MultiPartParser will return an empty QueryDict. """ r = { 'CONTENT_LENGTH': 0, 'CONTENT_TYPE': client.MULTIPART_CONTENT, 'PATH_INFO': '/echo/', 'REQUEST_METHOD': 'POST', 'wsgi.input': client.FakePayload(b''), } self.assertEqual(self.client.request(**r).json(), {}) def test_custom_upload_handler(self): file = tempfile.NamedTemporaryFile with file() as smallfile, file() as bigfile: # A small file (under the 5M quota) smallfile.write(b'a' * (2 ** 21)) smallfile.seek(0) # A big file (over the quota) bigfile.write(b'a' * (10 * 2 ** 20)) bigfile.seek(0) # Small file posting should work. self.assertIn('f', self.client.post('/quota/', {'f': smallfile}).json()) # Large files don't go through. self.assertNotIn('f', self.client.post("/quota/", {'f': bigfile}).json()) def test_broken_custom_upload_handler(self): with tempfile.NamedTemporaryFile() as file: file.write(b'a' * (2 ** 21)) file.seek(0) msg = 'You cannot alter upload handlers after the upload has been processed.' with self.assertRaisesMessage(AttributeError, msg): self.client.post('/quota/broken/', {'f': file}) def test_stop_upload_temporary_file_handler(self): with tempfile.NamedTemporaryFile() as temp_file: temp_file.write(b'a') temp_file.seek(0) response = self.client.post('/temp_file/stop_upload/', {'file': temp_file}) temp_path = response.json()['temp_path'] self.assertIs(os.path.exists(temp_path), False) def test_upload_interrupted_temporary_file_handler(self): # Simulate an interrupted upload by omitting the closing boundary. class MockedParser(Parser): def __iter__(self): for item in super().__iter__(): item_type, meta_data, field_stream = item yield item_type, meta_data, field_stream if item_type == FILE: return with tempfile.NamedTemporaryFile() as temp_file: temp_file.write(b'a') temp_file.seek(0) with mock.patch( 'django.http.multipartparser.Parser', MockedParser, ): response = self.client.post( '/temp_file/upload_interrupted/', {'file': temp_file}, ) temp_path = response.json()['temp_path'] self.assertIs(os.path.exists(temp_path), False) def test_fileupload_getlist(self): file = tempfile.NamedTemporaryFile with file() as file1, file() as file2, file() as file2a: file1.write(b'a' * (2 ** 23)) file1.seek(0) file2.write(b'a' * (2 * 2 ** 18)) file2.seek(0) file2a.write(b'a' * (5 * 2 ** 20)) file2a.seek(0) response = self.client.post('/getlist_count/', { 'file1': file1, 'field1': 'test', 'field2': 'test3', 'field3': 'test5', 'field4': 'test6', 'field5': 'test7', 'file2': (file2, file2a) }) got = response.json() self.assertEqual(got.get('file1'), 1) self.assertEqual(got.get('file2'), 2) def test_fileuploads_closed_at_request_end(self): file = tempfile.NamedTemporaryFile with file() as f1, file() as f2a, file() as f2b: response = self.client.post('/fd_closing/t/', { 'file': f1, 'file2': (f2a, f2b), }) request = response.wsgi_request # The files were parsed. self.assertTrue(hasattr(request, '_files')) file = request._files['file'] self.assertTrue(file.closed) files = request._files.getlist('file2') self.assertTrue(files[0].closed) self.assertTrue(files[1].closed) def test_no_parsing_triggered_by_fd_closing(self): file = tempfile.NamedTemporaryFile with file() as f1, file() as f2a, file() as f2b: response = self.client.post('/fd_closing/f/', { 'file': f1, 'file2': (f2a, f2b), }) request = response.wsgi_request # The fd closing logic doesn't trigger parsing of the stream self.assertFalse(hasattr(request, '_files')) def test_file_error_blocking(self): """ The server should not block when there are upload errors (bug #8622). This can happen if something -- i.e. an exception handler -- tries to access POST while handling an error in parsing POST. This shouldn't cause an infinite loop! """ class POSTAccessingHandler(client.ClientHandler): """A handler that'll access POST during an exception.""" def handle_uncaught_exception(self, request, resolver, exc_info): ret = super().handle_uncaught_exception(request, resolver, exc_info) request.POST # evaluate return ret # Maybe this is a little more complicated that it needs to be; but if # the django.test.client.FakePayload.read() implementation changes then # this test would fail. So we need to know exactly what kind of error # it raises when there is an attempt to read more than the available bytes: try: client.FakePayload(b'a').read(2) except Exception as err: reference_error = err # install the custom handler that tries to access request.POST self.client.handler = POSTAccessingHandler() with open(__file__, 'rb') as fp: post_data = { 'name': 'Ringo', 'file_field': fp, } try: self.client.post('/upload_errors/', post_data) except reference_error.__class__ as err: self.assertNotEqual( str(err), str(reference_error), "Caught a repeated exception that'll cause an infinite loop in file uploads." ) except Exception as err: # CustomUploadError is the error that should have been raised self.assertEqual(err.__class__, uploadhandler.CustomUploadError) def test_filename_case_preservation(self): """ The storage backend shouldn't mess with the case of the filenames uploaded. """ # Synthesize the contents of a file upload with a mixed case filename # so we don't have to carry such a file in the Django tests source code # tree. vars = {'boundary': 'oUrBoUnDaRyStRiNg'} post_data = [ '--%(boundary)s', 'Content-Disposition: form-data; name="file_field"; filename="MiXeD_cAsE.txt"', 'Content-Type: application/octet-stream', '', 'file contents\n' '', '--%(boundary)s--\r\n', ] response = self.client.post( '/filename_case/', '\r\n'.join(post_data) % vars, 'multipart/form-data; boundary=%(boundary)s' % vars ) self.assertEqual(response.status_code, 200) id = int(response.content) obj = FileModel.objects.get(pk=id) # The name of the file uploaded and the file stored in the server-side # shouldn't differ. self.assertEqual(os.path.basename(obj.testfile.path), 'MiXeD_cAsE.txt') def test_filename_traversal_upload(self): os.makedirs(UPLOAD_TO, exist_ok=True) tests = [ '..&#x2F;test.txt', '..&sol;test.txt', ] for file_name in tests: with self.subTest(file_name=file_name): payload = client.FakePayload() payload.write( '\r\n'.join([ '--' + client.BOUNDARY, 'Content-Disposition: form-data; name="my_file"; ' 'filename="%s";' % file_name, 'Content-Type: text/plain', '', 'file contents.\r\n', '\r\n--' + client.BOUNDARY + '--\r\n', ]), ) r = { 'CONTENT_LENGTH': len(payload), 'CONTENT_TYPE': client.MULTIPART_CONTENT, 'PATH_INFO': '/upload_traversal/', 'REQUEST_METHOD': 'POST', 'wsgi.input': payload, } response = self.client.request(**r) result = response.json() self.assertEqual(response.status_code, 200) self.assertEqual(result['file_name'], 'test.txt') self.assertIs( os.path.exists(os.path.join(MEDIA_ROOT, 'test.txt')), False, ) self.assertIs( os.path.exists(os.path.join(UPLOAD_TO, 'test.txt')), True, ) @override_settings(MEDIA_ROOT=MEDIA_ROOT) class DirectoryCreationTests(SimpleTestCase): """ Tests for error handling during directory creation via _save_FIELD_file (ticket #6450) """ @classmethod def setUpClass(cls): super().setUpClass() os.makedirs(MEDIA_ROOT, exist_ok=True) cls.addClassCleanup(shutil.rmtree, MEDIA_ROOT) def setUp(self): self.obj = FileModel() @unittest.skipIf(sys.platform == 'win32', "Python on Windows doesn't have working os.chmod().") def test_readonly_root(self): """Permission errors are not swallowed""" os.chmod(MEDIA_ROOT, 0o500) self.addCleanup(os.chmod, MEDIA_ROOT, 0o700) with self.assertRaises(PermissionError): self.obj.testfile.save('foo.txt', SimpleUploadedFile('foo.txt', b'x'), save=False) def test_not_a_directory(self): # Create a file with the upload directory name open(UPLOAD_TO, 'wb').close() self.addCleanup(os.remove, UPLOAD_TO) msg = '%s exists and is not a directory.' % UPLOAD_TO with self.assertRaisesMessage(FileExistsError, msg): with SimpleUploadedFile('foo.txt', b'x') as file: self.obj.testfile.save('foo.txt', file, save=False) class MultiParserTests(SimpleTestCase): def test_empty_upload_handlers(self): # We're not actually parsing here; just checking if the parser properly # instantiates with empty upload handlers. MultiPartParser({ 'CONTENT_TYPE': 'multipart/form-data; boundary=_foo', 'CONTENT_LENGTH': '1' }, StringIO('x'), [], 'utf-8') def test_invalid_content_type(self): with self.assertRaisesMessage(MultiPartParserError, 'Invalid Content-Type: text/plain'): MultiPartParser({ 'CONTENT_TYPE': 'text/plain', 'CONTENT_LENGTH': '1', }, StringIO('x'), [], 'utf-8') def test_negative_content_length(self): with self.assertRaisesMessage(MultiPartParserError, 'Invalid content length: -1'): MultiPartParser({ 'CONTENT_TYPE': 'multipart/form-data; boundary=_foo', 'CONTENT_LENGTH': -1, }, StringIO('x'), [], 'utf-8') def test_bad_type_content_length(self): multipart_parser = MultiPartParser({ 'CONTENT_TYPE': 'multipart/form-data; boundary=_foo', 'CONTENT_LENGTH': 'a', }, StringIO('x'), [], 'utf-8') self.assertEqual(multipart_parser._content_length, 0) def test_sanitize_file_name(self): parser = MultiPartParser({ 'CONTENT_TYPE': 'multipart/form-data; boundary=_foo', 'CONTENT_LENGTH': '1' }, StringIO('x'), [], 'utf-8') for file_name in CANDIDATE_TRAVERSAL_FILE_NAMES: with self.subTest(file_name=file_name): self.assertEqual(parser.sanitize_file_name(file_name), 'hax0rd.txt') def test_sanitize_invalid_file_name(self): parser = MultiPartParser({ 'CONTENT_TYPE': 'multipart/form-data; boundary=_foo', 'CONTENT_LENGTH': '1', }, StringIO('x'), [], 'utf-8') for file_name in CANDIDATE_INVALID_FILE_NAMES: with self.subTest(file_name=file_name): self.assertIsNone(parser.sanitize_file_name(file_name)) def test_rfc2231_parsing(self): test_data = ( (b"Content-Type: application/x-stuff; title*=us-ascii'en-us'This%20is%20%2A%2A%2Afun%2A%2A%2A", "This is ***fun***"), (b"Content-Type: application/x-stuff; title*=UTF-8''foo-%c3%a4.html", "foo-ä.html"), (b"Content-Type: application/x-stuff; title*=iso-8859-1''foo-%E4.html", "foo-ä.html"), ) for raw_line, expected_title in test_data: parsed = parse_header(raw_line) self.assertEqual(parsed[1]['title'], expected_title) def test_rfc2231_wrong_title(self): """ Test wrongly formatted RFC 2231 headers (missing double single quotes). Parsing should not crash (#24209). """ test_data = ( (b"Content-Type: application/x-stuff; title*='This%20is%20%2A%2A%2Afun%2A%2A%2A", b"'This%20is%20%2A%2A%2Afun%2A%2A%2A"), (b"Content-Type: application/x-stuff; title*='foo.html", b"'foo.html"), (b"Content-Type: application/x-stuff; title*=bar.html", b"bar.html"), ) for raw_line, expected_title in test_data: parsed = parse_header(raw_line) self.assertEqual(parsed[1]['title'], expected_title)