hash
stringlengths 64
64
| content
stringlengths 0
1.51M
|
---|---|
fc6e2fac65a89dc26c389996f01170d5a0ae5d2a99b8c3671cc187f827debfb2 | from functools import wraps
from django.db import IntegrityError, connections, transaction
from django.test import TestCase, ignore_warnings, skipUnlessDBFeature
from django.test.testcases import TestData
from django.utils.deprecation import RemovedInDjango41Warning
from .models import Car, Person, PossessedCar
class TestTestCase(TestCase):
@skipUnlessDBFeature('can_defer_constraint_checks')
@skipUnlessDBFeature('supports_foreign_keys')
def test_fixture_teardown_checks_constraints(self):
rollback_atomics = self._rollback_atomics
self._rollback_atomics = lambda connection: None # noop
try:
car = PossessedCar.objects.create(car_id=1, belongs_to_id=1)
with self.assertRaises(IntegrityError), transaction.atomic():
self._fixture_teardown()
car.delete()
finally:
self._rollback_atomics = rollback_atomics
def test_disallowed_database_connection(self):
message = (
"Database connections to 'other' are not allowed in this test. "
"Add 'other' to test_utils.test_testcase.TestTestCase.databases to "
"ensure proper test isolation and silence this failure."
)
with self.assertRaisesMessage(AssertionError, message):
connections['other'].connect()
with self.assertRaisesMessage(AssertionError, message):
connections['other'].temporary_connection()
def test_disallowed_database_queries(self):
message = (
"Database queries to 'other' are not allowed in this test. "
"Add 'other' to test_utils.test_testcase.TestTestCase.databases to "
"ensure proper test isolation and silence this failure."
)
with self.assertRaisesMessage(AssertionError, message):
Car.objects.using('other').get()
def test_reset_sequences(self):
old_reset_sequences = self.reset_sequences
self.reset_sequences = True
msg = 'reset_sequences cannot be used on TestCase instances'
try:
with self.assertRaisesMessage(TypeError, msg):
self._fixture_setup()
finally:
self.reset_sequences = old_reset_sequences
class NonDeepCopyAble:
def __deepcopy__(self, memo):
raise TypeError
def assert_no_queries(test):
@wraps(test)
def inner(self):
with self.assertNumQueries(0):
test(self)
return inner
class TestDataTests(TestCase):
# setUpTestData re-assignment are also wrapped in TestData.
jim_douglas = None
@classmethod
def setUpTestData(cls):
cls.jim_douglas = Person.objects.create(name='Jim Douglas')
cls.car = Car.objects.create(name='1963 Volkswagen Beetle')
cls.herbie = cls.jim_douglas.possessed_cars.create(
car=cls.car,
belongs_to=cls.jim_douglas,
)
cls.non_deepcopy_able = NonDeepCopyAble()
@assert_no_queries
def test_class_attribute_equality(self):
"""Class level test data is equal to instance level test data."""
self.assertEqual(self.jim_douglas, self.__class__.jim_douglas)
@assert_no_queries
def test_class_attribute_identity(self):
"""
Class level test data is not identical to instance level test data.
"""
self.assertIsNot(self.jim_douglas, self.__class__.jim_douglas)
@assert_no_queries
def test_identity_preservation(self):
"""Identity of test data is preserved between accesses."""
self.assertIs(self.jim_douglas, self.jim_douglas)
@assert_no_queries
def test_known_related_objects_identity_preservation(self):
"""Known related objects identity is preserved."""
self.assertIs(self.herbie.car, self.car)
self.assertIs(self.herbie.belongs_to, self.jim_douglas)
@ignore_warnings(category=RemovedInDjango41Warning)
def test_undeepcopyable(self):
self.assertIs(self.non_deepcopy_able, self.__class__.non_deepcopy_able)
def test_undeepcopyable_warning(self):
msg = (
"Assigning objects which don't support copy.deepcopy() during "
"setUpTestData() is deprecated. Either assign the "
"non_deepcopy_able attribute during setUpClass() or setUp(), or "
"add support for deepcopy() to "
"test_utils.test_testcase.TestDataTests.non_deepcopy_able."
)
with self.assertRaisesMessage(RemovedInDjango41Warning, msg):
self.non_deepcopy_able
def test_repr(self):
self.assertEqual(
repr(TestData('attr', 'value')),
"<TestData: name='attr', data='value'>",
)
class SetupTestDataIsolationTests(TestCase):
"""
In-memory data isolation is respected for model instances assigned to class
attributes during setUpTestData.
"""
@classmethod
def setUpTestData(cls):
cls.car = Car.objects.create(name='Volkswagen Beetle')
def test_book_name_deutsh(self):
self.assertEqual(self.car.name, 'Volkswagen Beetle')
self.car.name = 'VW sKäfer'
self.car.save()
def test_book_name_french(self):
self.assertEqual(self.car.name, 'Volkswagen Beetle')
self.car.name = 'Volkswagen Coccinelle'
self.car.save()
|
8bdfe04aa3f5ef15fa584fc8a7f4924082f16507d2baa70dc25cf0060ea9b471 | from django.test import SimpleTestCase
from django.test.testcases import SerializeMixin
class TestSerializeMixin(SimpleTestCase):
def test_init_without_lockfile(self):
msg = (
"ExampleTests.lockfile isn't set. Set it to a unique value in the "
"base class."
)
with self.assertRaisesMessage(ValueError, msg):
class ExampleTests(SerializeMixin, SimpleTestCase):
pass
class TestSerializeMixinUse(SerializeMixin, SimpleTestCase):
lockfile = __file__
def test_usage(self):
# Running this test ensures that the lock/unlock functions have passed.
pass
|
6fdc0c1db30f7e13f96e38aef662afa7ce640bf681af03298349920a7c1f94bc | import datetime
import os
import re
import unittest
from unittest import mock
from urllib.parse import parse_qsl, urljoin, urlparse
import pytz
try:
import zoneinfo
except ImportError:
try:
from backports import zoneinfo
except ImportError:
zoneinfo = None
from django.contrib import admin
from django.contrib.admin import AdminSite, ModelAdmin
from django.contrib.admin.helpers import ACTION_CHECKBOX_NAME
from django.contrib.admin.models import ADDITION, DELETION, LogEntry
from django.contrib.admin.options import TO_FIELD_VAR
from django.contrib.admin.templatetags.admin_urls import add_preserved_filters
from django.contrib.admin.tests import AdminSeleniumTestCase
from django.contrib.admin.utils import quote
from django.contrib.admin.views.main import IS_POPUP_VAR
from django.contrib.auth import REDIRECT_FIELD_NAME, get_permission_codename
from django.contrib.auth.models import Group, Permission, User
from django.contrib.contenttypes.models import ContentType
from django.core import mail
from django.core.checks import Error
from django.core.files import temp as tempfile
from django.forms.utils import ErrorList
from django.template.response import TemplateResponse
from django.test import (
TestCase, modify_settings, override_settings, skipUnlessDBFeature,
)
from django.test.utils import override_script_prefix
from django.urls import NoReverseMatch, resolve, reverse
from django.utils import formats, translation
from django.utils.cache import get_max_age
from django.utils.encoding import iri_to_uri
from django.utils.html import escape
from django.utils.http import urlencode
from . import customadmin
from .admin import CityAdmin, site, site2
from .models import (
Actor, AdminOrderedAdminMethod, AdminOrderedCallable, AdminOrderedField,
AdminOrderedModelMethod, Album, Answer, Answer2, Article, BarAccount, Book,
Bookmark, Category, Chapter, ChapterXtra1, ChapterXtra2, Character, Child,
Choice, City, Collector, Color, ComplexSortedPerson, CoverLetter,
CustomArticle, CyclicOne, CyclicTwo, DooHickey, Employee, EmptyModel,
Fabric, FancyDoodad, FieldOverridePost, FilteredManager, FooAccount,
FoodDelivery, FunkyTag, Gallery, Grommet, Inquisition, Language, Link,
MainPrepopulated, Media, ModelWithStringPrimaryKey, OtherStory, Paper,
Parent, ParentWithDependentChildren, ParentWithUUIDPK, Person, Persona,
Picture, Pizza, Plot, PlotDetails, PluggableSearchPerson, Podcast, Post,
PrePopulatedPost, Promo, Question, ReadablePizza, ReadOnlyPizza,
ReadOnlyRelatedField, Recommendation, Recommender, RelatedPrepopulated,
RelatedWithUUIDPKModel, Report, Restaurant, RowLevelChangePermissionModel,
SecretHideout, Section, ShortMessage, Simple, Song, State, Story,
SuperSecretHideout, SuperVillain, Telegram, TitleTranslation, Topping,
UnchangeableObject, UndeletableObject, UnorderedObject, UserProxy, Villain,
Vodcast, Whatsit, Widget, Worker, WorkHour,
)
ERROR_MESSAGE = "Please enter the correct username and password \
for a staff account. Note that both fields may be case-sensitive."
MULTIPART_ENCTYPE = 'enctype="multipart/form-data"'
def make_aware_datetimes(dt, iana_key):
"""Makes one aware datetime for each supported time zone provider."""
yield pytz.timezone(iana_key).localize(dt, is_dst=None)
if zoneinfo is not None:
yield dt.replace(tzinfo=zoneinfo.ZoneInfo(iana_key))
class AdminFieldExtractionMixin:
"""
Helper methods for extracting data from AdminForm.
"""
def get_admin_form_fields(self, response):
"""
Return a list of AdminFields for the AdminForm in the response.
"""
fields = []
for fieldset in response.context['adminform']:
for field_line in fieldset:
fields.extend(field_line)
return fields
def get_admin_readonly_fields(self, response):
"""
Return the readonly fields for the response's AdminForm.
"""
return [f for f in self.get_admin_form_fields(response) if f.is_readonly]
def get_admin_readonly_field(self, response, field_name):
"""
Return the readonly field for the given field_name.
"""
admin_readonly_fields = self.get_admin_readonly_fields(response)
for field in admin_readonly_fields:
if field.field['name'] == field_name:
return field
@override_settings(ROOT_URLCONF='admin_views.urls', USE_I18N=True, USE_L10N=False, LANGUAGE_CODE='en')
class AdminViewBasicTestCase(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]')
cls.s1 = Section.objects.create(name='Test section')
cls.a1 = Article.objects.create(
content='<p>Middle content</p>',
date=datetime.datetime(2008, 3, 18, 11, 54, 58),
section=cls.s1,
title='Article 1',
)
cls.a2 = Article.objects.create(
content='<p>Oldest content</p>',
date=datetime.datetime(2000, 3, 18, 11, 54, 58),
section=cls.s1,
title='Article 2',
)
cls.a3 = Article.objects.create(
content='<p>Newest content</p>', date=datetime.datetime(2009, 3, 18, 11, 54, 58), section=cls.s1
)
cls.p1 = PrePopulatedPost.objects.create(title='A Long Title', published=True, slug='a-long-title')
cls.color1 = Color.objects.create(value='Red', warm=True)
cls.color2 = Color.objects.create(value='Orange', warm=True)
cls.color3 = Color.objects.create(value='Blue', warm=False)
cls.color4 = Color.objects.create(value='Green', warm=False)
cls.fab1 = Fabric.objects.create(surface='x')
cls.fab2 = Fabric.objects.create(surface='y')
cls.fab3 = Fabric.objects.create(surface='plain')
cls.b1 = Book.objects.create(name='Book 1')
cls.b2 = Book.objects.create(name='Book 2')
cls.pro1 = Promo.objects.create(name='Promo 1', book=cls.b1)
cls.pro1 = Promo.objects.create(name='Promo 2', book=cls.b2)
cls.chap1 = Chapter.objects.create(title='Chapter 1', content='[ insert contents here ]', book=cls.b1)
cls.chap2 = Chapter.objects.create(title='Chapter 2', content='[ insert contents here ]', book=cls.b1)
cls.chap3 = Chapter.objects.create(title='Chapter 1', content='[ insert contents here ]', book=cls.b2)
cls.chap4 = Chapter.objects.create(title='Chapter 2', content='[ insert contents here ]', book=cls.b2)
cls.cx1 = ChapterXtra1.objects.create(chap=cls.chap1, xtra='ChapterXtra1 1')
cls.cx2 = ChapterXtra1.objects.create(chap=cls.chap3, xtra='ChapterXtra1 2')
Actor.objects.create(name='Palin', age=27)
# Post data for edit inline
cls.inline_post_data = {
"name": "Test section",
# inline data
"article_set-TOTAL_FORMS": "6",
"article_set-INITIAL_FORMS": "3",
"article_set-MAX_NUM_FORMS": "0",
"article_set-0-id": cls.a1.pk,
# there is no title in database, give one here or formset will fail.
"article_set-0-title": "Norske bostaver æøå skaper problemer",
"article_set-0-content": "<p>Middle content</p>",
"article_set-0-date_0": "2008-03-18",
"article_set-0-date_1": "11:54:58",
"article_set-0-section": cls.s1.pk,
"article_set-1-id": cls.a2.pk,
"article_set-1-title": "Need a title.",
"article_set-1-content": "<p>Oldest content</p>",
"article_set-1-date_0": "2000-03-18",
"article_set-1-date_1": "11:54:58",
"article_set-2-id": cls.a3.pk,
"article_set-2-title": "Need a title.",
"article_set-2-content": "<p>Newest content</p>",
"article_set-2-date_0": "2009-03-18",
"article_set-2-date_1": "11:54:58",
"article_set-3-id": "",
"article_set-3-title": "",
"article_set-3-content": "",
"article_set-3-date_0": "",
"article_set-3-date_1": "",
"article_set-4-id": "",
"article_set-4-title": "",
"article_set-4-content": "",
"article_set-4-date_0": "",
"article_set-4-date_1": "",
"article_set-5-id": "",
"article_set-5-title": "",
"article_set-5-content": "",
"article_set-5-date_0": "",
"article_set-5-date_1": "",
}
def setUp(self):
self.client.force_login(self.superuser)
def assertContentBefore(self, response, text1, text2, failing_msg=None):
"""
Testing utility asserting that text1 appears before text2 in response
content.
"""
self.assertEqual(response.status_code, 200)
self.assertLess(
response.content.index(text1.encode()),
response.content.index(text2.encode()),
(failing_msg or '') + '\nResponse:\n' + response.content.decode(response.charset)
)
class AdminViewBasicTest(AdminViewBasicTestCase):
def test_trailing_slash_required(self):
"""
If you leave off the trailing slash, app should redirect and add it.
"""
add_url = reverse('admin:admin_views_article_add')
response = self.client.get(add_url[:-1])
self.assertRedirects(response, add_url, status_code=301)
def test_basic_add_GET(self):
"""
A smoke test to ensure GET on the add_view works.
"""
response = self.client.get(reverse('admin:admin_views_section_add'))
self.assertIsInstance(response, TemplateResponse)
self.assertEqual(response.status_code, 200)
def test_add_with_GET_args(self):
response = self.client.get(reverse('admin:admin_views_section_add'), {'name': 'My Section'})
self.assertContains(
response, 'value="My Section"',
msg_prefix="Couldn't find an input with the right value in the response"
)
def test_basic_edit_GET(self):
"""
A smoke test to ensure GET on the change_view works.
"""
response = self.client.get(reverse('admin:admin_views_section_change', args=(self.s1.pk,)))
self.assertIsInstance(response, TemplateResponse)
self.assertEqual(response.status_code, 200)
def test_basic_edit_GET_string_PK(self):
"""
GET on the change_view (when passing a string as the PK argument for a
model with an integer PK field) redirects to the index page with a
message saying the object doesn't exist.
"""
response = self.client.get(reverse('admin:admin_views_section_change', args=(quote("abc/<b>"),)), follow=True)
self.assertRedirects(response, reverse('admin:index'))
self.assertEqual(
[m.message for m in response.context['messages']],
['section with ID “abc/<b>” doesn’t exist. Perhaps it was deleted?']
)
def test_basic_edit_GET_old_url_redirect(self):
"""
The change URL changed in Django 1.9, but the old one still redirects.
"""
response = self.client.get(
reverse('admin:admin_views_section_change', args=(self.s1.pk,)).replace('change/', '')
)
self.assertRedirects(response, reverse('admin:admin_views_section_change', args=(self.s1.pk,)))
def test_basic_inheritance_GET_string_PK(self):
"""
GET on the change_view (for inherited models) redirects to the index
page with a message saying the object doesn't exist.
"""
response = self.client.get(reverse('admin:admin_views_supervillain_change', args=('abc',)), follow=True)
self.assertRedirects(response, reverse('admin:index'))
self.assertEqual(
[m.message for m in response.context['messages']],
['super villain with ID “abc” doesn’t exist. Perhaps it was deleted?']
)
def test_basic_add_POST(self):
"""
A smoke test to ensure POST on add_view works.
"""
post_data = {
"name": "Another Section",
# inline data
"article_set-TOTAL_FORMS": "3",
"article_set-INITIAL_FORMS": "0",
"article_set-MAX_NUM_FORMS": "0",
}
response = self.client.post(reverse('admin:admin_views_section_add'), post_data)
self.assertEqual(response.status_code, 302) # redirect somewhere
def test_popup_add_POST(self):
"""
Ensure http response from a popup is properly escaped.
"""
post_data = {
IS_POPUP_VAR: '1',
'title': 'title with a new\nline',
'content': 'some content',
'date_0': '2010-09-10',
'date_1': '14:55:39',
}
response = self.client.post(reverse('admin:admin_views_article_add'), post_data)
self.assertContains(response, 'title with a new\\nline')
def test_basic_edit_POST(self):
"""
A smoke test to ensure POST on edit_view works.
"""
url = reverse('admin:admin_views_section_change', args=(self.s1.pk,))
response = self.client.post(url, self.inline_post_data)
self.assertEqual(response.status_code, 302) # redirect somewhere
def test_edit_save_as(self):
"""
Test "save as".
"""
post_data = self.inline_post_data.copy()
post_data.update({
'_saveasnew': 'Save+as+new',
"article_set-1-section": "1",
"article_set-2-section": "1",
"article_set-3-section": "1",
"article_set-4-section": "1",
"article_set-5-section": "1",
})
response = self.client.post(reverse('admin:admin_views_section_change', args=(self.s1.pk,)), post_data)
self.assertEqual(response.status_code, 302) # redirect somewhere
def test_edit_save_as_delete_inline(self):
"""
Should be able to "Save as new" while also deleting an inline.
"""
post_data = self.inline_post_data.copy()
post_data.update({
'_saveasnew': 'Save+as+new',
"article_set-1-section": "1",
"article_set-2-section": "1",
"article_set-2-DELETE": "1",
"article_set-3-section": "1",
})
response = self.client.post(reverse('admin:admin_views_section_change', args=(self.s1.pk,)), post_data)
self.assertEqual(response.status_code, 302)
# started with 3 articles, one was deleted.
self.assertEqual(Section.objects.latest('id').article_set.count(), 2)
def test_change_list_column_field_classes(self):
response = self.client.get(reverse('admin:admin_views_article_changelist'))
# callables display the callable name.
self.assertContains(response, 'column-callable_year')
self.assertContains(response, 'field-callable_year')
# lambdas display as "lambda" + index that they appear in list_display.
self.assertContains(response, 'column-lambda8')
self.assertContains(response, 'field-lambda8')
def test_change_list_sorting_callable(self):
"""
Ensure we can sort on a list_display field that is a callable
(column 2 is callable_year in ArticleAdmin)
"""
response = self.client.get(reverse('admin:admin_views_article_changelist'), {'o': 2})
self.assertContentBefore(
response, 'Oldest content', 'Middle content',
"Results of sorting on callable are out of order."
)
self.assertContentBefore(
response, 'Middle content', 'Newest content',
"Results of sorting on callable are out of order."
)
def test_change_list_sorting_property(self):
"""
Sort on a list_display field that is a property (column 10 is
a property in Article model).
"""
response = self.client.get(reverse('admin:admin_views_article_changelist'), {'o': 10})
self.assertContentBefore(
response,
'Oldest content',
'Middle content',
'Results of sorting on property are out of order.',
)
self.assertContentBefore(
response,
'Middle content',
'Newest content',
'Results of sorting on property are out of order.',
)
def test_change_list_sorting_callable_query_expression(self):
"""Query expressions may be used for admin_order_field."""
tests = [
('order_by_expression', 9),
('order_by_f_expression', 12),
('order_by_orderby_expression', 13),
]
for admin_order_field, index in tests:
with self.subTest(admin_order_field):
response = self.client.get(
reverse('admin:admin_views_article_changelist'),
{'o': index},
)
self.assertContentBefore(
response, 'Oldest content', 'Middle content',
'Results of sorting on callable are out of order.'
)
self.assertContentBefore(
response, 'Middle content', 'Newest content',
'Results of sorting on callable are out of order.'
)
def test_change_list_sorting_callable_query_expression_reverse(self):
tests = [
('order_by_expression', -9),
('order_by_f_expression', -12),
('order_by_orderby_expression', -13),
]
for admin_order_field, index in tests:
with self.subTest(admin_order_field):
response = self.client.get(
reverse('admin:admin_views_article_changelist'),
{'o': index},
)
self.assertContentBefore(
response, 'Middle content', 'Oldest content',
'Results of sorting on callable are out of order.'
)
self.assertContentBefore(
response, 'Newest content', 'Middle content',
'Results of sorting on callable are out of order.'
)
def test_change_list_sorting_model(self):
"""
Ensure we can sort on a list_display field that is a Model method
(column 3 is 'model_year' in ArticleAdmin)
"""
response = self.client.get(reverse('admin:admin_views_article_changelist'), {'o': '-3'})
self.assertContentBefore(
response, 'Newest content', 'Middle content',
"Results of sorting on Model method are out of order."
)
self.assertContentBefore(
response, 'Middle content', 'Oldest content',
"Results of sorting on Model method are out of order."
)
def test_change_list_sorting_model_admin(self):
"""
Ensure we can sort on a list_display field that is a ModelAdmin method
(column 4 is 'modeladmin_year' in ArticleAdmin)
"""
response = self.client.get(reverse('admin:admin_views_article_changelist'), {'o': '4'})
self.assertContentBefore(
response, 'Oldest content', 'Middle content',
"Results of sorting on ModelAdmin method are out of order."
)
self.assertContentBefore(
response, 'Middle content', 'Newest content',
"Results of sorting on ModelAdmin method are out of order."
)
def test_change_list_sorting_model_admin_reverse(self):
"""
Ensure we can sort on a list_display field that is a ModelAdmin
method in reverse order (i.e. admin_order_field uses the '-' prefix)
(column 6 is 'model_year_reverse' in ArticleAdmin)
"""
td = '<td class="field-model_property_year">%s</td>'
td_2000, td_2008, td_2009 = td % 2000, td % 2008, td % 2009
response = self.client.get(reverse('admin:admin_views_article_changelist'), {'o': '6'})
self.assertContentBefore(
response, td_2009, td_2008,
"Results of sorting on ModelAdmin method are out of order."
)
self.assertContentBefore(
response, td_2008, td_2000,
"Results of sorting on ModelAdmin method are out of order."
)
# Let's make sure the ordering is right and that we don't get a
# FieldError when we change to descending order
response = self.client.get(reverse('admin:admin_views_article_changelist'), {'o': '-6'})
self.assertContentBefore(
response, td_2000, td_2008,
"Results of sorting on ModelAdmin method are out of order."
)
self.assertContentBefore(
response, td_2008, td_2009,
"Results of sorting on ModelAdmin method are out of order."
)
def test_change_list_sorting_multiple(self):
p1 = Person.objects.create(name="Chris", gender=1, alive=True)
p2 = Person.objects.create(name="Chris", gender=2, alive=True)
p3 = Person.objects.create(name="Bob", gender=1, alive=True)
link1 = reverse('admin:admin_views_person_change', args=(p1.pk,))
link2 = reverse('admin:admin_views_person_change', args=(p2.pk,))
link3 = reverse('admin:admin_views_person_change', args=(p3.pk,))
# Sort by name, gender
response = self.client.get(reverse('admin:admin_views_person_changelist'), {'o': '1.2'})
self.assertContentBefore(response, link3, link1)
self.assertContentBefore(response, link1, link2)
# Sort by gender descending, name
response = self.client.get(reverse('admin:admin_views_person_changelist'), {'o': '-2.1'})
self.assertContentBefore(response, link2, link3)
self.assertContentBefore(response, link3, link1)
def test_change_list_sorting_preserve_queryset_ordering(self):
"""
If no ordering is defined in `ModelAdmin.ordering` or in the query
string, then the underlying order of the queryset should not be
changed, even if it is defined in `Modeladmin.get_queryset()`.
Refs #11868, #7309.
"""
p1 = Person.objects.create(name="Amy", gender=1, alive=True, age=80)
p2 = Person.objects.create(name="Bob", gender=1, alive=True, age=70)
p3 = Person.objects.create(name="Chris", gender=2, alive=False, age=60)
link1 = reverse('admin:admin_views_person_change', args=(p1.pk,))
link2 = reverse('admin:admin_views_person_change', args=(p2.pk,))
link3 = reverse('admin:admin_views_person_change', args=(p3.pk,))
response = self.client.get(reverse('admin:admin_views_person_changelist'), {})
self.assertContentBefore(response, link3, link2)
self.assertContentBefore(response, link2, link1)
def test_change_list_sorting_model_meta(self):
# Test ordering on Model Meta is respected
l1 = Language.objects.create(iso='ur', name='Urdu')
l2 = Language.objects.create(iso='ar', name='Arabic')
link1 = reverse('admin:admin_views_language_change', args=(quote(l1.pk),))
link2 = reverse('admin:admin_views_language_change', args=(quote(l2.pk),))
response = self.client.get(reverse('admin:admin_views_language_changelist'), {})
self.assertContentBefore(response, link2, link1)
# Test we can override with query string
response = self.client.get(reverse('admin:admin_views_language_changelist'), {'o': '-1'})
self.assertContentBefore(response, link1, link2)
def test_change_list_sorting_override_model_admin(self):
# Test ordering on Model Admin is respected, and overrides Model Meta
dt = datetime.datetime.now()
p1 = Podcast.objects.create(name="A", release_date=dt)
p2 = Podcast.objects.create(name="B", release_date=dt - datetime.timedelta(10))
link1 = reverse('admin:admin_views_podcast_change', args=(p1.pk,))
link2 = reverse('admin:admin_views_podcast_change', args=(p2.pk,))
response = self.client.get(reverse('admin:admin_views_podcast_changelist'), {})
self.assertContentBefore(response, link1, link2)
def test_multiple_sort_same_field(self):
# The changelist displays the correct columns if two columns correspond
# to the same ordering field.
dt = datetime.datetime.now()
p1 = Podcast.objects.create(name="A", release_date=dt)
p2 = Podcast.objects.create(name="B", release_date=dt - datetime.timedelta(10))
link1 = reverse('admin:admin_views_podcast_change', args=(quote(p1.pk),))
link2 = reverse('admin:admin_views_podcast_change', args=(quote(p2.pk),))
response = self.client.get(reverse('admin:admin_views_podcast_changelist'), {})
self.assertContentBefore(response, link1, link2)
p1 = ComplexSortedPerson.objects.create(name="Bob", age=10)
p2 = ComplexSortedPerson.objects.create(name="Amy", age=20)
link1 = reverse('admin:admin_views_complexsortedperson_change', args=(p1.pk,))
link2 = reverse('admin:admin_views_complexsortedperson_change', args=(p2.pk,))
response = self.client.get(reverse('admin:admin_views_complexsortedperson_changelist'), {})
# Should have 5 columns (including action checkbox col)
self.assertContains(response, '<th scope="col"', count=5)
self.assertContains(response, 'Name')
self.assertContains(response, 'Colored name')
# Check order
self.assertContentBefore(response, 'Name', 'Colored name')
# Check sorting - should be by name
self.assertContentBefore(response, link2, link1)
def test_sort_indicators_admin_order(self):
"""
The admin shows default sort indicators for all kinds of 'ordering'
fields: field names, method on the model admin and model itself, and
other callables. See #17252.
"""
models = [(AdminOrderedField, 'adminorderedfield'),
(AdminOrderedModelMethod, 'adminorderedmodelmethod'),
(AdminOrderedAdminMethod, 'adminorderedadminmethod'),
(AdminOrderedCallable, 'adminorderedcallable')]
for model, url in models:
model.objects.create(stuff='The Last Item', order=3)
model.objects.create(stuff='The First Item', order=1)
model.objects.create(stuff='The Middle Item', order=2)
response = self.client.get(reverse('admin:admin_views_%s_changelist' % url), {})
# Should have 3 columns including action checkbox col.
self.assertContains(response, '<th scope="col"', count=3, msg_prefix=url)
# Check if the correct column was selected. 2 is the index of the
# 'order' column in the model admin's 'list_display' with 0 being
# the implicit 'action_checkbox' and 1 being the column 'stuff'.
self.assertEqual(response.context['cl'].get_ordering_field_columns(), {2: 'asc'})
# Check order of records.
self.assertContentBefore(response, 'The First Item', 'The Middle Item')
self.assertContentBefore(response, 'The Middle Item', 'The Last Item')
def test_has_related_field_in_list_display_fk(self):
"""Joins shouldn't be performed for <FK>_id fields in list display."""
state = State.objects.create(name='Karnataka')
City.objects.create(state=state, name='Bangalore')
response = self.client.get(reverse('admin:admin_views_city_changelist'), {})
response.context['cl'].list_display = ['id', 'name', 'state']
self.assertIs(response.context['cl'].has_related_field_in_list_display(), True)
response.context['cl'].list_display = ['id', 'name', 'state_id']
self.assertIs(response.context['cl'].has_related_field_in_list_display(), False)
def test_has_related_field_in_list_display_o2o(self):
"""Joins shouldn't be performed for <O2O>_id fields in list display."""
media = Media.objects.create(name='Foo')
Vodcast.objects.create(media=media)
response = self.client.get(reverse('admin:admin_views_vodcast_changelist'), {})
response.context['cl'].list_display = ['media']
self.assertIs(response.context['cl'].has_related_field_in_list_display(), True)
response.context['cl'].list_display = ['media_id']
self.assertIs(response.context['cl'].has_related_field_in_list_display(), False)
def test_limited_filter(self):
"""Ensure admin changelist filters do not contain objects excluded via limit_choices_to.
This also tests relation-spanning filters (e.g. 'color__value').
"""
response = self.client.get(reverse('admin:admin_views_thing_changelist'))
self.assertContains(
response, '<div id="changelist-filter">',
msg_prefix="Expected filter not found in changelist view"
)
self.assertNotContains(
response, '<a href="?color__id__exact=3">Blue</a>',
msg_prefix="Changelist filter not correctly limited by limit_choices_to"
)
def test_relation_spanning_filters(self):
changelist_url = reverse('admin:admin_views_chapterxtra1_changelist')
response = self.client.get(changelist_url)
self.assertContains(response, '<div id="changelist-filter">')
filters = {
'chap__id__exact': {
'values': [c.id for c in Chapter.objects.all()],
'test': lambda obj, value: obj.chap.id == value,
},
'chap__title': {
'values': [c.title for c in Chapter.objects.all()],
'test': lambda obj, value: obj.chap.title == value,
},
'chap__book__id__exact': {
'values': [b.id for b in Book.objects.all()],
'test': lambda obj, value: obj.chap.book.id == value,
},
'chap__book__name': {
'values': [b.name for b in Book.objects.all()],
'test': lambda obj, value: obj.chap.book.name == value,
},
'chap__book__promo__id__exact': {
'values': [p.id for p in Promo.objects.all()],
'test': lambda obj, value: obj.chap.book.promo_set.filter(id=value).exists(),
},
'chap__book__promo__name': {
'values': [p.name for p in Promo.objects.all()],
'test': lambda obj, value: obj.chap.book.promo_set.filter(name=value).exists(),
},
# A forward relation (book) after a reverse relation (promo).
'guest_author__promo__book__id__exact': {
'values': [p.id for p in Book.objects.all()],
'test': lambda obj, value: obj.guest_author.promo_set.filter(book=value).exists(),
},
}
for filter_path, params in filters.items():
for value in params['values']:
query_string = urlencode({filter_path: value})
# ensure filter link exists
self.assertContains(response, '<a href="?%s"' % query_string)
# ensure link works
filtered_response = self.client.get('%s?%s' % (changelist_url, query_string))
self.assertEqual(filtered_response.status_code, 200)
# ensure changelist contains only valid objects
for obj in filtered_response.context['cl'].queryset.all():
self.assertTrue(params['test'](obj, value))
def test_incorrect_lookup_parameters(self):
"""Ensure incorrect lookup parameters are handled gracefully."""
changelist_url = reverse('admin:admin_views_thing_changelist')
response = self.client.get(changelist_url, {'notarealfield': '5'})
self.assertRedirects(response, '%s?e=1' % changelist_url)
# Spanning relationships through a nonexistent related object (Refs #16716)
response = self.client.get(changelist_url, {'notarealfield__whatever': '5'})
self.assertRedirects(response, '%s?e=1' % changelist_url)
response = self.client.get(changelist_url, {'color__id__exact': 'StringNotInteger!'})
self.assertRedirects(response, '%s?e=1' % changelist_url)
# Regression test for #18530
response = self.client.get(changelist_url, {'pub_date__gte': 'foo'})
self.assertRedirects(response, '%s?e=1' % changelist_url)
def test_isnull_lookups(self):
"""Ensure is_null is handled correctly."""
Article.objects.create(title="I Could Go Anywhere", content="Versatile", date=datetime.datetime.now())
changelist_url = reverse('admin:admin_views_article_changelist')
response = self.client.get(changelist_url)
self.assertContains(response, '4 articles')
response = self.client.get(changelist_url, {'section__isnull': 'false'})
self.assertContains(response, '3 articles')
response = self.client.get(changelist_url, {'section__isnull': '0'})
self.assertContains(response, '3 articles')
response = self.client.get(changelist_url, {'section__isnull': 'true'})
self.assertContains(response, '1 article')
response = self.client.get(changelist_url, {'section__isnull': '1'})
self.assertContains(response, '1 article')
def test_logout_and_password_change_URLs(self):
response = self.client.get(reverse('admin:admin_views_article_changelist'))
self.assertContains(response, '<a href="%s">' % reverse('admin:logout'))
self.assertContains(response, '<a href="%s">' % reverse('admin:password_change'))
def test_named_group_field_choices_change_list(self):
"""
Ensures the admin changelist shows correct values in the relevant column
for rows corresponding to instances of a model in which a named group
has been used in the choices option of a field.
"""
link1 = reverse('admin:admin_views_fabric_change', args=(self.fab1.pk,))
link2 = reverse('admin:admin_views_fabric_change', args=(self.fab2.pk,))
response = self.client.get(reverse('admin:admin_views_fabric_changelist'))
fail_msg = (
"Changelist table isn't showing the right human-readable values "
"set by a model field 'choices' option named group."
)
self.assertContains(response, '<a href="%s">Horizontal</a>' % link1, msg_prefix=fail_msg, html=True)
self.assertContains(response, '<a href="%s">Vertical</a>' % link2, msg_prefix=fail_msg, html=True)
def test_named_group_field_choices_filter(self):
"""
Ensures the filter UI shows correctly when at least one named group has
been used in the choices option of a model field.
"""
response = self.client.get(reverse('admin:admin_views_fabric_changelist'))
fail_msg = (
"Changelist filter isn't showing options contained inside a model "
"field 'choices' option named group."
)
self.assertContains(response, '<div id="changelist-filter">')
self.assertContains(
response, '<a href="?surface__exact=x" title="Horizontal">Horizontal</a>',
msg_prefix=fail_msg, html=True
)
self.assertContains(
response, '<a href="?surface__exact=y" title="Vertical">Vertical</a>',
msg_prefix=fail_msg, html=True
)
def test_change_list_null_boolean_display(self):
Post.objects.create(public=None)
response = self.client.get(reverse('admin:admin_views_post_changelist'))
self.assertContains(response, 'icon-unknown.svg')
def test_display_decorator_with_boolean_and_empty_value(self):
msg = (
'The boolean and empty_value arguments to the @display decorator '
'are mutually exclusive.'
)
with self.assertRaisesMessage(ValueError, msg):
class BookAdmin(admin.ModelAdmin):
@admin.display(boolean=True, empty_value='(Missing)')
def is_published(self, obj):
return obj.publish_date is not None
def test_i18n_language_non_english_default(self):
"""
Check if the JavaScript i18n view returns an empty language catalog
if the default language is non-English but the selected language
is English. See #13388 and #3594 for more details.
"""
with self.settings(LANGUAGE_CODE='fr'), translation.override('en-us'):
response = self.client.get(reverse('admin:jsi18n'))
self.assertNotContains(response, 'Choisir une heure')
def test_i18n_language_non_english_fallback(self):
"""
Makes sure that the fallback language is still working properly
in cases where the selected language cannot be found.
"""
with self.settings(LANGUAGE_CODE='fr'), translation.override('none'):
response = self.client.get(reverse('admin:jsi18n'))
self.assertContains(response, 'Choisir une heure')
def test_jsi18n_with_context(self):
response = self.client.get(reverse('admin-extra-context:jsi18n'))
self.assertEqual(response.status_code, 200)
def test_L10N_deactivated(self):
"""
Check if L10N is deactivated, the JavaScript i18n view doesn't
return localized date/time formats. Refs #14824.
"""
with self.settings(LANGUAGE_CODE='ru', USE_L10N=False), translation.override('none'):
response = self.client.get(reverse('admin:jsi18n'))
self.assertNotContains(response, '%d.%m.%Y %H:%M:%S')
self.assertContains(response, '%Y-%m-%d %H:%M:%S')
def test_disallowed_filtering(self):
with self.assertLogs('django.security.DisallowedModelAdminLookup', 'ERROR'):
response = self.client.get(
"%s?owner__email__startswith=fuzzy" % reverse('admin:admin_views_album_changelist')
)
self.assertEqual(response.status_code, 400)
# Filters are allowed if explicitly included in list_filter
response = self.client.get("%s?color__value__startswith=red" % reverse('admin:admin_views_thing_changelist'))
self.assertEqual(response.status_code, 200)
response = self.client.get("%s?color__value=red" % reverse('admin:admin_views_thing_changelist'))
self.assertEqual(response.status_code, 200)
# Filters should be allowed if they involve a local field without the
# need to allow them in list_filter or date_hierarchy.
response = self.client.get("%s?age__gt=30" % reverse('admin:admin_views_person_changelist'))
self.assertEqual(response.status_code, 200)
e1 = Employee.objects.create(name='Anonymous', gender=1, age=22, alive=True, code='123')
e2 = Employee.objects.create(name='Visitor', gender=2, age=19, alive=True, code='124')
WorkHour.objects.create(datum=datetime.datetime.now(), employee=e1)
WorkHour.objects.create(datum=datetime.datetime.now(), employee=e2)
response = self.client.get(reverse('admin:admin_views_workhour_changelist'))
self.assertContains(response, 'employee__person_ptr__exact')
response = self.client.get("%s?employee__person_ptr__exact=%d" % (
reverse('admin:admin_views_workhour_changelist'), e1.pk)
)
self.assertEqual(response.status_code, 200)
def test_disallowed_to_field(self):
url = reverse('admin:admin_views_section_changelist')
with self.assertLogs('django.security.DisallowedModelAdminToField', 'ERROR'):
response = self.client.get(url, {TO_FIELD_VAR: 'missing_field'})
self.assertEqual(response.status_code, 400)
# Specifying a field that is not referred by any other model registered
# to this admin site should raise an exception.
with self.assertLogs('django.security.DisallowedModelAdminToField', 'ERROR'):
response = self.client.get(reverse('admin:admin_views_section_changelist'), {TO_FIELD_VAR: 'name'})
self.assertEqual(response.status_code, 400)
# #23839 - Primary key should always be allowed, even if the referenced model isn't registered.
response = self.client.get(reverse('admin:admin_views_notreferenced_changelist'), {TO_FIELD_VAR: 'id'})
self.assertEqual(response.status_code, 200)
# #23915 - Specifying a field referenced by another model though a m2m should be allowed.
response = self.client.get(reverse('admin:admin_views_recipe_changelist'), {TO_FIELD_VAR: 'rname'})
self.assertEqual(response.status_code, 200)
# #23604, #23915 - Specifying a field referenced through a reverse m2m relationship should be allowed.
response = self.client.get(reverse('admin:admin_views_ingredient_changelist'), {TO_FIELD_VAR: 'iname'})
self.assertEqual(response.status_code, 200)
# #23329 - Specifying a field that is not referred by any other model directly registered
# to this admin site but registered through inheritance should be allowed.
response = self.client.get(reverse('admin:admin_views_referencedbyparent_changelist'), {TO_FIELD_VAR: 'name'})
self.assertEqual(response.status_code, 200)
# #23431 - Specifying a field that is only referred to by a inline of a registered
# model should be allowed.
response = self.client.get(reverse('admin:admin_views_referencedbyinline_changelist'), {TO_FIELD_VAR: 'name'})
self.assertEqual(response.status_code, 200)
# #25622 - Specifying a field of a model only referred by a generic
# relation should raise DisallowedModelAdminToField.
url = reverse('admin:admin_views_referencedbygenrel_changelist')
with self.assertLogs('django.security.DisallowedModelAdminToField', 'ERROR'):
response = self.client.get(url, {TO_FIELD_VAR: 'object_id'})
self.assertEqual(response.status_code, 400)
# We also want to prevent the add, change, and delete views from
# leaking a disallowed field value.
with self.assertLogs('django.security.DisallowedModelAdminToField', 'ERROR'):
response = self.client.post(reverse('admin:admin_views_section_add'), {TO_FIELD_VAR: 'name'})
self.assertEqual(response.status_code, 400)
section = Section.objects.create()
url = reverse('admin:admin_views_section_change', args=(section.pk,))
with self.assertLogs('django.security.DisallowedModelAdminToField', 'ERROR'):
response = self.client.post(url, {TO_FIELD_VAR: 'name'})
self.assertEqual(response.status_code, 400)
url = reverse('admin:admin_views_section_delete', args=(section.pk,))
with self.assertLogs('django.security.DisallowedModelAdminToField', 'ERROR'):
response = self.client.post(url, {TO_FIELD_VAR: 'name'})
self.assertEqual(response.status_code, 400)
def test_allowed_filtering_15103(self):
"""
Regressions test for ticket 15103 - filtering on fields defined in a
ForeignKey 'limit_choices_to' should be allowed, otherwise raw_id_fields
can break.
"""
# Filters should be allowed if they are defined on a ForeignKey pointing to this model
url = "%s?leader__name=Palin&leader__age=27" % reverse('admin:admin_views_inquisition_changelist')
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
def test_popup_dismiss_related(self):
"""
Regression test for ticket 20664 - ensure the pk is properly quoted.
"""
actor = Actor.objects.create(name="Palin", age=27)
response = self.client.get("%s?%s" % (reverse('admin:admin_views_actor_changelist'), IS_POPUP_VAR))
self.assertContains(response, 'data-popup-opener="%s"' % actor.pk)
def test_hide_change_password(self):
"""
Tests if the "change password" link in the admin is hidden if the User
does not have a usable password set.
(against 9bea85795705d015cdadc82c68b99196a8554f5c)
"""
user = User.objects.get(username='super')
user.set_unusable_password()
user.save()
self.client.force_login(user)
response = self.client.get(reverse('admin:index'))
self.assertNotContains(
response, reverse('admin:password_change'),
msg_prefix='The "change password" link should not be displayed if a user does not have a usable password.'
)
def test_change_view_with_show_delete_extra_context(self):
"""
The 'show_delete' context variable in the admin's change view controls
the display of the delete button.
"""
instance = UndeletableObject.objects.create(name='foo')
response = self.client.get(reverse('admin:admin_views_undeletableobject_change', args=(instance.pk,)))
self.assertNotContains(response, 'deletelink')
def test_change_view_logs_m2m_field_changes(self):
"""Changes to ManyToManyFields are included in the object's history."""
pizza = ReadablePizza.objects.create(name='Cheese')
cheese = Topping.objects.create(name='cheese')
post_data = {'name': pizza.name, 'toppings': [cheese.pk]}
response = self.client.post(reverse('admin:admin_views_readablepizza_change', args=(pizza.pk,)), post_data)
self.assertRedirects(response, reverse('admin:admin_views_readablepizza_changelist'))
pizza_ctype = ContentType.objects.get_for_model(ReadablePizza, for_concrete_model=False)
log = LogEntry.objects.filter(content_type=pizza_ctype, object_id=pizza.pk).first()
self.assertEqual(log.get_change_message(), 'Changed Toppings.')
def test_allows_attributeerror_to_bubble_up(self):
"""
AttributeErrors are allowed to bubble when raised inside a change list
view. Requires a model to be created so there's something to display.
Refs: #16655, #18593, and #18747
"""
Simple.objects.create()
with self.assertRaises(AttributeError):
self.client.get(reverse('admin:admin_views_simple_changelist'))
def test_changelist_with_no_change_url(self):
"""
ModelAdmin.changelist_view shouldn't result in a NoReverseMatch if url
for change_view is removed from get_urls (#20934).
"""
o = UnchangeableObject.objects.create()
response = self.client.get(reverse('admin:admin_views_unchangeableobject_changelist'))
# Check the format of the shown object -- shouldn't contain a change link
self.assertContains(response, '<th class="field-__str__">%s</th>' % o, html=True)
def test_invalid_appindex_url(self):
"""
#21056 -- URL reversing shouldn't work for nonexistent apps.
"""
good_url = '/test_admin/admin/admin_views/'
confirm_good_url = reverse('admin:app_list',
kwargs={'app_label': 'admin_views'})
self.assertEqual(good_url, confirm_good_url)
with self.assertRaises(NoReverseMatch):
reverse('admin:app_list', kwargs={'app_label': 'this_should_fail'})
with self.assertRaises(NoReverseMatch):
reverse('admin:app_list', args=('admin_views2',))
def test_resolve_admin_views(self):
index_match = resolve('/test_admin/admin4/')
list_match = resolve('/test_admin/admin4/auth/user/')
self.assertIs(index_match.func.admin_site, customadmin.simple_site)
self.assertIsInstance(list_match.func.model_admin, customadmin.CustomPwdTemplateUserAdmin)
def test_adminsite_display_site_url(self):
"""
#13749 - Admin should display link to front-end site 'View site'
"""
url = reverse('admin:index')
response = self.client.get(url)
self.assertEqual(response.context['site_url'], '/my-site-url/')
self.assertContains(response, '<a href="/my-site-url/">View site</a>')
def test_date_hierarchy_empty_queryset(self):
self.assertIs(Question.objects.exists(), False)
response = self.client.get(reverse('admin:admin_views_answer2_changelist'))
self.assertEqual(response.status_code, 200)
@override_settings(TIME_ZONE='America/Sao_Paulo', USE_TZ=True)
def test_date_hierarchy_timezone_dst(self):
# This datetime doesn't exist in this timezone due to DST.
for date in make_aware_datetimes(datetime.datetime(2016, 10, 16, 15), 'America/Sao_Paulo'):
with self.subTest(repr(date.tzinfo)):
q = Question.objects.create(question='Why?', expires=date)
Answer2.objects.create(question=q, answer='Because.')
response = self.client.get(reverse('admin:admin_views_answer2_changelist'))
self.assertContains(response, 'question__expires__day=16')
self.assertContains(response, 'question__expires__month=10')
self.assertContains(response, 'question__expires__year=2016')
@override_settings(TIME_ZONE='America/Los_Angeles', USE_TZ=True)
def test_date_hierarchy_local_date_differ_from_utc(self):
# This datetime is 2017-01-01 in UTC.
for date in make_aware_datetimes(datetime.datetime(2016, 12, 31, 16), 'America/Los_Angeles'):
with self.subTest(repr(date.tzinfo)):
q = Question.objects.create(question='Why?', expires=date)
Answer2.objects.create(question=q, answer='Because.')
response = self.client.get(reverse('admin:admin_views_answer2_changelist'))
self.assertContains(response, 'question__expires__day=31')
self.assertContains(response, 'question__expires__month=12')
self.assertContains(response, 'question__expires__year=2016')
def test_sortable_by_columns_subset(self):
expected_sortable_fields = ('date', 'callable_year')
expected_not_sortable_fields = (
'content', 'model_year', 'modeladmin_year', 'model_year_reversed',
'section',
)
response = self.client.get(reverse('admin6:admin_views_article_changelist'))
for field_name in expected_sortable_fields:
self.assertContains(response, '<th scope="col" class="sortable column-%s">' % field_name)
for field_name in expected_not_sortable_fields:
self.assertContains(response, '<th scope="col" class="column-%s">' % field_name)
def test_get_sortable_by_columns_subset(self):
response = self.client.get(reverse('admin6:admin_views_actor_changelist'))
self.assertContains(response, '<th scope="col" class="sortable column-age">')
self.assertContains(response, '<th scope="col" class="column-name">')
def test_sortable_by_no_column(self):
expected_not_sortable_fields = ('title', 'book')
response = self.client.get(reverse('admin6:admin_views_chapter_changelist'))
for field_name in expected_not_sortable_fields:
self.assertContains(response, '<th scope="col" class="column-%s">' % field_name)
self.assertNotContains(response, '<th scope="col" class="sortable column')
def test_get_sortable_by_no_column(self):
response = self.client.get(reverse('admin6:admin_views_color_changelist'))
self.assertContains(response, '<th scope="col" class="column-value">')
self.assertNotContains(response, '<th scope="col" class="sortable column')
def test_app_index_context(self):
response = self.client.get(reverse('admin:app_list', args=('admin_views',)))
self.assertContains(
response,
'<title>Admin_Views administration | Django site admin</title>',
)
self.assertEqual(response.context['title'], 'Admin_Views administration')
self.assertEqual(response.context['app_label'], 'admin_views')
def test_change_view_subtitle_per_object(self):
response = self.client.get(
reverse('admin:admin_views_article_change', args=(self.a1.pk,)),
)
self.assertContains(
response,
'<title>Article 1 | Change article | Django site admin</title>',
)
self.assertContains(response, '<h1>Change article</h1>')
self.assertContains(response, '<h2>Article 1</h2>')
response = self.client.get(
reverse('admin:admin_views_article_change', args=(self.a2.pk,)),
)
self.assertContains(
response,
'<title>Article 2 | Change article | Django site admin</title>',
)
self.assertContains(response, '<h1>Change article</h1>')
self.assertContains(response, '<h2>Article 2</h2>')
def test_view_subtitle_per_object(self):
viewuser = User.objects.create_user(
username='viewuser', password='secret', is_staff=True,
)
viewuser.user_permissions.add(
get_perm(Article, get_permission_codename('view', Article._meta)),
)
self.client.force_login(viewuser)
response = self.client.get(
reverse('admin:admin_views_article_change', args=(self.a1.pk,)),
)
self.assertContains(
response,
'<title>Article 1 | View article | Django site admin</title>',
)
self.assertContains(response, '<h1>View article</h1>')
self.assertContains(response, '<h2>Article 1</h2>')
response = self.client.get(
reverse('admin:admin_views_article_change', args=(self.a2.pk,)),
)
self.assertContains(
response,
'<title>Article 2 | View article | Django site admin</title>',
)
self.assertContains(response, '<h1>View article</h1>')
self.assertContains(response, '<h2>Article 2</h2>')
def test_formset_kwargs_can_be_overridden(self):
response = self.client.get(reverse('admin:admin_views_city_add'))
self.assertContains(response, 'overridden_name')
def test_render_views_no_subtitle(self):
tests = [
reverse('admin:index'),
reverse('admin:app_list', args=('admin_views',)),
reverse('admin:admin_views_article_delete', args=(self.a1.pk,)),
reverse('admin:admin_views_article_history', args=(self.a1.pk,)),
]
for url in tests:
with self.subTest(url=url):
with self.assertNoLogs('django.template', 'DEBUG'):
self.client.get(url)
@override_settings(TEMPLATES=[{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
# Put this app's and the shared tests templates dirs in DIRS to take precedence
# over the admin's templates dir.
'DIRS': [
os.path.join(os.path.dirname(__file__), 'templates'),
os.path.join(os.path.dirname(os.path.dirname(__file__)), 'templates'),
],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
}])
class AdminCustomTemplateTests(AdminViewBasicTestCase):
def test_custom_model_admin_templates(self):
# Test custom change list template with custom extra context
response = self.client.get(reverse('admin:admin_views_customarticle_changelist'))
self.assertContains(response, "var hello = 'Hello!';")
self.assertTemplateUsed(response, 'custom_admin/change_list.html')
# Test custom add form template
response = self.client.get(reverse('admin:admin_views_customarticle_add'))
self.assertTemplateUsed(response, 'custom_admin/add_form.html')
# Add an article so we can test delete, change, and history views
post = self.client.post(reverse('admin:admin_views_customarticle_add'), {
'content': '<p>great article</p>',
'date_0': '2008-03-18',
'date_1': '10:54:39'
})
self.assertRedirects(post, reverse('admin:admin_views_customarticle_changelist'))
self.assertEqual(CustomArticle.objects.all().count(), 1)
article_pk = CustomArticle.objects.all()[0].pk
# Test custom delete, change, and object history templates
# Test custom change form template
response = self.client.get(reverse('admin:admin_views_customarticle_change', args=(article_pk,)))
self.assertTemplateUsed(response, 'custom_admin/change_form.html')
response = self.client.get(reverse('admin:admin_views_customarticle_delete', args=(article_pk,)))
self.assertTemplateUsed(response, 'custom_admin/delete_confirmation.html')
response = self.client.post(reverse('admin:admin_views_customarticle_changelist'), data={
'index': 0,
'action': ['delete_selected'],
'_selected_action': ['1'],
})
self.assertTemplateUsed(response, 'custom_admin/delete_selected_confirmation.html')
response = self.client.get(reverse('admin:admin_views_customarticle_history', args=(article_pk,)))
self.assertTemplateUsed(response, 'custom_admin/object_history.html')
# A custom popup response template may be specified by
# ModelAdmin.popup_response_template.
response = self.client.post(reverse('admin:admin_views_customarticle_add') + '?%s=1' % IS_POPUP_VAR, {
'content': '<p>great article</p>',
'date_0': '2008-03-18',
'date_1': '10:54:39',
IS_POPUP_VAR: '1'
})
self.assertEqual(response.template_name, 'custom_admin/popup_response.html')
def test_extended_bodyclass_template_change_form(self):
"""
The admin/change_form.html template uses block.super in the
bodyclass block.
"""
response = self.client.get(reverse('admin:admin_views_section_add'))
self.assertContains(response, 'bodyclass_consistency_check ')
def test_change_password_template(self):
user = User.objects.get(username='super')
response = self.client.get(reverse('admin:auth_user_password_change', args=(user.id,)))
# The auth/user/change_password.html template uses super in the
# bodyclass block.
self.assertContains(response, 'bodyclass_consistency_check ')
# When a site has multiple passwords in the browser's password manager,
# a browser pop up asks which user the new password is for. To prevent
# this, the username is added to the change password form.
self.assertContains(response, '<input type="text" name="username" value="super" class="hidden">')
def test_extended_bodyclass_template_index(self):
"""
The admin/index.html template uses block.super in the bodyclass block.
"""
response = self.client.get(reverse('admin:index'))
self.assertContains(response, 'bodyclass_consistency_check ')
def test_extended_bodyclass_change_list(self):
"""
The admin/change_list.html' template uses block.super
in the bodyclass block.
"""
response = self.client.get(reverse('admin:admin_views_article_changelist'))
self.assertContains(response, 'bodyclass_consistency_check ')
def test_extended_bodyclass_template_login(self):
"""
The admin/login.html template uses block.super in the
bodyclass block.
"""
self.client.logout()
response = self.client.get(reverse('admin:login'))
self.assertContains(response, 'bodyclass_consistency_check ')
def test_extended_bodyclass_template_delete_confirmation(self):
"""
The admin/delete_confirmation.html template uses
block.super in the bodyclass block.
"""
group = Group.objects.create(name="foogroup")
response = self.client.get(reverse('admin:auth_group_delete', args=(group.id,)))
self.assertContains(response, 'bodyclass_consistency_check ')
def test_extended_bodyclass_template_delete_selected_confirmation(self):
"""
The admin/delete_selected_confirmation.html template uses
block.super in bodyclass block.
"""
group = Group.objects.create(name="foogroup")
post_data = {
'action': 'delete_selected',
'selected_across': '0',
'index': '0',
'_selected_action': group.id
}
response = self.client.post(reverse('admin:auth_group_changelist'), post_data)
self.assertEqual(response.context['site_header'], 'Django administration')
self.assertContains(response, 'bodyclass_consistency_check ')
def test_filter_with_custom_template(self):
"""
A custom template can be used to render an admin filter.
"""
response = self.client.get(reverse('admin:admin_views_color2_changelist'))
self.assertTemplateUsed(response, 'custom_filter_template.html')
@override_settings(ROOT_URLCONF='admin_views.urls')
class AdminViewFormUrlTest(TestCase):
current_app = "admin3"
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]')
cls.s1 = Section.objects.create(name='Test section')
cls.a1 = Article.objects.create(
content='<p>Middle content</p>', date=datetime.datetime(2008, 3, 18, 11, 54, 58), section=cls.s1
)
cls.a2 = Article.objects.create(
content='<p>Oldest content</p>', date=datetime.datetime(2000, 3, 18, 11, 54, 58), section=cls.s1
)
cls.a3 = Article.objects.create(
content='<p>Newest content</p>', date=datetime.datetime(2009, 3, 18, 11, 54, 58), section=cls.s1
)
cls.p1 = PrePopulatedPost.objects.create(title='A Long Title', published=True, slug='a-long-title')
def setUp(self):
self.client.force_login(self.superuser)
def test_change_form_URL_has_correct_value(self):
"""
change_view has form_url in response.context
"""
response = self.client.get(
reverse('admin:admin_views_section_change', args=(self.s1.pk,), current_app=self.current_app)
)
self.assertIn('form_url', response.context, msg='form_url not present in response.context')
self.assertEqual(response.context['form_url'], 'pony')
def test_initial_data_can_be_overridden(self):
"""
The behavior for setting initial form data can be overridden in the
ModelAdmin class. Usually, the initial value is set via the GET params.
"""
response = self.client.get(
reverse('admin:admin_views_restaurant_add', current_app=self.current_app),
{'name': 'test_value'}
)
# this would be the usual behaviour
self.assertNotContains(response, 'value="test_value"')
# this is the overridden behaviour
self.assertContains(response, 'value="overridden_value"')
@override_settings(ROOT_URLCONF='admin_views.urls')
class AdminJavaScriptTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]')
def setUp(self):
self.client.force_login(self.superuser)
def test_js_minified_only_if_debug_is_false(self):
"""
The minified versions of the JS files are only used when DEBUG is False.
"""
with override_settings(DEBUG=False):
response = self.client.get(reverse('admin:admin_views_section_add'))
self.assertNotContains(response, 'vendor/jquery/jquery.js')
self.assertContains(response, 'vendor/jquery/jquery.min.js')
self.assertContains(response, 'prepopulate.js')
self.assertContains(response, 'actions.js')
self.assertContains(response, 'collapse.js')
self.assertContains(response, 'inlines.js')
with override_settings(DEBUG=True):
response = self.client.get(reverse('admin:admin_views_section_add'))
self.assertContains(response, 'vendor/jquery/jquery.js')
self.assertNotContains(response, 'vendor/jquery/jquery.min.js')
self.assertContains(response, 'prepopulate.js')
self.assertContains(response, 'actions.js')
self.assertContains(response, 'collapse.js')
self.assertContains(response, 'inlines.js')
@override_settings(ROOT_URLCONF='admin_views.urls')
class SaveAsTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]')
cls.per1 = Person.objects.create(name='John Mauchly', gender=1, alive=True)
def setUp(self):
self.client.force_login(self.superuser)
def test_save_as_duplication(self):
"""'save as' creates a new person"""
post_data = {'_saveasnew': '', 'name': 'John M', 'gender': 1, 'age': 42}
response = self.client.post(reverse('admin:admin_views_person_change', args=(self.per1.pk,)), post_data)
self.assertEqual(len(Person.objects.filter(name='John M')), 1)
self.assertEqual(len(Person.objects.filter(id=self.per1.pk)), 1)
new_person = Person.objects.latest('id')
self.assertRedirects(response, reverse('admin:admin_views_person_change', args=(new_person.pk,)))
def test_save_as_continue_false(self):
"""
Saving a new object using "Save as new" redirects to the changelist
instead of the change view when ModelAdmin.save_as_continue=False.
"""
post_data = {'_saveasnew': '', 'name': 'John M', 'gender': 1, 'age': 42}
url = reverse('admin:admin_views_person_change', args=(self.per1.pk,), current_app=site2.name)
response = self.client.post(url, post_data)
self.assertEqual(len(Person.objects.filter(name='John M')), 1)
self.assertEqual(len(Person.objects.filter(id=self.per1.pk)), 1)
self.assertRedirects(response, reverse('admin:admin_views_person_changelist', current_app=site2.name))
def test_save_as_new_with_validation_errors(self):
"""
When you click "Save as new" and have a validation error,
you only see the "Save as new" button and not the other save buttons,
and that only the "Save as" button is visible.
"""
response = self.client.post(reverse('admin:admin_views_person_change', args=(self.per1.pk,)), {
'_saveasnew': '',
'gender': 'invalid',
'_addanother': 'fail',
})
self.assertContains(response, 'Please correct the errors below.')
self.assertFalse(response.context['show_save_and_add_another'])
self.assertFalse(response.context['show_save_and_continue'])
self.assertTrue(response.context['show_save_as_new'])
def test_save_as_new_with_validation_errors_with_inlines(self):
parent = Parent.objects.create(name='Father')
child = Child.objects.create(parent=parent, name='Child')
response = self.client.post(reverse('admin:admin_views_parent_change', args=(parent.pk,)), {
'_saveasnew': 'Save as new',
'child_set-0-parent': parent.pk,
'child_set-0-id': child.pk,
'child_set-0-name': 'Child',
'child_set-INITIAL_FORMS': 1,
'child_set-MAX_NUM_FORMS': 1000,
'child_set-MIN_NUM_FORMS': 0,
'child_set-TOTAL_FORMS': 4,
'name': '_invalid',
})
self.assertContains(response, 'Please correct the error below.')
self.assertFalse(response.context['show_save_and_add_another'])
self.assertFalse(response.context['show_save_and_continue'])
self.assertTrue(response.context['show_save_as_new'])
def test_save_as_new_with_inlines_with_validation_errors(self):
parent = Parent.objects.create(name='Father')
child = Child.objects.create(parent=parent, name='Child')
response = self.client.post(reverse('admin:admin_views_parent_change', args=(parent.pk,)), {
'_saveasnew': 'Save as new',
'child_set-0-parent': parent.pk,
'child_set-0-id': child.pk,
'child_set-0-name': '_invalid',
'child_set-INITIAL_FORMS': 1,
'child_set-MAX_NUM_FORMS': 1000,
'child_set-MIN_NUM_FORMS': 0,
'child_set-TOTAL_FORMS': 4,
'name': 'Father',
})
self.assertContains(response, 'Please correct the error below.')
self.assertFalse(response.context['show_save_and_add_another'])
self.assertFalse(response.context['show_save_and_continue'])
self.assertTrue(response.context['show_save_as_new'])
@override_settings(ROOT_URLCONF='admin_views.urls')
class CustomModelAdminTest(AdminViewBasicTestCase):
def test_custom_admin_site_login_form(self):
self.client.logout()
response = self.client.get(reverse('admin2:index'), follow=True)
self.assertIsInstance(response, TemplateResponse)
self.assertEqual(response.status_code, 200)
login = self.client.post(reverse('admin2:login'), {
REDIRECT_FIELD_NAME: reverse('admin2:index'),
'username': 'customform',
'password': 'secret',
}, follow=True)
self.assertIsInstance(login, TemplateResponse)
self.assertContains(login, 'custom form error')
self.assertContains(login, 'path/to/media.css')
def test_custom_admin_site_login_template(self):
self.client.logout()
response = self.client.get(reverse('admin2:index'), follow=True)
self.assertIsInstance(response, TemplateResponse)
self.assertTemplateUsed(response, 'custom_admin/login.html')
self.assertContains(response, 'Hello from a custom login template')
def test_custom_admin_site_logout_template(self):
response = self.client.get(reverse('admin2:logout'))
self.assertIsInstance(response, TemplateResponse)
self.assertTemplateUsed(response, 'custom_admin/logout.html')
self.assertContains(response, 'Hello from a custom logout template')
def test_custom_admin_site_index_view_and_template(self):
response = self.client.get(reverse('admin2:index'))
self.assertIsInstance(response, TemplateResponse)
self.assertTemplateUsed(response, 'custom_admin/index.html')
self.assertContains(response, 'Hello from a custom index template *bar*')
def test_custom_admin_site_app_index_view_and_template(self):
response = self.client.get(reverse('admin2:app_list', args=('admin_views',)))
self.assertIsInstance(response, TemplateResponse)
self.assertTemplateUsed(response, 'custom_admin/app_index.html')
self.assertContains(response, 'Hello from a custom app_index template')
def test_custom_admin_site_password_change_template(self):
response = self.client.get(reverse('admin2:password_change'))
self.assertIsInstance(response, TemplateResponse)
self.assertTemplateUsed(response, 'custom_admin/password_change_form.html')
self.assertContains(response, 'Hello from a custom password change form template')
def test_custom_admin_site_password_change_with_extra_context(self):
response = self.client.get(reverse('admin2:password_change'))
self.assertIsInstance(response, TemplateResponse)
self.assertTemplateUsed(response, 'custom_admin/password_change_form.html')
self.assertContains(response, 'eggs')
def test_custom_admin_site_password_change_done_template(self):
response = self.client.get(reverse('admin2:password_change_done'))
self.assertIsInstance(response, TemplateResponse)
self.assertTemplateUsed(response, 'custom_admin/password_change_done.html')
self.assertContains(response, 'Hello from a custom password change done template')
def test_custom_admin_site_view(self):
self.client.force_login(self.superuser)
response = self.client.get(reverse('admin2:my_view'))
self.assertEqual(response.content, b"Django is a magical pony!")
def test_pwd_change_custom_template(self):
self.client.force_login(self.superuser)
su = User.objects.get(username='super')
response = self.client.get(reverse('admin4:auth_user_password_change', args=(su.pk,)))
self.assertEqual(response.status_code, 200)
def get_perm(Model, codename):
"""Return the permission object, for the Model"""
ct = ContentType.objects.get_for_model(Model, for_concrete_model=False)
return Permission.objects.get(content_type=ct, codename=codename)
@override_settings(
ROOT_URLCONF='admin_views.urls',
# Test with the admin's documented list of required context processors.
TEMPLATES=[{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
}],
)
class AdminViewPermissionsTest(TestCase):
"""Tests for Admin Views Permissions."""
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]')
cls.viewuser = User.objects.create_user(username='viewuser', password='secret', is_staff=True)
cls.adduser = User.objects.create_user(username='adduser', password='secret', is_staff=True)
cls.changeuser = User.objects.create_user(username='changeuser', password='secret', is_staff=True)
cls.deleteuser = User.objects.create_user(username='deleteuser', password='secret', is_staff=True)
cls.joepublicuser = User.objects.create_user(username='joepublic', password='secret')
cls.nostaffuser = User.objects.create_user(username='nostaff', password='secret')
cls.s1 = Section.objects.create(name='Test section')
cls.a1 = Article.objects.create(
content='<p>Middle content</p>', date=datetime.datetime(2008, 3, 18, 11, 54, 58), section=cls.s1,
another_section=cls.s1,
)
cls.a2 = Article.objects.create(
content='<p>Oldest content</p>', date=datetime.datetime(2000, 3, 18, 11, 54, 58), section=cls.s1
)
cls.a3 = Article.objects.create(
content='<p>Newest content</p>', date=datetime.datetime(2009, 3, 18, 11, 54, 58), section=cls.s1
)
cls.p1 = PrePopulatedPost.objects.create(title='A Long Title', published=True, slug='a-long-title')
# Setup permissions, for our users who can add, change, and delete.
opts = Article._meta
# User who can view Articles
cls.viewuser.user_permissions.add(get_perm(Article, get_permission_codename('view', opts)))
# User who can add Articles
cls.adduser.user_permissions.add(get_perm(Article, get_permission_codename('add', opts)))
# User who can change Articles
cls.changeuser.user_permissions.add(get_perm(Article, get_permission_codename('change', opts)))
cls.nostaffuser.user_permissions.add(get_perm(Article, get_permission_codename('change', opts)))
# User who can delete Articles
cls.deleteuser.user_permissions.add(get_perm(Article, get_permission_codename('delete', opts)))
cls.deleteuser.user_permissions.add(get_perm(Section, get_permission_codename('delete', Section._meta)))
# login POST dicts
cls.index_url = reverse('admin:index')
cls.super_login = {
REDIRECT_FIELD_NAME: cls.index_url,
'username': 'super',
'password': 'secret',
}
cls.super_email_login = {
REDIRECT_FIELD_NAME: cls.index_url,
'username': '[email protected]',
'password': 'secret',
}
cls.super_email_bad_login = {
REDIRECT_FIELD_NAME: cls.index_url,
'username': '[email protected]',
'password': 'notsecret',
}
cls.adduser_login = {
REDIRECT_FIELD_NAME: cls.index_url,
'username': 'adduser',
'password': 'secret',
}
cls.changeuser_login = {
REDIRECT_FIELD_NAME: cls.index_url,
'username': 'changeuser',
'password': 'secret',
}
cls.deleteuser_login = {
REDIRECT_FIELD_NAME: cls.index_url,
'username': 'deleteuser',
'password': 'secret',
}
cls.nostaff_login = {
REDIRECT_FIELD_NAME: reverse('has_permission_admin:index'),
'username': 'nostaff',
'password': 'secret',
}
cls.joepublic_login = {
REDIRECT_FIELD_NAME: cls.index_url,
'username': 'joepublic',
'password': 'secret',
}
cls.viewuser_login = {
REDIRECT_FIELD_NAME: cls.index_url,
'username': 'viewuser',
'password': 'secret',
}
cls.no_username_login = {
REDIRECT_FIELD_NAME: cls.index_url,
'password': 'secret',
}
def test_login(self):
"""
Make sure only staff members can log in.
Successful posts to the login page will redirect to the original url.
Unsuccessful attempts will continue to render the login page with
a 200 status code.
"""
login_url = '%s?next=%s' % (reverse('admin:login'), reverse('admin:index'))
# Super User
response = self.client.get(self.index_url)
self.assertRedirects(response, login_url)
login = self.client.post(login_url, self.super_login)
self.assertRedirects(login, self.index_url)
self.assertFalse(login.context)
self.client.get(reverse('admin:logout'))
# Test if user enters email address
response = self.client.get(self.index_url)
self.assertEqual(response.status_code, 302)
login = self.client.post(login_url, self.super_email_login)
self.assertContains(login, ERROR_MESSAGE)
# only correct passwords get a username hint
login = self.client.post(login_url, self.super_email_bad_login)
self.assertContains(login, ERROR_MESSAGE)
new_user = User(username='jondoe', password='secret', email='[email protected]')
new_user.save()
# check to ensure if there are multiple email addresses a user doesn't get a 500
login = self.client.post(login_url, self.super_email_login)
self.assertContains(login, ERROR_MESSAGE)
# View User
response = self.client.get(self.index_url)
self.assertEqual(response.status_code, 302)
login = self.client.post(login_url, self.viewuser_login)
self.assertRedirects(login, self.index_url)
self.assertFalse(login.context)
self.client.get(reverse('admin:logout'))
# Add User
response = self.client.get(self.index_url)
self.assertEqual(response.status_code, 302)
login = self.client.post(login_url, self.adduser_login)
self.assertRedirects(login, self.index_url)
self.assertFalse(login.context)
self.client.get(reverse('admin:logout'))
# Change User
response = self.client.get(self.index_url)
self.assertEqual(response.status_code, 302)
login = self.client.post(login_url, self.changeuser_login)
self.assertRedirects(login, self.index_url)
self.assertFalse(login.context)
self.client.get(reverse('admin:logout'))
# Delete User
response = self.client.get(self.index_url)
self.assertEqual(response.status_code, 302)
login = self.client.post(login_url, self.deleteuser_login)
self.assertRedirects(login, self.index_url)
self.assertFalse(login.context)
self.client.get(reverse('admin:logout'))
# Regular User should not be able to login.
response = self.client.get(self.index_url)
self.assertEqual(response.status_code, 302)
login = self.client.post(login_url, self.joepublic_login)
self.assertContains(login, ERROR_MESSAGE)
# Requests without username should not return 500 errors.
response = self.client.get(self.index_url)
self.assertEqual(response.status_code, 302)
login = self.client.post(login_url, self.no_username_login)
self.assertEqual(login.status_code, 200)
self.assertFormError(login, 'form', 'username', ['This field is required.'])
def test_login_redirect_for_direct_get(self):
"""
Login redirect should be to the admin index page when going directly to
/admin/login/.
"""
response = self.client.get(reverse('admin:login'))
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context[REDIRECT_FIELD_NAME], reverse('admin:index'))
def test_login_has_permission(self):
# Regular User should not be able to login.
response = self.client.get(reverse('has_permission_admin:index'))
self.assertEqual(response.status_code, 302)
login = self.client.post(reverse('has_permission_admin:login'), self.joepublic_login)
self.assertContains(login, 'permission denied')
# User with permissions should be able to login.
response = self.client.get(reverse('has_permission_admin:index'))
self.assertEqual(response.status_code, 302)
login = self.client.post(reverse('has_permission_admin:login'), self.nostaff_login)
self.assertRedirects(login, reverse('has_permission_admin:index'))
self.assertFalse(login.context)
self.client.get(reverse('has_permission_admin:logout'))
# Staff should be able to login.
response = self.client.get(reverse('has_permission_admin:index'))
self.assertEqual(response.status_code, 302)
login = self.client.post(reverse('has_permission_admin:login'), {
REDIRECT_FIELD_NAME: reverse('has_permission_admin:index'),
'username': 'deleteuser',
'password': 'secret',
})
self.assertRedirects(login, reverse('has_permission_admin:index'))
self.assertFalse(login.context)
self.client.get(reverse('has_permission_admin:logout'))
def test_login_successfully_redirects_to_original_URL(self):
response = self.client.get(self.index_url)
self.assertEqual(response.status_code, 302)
query_string = 'the-answer=42'
redirect_url = '%s?%s' % (self.index_url, query_string)
new_next = {REDIRECT_FIELD_NAME: redirect_url}
post_data = self.super_login.copy()
post_data.pop(REDIRECT_FIELD_NAME)
login = self.client.post(
'%s?%s' % (reverse('admin:login'), urlencode(new_next)),
post_data)
self.assertRedirects(login, redirect_url)
def test_double_login_is_not_allowed(self):
"""Regression test for #19327"""
login_url = '%s?next=%s' % (reverse('admin:login'), reverse('admin:index'))
response = self.client.get(self.index_url)
self.assertEqual(response.status_code, 302)
# Establish a valid admin session
login = self.client.post(login_url, self.super_login)
self.assertRedirects(login, self.index_url)
self.assertFalse(login.context)
# Logging in with non-admin user fails
login = self.client.post(login_url, self.joepublic_login)
self.assertContains(login, ERROR_MESSAGE)
# Establish a valid admin session
login = self.client.post(login_url, self.super_login)
self.assertRedirects(login, self.index_url)
self.assertFalse(login.context)
# Logging in with admin user while already logged in
login = self.client.post(login_url, self.super_login)
self.assertRedirects(login, self.index_url)
self.assertFalse(login.context)
self.client.get(reverse('admin:logout'))
def test_login_page_notice_for_non_staff_users(self):
"""
A logged-in non-staff user trying to access the admin index should be
presented with the login page and a hint indicating that the current
user doesn't have access to it.
"""
hint_template = 'You are authenticated as {}'
# Anonymous user should not be shown the hint
response = self.client.get(self.index_url, follow=True)
self.assertContains(response, 'login-form')
self.assertNotContains(response, hint_template.format(''), status_code=200)
# Non-staff user should be shown the hint
self.client.force_login(self.nostaffuser)
response = self.client.get(self.index_url, follow=True)
self.assertContains(response, 'login-form')
self.assertContains(response, hint_template.format(self.nostaffuser.username), status_code=200)
def test_add_view(self):
"""Test add view restricts access and actually adds items."""
add_dict = {
'title': 'Døm ikke',
'content': '<p>great article</p>',
'date_0': '2008-03-18', 'date_1': '10:54:39',
'section': self.s1.pk,
}
# Change User should not have access to add articles
self.client.force_login(self.changeuser)
# make sure the view removes test cookie
self.assertIs(self.client.session.test_cookie_worked(), False)
response = self.client.get(reverse('admin:admin_views_article_add'))
self.assertEqual(response.status_code, 403)
# Try POST just to make sure
post = self.client.post(reverse('admin:admin_views_article_add'), add_dict)
self.assertEqual(post.status_code, 403)
self.assertEqual(Article.objects.count(), 3)
self.client.get(reverse('admin:logout'))
# View User should not have access to add articles
self.client.force_login(self.viewuser)
response = self.client.get(reverse('admin:admin_views_article_add'))
self.assertEqual(response.status_code, 403)
# Try POST just to make sure
post = self.client.post(reverse('admin:admin_views_article_add'), add_dict)
self.assertEqual(post.status_code, 403)
self.assertEqual(Article.objects.count(), 3)
# Now give the user permission to add but not change.
self.viewuser.user_permissions.add(get_perm(Article, get_permission_codename('add', Article._meta)))
response = self.client.get(reverse('admin:admin_views_article_add'))
self.assertEqual(response.context['title'], 'Add article')
self.assertContains(response, '<title>Add article | Django site admin</title>')
self.assertContains(response, '<input type="submit" value="Save and view" name="_continue">')
post = self.client.post(reverse('admin:admin_views_article_add'), add_dict, follow=False)
self.assertEqual(post.status_code, 302)
self.assertEqual(Article.objects.count(), 4)
article = Article.objects.latest('pk')
response = self.client.get(reverse('admin:admin_views_article_change', args=(article.pk,)))
self.assertContains(response, '<li class="success">The article “Døm ikke” was added successfully.</li>')
article.delete()
self.client.get(reverse('admin:logout'))
# Add user may login and POST to add view, then redirect to admin root
self.client.force_login(self.adduser)
addpage = self.client.get(reverse('admin:admin_views_article_add'))
change_list_link = '› <a href="%s">Articles</a>' % reverse('admin:admin_views_article_changelist')
self.assertNotContains(
addpage, change_list_link,
msg_prefix='User restricted to add permission is given link to change list view in breadcrumbs.'
)
post = self.client.post(reverse('admin:admin_views_article_add'), add_dict)
self.assertRedirects(post, self.index_url)
self.assertEqual(Article.objects.count(), 4)
self.assertEqual(len(mail.outbox), 2)
self.assertEqual(mail.outbox[0].subject, 'Greetings from a created object')
self.client.get(reverse('admin:logout'))
# The addition was logged correctly
addition_log = LogEntry.objects.all()[0]
new_article = Article.objects.last()
article_ct = ContentType.objects.get_for_model(Article)
self.assertEqual(addition_log.user_id, self.adduser.pk)
self.assertEqual(addition_log.content_type_id, article_ct.pk)
self.assertEqual(addition_log.object_id, str(new_article.pk))
self.assertEqual(addition_log.object_repr, "Døm ikke")
self.assertEqual(addition_log.action_flag, ADDITION)
self.assertEqual(addition_log.get_change_message(), "Added.")
# Super can add too, but is redirected to the change list view
self.client.force_login(self.superuser)
addpage = self.client.get(reverse('admin:admin_views_article_add'))
self.assertContains(
addpage, change_list_link,
msg_prefix='Unrestricted user is not given link to change list view in breadcrumbs.'
)
post = self.client.post(reverse('admin:admin_views_article_add'), add_dict)
self.assertRedirects(post, reverse('admin:admin_views_article_changelist'))
self.assertEqual(Article.objects.count(), 5)
self.client.get(reverse('admin:logout'))
# 8509 - if a normal user is already logged in, it is possible
# to change user into the superuser without error
self.client.force_login(self.joepublicuser)
# Check and make sure that if user expires, data still persists
self.client.force_login(self.superuser)
# make sure the view removes test cookie
self.assertIs(self.client.session.test_cookie_worked(), False)
@mock.patch('django.contrib.admin.options.InlineModelAdmin.has_change_permission')
def test_add_view_with_view_only_inlines(self, has_change_permission):
"""User with add permission to a section but view-only for inlines."""
self.viewuser.user_permissions.add(get_perm(Section, get_permission_codename('add', Section._meta)))
self.client.force_login(self.viewuser)
# Valid POST creates a new section.
data = {
'name': 'New obj',
'article_set-TOTAL_FORMS': 0,
'article_set-INITIAL_FORMS': 0,
}
response = self.client.post(reverse('admin:admin_views_section_add'), data)
self.assertRedirects(response, reverse('admin:index'))
self.assertEqual(Section.objects.latest('id').name, data['name'])
# InlineModelAdmin.has_change_permission()'s obj argument is always
# None during object add.
self.assertEqual([obj for (request, obj), _ in has_change_permission.call_args_list], [None, None])
def test_change_view(self):
"""Change view should restrict access and allow users to edit items."""
change_dict = {
'title': 'Ikke fordømt',
'content': '<p>edited article</p>',
'date_0': '2008-03-18', 'date_1': '10:54:39',
'section': self.s1.pk,
}
article_change_url = reverse('admin:admin_views_article_change', args=(self.a1.pk,))
article_changelist_url = reverse('admin:admin_views_article_changelist')
# add user should not be able to view the list of article or change any of them
self.client.force_login(self.adduser)
response = self.client.get(article_changelist_url)
self.assertEqual(response.status_code, 403)
response = self.client.get(article_change_url)
self.assertEqual(response.status_code, 403)
post = self.client.post(article_change_url, change_dict)
self.assertEqual(post.status_code, 403)
self.client.get(reverse('admin:logout'))
# view user can view articles but not make changes.
self.client.force_login(self.viewuser)
response = self.client.get(article_changelist_url)
self.assertContains(
response,
'<title>Select article to view | Django site admin</title>',
)
self.assertContains(response, '<h1>Select article to view</h1>')
self.assertEqual(response.context['title'], 'Select article to view')
response = self.client.get(article_change_url)
self.assertContains(response, '<title>View article | Django site admin</title>')
self.assertContains(response, '<h1>View article</h1>')
self.assertContains(response, '<label>Extra form field:</label>')
self.assertContains(response, '<a href="/test_admin/admin/admin_views/article/" class="closelink">Close</a>')
self.assertEqual(response.context['title'], 'View article')
post = self.client.post(article_change_url, change_dict)
self.assertEqual(post.status_code, 403)
self.assertEqual(Article.objects.get(pk=self.a1.pk).content, '<p>Middle content</p>')
self.client.get(reverse('admin:logout'))
# change user can view all items and edit them
self.client.force_login(self.changeuser)
response = self.client.get(article_changelist_url)
self.assertEqual(response.context['title'], 'Select article to change')
self.assertContains(
response,
'<title>Select article to change | Django site admin</title>',
)
self.assertContains(response, '<h1>Select article to change</h1>')
response = self.client.get(article_change_url)
self.assertEqual(response.context['title'], 'Change article')
self.assertContains(
response,
'<title>Change article | Django site admin</title>',
)
self.assertContains(response, '<h1>Change article</h1>')
post = self.client.post(article_change_url, change_dict)
self.assertRedirects(post, article_changelist_url)
self.assertEqual(Article.objects.get(pk=self.a1.pk).content, '<p>edited article</p>')
# one error in form should produce singular error message, multiple errors plural
change_dict['title'] = ''
post = self.client.post(article_change_url, change_dict)
self.assertContains(
post, 'Please correct the error below.',
msg_prefix='Singular error message not found in response to post with one error'
)
change_dict['content'] = ''
post = self.client.post(article_change_url, change_dict)
self.assertContains(
post, 'Please correct the errors below.',
msg_prefix='Plural error message not found in response to post with multiple errors'
)
self.client.get(reverse('admin:logout'))
# Test redirection when using row-level change permissions. Refs #11513.
r1 = RowLevelChangePermissionModel.objects.create(id=1, name="odd id")
r2 = RowLevelChangePermissionModel.objects.create(id=2, name="even id")
r3 = RowLevelChangePermissionModel.objects.create(id=3, name='odd id mult 3')
r6 = RowLevelChangePermissionModel.objects.create(id=6, name='even id mult 3')
change_url_1 = reverse('admin:admin_views_rowlevelchangepermissionmodel_change', args=(r1.pk,))
change_url_2 = reverse('admin:admin_views_rowlevelchangepermissionmodel_change', args=(r2.pk,))
change_url_3 = reverse('admin:admin_views_rowlevelchangepermissionmodel_change', args=(r3.pk,))
change_url_6 = reverse('admin:admin_views_rowlevelchangepermissionmodel_change', args=(r6.pk,))
logins = [self.superuser, self.viewuser, self.adduser, self.changeuser, self.deleteuser]
for login_user in logins:
with self.subTest(login_user.username):
self.client.force_login(login_user)
response = self.client.get(change_url_1)
self.assertEqual(response.status_code, 403)
response = self.client.post(change_url_1, {'name': 'changed'})
self.assertEqual(RowLevelChangePermissionModel.objects.get(id=1).name, 'odd id')
self.assertEqual(response.status_code, 403)
response = self.client.get(change_url_2)
self.assertEqual(response.status_code, 200)
response = self.client.post(change_url_2, {'name': 'changed'})
self.assertEqual(RowLevelChangePermissionModel.objects.get(id=2).name, 'changed')
self.assertRedirects(response, self.index_url)
response = self.client.get(change_url_3)
self.assertEqual(response.status_code, 200)
response = self.client.post(change_url_3, {'name': 'changed'})
self.assertEqual(response.status_code, 403)
self.assertEqual(RowLevelChangePermissionModel.objects.get(id=3).name, 'odd id mult 3')
response = self.client.get(change_url_6)
self.assertEqual(response.status_code, 200)
response = self.client.post(change_url_6, {'name': 'changed'})
self.assertEqual(RowLevelChangePermissionModel.objects.get(id=6).name, 'changed')
self.assertRedirects(response, self.index_url)
self.client.get(reverse('admin:logout'))
for login_user in [self.joepublicuser, self.nostaffuser]:
with self.subTest(login_user.username):
self.client.force_login(login_user)
response = self.client.get(change_url_1, follow=True)
self.assertContains(response, 'login-form')
response = self.client.post(change_url_1, {'name': 'changed'}, follow=True)
self.assertEqual(RowLevelChangePermissionModel.objects.get(id=1).name, 'odd id')
self.assertContains(response, 'login-form')
response = self.client.get(change_url_2, follow=True)
self.assertContains(response, 'login-form')
response = self.client.post(change_url_2, {'name': 'changed again'}, follow=True)
self.assertEqual(RowLevelChangePermissionModel.objects.get(id=2).name, 'changed')
self.assertContains(response, 'login-form')
self.client.get(reverse('admin:logout'))
def test_change_view_without_object_change_permission(self):
"""
The object should be read-only if the user has permission to view it
and change objects of that type but not to change the current object.
"""
change_url = reverse('admin9:admin_views_article_change', args=(self.a1.pk,))
self.client.force_login(self.viewuser)
response = self.client.get(change_url)
self.assertEqual(response.context['title'], 'View article')
self.assertContains(response, '<title>View article | Django site admin</title>')
self.assertContains(response, '<h1>View article</h1>')
self.assertContains(response, '<a href="/test_admin/admin9/admin_views/article/" class="closelink">Close</a>')
def test_change_view_save_as_new(self):
"""
'Save as new' should raise PermissionDenied for users without the 'add'
permission.
"""
change_dict_save_as_new = {
'_saveasnew': 'Save as new',
'title': 'Ikke fordømt',
'content': '<p>edited article</p>',
'date_0': '2008-03-18', 'date_1': '10:54:39',
'section': self.s1.pk,
}
article_change_url = reverse('admin:admin_views_article_change', args=(self.a1.pk,))
# Add user can perform "Save as new".
article_count = Article.objects.count()
self.client.force_login(self.adduser)
post = self.client.post(article_change_url, change_dict_save_as_new)
self.assertRedirects(post, self.index_url)
self.assertEqual(Article.objects.count(), article_count + 1)
self.client.logout()
# Change user cannot perform "Save as new" (no 'add' permission).
article_count = Article.objects.count()
self.client.force_login(self.changeuser)
post = self.client.post(article_change_url, change_dict_save_as_new)
self.assertEqual(post.status_code, 403)
self.assertEqual(Article.objects.count(), article_count)
# User with both add and change permissions should be redirected to the
# change page for the newly created object.
article_count = Article.objects.count()
self.client.force_login(self.superuser)
post = self.client.post(article_change_url, change_dict_save_as_new)
self.assertEqual(Article.objects.count(), article_count + 1)
new_article = Article.objects.latest('id')
self.assertRedirects(post, reverse('admin:admin_views_article_change', args=(new_article.pk,)))
def test_change_view_with_view_only_inlines(self):
"""
User with change permission to a section but view-only for inlines.
"""
self.viewuser.user_permissions.add(get_perm(Section, get_permission_codename('change', Section._meta)))
self.client.force_login(self.viewuser)
# GET shows inlines.
response = self.client.get(reverse('admin:admin_views_section_change', args=(self.s1.pk,)))
self.assertEqual(len(response.context['inline_admin_formsets']), 1)
formset = response.context['inline_admin_formsets'][0]
self.assertEqual(len(formset.forms), 3)
# Valid POST changes the name.
data = {
'name': 'Can edit name with view-only inlines',
'article_set-TOTAL_FORMS': 3,
'article_set-INITIAL_FORMS': 3
}
response = self.client.post(reverse('admin:admin_views_section_change', args=(self.s1.pk,)), data)
self.assertRedirects(response, reverse('admin:admin_views_section_changelist'))
self.assertEqual(Section.objects.get(pk=self.s1.pk).name, data['name'])
# Invalid POST reshows inlines.
del data['name']
response = self.client.post(reverse('admin:admin_views_section_change', args=(self.s1.pk,)), data)
self.assertEqual(response.status_code, 200)
self.assertEqual(len(response.context['inline_admin_formsets']), 1)
formset = response.context['inline_admin_formsets'][0]
self.assertEqual(len(formset.forms), 3)
def test_change_view_with_view_and_add_inlines(self):
"""User has view and add permissions on the inline model."""
self.viewuser.user_permissions.add(get_perm(Section, get_permission_codename('change', Section._meta)))
self.viewuser.user_permissions.add(get_perm(Article, get_permission_codename('add', Article._meta)))
self.client.force_login(self.viewuser)
# GET shows inlines.
response = self.client.get(reverse('admin:admin_views_section_change', args=(self.s1.pk,)))
self.assertEqual(len(response.context['inline_admin_formsets']), 1)
formset = response.context['inline_admin_formsets'][0]
self.assertEqual(len(formset.forms), 6)
# Valid POST creates a new article.
data = {
'name': 'Can edit name with view-only inlines',
'article_set-TOTAL_FORMS': 6,
'article_set-INITIAL_FORMS': 3,
'article_set-3-id': [''],
'article_set-3-title': ['A title'],
'article_set-3-content': ['Added content'],
'article_set-3-date_0': ['2008-3-18'],
'article_set-3-date_1': ['11:54:58'],
'article_set-3-section': [str(self.s1.pk)],
}
response = self.client.post(reverse('admin:admin_views_section_change', args=(self.s1.pk,)), data)
self.assertRedirects(response, reverse('admin:admin_views_section_changelist'))
self.assertEqual(Section.objects.get(pk=self.s1.pk).name, data['name'])
self.assertEqual(Article.objects.count(), 4)
# Invalid POST reshows inlines.
del data['name']
response = self.client.post(reverse('admin:admin_views_section_change', args=(self.s1.pk,)), data)
self.assertEqual(response.status_code, 200)
self.assertEqual(len(response.context['inline_admin_formsets']), 1)
formset = response.context['inline_admin_formsets'][0]
self.assertEqual(len(formset.forms), 6)
def test_change_view_with_view_and_delete_inlines(self):
"""User has view and delete permissions on the inline model."""
self.viewuser.user_permissions.add(get_perm(Section, get_permission_codename('change', Section._meta)))
self.client.force_login(self.viewuser)
data = {
'name': 'Name is required.',
'article_set-TOTAL_FORMS': 6,
'article_set-INITIAL_FORMS': 3,
'article_set-0-id': [str(self.a1.pk)],
'article_set-0-DELETE': ['on'],
}
# Inline POST details are ignored without delete permission.
response = self.client.post(reverse('admin:admin_views_section_change', args=(self.s1.pk,)), data)
self.assertRedirects(response, reverse('admin:admin_views_section_changelist'))
self.assertEqual(Article.objects.count(), 3)
# Deletion successful when delete permission is added.
self.viewuser.user_permissions.add(get_perm(Article, get_permission_codename('delete', Article._meta)))
data = {
'name': 'Name is required.',
'article_set-TOTAL_FORMS': 6,
'article_set-INITIAL_FORMS': 3,
'article_set-0-id': [str(self.a1.pk)],
'article_set-0-DELETE': ['on'],
}
response = self.client.post(reverse('admin:admin_views_section_change', args=(self.s1.pk,)), data)
self.assertRedirects(response, reverse('admin:admin_views_section_changelist'))
self.assertEqual(Article.objects.count(), 2)
def test_delete_view(self):
"""Delete view should restrict access and actually delete items."""
delete_dict = {'post': 'yes'}
delete_url = reverse('admin:admin_views_article_delete', args=(self.a1.pk,))
# add user should not be able to delete articles
self.client.force_login(self.adduser)
response = self.client.get(delete_url)
self.assertEqual(response.status_code, 403)
post = self.client.post(delete_url, delete_dict)
self.assertEqual(post.status_code, 403)
self.assertEqual(Article.objects.count(), 3)
self.client.logout()
# view user should not be able to delete articles
self.client.force_login(self.viewuser)
response = self.client.get(delete_url)
self.assertEqual(response.status_code, 403)
post = self.client.post(delete_url, delete_dict)
self.assertEqual(post.status_code, 403)
self.assertEqual(Article.objects.count(), 3)
self.client.logout()
# Delete user can delete
self.client.force_login(self.deleteuser)
response = self.client.get(reverse('admin:admin_views_section_delete', args=(self.s1.pk,)))
self.assertContains(response, "<h2>Summary</h2>")
self.assertContains(response, "<li>Articles: 3</li>")
# test response contains link to related Article
self.assertContains(response, "admin_views/article/%s/" % self.a1.pk)
response = self.client.get(delete_url)
self.assertContains(response, "admin_views/article/%s/" % self.a1.pk)
self.assertContains(response, "<h2>Summary</h2>")
self.assertContains(response, "<li>Articles: 1</li>")
post = self.client.post(delete_url, delete_dict)
self.assertRedirects(post, self.index_url)
self.assertEqual(Article.objects.count(), 2)
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(mail.outbox[0].subject, 'Greetings from a deleted object')
article_ct = ContentType.objects.get_for_model(Article)
logged = LogEntry.objects.get(content_type=article_ct, action_flag=DELETION)
self.assertEqual(logged.object_id, str(self.a1.pk))
def test_delete_view_with_no_default_permissions(self):
"""
The delete view allows users to delete collected objects without a
'delete' permission (ReadOnlyPizza.Meta.default_permissions is empty).
"""
pizza = ReadOnlyPizza.objects.create(name='Double Cheese')
delete_url = reverse('admin:admin_views_readonlypizza_delete', args=(pizza.pk,))
self.client.force_login(self.adduser)
response = self.client.get(delete_url)
self.assertContains(response, 'admin_views/readonlypizza/%s/' % pizza.pk)
self.assertContains(response, '<h2>Summary</h2>')
self.assertContains(response, '<li>Read only pizzas: 1</li>')
post = self.client.post(delete_url, {'post': 'yes'})
self.assertRedirects(post, reverse('admin:admin_views_readonlypizza_changelist'))
self.assertEqual(ReadOnlyPizza.objects.count(), 0)
def test_delete_view_nonexistent_obj(self):
self.client.force_login(self.deleteuser)
url = reverse('admin:admin_views_article_delete', args=('nonexistent',))
response = self.client.get(url, follow=True)
self.assertRedirects(response, reverse('admin:index'))
self.assertEqual(
[m.message for m in response.context['messages']],
['article with ID “nonexistent” doesn’t exist. Perhaps it was deleted?']
)
def test_history_view(self):
"""History view should restrict access."""
# add user should not be able to view the list of article or change any of them
self.client.force_login(self.adduser)
response = self.client.get(reverse('admin:admin_views_article_history', args=(self.a1.pk,)))
self.assertEqual(response.status_code, 403)
self.client.get(reverse('admin:logout'))
# view user can view all items
self.client.force_login(self.viewuser)
response = self.client.get(reverse('admin:admin_views_article_history', args=(self.a1.pk,)))
self.assertEqual(response.status_code, 200)
self.client.get(reverse('admin:logout'))
# change user can view all items and edit them
self.client.force_login(self.changeuser)
response = self.client.get(reverse('admin:admin_views_article_history', args=(self.a1.pk,)))
self.assertEqual(response.status_code, 200)
# Test redirection when using row-level change permissions. Refs #11513.
rl1 = RowLevelChangePermissionModel.objects.create(id=1, name="odd id")
rl2 = RowLevelChangePermissionModel.objects.create(id=2, name="even id")
logins = [self.superuser, self.viewuser, self.adduser, self.changeuser, self.deleteuser]
for login_user in logins:
with self.subTest(login_user.username):
self.client.force_login(login_user)
url = reverse('admin:admin_views_rowlevelchangepermissionmodel_history', args=(rl1.pk,))
response = self.client.get(url)
self.assertEqual(response.status_code, 403)
url = reverse('admin:admin_views_rowlevelchangepermissionmodel_history', args=(rl2.pk,))
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
self.client.get(reverse('admin:logout'))
for login_user in [self.joepublicuser, self.nostaffuser]:
with self.subTest(login_user.username):
self.client.force_login(login_user)
url = reverse('admin:admin_views_rowlevelchangepermissionmodel_history', args=(rl1.pk,))
response = self.client.get(url, follow=True)
self.assertContains(response, 'login-form')
url = reverse('admin:admin_views_rowlevelchangepermissionmodel_history', args=(rl2.pk,))
response = self.client.get(url, follow=True)
self.assertContains(response, 'login-form')
self.client.get(reverse('admin:logout'))
def test_history_view_bad_url(self):
self.client.force_login(self.changeuser)
response = self.client.get(reverse('admin:admin_views_article_history', args=('foo',)), follow=True)
self.assertRedirects(response, reverse('admin:index'))
self.assertEqual(
[m.message for m in response.context['messages']],
['article with ID “foo” doesn’t exist. Perhaps it was deleted?']
)
def test_conditionally_show_add_section_link(self):
"""
The foreign key widget should only show the "add related" button if the
user has permission to add that related item.
"""
self.client.force_login(self.adduser)
# The user can't add sections yet, so they shouldn't see the "add section" link.
url = reverse('admin:admin_views_article_add')
add_link_text = 'add_id_section'
response = self.client.get(url)
self.assertNotContains(response, add_link_text)
# Allow the user to add sections too. Now they can see the "add section" link.
user = User.objects.get(username='adduser')
perm = get_perm(Section, get_permission_codename('add', Section._meta))
user.user_permissions.add(perm)
response = self.client.get(url)
self.assertContains(response, add_link_text)
def test_conditionally_show_change_section_link(self):
"""
The foreign key widget should only show the "change related" button if
the user has permission to change that related item.
"""
def get_change_related(response):
return response.context['adminform'].form.fields['section'].widget.can_change_related
self.client.force_login(self.adduser)
# The user can't change sections yet, so they shouldn't see the "change section" link.
url = reverse('admin:admin_views_article_add')
change_link_text = 'change_id_section'
response = self.client.get(url)
self.assertFalse(get_change_related(response))
self.assertNotContains(response, change_link_text)
# Allow the user to change sections too. Now they can see the "change section" link.
user = User.objects.get(username='adduser')
perm = get_perm(Section, get_permission_codename('change', Section._meta))
user.user_permissions.add(perm)
response = self.client.get(url)
self.assertTrue(get_change_related(response))
self.assertContains(response, change_link_text)
def test_conditionally_show_delete_section_link(self):
"""
The foreign key widget should only show the "delete related" button if
the user has permission to delete that related item.
"""
def get_delete_related(response):
return response.context['adminform'].form.fields['sub_section'].widget.can_delete_related
self.client.force_login(self.adduser)
# The user can't delete sections yet, so they shouldn't see the "delete section" link.
url = reverse('admin:admin_views_article_add')
delete_link_text = 'delete_id_sub_section'
response = self.client.get(url)
self.assertFalse(get_delete_related(response))
self.assertNotContains(response, delete_link_text)
# Allow the user to delete sections too. Now they can see the "delete section" link.
user = User.objects.get(username='adduser')
perm = get_perm(Section, get_permission_codename('delete', Section._meta))
user.user_permissions.add(perm)
response = self.client.get(url)
self.assertTrue(get_delete_related(response))
self.assertContains(response, delete_link_text)
def test_disabled_permissions_when_logged_in(self):
self.client.force_login(self.superuser)
superuser = User.objects.get(username='super')
superuser.is_active = False
superuser.save()
response = self.client.get(self.index_url, follow=True)
self.assertContains(response, 'id="login-form"')
self.assertNotContains(response, 'Log out')
response = self.client.get(reverse('secure_view'), follow=True)
self.assertContains(response, 'id="login-form"')
def test_disabled_staff_permissions_when_logged_in(self):
self.client.force_login(self.superuser)
superuser = User.objects.get(username='super')
superuser.is_staff = False
superuser.save()
response = self.client.get(self.index_url, follow=True)
self.assertContains(response, 'id="login-form"')
self.assertNotContains(response, 'Log out')
response = self.client.get(reverse('secure_view'), follow=True)
self.assertContains(response, 'id="login-form"')
def test_app_list_permissions(self):
"""
If a user has no module perms, the app list returns a 404.
"""
opts = Article._meta
change_user = User.objects.get(username='changeuser')
permission = get_perm(Article, get_permission_codename('change', opts))
self.client.force_login(self.changeuser)
# the user has no module permissions
change_user.user_permissions.remove(permission)
response = self.client.get(reverse('admin:app_list', args=('admin_views',)))
self.assertEqual(response.status_code, 404)
# the user now has module permissions
change_user.user_permissions.add(permission)
response = self.client.get(reverse('admin:app_list', args=('admin_views',)))
self.assertEqual(response.status_code, 200)
def test_shortcut_view_only_available_to_staff(self):
"""
Only admin users should be able to use the admin shortcut view.
"""
model_ctype = ContentType.objects.get_for_model(ModelWithStringPrimaryKey)
obj = ModelWithStringPrimaryKey.objects.create(string_pk='foo')
shortcut_url = reverse('admin:view_on_site', args=(model_ctype.pk, obj.pk))
# Not logged in: we should see the login page.
response = self.client.get(shortcut_url, follow=True)
self.assertTemplateUsed(response, 'admin/login.html')
# Logged in? Redirect.
self.client.force_login(self.superuser)
response = self.client.get(shortcut_url, follow=False)
# Can't use self.assertRedirects() because User.get_absolute_url() is silly.
self.assertEqual(response.status_code, 302)
# Domain may depend on contrib.sites tests also run
self.assertRegex(response.url, 'http://(testserver|example.com)/dummy/foo/')
def test_has_module_permission(self):
"""
has_module_permission() returns True for all users who
have any permission for that module (add, change, or delete), so that
the module is displayed on the admin index page.
"""
self.client.force_login(self.superuser)
response = self.client.get(self.index_url)
self.assertContains(response, 'admin_views')
self.assertContains(response, 'Articles')
self.client.logout()
self.client.force_login(self.viewuser)
response = self.client.get(self.index_url)
self.assertContains(response, 'admin_views')
self.assertContains(response, 'Articles')
self.client.logout()
self.client.force_login(self.adduser)
response = self.client.get(self.index_url)
self.assertContains(response, 'admin_views')
self.assertContains(response, 'Articles')
self.client.logout()
self.client.force_login(self.changeuser)
response = self.client.get(self.index_url)
self.assertContains(response, 'admin_views')
self.assertContains(response, 'Articles')
self.client.logout()
self.client.force_login(self.deleteuser)
response = self.client.get(self.index_url)
self.assertContains(response, 'admin_views')
self.assertContains(response, 'Articles')
def test_overriding_has_module_permission(self):
"""
If has_module_permission() always returns False, the module shouldn't
be displayed on the admin index page for any users.
"""
articles = Article._meta.verbose_name_plural.title()
sections = Section._meta.verbose_name_plural.title()
index_url = reverse('admin7:index')
self.client.force_login(self.superuser)
response = self.client.get(index_url)
self.assertContains(response, sections)
self.assertNotContains(response, articles)
self.client.logout()
self.client.force_login(self.viewuser)
response = self.client.get(index_url)
self.assertNotContains(response, 'admin_views')
self.assertNotContains(response, articles)
self.client.logout()
self.client.force_login(self.adduser)
response = self.client.get(index_url)
self.assertNotContains(response, 'admin_views')
self.assertNotContains(response, articles)
self.client.logout()
self.client.force_login(self.changeuser)
response = self.client.get(index_url)
self.assertNotContains(response, 'admin_views')
self.assertNotContains(response, articles)
self.client.logout()
self.client.force_login(self.deleteuser)
response = self.client.get(index_url)
self.assertNotContains(response, articles)
# The app list displays Sections but not Articles as the latter has
# ModelAdmin.has_module_permission() = False.
self.client.force_login(self.superuser)
response = self.client.get(reverse('admin7:app_list', args=('admin_views',)))
self.assertContains(response, sections)
self.assertNotContains(response, articles)
def test_post_save_message_no_forbidden_links_visible(self):
"""
Post-save message shouldn't contain a link to the change form if the
user doesn't have the change permission.
"""
self.client.force_login(self.adduser)
# Emulate Article creation for user with add-only permission.
post_data = {
"title": "Fun & games",
"content": "Some content",
"date_0": "2015-10-31",
"date_1": "16:35:00",
"_save": "Save",
}
response = self.client.post(reverse('admin:admin_views_article_add'), post_data, follow=True)
self.assertContains(
response,
'<li class="success">The article “Fun & games” was added successfully.</li>',
html=True
)
@override_settings(
ROOT_URLCONF='admin_views.urls',
TEMPLATES=[{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
}],
)
class AdminViewProxyModelPermissionsTests(TestCase):
"""Tests for proxy models permissions in the admin."""
@classmethod
def setUpTestData(cls):
cls.viewuser = User.objects.create_user(username='viewuser', password='secret', is_staff=True)
cls.adduser = User.objects.create_user(username='adduser', password='secret', is_staff=True)
cls.changeuser = User.objects.create_user(username='changeuser', password='secret', is_staff=True)
cls.deleteuser = User.objects.create_user(username='deleteuser', password='secret', is_staff=True)
# Setup permissions.
opts = UserProxy._meta
cls.viewuser.user_permissions.add(get_perm(UserProxy, get_permission_codename('view', opts)))
cls.adduser.user_permissions.add(get_perm(UserProxy, get_permission_codename('add', opts)))
cls.changeuser.user_permissions.add(get_perm(UserProxy, get_permission_codename('change', opts)))
cls.deleteuser.user_permissions.add(get_perm(UserProxy, get_permission_codename('delete', opts)))
# UserProxy instances.
cls.user_proxy = UserProxy.objects.create(username='user_proxy', password='secret')
def test_add(self):
self.client.force_login(self.adduser)
url = reverse('admin:admin_views_userproxy_add')
data = {
'username': 'can_add',
'password': 'secret',
'date_joined_0': '2019-01-15',
'date_joined_1': '16:59:10',
}
response = self.client.post(url, data, follow=True)
self.assertEqual(response.status_code, 200)
self.assertTrue(UserProxy.objects.filter(username='can_add').exists())
def test_view(self):
self.client.force_login(self.viewuser)
response = self.client.get(reverse('admin:admin_views_userproxy_changelist'))
self.assertContains(response, '<h1>Select user proxy to view</h1>')
response = self.client.get(reverse('admin:admin_views_userproxy_change', args=(self.user_proxy.pk,)))
self.assertContains(response, '<h1>View user proxy</h1>')
self.assertContains(response, '<div class="readonly">user_proxy</div>')
def test_change(self):
self.client.force_login(self.changeuser)
data = {
'password': self.user_proxy.password,
'username': self.user_proxy.username,
'date_joined_0': self.user_proxy.date_joined.strftime('%Y-%m-%d'),
'date_joined_1': self.user_proxy.date_joined.strftime('%H:%M:%S'),
'first_name': 'first_name',
}
url = reverse('admin:admin_views_userproxy_change', args=(self.user_proxy.pk,))
response = self.client.post(url, data)
self.assertRedirects(response, reverse('admin:admin_views_userproxy_changelist'))
self.assertEqual(UserProxy.objects.get(pk=self.user_proxy.pk).first_name, 'first_name')
def test_delete(self):
self.client.force_login(self.deleteuser)
url = reverse('admin:admin_views_userproxy_delete', args=(self.user_proxy.pk,))
response = self.client.post(url, {'post': 'yes'}, follow=True)
self.assertEqual(response.status_code, 200)
self.assertFalse(UserProxy.objects.filter(pk=self.user_proxy.pk).exists())
@override_settings(ROOT_URLCONF='admin_views.urls')
class AdminViewsNoUrlTest(TestCase):
"""Regression test for #17333"""
@classmethod
def setUpTestData(cls):
# User who can change Reports
cls.changeuser = User.objects.create_user(username='changeuser', password='secret', is_staff=True)
cls.changeuser.user_permissions.add(get_perm(Report, get_permission_codename('change', Report._meta)))
def test_no_standard_modeladmin_urls(self):
"""Admin index views don't break when user's ModelAdmin removes standard urls"""
self.client.force_login(self.changeuser)
r = self.client.get(reverse('admin:index'))
# we shouldn't get a 500 error caused by a NoReverseMatch
self.assertEqual(r.status_code, 200)
self.client.get(reverse('admin:logout'))
@skipUnlessDBFeature('can_defer_constraint_checks')
@override_settings(ROOT_URLCONF='admin_views.urls')
class AdminViewDeletedObjectsTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]')
cls.deleteuser = User.objects.create_user(username='deleteuser', password='secret', is_staff=True)
cls.s1 = Section.objects.create(name='Test section')
cls.a1 = Article.objects.create(
content='<p>Middle content</p>', date=datetime.datetime(2008, 3, 18, 11, 54, 58), section=cls.s1
)
cls.a2 = Article.objects.create(
content='<p>Oldest content</p>', date=datetime.datetime(2000, 3, 18, 11, 54, 58), section=cls.s1
)
cls.a3 = Article.objects.create(
content='<p>Newest content</p>', date=datetime.datetime(2009, 3, 18, 11, 54, 58), section=cls.s1
)
cls.p1 = PrePopulatedPost.objects.create(title='A Long Title', published=True, slug='a-long-title')
cls.v1 = Villain.objects.create(name='Adam')
cls.v2 = Villain.objects.create(name='Sue')
cls.sv1 = SuperVillain.objects.create(name='Bob')
cls.pl1 = Plot.objects.create(name='World Domination', team_leader=cls.v1, contact=cls.v2)
cls.pl2 = Plot.objects.create(name='World Peace', team_leader=cls.v2, contact=cls.v2)
cls.pl3 = Plot.objects.create(name='Corn Conspiracy', team_leader=cls.v1, contact=cls.v1)
cls.pd1 = PlotDetails.objects.create(details='almost finished', plot=cls.pl1)
cls.sh1 = SecretHideout.objects.create(location='underground bunker', villain=cls.v1)
cls.sh2 = SecretHideout.objects.create(location='floating castle', villain=cls.sv1)
cls.ssh1 = SuperSecretHideout.objects.create(location='super floating castle!', supervillain=cls.sv1)
cls.cy1 = CyclicOne.objects.create(name='I am recursive', two_id=1)
cls.cy2 = CyclicTwo.objects.create(name='I am recursive too', one_id=1)
def setUp(self):
self.client.force_login(self.superuser)
def test_nesting(self):
"""
Objects should be nested to display the relationships that
cause them to be scheduled for deletion.
"""
pattern = re.compile(
r'<li>Plot: <a href="%s">World Domination</a>\s*<ul>\s*'
r'<li>Plot details: <a href="%s">almost finished</a>' % (
reverse('admin:admin_views_plot_change', args=(self.pl1.pk,)),
reverse('admin:admin_views_plotdetails_change', args=(self.pd1.pk,)),
)
)
response = self.client.get(reverse('admin:admin_views_villain_delete', args=(self.v1.pk,)))
self.assertRegex(response.content.decode(), pattern)
def test_cyclic(self):
"""
Cyclic relationships should still cause each object to only be
listed once.
"""
one = '<li>Cyclic one: <a href="%s">I am recursive</a>' % (
reverse('admin:admin_views_cyclicone_change', args=(self.cy1.pk,)),
)
two = '<li>Cyclic two: <a href="%s">I am recursive too</a>' % (
reverse('admin:admin_views_cyclictwo_change', args=(self.cy2.pk,)),
)
response = self.client.get(reverse('admin:admin_views_cyclicone_delete', args=(self.cy1.pk,)))
self.assertContains(response, one, 1)
self.assertContains(response, two, 1)
def test_perms_needed(self):
self.client.logout()
delete_user = User.objects.get(username='deleteuser')
delete_user.user_permissions.add(get_perm(Plot, get_permission_codename('delete', Plot._meta)))
self.client.force_login(self.deleteuser)
response = self.client.get(reverse('admin:admin_views_plot_delete', args=(self.pl1.pk,)))
self.assertContains(response, "your account doesn't have permission to delete the following types of objects")
self.assertContains(response, "<li>plot details</li>")
def test_protected(self):
q = Question.objects.create(question="Why?")
a1 = Answer.objects.create(question=q, answer="Because.")
a2 = Answer.objects.create(question=q, answer="Yes.")
response = self.client.get(reverse('admin:admin_views_question_delete', args=(q.pk,)))
self.assertContains(response, "would require deleting the following protected related objects")
self.assertContains(
response,
'<li>Answer: <a href="%s">Because.</a></li>' % reverse('admin:admin_views_answer_change', args=(a1.pk,))
)
self.assertContains(
response,
'<li>Answer: <a href="%s">Yes.</a></li>' % reverse('admin:admin_views_answer_change', args=(a2.pk,))
)
def test_post_delete_protected(self):
"""
A POST request to delete protected objects should display the page
which says the deletion is prohibited.
"""
q = Question.objects.create(question='Why?')
Answer.objects.create(question=q, answer='Because.')
response = self.client.post(reverse('admin:admin_views_question_delete', args=(q.pk,)), {'post': 'yes'})
self.assertEqual(Question.objects.count(), 1)
self.assertContains(response, "would require deleting the following protected related objects")
def test_restricted(self):
album = Album.objects.create(title='Amaryllis')
song = Song.objects.create(album=album, name='Unity')
response = self.client.get(reverse('admin:admin_views_album_delete', args=(album.pk,)))
self.assertContains(
response,
'would require deleting the following protected related objects',
)
self.assertContains(
response,
'<li>Song: <a href="%s">Unity</a></li>'
% reverse('admin:admin_views_song_change', args=(song.pk,))
)
def test_post_delete_restricted(self):
album = Album.objects.create(title='Amaryllis')
Song.objects.create(album=album, name='Unity')
response = self.client.post(
reverse('admin:admin_views_album_delete', args=(album.pk,)),
{'post': 'yes'},
)
self.assertEqual(Album.objects.count(), 1)
self.assertContains(
response,
'would require deleting the following protected related objects',
)
def test_not_registered(self):
should_contain = """<li>Secret hideout: underground bunker"""
response = self.client.get(reverse('admin:admin_views_villain_delete', args=(self.v1.pk,)))
self.assertContains(response, should_contain, 1)
def test_multiple_fkeys_to_same_model(self):
"""
If a deleted object has two relationships from another model,
both of those should be followed in looking for related
objects to delete.
"""
should_contain = '<li>Plot: <a href="%s">World Domination</a>' % reverse(
'admin:admin_views_plot_change', args=(self.pl1.pk,)
)
response = self.client.get(reverse('admin:admin_views_villain_delete', args=(self.v1.pk,)))
self.assertContains(response, should_contain)
response = self.client.get(reverse('admin:admin_views_villain_delete', args=(self.v2.pk,)))
self.assertContains(response, should_contain)
def test_multiple_fkeys_to_same_instance(self):
"""
If a deleted object has two relationships pointing to it from
another object, the other object should still only be listed
once.
"""
should_contain = '<li>Plot: <a href="%s">World Peace</a></li>' % reverse(
'admin:admin_views_plot_change', args=(self.pl2.pk,)
)
response = self.client.get(reverse('admin:admin_views_villain_delete', args=(self.v2.pk,)))
self.assertContains(response, should_contain, 1)
def test_inheritance(self):
"""
In the case of an inherited model, if either the child or
parent-model instance is deleted, both instances are listed
for deletion, as well as any relationships they have.
"""
should_contain = [
'<li>Villain: <a href="%s">Bob</a>' % reverse('admin:admin_views_villain_change', args=(self.sv1.pk,)),
'<li>Super villain: <a href="%s">Bob</a>' % reverse(
'admin:admin_views_supervillain_change', args=(self.sv1.pk,)
),
'<li>Secret hideout: floating castle',
'<li>Super secret hideout: super floating castle!',
]
response = self.client.get(reverse('admin:admin_views_villain_delete', args=(self.sv1.pk,)))
for should in should_contain:
self.assertContains(response, should, 1)
response = self.client.get(reverse('admin:admin_views_supervillain_delete', args=(self.sv1.pk,)))
for should in should_contain:
self.assertContains(response, should, 1)
def test_generic_relations(self):
"""
If a deleted object has GenericForeignKeys pointing to it,
those objects should be listed for deletion.
"""
plot = self.pl3
tag = FunkyTag.objects.create(content_object=plot, name='hott')
should_contain = '<li>Funky tag: <a href="%s">hott' % reverse(
'admin:admin_views_funkytag_change', args=(tag.id,))
response = self.client.get(reverse('admin:admin_views_plot_delete', args=(plot.pk,)))
self.assertContains(response, should_contain)
def test_generic_relations_with_related_query_name(self):
"""
If a deleted object has GenericForeignKey with
GenericRelation(related_query_name='...') pointing to it, those objects
should be listed for deletion.
"""
bookmark = Bookmark.objects.create(name='djangoproject')
tag = FunkyTag.objects.create(content_object=bookmark, name='django')
tag_url = reverse('admin:admin_views_funkytag_change', args=(tag.id,))
should_contain = '<li>Funky tag: <a href="%s">django' % tag_url
response = self.client.get(reverse('admin:admin_views_bookmark_delete', args=(bookmark.pk,)))
self.assertContains(response, should_contain)
def test_delete_view_uses_get_deleted_objects(self):
"""The delete view uses ModelAdmin.get_deleted_objects()."""
book = Book.objects.create(name='Test Book')
response = self.client.get(reverse('admin2:admin_views_book_delete', args=(book.pk,)))
# BookAdmin.get_deleted_objects() returns custom text.
self.assertContains(response, 'a deletable object')
@override_settings(ROOT_URLCONF='admin_views.urls')
class TestGenericRelations(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]')
cls.v1 = Villain.objects.create(name='Adam')
cls.pl3 = Plot.objects.create(name='Corn Conspiracy', team_leader=cls.v1, contact=cls.v1)
def setUp(self):
self.client.force_login(self.superuser)
def test_generic_content_object_in_list_display(self):
FunkyTag.objects.create(content_object=self.pl3, name='hott')
response = self.client.get(reverse('admin:admin_views_funkytag_changelist'))
self.assertContains(response, "%s</td>" % self.pl3)
@override_settings(ROOT_URLCONF='admin_views.urls')
class AdminViewStringPrimaryKeyTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]')
cls.s1 = Section.objects.create(name='Test section')
cls.a1 = Article.objects.create(
content='<p>Middle content</p>', date=datetime.datetime(2008, 3, 18, 11, 54, 58), section=cls.s1
)
cls.a2 = Article.objects.create(
content='<p>Oldest content</p>', date=datetime.datetime(2000, 3, 18, 11, 54, 58), section=cls.s1
)
cls.a3 = Article.objects.create(
content='<p>Newest content</p>', date=datetime.datetime(2009, 3, 18, 11, 54, 58), section=cls.s1
)
cls.p1 = PrePopulatedPost.objects.create(title='A Long Title', published=True, slug='a-long-title')
cls.pk = (
"abcdefghijklmnopqrstuvwxyz ABCDEFGHIJKLMNOPQRSTUVWXYZ 1234567890 "
r"""-_.!~*'() ;/?:@&=+$, <>#%" {}|\^[]`"""
)
cls.m1 = ModelWithStringPrimaryKey.objects.create(string_pk=cls.pk)
content_type_pk = ContentType.objects.get_for_model(ModelWithStringPrimaryKey).pk
user_pk = cls.superuser.pk
LogEntry.objects.log_action(user_pk, content_type_pk, cls.pk, cls.pk, 2, change_message='Changed something')
def setUp(self):
self.client.force_login(self.superuser)
def test_get_history_view(self):
"""
Retrieving the history for an object using urlencoded form of primary
key should work.
Refs #12349, #18550.
"""
response = self.client.get(reverse('admin:admin_views_modelwithstringprimarykey_history', args=(self.pk,)))
self.assertContains(response, escape(self.pk))
self.assertContains(response, 'Changed something')
def test_get_change_view(self):
"Retrieving the object using urlencoded form of primary key should work"
response = self.client.get(reverse('admin:admin_views_modelwithstringprimarykey_change', args=(self.pk,)))
self.assertContains(response, escape(self.pk))
def test_changelist_to_changeform_link(self):
"Link to the changeform of the object in changelist should use reverse() and be quoted -- #18072"
response = self.client.get(reverse('admin:admin_views_modelwithstringprimarykey_changelist'))
# this URL now comes through reverse(), thus url quoting and iri_to_uri encoding
pk_final_url = escape(iri_to_uri(quote(self.pk)))
change_url = reverse(
'admin:admin_views_modelwithstringprimarykey_change', args=('__fk__',)
).replace('__fk__', pk_final_url)
should_contain = '<th class="field-__str__"><a href="%s">%s</a></th>' % (change_url, escape(self.pk))
self.assertContains(response, should_contain)
def test_recentactions_link(self):
"The link from the recent actions list referring to the changeform of the object should be quoted"
response = self.client.get(reverse('admin:index'))
link = reverse('admin:admin_views_modelwithstringprimarykey_change', args=(quote(self.pk),))
should_contain = """<a href="%s">%s</a>""" % (escape(link), escape(self.pk))
self.assertContains(response, should_contain)
def test_deleteconfirmation_link(self):
"The link from the delete confirmation page referring back to the changeform of the object should be quoted"
url = reverse('admin:admin_views_modelwithstringprimarykey_delete', args=(quote(self.pk),))
response = self.client.get(url)
# this URL now comes through reverse(), thus url quoting and iri_to_uri encoding
change_url = reverse(
'admin:admin_views_modelwithstringprimarykey_change', args=('__fk__',)
).replace('__fk__', escape(iri_to_uri(quote(self.pk))))
should_contain = '<a href="%s">%s</a>' % (change_url, escape(self.pk))
self.assertContains(response, should_contain)
def test_url_conflicts_with_add(self):
"A model with a primary key that ends with add or is `add` should be visible"
add_model = ModelWithStringPrimaryKey.objects.create(pk="i have something to add")
add_model.save()
response = self.client.get(
reverse('admin:admin_views_modelwithstringprimarykey_change', args=(quote(add_model.pk),))
)
should_contain = """<h1>Change model with string primary key</h1>"""
self.assertContains(response, should_contain)
add_model2 = ModelWithStringPrimaryKey.objects.create(pk="add")
add_url = reverse('admin:admin_views_modelwithstringprimarykey_add')
change_url = reverse('admin:admin_views_modelwithstringprimarykey_change', args=(quote(add_model2.pk),))
self.assertNotEqual(add_url, change_url)
def test_url_conflicts_with_delete(self):
"A model with a primary key that ends with delete should be visible"
delete_model = ModelWithStringPrimaryKey(pk="delete")
delete_model.save()
response = self.client.get(
reverse('admin:admin_views_modelwithstringprimarykey_change', args=(quote(delete_model.pk),))
)
should_contain = """<h1>Change model with string primary key</h1>"""
self.assertContains(response, should_contain)
def test_url_conflicts_with_history(self):
"A model with a primary key that ends with history should be visible"
history_model = ModelWithStringPrimaryKey(pk="history")
history_model.save()
response = self.client.get(
reverse('admin:admin_views_modelwithstringprimarykey_change', args=(quote(history_model.pk),))
)
should_contain = """<h1>Change model with string primary key</h1>"""
self.assertContains(response, should_contain)
def test_shortcut_view_with_escaping(self):
"'View on site should' work properly with char fields"
model = ModelWithStringPrimaryKey(pk='abc_123')
model.save()
response = self.client.get(
reverse('admin:admin_views_modelwithstringprimarykey_change', args=(quote(model.pk),))
)
should_contain = '/%s/" class="viewsitelink">' % model.pk
self.assertContains(response, should_contain)
def test_change_view_history_link(self):
"""Object history button link should work and contain the pk value quoted."""
url = reverse(
'admin:%s_modelwithstringprimarykey_change' % ModelWithStringPrimaryKey._meta.app_label,
args=(quote(self.pk),)
)
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
expected_link = reverse(
'admin:%s_modelwithstringprimarykey_history' % ModelWithStringPrimaryKey._meta.app_label,
args=(quote(self.pk),)
)
self.assertContains(response, '<a href="%s" class="historylink"' % escape(expected_link))
def test_redirect_on_add_view_continue_button(self):
"""As soon as an object is added using "Save and continue editing"
button, the user should be redirected to the object's change_view.
In case primary key is a string containing some special characters
like slash or underscore, these characters must be escaped (see #22266)
"""
response = self.client.post(
reverse('admin:admin_views_modelwithstringprimarykey_add'),
{
'string_pk': '123/history',
"_continue": "1", # Save and continue editing
}
)
self.assertEqual(response.status_code, 302) # temporary redirect
self.assertIn('/123_2Fhistory/', response.headers['location']) # PK is quoted
@override_settings(ROOT_URLCONF='admin_views.urls')
class SecureViewTests(TestCase):
"""
Test behavior of a view protected by the staff_member_required decorator.
"""
def test_secure_view_shows_login_if_not_logged_in(self):
secure_url = reverse('secure_view')
response = self.client.get(secure_url)
self.assertRedirects(response, '%s?next=%s' % (reverse('admin:login'), secure_url))
response = self.client.get(secure_url, follow=True)
self.assertTemplateUsed(response, 'admin/login.html')
self.assertEqual(response.context[REDIRECT_FIELD_NAME], secure_url)
def test_staff_member_required_decorator_works_with_argument(self):
"""
Staff_member_required decorator works with an argument
(redirect_field_name).
"""
secure_url = '/test_admin/admin/secure-view2/'
response = self.client.get(secure_url)
self.assertRedirects(response, '%s?myfield=%s' % (reverse('admin:login'), secure_url))
@override_settings(ROOT_URLCONF='admin_views.urls')
class AdminViewUnicodeTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]')
cls.b1 = Book.objects.create(name='Lærdommer')
cls.p1 = Promo.objects.create(name='<Promo for Lærdommer>', book=cls.b1)
cls.chap1 = Chapter.objects.create(
title='Norske bostaver æøå skaper problemer', content='<p>Svært frustrerende med UnicodeDecodeErro</p>',
book=cls.b1
)
cls.chap2 = Chapter.objects.create(
title='Kjærlighet', content='<p>La kjærligheten til de lidende seire.</p>', book=cls.b1)
cls.chap3 = Chapter.objects.create(title='Kjærlighet', content='<p>Noe innhold</p>', book=cls.b1)
cls.chap4 = ChapterXtra1.objects.create(chap=cls.chap1, xtra='<Xtra(1) Norske bostaver æøå skaper problemer>')
cls.chap5 = ChapterXtra1.objects.create(chap=cls.chap2, xtra='<Xtra(1) Kjærlighet>')
cls.chap6 = ChapterXtra1.objects.create(chap=cls.chap3, xtra='<Xtra(1) Kjærlighet>')
cls.chap7 = ChapterXtra2.objects.create(chap=cls.chap1, xtra='<Xtra(2) Norske bostaver æøå skaper problemer>')
cls.chap8 = ChapterXtra2.objects.create(chap=cls.chap2, xtra='<Xtra(2) Kjærlighet>')
cls.chap9 = ChapterXtra2.objects.create(chap=cls.chap3, xtra='<Xtra(2) Kjærlighet>')
def setUp(self):
self.client.force_login(self.superuser)
def test_unicode_edit(self):
"""
A test to ensure that POST on edit_view handles non-ASCII characters.
"""
post_data = {
"name": "Test lærdommer",
# inline data
"chapter_set-TOTAL_FORMS": "6",
"chapter_set-INITIAL_FORMS": "3",
"chapter_set-MAX_NUM_FORMS": "0",
"chapter_set-0-id": self.chap1.pk,
"chapter_set-0-title": "Norske bostaver æøå skaper problemer",
"chapter_set-0-content": "<p>Svært frustrerende med UnicodeDecodeError</p>",
"chapter_set-1-id": self.chap2.id,
"chapter_set-1-title": "Kjærlighet.",
"chapter_set-1-content": "<p>La kjærligheten til de lidende seire.</p>",
"chapter_set-2-id": self.chap3.id,
"chapter_set-2-title": "Need a title.",
"chapter_set-2-content": "<p>Newest content</p>",
"chapter_set-3-id": "",
"chapter_set-3-title": "",
"chapter_set-3-content": "",
"chapter_set-4-id": "",
"chapter_set-4-title": "",
"chapter_set-4-content": "",
"chapter_set-5-id": "",
"chapter_set-5-title": "",
"chapter_set-5-content": "",
}
response = self.client.post(reverse('admin:admin_views_book_change', args=(self.b1.pk,)), post_data)
self.assertEqual(response.status_code, 302) # redirect somewhere
def test_unicode_delete(self):
"""
The delete_view handles non-ASCII characters
"""
delete_dict = {'post': 'yes'}
delete_url = reverse('admin:admin_views_book_delete', args=(self.b1.pk,))
response = self.client.get(delete_url)
self.assertEqual(response.status_code, 200)
response = self.client.post(delete_url, delete_dict)
self.assertRedirects(response, reverse('admin:admin_views_book_changelist'))
@override_settings(ROOT_URLCONF='admin_views.urls')
class AdminViewListEditable(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]')
cls.s1 = Section.objects.create(name='Test section')
cls.a1 = Article.objects.create(
content='<p>Middle content</p>', date=datetime.datetime(2008, 3, 18, 11, 54, 58), section=cls.s1
)
cls.a2 = Article.objects.create(
content='<p>Oldest content</p>', date=datetime.datetime(2000, 3, 18, 11, 54, 58), section=cls.s1
)
cls.a3 = Article.objects.create(
content='<p>Newest content</p>', date=datetime.datetime(2009, 3, 18, 11, 54, 58), section=cls.s1
)
cls.p1 = PrePopulatedPost.objects.create(title='A Long Title', published=True, slug='a-long-title')
cls.per1 = Person.objects.create(name='John Mauchly', gender=1, alive=True)
cls.per2 = Person.objects.create(name='Grace Hopper', gender=1, alive=False)
cls.per3 = Person.objects.create(name='Guido van Rossum', gender=1, alive=True)
def setUp(self):
self.client.force_login(self.superuser)
def test_inheritance(self):
Podcast.objects.create(name="This Week in Django", release_date=datetime.date.today())
response = self.client.get(reverse('admin:admin_views_podcast_changelist'))
self.assertEqual(response.status_code, 200)
def test_inheritance_2(self):
Vodcast.objects.create(name="This Week in Django", released=True)
response = self.client.get(reverse('admin:admin_views_vodcast_changelist'))
self.assertEqual(response.status_code, 200)
def test_custom_pk(self):
Language.objects.create(iso='en', name='English', english_name='English')
response = self.client.get(reverse('admin:admin_views_language_changelist'))
self.assertEqual(response.status_code, 200)
def test_changelist_input_html(self):
response = self.client.get(reverse('admin:admin_views_person_changelist'))
# 2 inputs per object(the field and the hidden id field) = 6
# 4 management hidden fields = 4
# 4 action inputs (3 regular checkboxes, 1 checkbox to select all)
# main form submit button = 1
# search field and search submit button = 2
# CSRF field = 1
# field to track 'select all' across paginated views = 1
# 6 + 4 + 4 + 1 + 2 + 1 + 1 = 19 inputs
self.assertContains(response, "<input", count=20)
# 1 select per object = 3 selects
self.assertContains(response, "<select", count=4)
def test_post_messages(self):
# Ticket 12707: Saving inline editable should not show admin
# action warnings
data = {
"form-TOTAL_FORMS": "3",
"form-INITIAL_FORMS": "3",
"form-MAX_NUM_FORMS": "0",
"form-0-gender": "1",
"form-0-id": str(self.per1.pk),
"form-1-gender": "2",
"form-1-id": str(self.per2.pk),
"form-2-alive": "checked",
"form-2-gender": "1",
"form-2-id": str(self.per3.pk),
"_save": "Save",
}
response = self.client.post(reverse('admin:admin_views_person_changelist'),
data, follow=True)
self.assertEqual(len(response.context['messages']), 1)
def test_post_submission(self):
data = {
"form-TOTAL_FORMS": "3",
"form-INITIAL_FORMS": "3",
"form-MAX_NUM_FORMS": "0",
"form-0-gender": "1",
"form-0-id": str(self.per1.pk),
"form-1-gender": "2",
"form-1-id": str(self.per2.pk),
"form-2-alive": "checked",
"form-2-gender": "1",
"form-2-id": str(self.per3.pk),
"_save": "Save",
}
self.client.post(reverse('admin:admin_views_person_changelist'), data)
self.assertIs(Person.objects.get(name="John Mauchly").alive, False)
self.assertEqual(Person.objects.get(name="Grace Hopper").gender, 2)
# test a filtered page
data = {
"form-TOTAL_FORMS": "2",
"form-INITIAL_FORMS": "2",
"form-MAX_NUM_FORMS": "0",
"form-0-id": str(self.per1.pk),
"form-0-gender": "1",
"form-0-alive": "checked",
"form-1-id": str(self.per3.pk),
"form-1-gender": "1",
"form-1-alive": "checked",
"_save": "Save",
}
self.client.post(reverse('admin:admin_views_person_changelist') + '?gender__exact=1', data)
self.assertIs(Person.objects.get(name="John Mauchly").alive, True)
# test a searched page
data = {
"form-TOTAL_FORMS": "1",
"form-INITIAL_FORMS": "1",
"form-MAX_NUM_FORMS": "0",
"form-0-id": str(self.per1.pk),
"form-0-gender": "1",
"_save": "Save",
}
self.client.post(reverse('admin:admin_views_person_changelist') + '?q=john', data)
self.assertIs(Person.objects.get(name="John Mauchly").alive, False)
def test_non_field_errors(self):
"""
Non-field errors are displayed for each of the forms in the
changelist's formset.
"""
fd1 = FoodDelivery.objects.create(reference='123', driver='bill', restaurant='thai')
fd2 = FoodDelivery.objects.create(reference='456', driver='bill', restaurant='india')
fd3 = FoodDelivery.objects.create(reference='789', driver='bill', restaurant='pizza')
data = {
"form-TOTAL_FORMS": "3",
"form-INITIAL_FORMS": "3",
"form-MAX_NUM_FORMS": "0",
"form-0-id": str(fd1.id),
"form-0-reference": "123",
"form-0-driver": "bill",
"form-0-restaurant": "thai",
# Same data as above: Forbidden because of unique_together!
"form-1-id": str(fd2.id),
"form-1-reference": "456",
"form-1-driver": "bill",
"form-1-restaurant": "thai",
"form-2-id": str(fd3.id),
"form-2-reference": "789",
"form-2-driver": "bill",
"form-2-restaurant": "pizza",
"_save": "Save",
}
response = self.client.post(reverse('admin:admin_views_fooddelivery_changelist'), data)
self.assertContains(
response,
'<tr><td colspan="4"><ul class="errorlist nonfield"><li>Food delivery '
'with this Driver and Restaurant already exists.</li></ul></td></tr>',
1,
html=True
)
data = {
"form-TOTAL_FORMS": "3",
"form-INITIAL_FORMS": "3",
"form-MAX_NUM_FORMS": "0",
"form-0-id": str(fd1.id),
"form-0-reference": "123",
"form-0-driver": "bill",
"form-0-restaurant": "thai",
# Same data as above: Forbidden because of unique_together!
"form-1-id": str(fd2.id),
"form-1-reference": "456",
"form-1-driver": "bill",
"form-1-restaurant": "thai",
# Same data also.
"form-2-id": str(fd3.id),
"form-2-reference": "789",
"form-2-driver": "bill",
"form-2-restaurant": "thai",
"_save": "Save",
}
response = self.client.post(reverse('admin:admin_views_fooddelivery_changelist'), data)
self.assertContains(
response,
'<tr><td colspan="4"><ul class="errorlist nonfield"><li>Food delivery '
'with this Driver and Restaurant already exists.</li></ul></td></tr>',
2,
html=True
)
def test_non_form_errors(self):
# test if non-form errors are handled; ticket #12716
data = {
"form-TOTAL_FORMS": "1",
"form-INITIAL_FORMS": "1",
"form-MAX_NUM_FORMS": "0",
"form-0-id": str(self.per2.pk),
"form-0-alive": "1",
"form-0-gender": "2",
# The form processing understands this as a list_editable "Save"
# and not an action "Go".
"_save": "Save",
}
response = self.client.post(reverse('admin:admin_views_person_changelist'), data)
self.assertContains(response, "Grace is not a Zombie")
def test_non_form_errors_is_errorlist(self):
# test if non-form errors are correctly handled; ticket #12878
data = {
"form-TOTAL_FORMS": "1",
"form-INITIAL_FORMS": "1",
"form-MAX_NUM_FORMS": "0",
"form-0-id": str(self.per2.pk),
"form-0-alive": "1",
"form-0-gender": "2",
"_save": "Save",
}
response = self.client.post(reverse('admin:admin_views_person_changelist'), data)
non_form_errors = response.context['cl'].formset.non_form_errors()
self.assertIsInstance(non_form_errors, ErrorList)
self.assertEqual(
str(non_form_errors),
str(ErrorList(['Grace is not a Zombie'], error_class='nonform')),
)
def test_list_editable_ordering(self):
collector = Collector.objects.create(id=1, name="Frederick Clegg")
Category.objects.create(id=1, order=1, collector=collector)
Category.objects.create(id=2, order=2, collector=collector)
Category.objects.create(id=3, order=0, collector=collector)
Category.objects.create(id=4, order=0, collector=collector)
# NB: The order values must be changed so that the items are reordered.
data = {
"form-TOTAL_FORMS": "4",
"form-INITIAL_FORMS": "4",
"form-MAX_NUM_FORMS": "0",
"form-0-order": "14",
"form-0-id": "1",
"form-0-collector": "1",
"form-1-order": "13",
"form-1-id": "2",
"form-1-collector": "1",
"form-2-order": "1",
"form-2-id": "3",
"form-2-collector": "1",
"form-3-order": "0",
"form-3-id": "4",
"form-3-collector": "1",
# The form processing understands this as a list_editable "Save"
# and not an action "Go".
"_save": "Save",
}
response = self.client.post(reverse('admin:admin_views_category_changelist'), data)
# Successful post will redirect
self.assertEqual(response.status_code, 302)
# The order values have been applied to the right objects
self.assertEqual(Category.objects.get(id=1).order, 14)
self.assertEqual(Category.objects.get(id=2).order, 13)
self.assertEqual(Category.objects.get(id=3).order, 1)
self.assertEqual(Category.objects.get(id=4).order, 0)
def test_list_editable_pagination(self):
"""
Pagination works for list_editable items.
"""
UnorderedObject.objects.create(id=1, name='Unordered object #1')
UnorderedObject.objects.create(id=2, name='Unordered object #2')
UnorderedObject.objects.create(id=3, name='Unordered object #3')
response = self.client.get(reverse('admin:admin_views_unorderedobject_changelist'))
self.assertContains(response, 'Unordered object #3')
self.assertContains(response, 'Unordered object #2')
self.assertNotContains(response, 'Unordered object #1')
response = self.client.get(reverse('admin:admin_views_unorderedobject_changelist') + '?p=2')
self.assertNotContains(response, 'Unordered object #3')
self.assertNotContains(response, 'Unordered object #2')
self.assertContains(response, 'Unordered object #1')
def test_list_editable_action_submit(self):
# List editable changes should not be executed if the action "Go" button is
# used to submit the form.
data = {
"form-TOTAL_FORMS": "3",
"form-INITIAL_FORMS": "3",
"form-MAX_NUM_FORMS": "0",
"form-0-gender": "1",
"form-0-id": "1",
"form-1-gender": "2",
"form-1-id": "2",
"form-2-alive": "checked",
"form-2-gender": "1",
"form-2-id": "3",
"index": "0",
"_selected_action": ['3'],
"action": ['', 'delete_selected'],
}
self.client.post(reverse('admin:admin_views_person_changelist'), data)
self.assertIs(Person.objects.get(name="John Mauchly").alive, True)
self.assertEqual(Person.objects.get(name="Grace Hopper").gender, 1)
def test_list_editable_action_choices(self):
# List editable changes should be executed if the "Save" button is
# used to submit the form - any action choices should be ignored.
data = {
"form-TOTAL_FORMS": "3",
"form-INITIAL_FORMS": "3",
"form-MAX_NUM_FORMS": "0",
"form-0-gender": "1",
"form-0-id": str(self.per1.pk),
"form-1-gender": "2",
"form-1-id": str(self.per2.pk),
"form-2-alive": "checked",
"form-2-gender": "1",
"form-2-id": str(self.per3.pk),
"_save": "Save",
"_selected_action": ['1'],
"action": ['', 'delete_selected'],
}
self.client.post(reverse('admin:admin_views_person_changelist'), data)
self.assertIs(Person.objects.get(name="John Mauchly").alive, False)
self.assertEqual(Person.objects.get(name="Grace Hopper").gender, 2)
def test_list_editable_popup(self):
"""
Fields should not be list-editable in popups.
"""
response = self.client.get(reverse('admin:admin_views_person_changelist'))
self.assertNotEqual(response.context['cl'].list_editable, ())
response = self.client.get(reverse('admin:admin_views_person_changelist') + '?%s' % IS_POPUP_VAR)
self.assertEqual(response.context['cl'].list_editable, ())
def test_pk_hidden_fields(self):
"""
hidden pk fields aren't displayed in the table body and their
corresponding human-readable value is displayed instead. The hidden pk
fields are displayed but separately (not in the table) and only once.
"""
story1 = Story.objects.create(title='The adventures of Guido', content='Once upon a time in Djangoland...')
story2 = Story.objects.create(
title='Crouching Tiger, Hidden Python',
content='The Python was sneaking into...',
)
response = self.client.get(reverse('admin:admin_views_story_changelist'))
# Only one hidden field, in a separate place than the table.
self.assertContains(response, 'id="id_form-0-id"', 1)
self.assertContains(response, 'id="id_form-1-id"', 1)
self.assertContains(
response,
'<div class="hiddenfields">\n'
'<input type="hidden" name="form-0-id" value="%d" id="id_form-0-id">'
'<input type="hidden" name="form-1-id" value="%d" id="id_form-1-id">\n</div>'
% (story2.id, story1.id),
html=True
)
self.assertContains(response, '<td class="field-id">%d</td>' % story1.id, 1)
self.assertContains(response, '<td class="field-id">%d</td>' % story2.id, 1)
def test_pk_hidden_fields_with_list_display_links(self):
""" Similarly as test_pk_hidden_fields, but when the hidden pk fields are
referenced in list_display_links.
Refs #12475.
"""
story1 = OtherStory.objects.create(
title='The adventures of Guido',
content='Once upon a time in Djangoland...',
)
story2 = OtherStory.objects.create(
title='Crouching Tiger, Hidden Python',
content='The Python was sneaking into...',
)
link1 = reverse('admin:admin_views_otherstory_change', args=(story1.pk,))
link2 = reverse('admin:admin_views_otherstory_change', args=(story2.pk,))
response = self.client.get(reverse('admin:admin_views_otherstory_changelist'))
# Only one hidden field, in a separate place than the table.
self.assertContains(response, 'id="id_form-0-id"', 1)
self.assertContains(response, 'id="id_form-1-id"', 1)
self.assertContains(
response,
'<div class="hiddenfields">\n'
'<input type="hidden" name="form-0-id" value="%d" id="id_form-0-id">'
'<input type="hidden" name="form-1-id" value="%d" id="id_form-1-id">\n</div>'
% (story2.id, story1.id),
html=True
)
self.assertContains(response, '<th class="field-id"><a href="%s">%d</a></th>' % (link1, story1.id), 1)
self.assertContains(response, '<th class="field-id"><a href="%s">%d</a></th>' % (link2, story2.id), 1)
@override_settings(ROOT_URLCONF='admin_views.urls')
class AdminSearchTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]')
cls.joepublicuser = User.objects.create_user(username='joepublic', password='secret')
cls.s1 = Section.objects.create(name='Test section')
cls.a1 = Article.objects.create(
content='<p>Middle content</p>', date=datetime.datetime(2008, 3, 18, 11, 54, 58), section=cls.s1
)
cls.a2 = Article.objects.create(
content='<p>Oldest content</p>', date=datetime.datetime(2000, 3, 18, 11, 54, 58), section=cls.s1
)
cls.a3 = Article.objects.create(
content='<p>Newest content</p>', date=datetime.datetime(2009, 3, 18, 11, 54, 58), section=cls.s1
)
cls.p1 = PrePopulatedPost.objects.create(title='A Long Title', published=True, slug='a-long-title')
cls.per1 = Person.objects.create(name='John Mauchly', gender=1, alive=True)
cls.per2 = Person.objects.create(name='Grace Hopper', gender=1, alive=False)
cls.per3 = Person.objects.create(name='Guido van Rossum', gender=1, alive=True)
Person.objects.create(name='John Doe', gender=1)
Person.objects.create(name='John O"Hara', gender=1)
Person.objects.create(name="John O'Hara", gender=1)
cls.t1 = Recommender.objects.create()
cls.t2 = Recommendation.objects.create(the_recommender=cls.t1)
cls.t3 = Recommender.objects.create()
cls.t4 = Recommendation.objects.create(the_recommender=cls.t3)
cls.tt1 = TitleTranslation.objects.create(title=cls.t1, text='Bar')
cls.tt2 = TitleTranslation.objects.create(title=cls.t2, text='Foo')
cls.tt3 = TitleTranslation.objects.create(title=cls.t3, text='Few')
cls.tt4 = TitleTranslation.objects.create(title=cls.t4, text='Bas')
def setUp(self):
self.client.force_login(self.superuser)
def test_search_on_sibling_models(self):
"A search that mentions sibling models"
response = self.client.get(reverse('admin:admin_views_recommendation_changelist') + '?q=bar')
# confirm the search returned 1 object
self.assertContains(response, "\n1 recommendation\n")
def test_with_fk_to_field(self):
"""
The to_field GET parameter is preserved when a search is performed.
Refs #10918.
"""
response = self.client.get(reverse('admin:auth_user_changelist') + '?q=joe&%s=id' % TO_FIELD_VAR)
self.assertContains(response, "\n1 user\n")
self.assertContains(response, '<input type="hidden" name="%s" value="id">' % TO_FIELD_VAR, html=True)
def test_exact_matches(self):
response = self.client.get(reverse('admin:admin_views_recommendation_changelist') + '?q=bar')
# confirm the search returned one object
self.assertContains(response, "\n1 recommendation\n")
response = self.client.get(reverse('admin:admin_views_recommendation_changelist') + '?q=ba')
# confirm the search returned zero objects
self.assertContains(response, "\n0 recommendations\n")
def test_beginning_matches(self):
response = self.client.get(reverse('admin:admin_views_person_changelist') + '?q=Gui')
# confirm the search returned one object
self.assertContains(response, "\n1 person\n")
self.assertContains(response, "Guido")
response = self.client.get(reverse('admin:admin_views_person_changelist') + '?q=uido')
# confirm the search returned zero objects
self.assertContains(response, "\n0 persons\n")
self.assertNotContains(response, "Guido")
def test_pluggable_search(self):
PluggableSearchPerson.objects.create(name="Bob", age=10)
PluggableSearchPerson.objects.create(name="Amy", age=20)
response = self.client.get(reverse('admin:admin_views_pluggablesearchperson_changelist') + '?q=Bob')
# confirm the search returned one object
self.assertContains(response, "\n1 pluggable search person\n")
self.assertContains(response, "Bob")
response = self.client.get(reverse('admin:admin_views_pluggablesearchperson_changelist') + '?q=20')
# confirm the search returned one object
self.assertContains(response, "\n1 pluggable search person\n")
self.assertContains(response, "Amy")
def test_reset_link(self):
"""
Test presence of reset link in search bar ("1 result (_x total_)").
"""
# 1 query for session + 1 for fetching user
# + 1 for filtered result + 1 for filtered count
# + 1 for total count
with self.assertNumQueries(5):
response = self.client.get(reverse('admin:admin_views_person_changelist') + '?q=Gui')
self.assertContains(
response,
"""<span class="small quiet">1 result (<a href="?">6 total</a>)</span>""",
html=True
)
def test_no_total_count(self):
"""
#8408 -- "Show all" should be displayed instead of the total count if
ModelAdmin.show_full_result_count is False.
"""
# 1 query for session + 1 for fetching user
# + 1 for filtered result + 1 for filtered count
with self.assertNumQueries(4):
response = self.client.get(reverse('admin:admin_views_recommendation_changelist') + '?q=bar')
self.assertContains(
response,
"""<span class="small quiet">1 result (<a href="?">Show all</a>)</span>""",
html=True
)
self.assertTrue(response.context['cl'].show_admin_actions)
def test_search_with_spaces(self):
url = reverse('admin:admin_views_person_changelist') + '?q=%s'
tests = [
('"John Doe"', 1),
("'John Doe'", 1),
('John Doe', 0),
('"John Doe" John', 1),
("'John Doe' John", 1),
("John Doe John", 0),
('"John Do"', 1),
("'John Do'", 1),
("'John O\'Hara'", 0),
("'John O\\'Hara'", 1),
('"John O\"Hara"', 0),
('"John O\\"Hara"', 1),
]
for search, hits in tests:
with self.subTest(search=search):
response = self.client.get(url % search)
self.assertContains(response, '\n%s person' % hits)
@override_settings(ROOT_URLCONF='admin_views.urls')
class AdminInheritedInlinesTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]')
def setUp(self):
self.client.force_login(self.superuser)
def test_inline(self):
"""
Inline models which inherit from a common parent are correctly handled.
"""
foo_user = "foo username"
bar_user = "bar username"
name_re = re.compile(b'name="(.*?)"')
# test the add case
response = self.client.get(reverse('admin:admin_views_persona_add'))
names = name_re.findall(response.content)
# make sure we have no duplicate HTML names
self.assertEqual(len(names), len(set(names)))
# test the add case
post_data = {
"name": "Test Name",
# inline data
"accounts-TOTAL_FORMS": "1",
"accounts-INITIAL_FORMS": "0",
"accounts-MAX_NUM_FORMS": "0",
"accounts-0-username": foo_user,
"accounts-2-TOTAL_FORMS": "1",
"accounts-2-INITIAL_FORMS": "0",
"accounts-2-MAX_NUM_FORMS": "0",
"accounts-2-0-username": bar_user,
}
response = self.client.post(reverse('admin:admin_views_persona_add'), post_data)
self.assertEqual(response.status_code, 302) # redirect somewhere
self.assertEqual(Persona.objects.count(), 1)
self.assertEqual(FooAccount.objects.count(), 1)
self.assertEqual(BarAccount.objects.count(), 1)
self.assertEqual(FooAccount.objects.all()[0].username, foo_user)
self.assertEqual(BarAccount.objects.all()[0].username, bar_user)
self.assertEqual(Persona.objects.all()[0].accounts.count(), 2)
persona_id = Persona.objects.all()[0].id
foo_id = FooAccount.objects.all()[0].id
bar_id = BarAccount.objects.all()[0].id
# test the edit case
response = self.client.get(reverse('admin:admin_views_persona_change', args=(persona_id,)))
names = name_re.findall(response.content)
# make sure we have no duplicate HTML names
self.assertEqual(len(names), len(set(names)))
post_data = {
"name": "Test Name",
"accounts-TOTAL_FORMS": "2",
"accounts-INITIAL_FORMS": "1",
"accounts-MAX_NUM_FORMS": "0",
"accounts-0-username": "%s-1" % foo_user,
"accounts-0-account_ptr": str(foo_id),
"accounts-0-persona": str(persona_id),
"accounts-2-TOTAL_FORMS": "2",
"accounts-2-INITIAL_FORMS": "1",
"accounts-2-MAX_NUM_FORMS": "0",
"accounts-2-0-username": "%s-1" % bar_user,
"accounts-2-0-account_ptr": str(bar_id),
"accounts-2-0-persona": str(persona_id),
}
response = self.client.post(reverse('admin:admin_views_persona_change', args=(persona_id,)), post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Persona.objects.count(), 1)
self.assertEqual(FooAccount.objects.count(), 1)
self.assertEqual(BarAccount.objects.count(), 1)
self.assertEqual(FooAccount.objects.all()[0].username, "%s-1" % foo_user)
self.assertEqual(BarAccount.objects.all()[0].username, "%s-1" % bar_user)
self.assertEqual(Persona.objects.all()[0].accounts.count(), 2)
@override_settings(ROOT_URLCONF='admin_views.urls')
class TestCustomChangeList(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]')
def setUp(self):
self.client.force_login(self.superuser)
def test_custom_changelist(self):
"""
Validate that a custom ChangeList class can be used (#9749)
"""
# Insert some data
post_data = {"name": "First Gadget"}
response = self.client.post(reverse('admin:admin_views_gadget_add'), post_data)
self.assertEqual(response.status_code, 302) # redirect somewhere
# Hit the page once to get messages out of the queue message list
response = self.client.get(reverse('admin:admin_views_gadget_changelist'))
# Data is still not visible on the page
response = self.client.get(reverse('admin:admin_views_gadget_changelist'))
self.assertNotContains(response, 'First Gadget')
@override_settings(ROOT_URLCONF='admin_views.urls')
class TestInlineNotEditable(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]')
def setUp(self):
self.client.force_login(self.superuser)
def test_GET_parent_add(self):
"""
InlineModelAdmin broken?
"""
response = self.client.get(reverse('admin:admin_views_parent_add'))
self.assertEqual(response.status_code, 200)
@override_settings(ROOT_URLCONF='admin_views.urls')
class AdminCustomQuerysetTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]')
cls.pks = [EmptyModel.objects.create().id for i in range(3)]
def setUp(self):
self.client.force_login(self.superuser)
self.super_login = {
REDIRECT_FIELD_NAME: reverse('admin:index'),
'username': 'super',
'password': 'secret',
}
def test_changelist_view(self):
response = self.client.get(reverse('admin:admin_views_emptymodel_changelist'))
for i in self.pks:
if i > 1:
self.assertContains(response, 'Primary key = %s' % i)
else:
self.assertNotContains(response, 'Primary key = %s' % i)
def test_changelist_view_count_queries(self):
# create 2 Person objects
Person.objects.create(name='person1', gender=1)
Person.objects.create(name='person2', gender=2)
changelist_url = reverse('admin:admin_views_person_changelist')
# 5 queries are expected: 1 for the session, 1 for the user,
# 2 for the counts and 1 for the objects on the page
with self.assertNumQueries(5):
resp = self.client.get(changelist_url)
self.assertEqual(resp.context['selection_note'], '0 of 2 selected')
self.assertEqual(resp.context['selection_note_all'], 'All 2 selected')
with self.assertNumQueries(5):
extra = {'q': 'not_in_name'}
resp = self.client.get(changelist_url, extra)
self.assertEqual(resp.context['selection_note'], '0 of 0 selected')
self.assertEqual(resp.context['selection_note_all'], 'All 0 selected')
with self.assertNumQueries(5):
extra = {'q': 'person'}
resp = self.client.get(changelist_url, extra)
self.assertEqual(resp.context['selection_note'], '0 of 2 selected')
self.assertEqual(resp.context['selection_note_all'], 'All 2 selected')
with self.assertNumQueries(5):
extra = {'gender__exact': '1'}
resp = self.client.get(changelist_url, extra)
self.assertEqual(resp.context['selection_note'], '0 of 1 selected')
self.assertEqual(resp.context['selection_note_all'], '1 selected')
def test_change_view(self):
for i in self.pks:
url = reverse('admin:admin_views_emptymodel_change', args=(i,))
response = self.client.get(url, follow=True)
if i > 1:
self.assertEqual(response.status_code, 200)
else:
self.assertRedirects(response, reverse('admin:index'))
self.assertEqual(
[m.message for m in response.context['messages']],
['empty model with ID “1” doesn’t exist. Perhaps it was deleted?']
)
def test_add_model_modeladmin_defer_qs(self):
# Test for #14529. defer() is used in ModelAdmin.get_queryset()
# model has __str__ method
self.assertEqual(CoverLetter.objects.count(), 0)
# Emulate model instance creation via the admin
post_data = {
"author": "Candidate, Best",
"_save": "Save",
}
response = self.client.post(reverse('admin:admin_views_coverletter_add'), post_data, follow=True)
self.assertEqual(response.status_code, 200)
self.assertEqual(CoverLetter.objects.count(), 1)
# Message should contain non-ugly model verbose name
pk = CoverLetter.objects.all()[0].pk
self.assertContains(
response,
'<li class="success">The cover letter “<a href="%s">'
'Candidate, Best</a>” was added successfully.</li>' %
reverse('admin:admin_views_coverletter_change', args=(pk,)), html=True
)
# model has no __str__ method
self.assertEqual(ShortMessage.objects.count(), 0)
# Emulate model instance creation via the admin
post_data = {
"content": "What's this SMS thing?",
"_save": "Save",
}
response = self.client.post(reverse('admin:admin_views_shortmessage_add'), post_data, follow=True)
self.assertEqual(response.status_code, 200)
self.assertEqual(ShortMessage.objects.count(), 1)
# Message should contain non-ugly model verbose name
sm = ShortMessage.objects.all()[0]
self.assertContains(
response,
'<li class="success">The short message “<a href="%s">'
'%s</a>” was added successfully.</li>' %
(reverse('admin:admin_views_shortmessage_change', args=(sm.pk,)), sm), html=True
)
def test_add_model_modeladmin_only_qs(self):
# Test for #14529. only() is used in ModelAdmin.get_queryset()
# model has __str__ method
self.assertEqual(Telegram.objects.count(), 0)
# Emulate model instance creation via the admin
post_data = {
"title": "Urgent telegram",
"_save": "Save",
}
response = self.client.post(reverse('admin:admin_views_telegram_add'), post_data, follow=True)
self.assertEqual(response.status_code, 200)
self.assertEqual(Telegram.objects.count(), 1)
# Message should contain non-ugly model verbose name
pk = Telegram.objects.all()[0].pk
self.assertContains(
response,
'<li class="success">The telegram “<a href="%s">'
'Urgent telegram</a>” was added successfully.</li>' %
reverse('admin:admin_views_telegram_change', args=(pk,)), html=True
)
# model has no __str__ method
self.assertEqual(Paper.objects.count(), 0)
# Emulate model instance creation via the admin
post_data = {
"title": "My Modified Paper Title",
"_save": "Save",
}
response = self.client.post(reverse('admin:admin_views_paper_add'), post_data, follow=True)
self.assertEqual(response.status_code, 200)
self.assertEqual(Paper.objects.count(), 1)
# Message should contain non-ugly model verbose name
p = Paper.objects.all()[0]
self.assertContains(
response,
'<li class="success">The paper “<a href="%s">'
'%s</a>” was added successfully.</li>' %
(reverse('admin:admin_views_paper_change', args=(p.pk,)), p), html=True
)
def test_edit_model_modeladmin_defer_qs(self):
# Test for #14529. defer() is used in ModelAdmin.get_queryset()
# model has __str__ method
cl = CoverLetter.objects.create(author="John Doe")
self.assertEqual(CoverLetter.objects.count(), 1)
response = self.client.get(reverse('admin:admin_views_coverletter_change', args=(cl.pk,)))
self.assertEqual(response.status_code, 200)
# Emulate model instance edit via the admin
post_data = {
"author": "John Doe II",
"_save": "Save",
}
url = reverse('admin:admin_views_coverletter_change', args=(cl.pk,))
response = self.client.post(url, post_data, follow=True)
self.assertEqual(response.status_code, 200)
self.assertEqual(CoverLetter.objects.count(), 1)
# Message should contain non-ugly model verbose name. Instance
# representation is set by model's __str__()
self.assertContains(
response,
'<li class="success">The cover letter “<a href="%s">'
'John Doe II</a>” was changed successfully.</li>' %
reverse('admin:admin_views_coverletter_change', args=(cl.pk,)), html=True
)
# model has no __str__ method
sm = ShortMessage.objects.create(content="This is expensive")
self.assertEqual(ShortMessage.objects.count(), 1)
response = self.client.get(reverse('admin:admin_views_shortmessage_change', args=(sm.pk,)))
self.assertEqual(response.status_code, 200)
# Emulate model instance edit via the admin
post_data = {
"content": "Too expensive",
"_save": "Save",
}
url = reverse('admin:admin_views_shortmessage_change', args=(sm.pk,))
response = self.client.post(url, post_data, follow=True)
self.assertEqual(response.status_code, 200)
self.assertEqual(ShortMessage.objects.count(), 1)
# Message should contain non-ugly model verbose name. The ugly(!)
# instance representation is set by __str__().
self.assertContains(
response,
'<li class="success">The short message “<a href="%s">'
'%s</a>” was changed successfully.</li>' %
(reverse('admin:admin_views_shortmessage_change', args=(sm.pk,)), sm), html=True
)
def test_edit_model_modeladmin_only_qs(self):
# Test for #14529. only() is used in ModelAdmin.get_queryset()
# model has __str__ method
t = Telegram.objects.create(title="First Telegram")
self.assertEqual(Telegram.objects.count(), 1)
response = self.client.get(reverse('admin:admin_views_telegram_change', args=(t.pk,)))
self.assertEqual(response.status_code, 200)
# Emulate model instance edit via the admin
post_data = {
"title": "Telegram without typo",
"_save": "Save",
}
response = self.client.post(reverse('admin:admin_views_telegram_change', args=(t.pk,)), post_data, follow=True)
self.assertEqual(response.status_code, 200)
self.assertEqual(Telegram.objects.count(), 1)
# Message should contain non-ugly model verbose name. The instance
# representation is set by model's __str__()
self.assertContains(
response,
'<li class="success">The telegram “<a href="%s">'
'Telegram without typo</a>” was changed successfully.</li>' %
reverse('admin:admin_views_telegram_change', args=(t.pk,)), html=True
)
# model has no __str__ method
p = Paper.objects.create(title="My Paper Title")
self.assertEqual(Paper.objects.count(), 1)
response = self.client.get(reverse('admin:admin_views_paper_change', args=(p.pk,)))
self.assertEqual(response.status_code, 200)
# Emulate model instance edit via the admin
post_data = {
"title": "My Modified Paper Title",
"_save": "Save",
}
response = self.client.post(reverse('admin:admin_views_paper_change', args=(p.pk,)), post_data, follow=True)
self.assertEqual(response.status_code, 200)
self.assertEqual(Paper.objects.count(), 1)
# Message should contain non-ugly model verbose name. The ugly(!)
# instance representation is set by __str__().
self.assertContains(
response,
'<li class="success">The paper “<a href="%s">'
'%s</a>” was changed successfully.</li>' %
(reverse('admin:admin_views_paper_change', args=(p.pk,)), p), html=True
)
def test_history_view_custom_qs(self):
"""
Custom querysets are considered for the admin history view.
"""
self.client.post(reverse('admin:login'), self.super_login)
FilteredManager.objects.create(pk=1)
FilteredManager.objects.create(pk=2)
response = self.client.get(reverse('admin:admin_views_filteredmanager_changelist'))
self.assertContains(response, "PK=1")
self.assertContains(response, "PK=2")
self.assertEqual(
self.client.get(reverse('admin:admin_views_filteredmanager_history', args=(1,))).status_code, 200
)
self.assertEqual(
self.client.get(reverse('admin:admin_views_filteredmanager_history', args=(2,))).status_code, 200
)
@override_settings(ROOT_URLCONF='admin_views.urls')
class AdminInlineFileUploadTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]')
file1 = tempfile.NamedTemporaryFile(suffix=".file1")
file1.write(b'a' * (2 ** 21))
filename = file1.name
file1.close()
cls.gallery = Gallery.objects.create(name='Test Gallery')
cls.picture = Picture.objects.create(
name='Test Picture',
image=filename,
gallery=cls.gallery,
)
def setUp(self):
self.client.force_login(self.superuser)
def test_form_has_multipart_enctype(self):
response = self.client.get(
reverse('admin:admin_views_gallery_change', args=(self.gallery.id,))
)
self.assertIs(response.context['has_file_field'], True)
self.assertContains(response, MULTIPART_ENCTYPE)
def test_inline_file_upload_edit_validation_error_post(self):
"""
Inline file uploads correctly display prior data (#10002).
"""
post_data = {
"name": "Test Gallery",
"pictures-TOTAL_FORMS": "2",
"pictures-INITIAL_FORMS": "1",
"pictures-MAX_NUM_FORMS": "0",
"pictures-0-id": str(self.picture.id),
"pictures-0-gallery": str(self.gallery.id),
"pictures-0-name": "Test Picture",
"pictures-0-image": "",
"pictures-1-id": "",
"pictures-1-gallery": str(self.gallery.id),
"pictures-1-name": "Test Picture 2",
"pictures-1-image": "",
}
response = self.client.post(
reverse('admin:admin_views_gallery_change', args=(self.gallery.id,)), post_data
)
self.assertContains(response, b"Currently")
@override_settings(ROOT_URLCONF='admin_views.urls')
class AdminInlineTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]')
cls.collector = Collector.objects.create(pk=1, name='John Fowles')
def setUp(self):
self.post_data = {
"name": "Test Name",
"widget_set-TOTAL_FORMS": "3",
"widget_set-INITIAL_FORMS": "0",
"widget_set-MAX_NUM_FORMS": "0",
"widget_set-0-id": "",
"widget_set-0-owner": "1",
"widget_set-0-name": "",
"widget_set-1-id": "",
"widget_set-1-owner": "1",
"widget_set-1-name": "",
"widget_set-2-id": "",
"widget_set-2-owner": "1",
"widget_set-2-name": "",
"doohickey_set-TOTAL_FORMS": "3",
"doohickey_set-INITIAL_FORMS": "0",
"doohickey_set-MAX_NUM_FORMS": "0",
"doohickey_set-0-owner": "1",
"doohickey_set-0-code": "",
"doohickey_set-0-name": "",
"doohickey_set-1-owner": "1",
"doohickey_set-1-code": "",
"doohickey_set-1-name": "",
"doohickey_set-2-owner": "1",
"doohickey_set-2-code": "",
"doohickey_set-2-name": "",
"grommet_set-TOTAL_FORMS": "3",
"grommet_set-INITIAL_FORMS": "0",
"grommet_set-MAX_NUM_FORMS": "0",
"grommet_set-0-code": "",
"grommet_set-0-owner": "1",
"grommet_set-0-name": "",
"grommet_set-1-code": "",
"grommet_set-1-owner": "1",
"grommet_set-1-name": "",
"grommet_set-2-code": "",
"grommet_set-2-owner": "1",
"grommet_set-2-name": "",
"whatsit_set-TOTAL_FORMS": "3",
"whatsit_set-INITIAL_FORMS": "0",
"whatsit_set-MAX_NUM_FORMS": "0",
"whatsit_set-0-owner": "1",
"whatsit_set-0-index": "",
"whatsit_set-0-name": "",
"whatsit_set-1-owner": "1",
"whatsit_set-1-index": "",
"whatsit_set-1-name": "",
"whatsit_set-2-owner": "1",
"whatsit_set-2-index": "",
"whatsit_set-2-name": "",
"fancydoodad_set-TOTAL_FORMS": "3",
"fancydoodad_set-INITIAL_FORMS": "0",
"fancydoodad_set-MAX_NUM_FORMS": "0",
"fancydoodad_set-0-doodad_ptr": "",
"fancydoodad_set-0-owner": "1",
"fancydoodad_set-0-name": "",
"fancydoodad_set-0-expensive": "on",
"fancydoodad_set-1-doodad_ptr": "",
"fancydoodad_set-1-owner": "1",
"fancydoodad_set-1-name": "",
"fancydoodad_set-1-expensive": "on",
"fancydoodad_set-2-doodad_ptr": "",
"fancydoodad_set-2-owner": "1",
"fancydoodad_set-2-name": "",
"fancydoodad_set-2-expensive": "on",
"category_set-TOTAL_FORMS": "3",
"category_set-INITIAL_FORMS": "0",
"category_set-MAX_NUM_FORMS": "0",
"category_set-0-order": "",
"category_set-0-id": "",
"category_set-0-collector": "1",
"category_set-1-order": "",
"category_set-1-id": "",
"category_set-1-collector": "1",
"category_set-2-order": "",
"category_set-2-id": "",
"category_set-2-collector": "1",
}
self.client.force_login(self.superuser)
def test_simple_inline(self):
"A simple model can be saved as inlines"
# First add a new inline
self.post_data['widget_set-0-name'] = "Widget 1"
collector_url = reverse('admin:admin_views_collector_change', args=(self.collector.pk,))
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Widget.objects.count(), 1)
self.assertEqual(Widget.objects.all()[0].name, "Widget 1")
widget_id = Widget.objects.all()[0].id
# The PK link exists on the rendered form
response = self.client.get(collector_url)
self.assertContains(response, 'name="widget_set-0-id"')
# No file or image fields, no enctype on the forms
self.assertIs(response.context['has_file_field'], False)
self.assertNotContains(response, MULTIPART_ENCTYPE)
# Now resave that inline
self.post_data['widget_set-INITIAL_FORMS'] = "1"
self.post_data['widget_set-0-id'] = str(widget_id)
self.post_data['widget_set-0-name'] = "Widget 1"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Widget.objects.count(), 1)
self.assertEqual(Widget.objects.all()[0].name, "Widget 1")
# Now modify that inline
self.post_data['widget_set-INITIAL_FORMS'] = "1"
self.post_data['widget_set-0-id'] = str(widget_id)
self.post_data['widget_set-0-name'] = "Widget 1 Updated"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Widget.objects.count(), 1)
self.assertEqual(Widget.objects.all()[0].name, "Widget 1 Updated")
def test_explicit_autofield_inline(self):
"A model with an explicit autofield primary key can be saved as inlines. Regression for #8093"
# First add a new inline
self.post_data['grommet_set-0-name'] = "Grommet 1"
collector_url = reverse('admin:admin_views_collector_change', args=(self.collector.pk,))
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Grommet.objects.count(), 1)
self.assertEqual(Grommet.objects.all()[0].name, "Grommet 1")
# The PK link exists on the rendered form
response = self.client.get(collector_url)
self.assertContains(response, 'name="grommet_set-0-code"')
# Now resave that inline
self.post_data['grommet_set-INITIAL_FORMS'] = "1"
self.post_data['grommet_set-0-code'] = str(Grommet.objects.all()[0].code)
self.post_data['grommet_set-0-name'] = "Grommet 1"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Grommet.objects.count(), 1)
self.assertEqual(Grommet.objects.all()[0].name, "Grommet 1")
# Now modify that inline
self.post_data['grommet_set-INITIAL_FORMS'] = "1"
self.post_data['grommet_set-0-code'] = str(Grommet.objects.all()[0].code)
self.post_data['grommet_set-0-name'] = "Grommet 1 Updated"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Grommet.objects.count(), 1)
self.assertEqual(Grommet.objects.all()[0].name, "Grommet 1 Updated")
def test_char_pk_inline(self):
"A model with a character PK can be saved as inlines. Regression for #10992"
# First add a new inline
self.post_data['doohickey_set-0-code'] = "DH1"
self.post_data['doohickey_set-0-name'] = "Doohickey 1"
collector_url = reverse('admin:admin_views_collector_change', args=(self.collector.pk,))
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(DooHickey.objects.count(), 1)
self.assertEqual(DooHickey.objects.all()[0].name, "Doohickey 1")
# The PK link exists on the rendered form
response = self.client.get(collector_url)
self.assertContains(response, 'name="doohickey_set-0-code"')
# Now resave that inline
self.post_data['doohickey_set-INITIAL_FORMS'] = "1"
self.post_data['doohickey_set-0-code'] = "DH1"
self.post_data['doohickey_set-0-name'] = "Doohickey 1"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(DooHickey.objects.count(), 1)
self.assertEqual(DooHickey.objects.all()[0].name, "Doohickey 1")
# Now modify that inline
self.post_data['doohickey_set-INITIAL_FORMS'] = "1"
self.post_data['doohickey_set-0-code'] = "DH1"
self.post_data['doohickey_set-0-name'] = "Doohickey 1 Updated"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(DooHickey.objects.count(), 1)
self.assertEqual(DooHickey.objects.all()[0].name, "Doohickey 1 Updated")
def test_integer_pk_inline(self):
"A model with an integer PK can be saved as inlines. Regression for #10992"
# First add a new inline
self.post_data['whatsit_set-0-index'] = "42"
self.post_data['whatsit_set-0-name'] = "Whatsit 1"
collector_url = reverse('admin:admin_views_collector_change', args=(self.collector.pk,))
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Whatsit.objects.count(), 1)
self.assertEqual(Whatsit.objects.all()[0].name, "Whatsit 1")
# The PK link exists on the rendered form
response = self.client.get(collector_url)
self.assertContains(response, 'name="whatsit_set-0-index"')
# Now resave that inline
self.post_data['whatsit_set-INITIAL_FORMS'] = "1"
self.post_data['whatsit_set-0-index'] = "42"
self.post_data['whatsit_set-0-name'] = "Whatsit 1"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Whatsit.objects.count(), 1)
self.assertEqual(Whatsit.objects.all()[0].name, "Whatsit 1")
# Now modify that inline
self.post_data['whatsit_set-INITIAL_FORMS'] = "1"
self.post_data['whatsit_set-0-index'] = "42"
self.post_data['whatsit_set-0-name'] = "Whatsit 1 Updated"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Whatsit.objects.count(), 1)
self.assertEqual(Whatsit.objects.all()[0].name, "Whatsit 1 Updated")
def test_inherited_inline(self):
"An inherited model can be saved as inlines. Regression for #11042"
# First add a new inline
self.post_data['fancydoodad_set-0-name'] = "Fancy Doodad 1"
collector_url = reverse('admin:admin_views_collector_change', args=(self.collector.pk,))
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(FancyDoodad.objects.count(), 1)
self.assertEqual(FancyDoodad.objects.all()[0].name, "Fancy Doodad 1")
doodad_pk = FancyDoodad.objects.all()[0].pk
# The PK link exists on the rendered form
response = self.client.get(collector_url)
self.assertContains(response, 'name="fancydoodad_set-0-doodad_ptr"')
# Now resave that inline
self.post_data['fancydoodad_set-INITIAL_FORMS'] = "1"
self.post_data['fancydoodad_set-0-doodad_ptr'] = str(doodad_pk)
self.post_data['fancydoodad_set-0-name'] = "Fancy Doodad 1"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(FancyDoodad.objects.count(), 1)
self.assertEqual(FancyDoodad.objects.all()[0].name, "Fancy Doodad 1")
# Now modify that inline
self.post_data['fancydoodad_set-INITIAL_FORMS'] = "1"
self.post_data['fancydoodad_set-0-doodad_ptr'] = str(doodad_pk)
self.post_data['fancydoodad_set-0-name'] = "Fancy Doodad 1 Updated"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(FancyDoodad.objects.count(), 1)
self.assertEqual(FancyDoodad.objects.all()[0].name, "Fancy Doodad 1 Updated")
def test_ordered_inline(self):
"""
An inline with an editable ordering fields is updated correctly.
"""
# Create some objects with an initial ordering
Category.objects.create(id=1, order=1, collector=self.collector)
Category.objects.create(id=2, order=2, collector=self.collector)
Category.objects.create(id=3, order=0, collector=self.collector)
Category.objects.create(id=4, order=0, collector=self.collector)
# NB: The order values must be changed so that the items are reordered.
self.post_data.update({
"name": "Frederick Clegg",
"category_set-TOTAL_FORMS": "7",
"category_set-INITIAL_FORMS": "4",
"category_set-MAX_NUM_FORMS": "0",
"category_set-0-order": "14",
"category_set-0-id": "1",
"category_set-0-collector": "1",
"category_set-1-order": "13",
"category_set-1-id": "2",
"category_set-1-collector": "1",
"category_set-2-order": "1",
"category_set-2-id": "3",
"category_set-2-collector": "1",
"category_set-3-order": "0",
"category_set-3-id": "4",
"category_set-3-collector": "1",
"category_set-4-order": "",
"category_set-4-id": "",
"category_set-4-collector": "1",
"category_set-5-order": "",
"category_set-5-id": "",
"category_set-5-collector": "1",
"category_set-6-order": "",
"category_set-6-id": "",
"category_set-6-collector": "1",
})
collector_url = reverse('admin:admin_views_collector_change', args=(self.collector.pk,))
response = self.client.post(collector_url, self.post_data)
# Successful post will redirect
self.assertEqual(response.status_code, 302)
# The order values have been applied to the right objects
self.assertEqual(self.collector.category_set.count(), 4)
self.assertEqual(Category.objects.get(id=1).order, 14)
self.assertEqual(Category.objects.get(id=2).order, 13)
self.assertEqual(Category.objects.get(id=3).order, 1)
self.assertEqual(Category.objects.get(id=4).order, 0)
@override_settings(ROOT_URLCONF='admin_views.urls')
class NeverCacheTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]')
cls.s1 = Section.objects.create(name='Test section')
def setUp(self):
self.client.force_login(self.superuser)
def test_admin_index(self):
"Check the never-cache status of the main index"
response = self.client.get(reverse('admin:index'))
self.assertEqual(get_max_age(response), 0)
def test_app_index(self):
"Check the never-cache status of an application index"
response = self.client.get(reverse('admin:app_list', args=('admin_views',)))
self.assertEqual(get_max_age(response), 0)
def test_model_index(self):
"Check the never-cache status of a model index"
response = self.client.get(reverse('admin:admin_views_fabric_changelist'))
self.assertEqual(get_max_age(response), 0)
def test_model_add(self):
"Check the never-cache status of a model add page"
response = self.client.get(reverse('admin:admin_views_fabric_add'))
self.assertEqual(get_max_age(response), 0)
def test_model_view(self):
"Check the never-cache status of a model edit page"
response = self.client.get(reverse('admin:admin_views_section_change', args=(self.s1.pk,)))
self.assertEqual(get_max_age(response), 0)
def test_model_history(self):
"Check the never-cache status of a model history page"
response = self.client.get(reverse('admin:admin_views_section_history', args=(self.s1.pk,)))
self.assertEqual(get_max_age(response), 0)
def test_model_delete(self):
"Check the never-cache status of a model delete page"
response = self.client.get(reverse('admin:admin_views_section_delete', args=(self.s1.pk,)))
self.assertEqual(get_max_age(response), 0)
def test_login(self):
"Check the never-cache status of login views"
self.client.logout()
response = self.client.get(reverse('admin:index'))
self.assertEqual(get_max_age(response), 0)
def test_logout(self):
"Check the never-cache status of logout view"
response = self.client.get(reverse('admin:logout'))
self.assertEqual(get_max_age(response), 0)
def test_password_change(self):
"Check the never-cache status of the password change view"
self.client.logout()
response = self.client.get(reverse('admin:password_change'))
self.assertIsNone(get_max_age(response))
def test_password_change_done(self):
"Check the never-cache status of the password change done view"
response = self.client.get(reverse('admin:password_change_done'))
self.assertIsNone(get_max_age(response))
def test_JS_i18n(self):
"Check the never-cache status of the JavaScript i18n view"
response = self.client.get(reverse('admin:jsi18n'))
self.assertIsNone(get_max_age(response))
@override_settings(ROOT_URLCONF='admin_views.urls')
class PrePopulatedTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]')
cls.p1 = PrePopulatedPost.objects.create(title='A Long Title', published=True, slug='a-long-title')
def setUp(self):
self.client.force_login(self.superuser)
def test_prepopulated_on(self):
response = self.client.get(reverse('admin:admin_views_prepopulatedpost_add'))
self.assertContains(response, ""id": "#id_slug"")
self.assertContains(response, ""dependency_ids": ["#id_title"]")
self.assertContains(response, ""id": "#id_prepopulatedsubpost_set-0-subslug"")
def test_prepopulated_off(self):
response = self.client.get(reverse('admin:admin_views_prepopulatedpost_change', args=(self.p1.pk,)))
self.assertContains(response, "A Long Title")
self.assertNotContains(response, ""id": "#id_slug"")
self.assertNotContains(response, ""dependency_ids": ["#id_title"]")
self.assertNotContains(
response,
""id": "#id_prepopulatedsubpost_set-0-subslug""
)
@override_settings(USE_THOUSAND_SEPARATOR=True, USE_L10N=True)
def test_prepopulated_maxlength_localized(self):
"""
Regression test for #15938: if USE_THOUSAND_SEPARATOR is set, make sure
that maxLength (in the JavaScript) is rendered without separators.
"""
response = self.client.get(reverse('admin:admin_views_prepopulatedpostlargeslug_add'))
self.assertContains(response, ""maxLength": 1000") # instead of 1,000
def test_view_only_add_form(self):
"""
PrePopulatedPostReadOnlyAdmin.prepopulated_fields includes 'slug'
which is present in the add view, even if the
ModelAdmin.has_change_permission() returns False.
"""
response = self.client.get(reverse('admin7:admin_views_prepopulatedpost_add'))
self.assertContains(response, 'data-prepopulated-fields=')
self.assertContains(response, '"id": "#id_slug"')
def test_view_only_change_form(self):
"""
PrePopulatedPostReadOnlyAdmin.prepopulated_fields includes 'slug'. That
doesn't break a view-only change view.
"""
response = self.client.get(reverse('admin7:admin_views_prepopulatedpost_change', args=(self.p1.pk,)))
self.assertContains(response, 'data-prepopulated-fields="[]"')
self.assertContains(response, '<div class="readonly">%s</div>' % self.p1.slug)
@override_settings(ROOT_URLCONF='admin_views.urls')
class SeleniumTests(AdminSeleniumTestCase):
available_apps = ['admin_views'] + AdminSeleniumTestCase.available_apps
def setUp(self):
self.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]')
self.p1 = PrePopulatedPost.objects.create(title='A Long Title', published=True, slug='a-long-title')
def test_login_button_centered(self):
self.selenium.get(self.live_server_url + reverse('admin:login'))
button = self.selenium.find_element_by_css_selector('.submit-row input')
offset_left = button.get_property('offsetLeft')
offset_right = (
button.get_property('offsetParent').get_property('offsetWidth') -
(offset_left + button.get_property('offsetWidth'))
)
# Use assertAlmostEqual to avoid pixel rounding errors.
self.assertAlmostEqual(offset_left, offset_right, delta=3)
def test_prepopulated_fields(self):
"""
The JavaScript-automated prepopulated fields work with the main form
and with stacked and tabular inlines.
Refs #13068, #9264, #9983, #9784.
"""
self.admin_login(username='super', password='secret', login_url=reverse('admin:index'))
self.selenium.get(self.live_server_url + reverse('admin:admin_views_mainprepopulated_add'))
self.wait_for('.select2')
# Main form ----------------------------------------------------------
self.selenium.find_element_by_id('id_pubdate').send_keys('2012-02-18')
self.select_option('#id_status', 'option two')
self.selenium.find_element_by_id('id_name').send_keys(' the mAin nÀMë and it\'s awεšomeıııİ')
slug1 = self.selenium.find_element_by_id('id_slug1').get_attribute('value')
slug2 = self.selenium.find_element_by_id('id_slug2').get_attribute('value')
slug3 = self.selenium.find_element_by_id('id_slug3').get_attribute('value')
self.assertEqual(slug1, 'the-main-name-and-its-awesomeiiii-2012-02-18')
self.assertEqual(slug2, 'option-two-the-main-name-and-its-awesomeiiii')
self.assertEqual(slug3, 'the-main-n\xe0m\xeb-and-its-aw\u03b5\u0161ome\u0131\u0131\u0131i')
# Stacked inlines ----------------------------------------------------
# Initial inline
self.selenium.find_element_by_id('id_relatedprepopulated_set-0-pubdate').send_keys('2011-12-17')
self.select_option('#id_relatedprepopulated_set-0-status', 'option one')
self.selenium.find_element_by_id('id_relatedprepopulated_set-0-name').send_keys(
' here is a sŤāÇkeð inline ! '
)
slug1 = self.selenium.find_element_by_id('id_relatedprepopulated_set-0-slug1').get_attribute('value')
slug2 = self.selenium.find_element_by_id('id_relatedprepopulated_set-0-slug2').get_attribute('value')
self.assertEqual(slug1, 'here-is-a-stacked-inline-2011-12-17')
self.assertEqual(slug2, 'option-one-here-is-a-stacked-inline')
initial_select2_inputs = self.selenium.find_elements_by_class_name('select2-selection')
# Inline formsets have empty/invisible forms.
# Only the 4 visible select2 inputs are initialized.
num_initial_select2_inputs = len(initial_select2_inputs)
self.assertEqual(num_initial_select2_inputs, 4)
# Add an inline
self.selenium.find_elements_by_link_text('Add another Related prepopulated')[0].click()
self.assertEqual(
len(self.selenium.find_elements_by_class_name('select2-selection')),
num_initial_select2_inputs + 2
)
self.selenium.find_element_by_id('id_relatedprepopulated_set-1-pubdate').send_keys('1999-01-25')
self.select_option('#id_relatedprepopulated_set-1-status', 'option two')
self.selenium.find_element_by_id('id_relatedprepopulated_set-1-name').send_keys(
' now you haVe anöther sŤāÇkeð inline with a very ... '
'loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooog text... '
)
slug1 = self.selenium.find_element_by_id('id_relatedprepopulated_set-1-slug1').get_attribute('value')
slug2 = self.selenium.find_element_by_id('id_relatedprepopulated_set-1-slug2').get_attribute('value')
# 50 characters maximum for slug1 field
self.assertEqual(slug1, 'now-you-have-another-stacked-inline-with-a-very-lo')
# 60 characters maximum for slug2 field
self.assertEqual(slug2, 'option-two-now-you-have-another-stacked-inline-with-a-very-l')
# Tabular inlines ----------------------------------------------------
# Initial inline
self.selenium.find_element_by_id('id_relatedprepopulated_set-2-0-pubdate').send_keys('1234-12-07')
self.select_option('#id_relatedprepopulated_set-2-0-status', 'option two')
self.selenium.find_element_by_id('id_relatedprepopulated_set-2-0-name').send_keys(
'And now, with a tÃbűlaŘ inline !!!'
)
slug1 = self.selenium.find_element_by_id('id_relatedprepopulated_set-2-0-slug1').get_attribute('value')
slug2 = self.selenium.find_element_by_id('id_relatedprepopulated_set-2-0-slug2').get_attribute('value')
self.assertEqual(slug1, 'and-now-with-a-tabular-inline-1234-12-07')
self.assertEqual(slug2, 'option-two-and-now-with-a-tabular-inline')
# Add an inline
# Button may be outside the browser frame.
element = self.selenium.find_elements_by_link_text('Add another Related prepopulated')[1]
self.selenium.execute_script('window.scrollTo(0, %s);' % element.location['y'])
element.click()
self.assertEqual(
len(self.selenium.find_elements_by_class_name('select2-selection')),
num_initial_select2_inputs + 4
)
self.selenium.find_element_by_id('id_relatedprepopulated_set-2-1-pubdate').send_keys('1981-08-22')
self.select_option('#id_relatedprepopulated_set-2-1-status', 'option one')
self.selenium.find_element_by_id('id_relatedprepopulated_set-2-1-name').send_keys(
r'tÃbűlaŘ inline with ignored ;"&*^\%$#@-/`~ characters'
)
slug1 = self.selenium.find_element_by_id('id_relatedprepopulated_set-2-1-slug1').get_attribute('value')
slug2 = self.selenium.find_element_by_id('id_relatedprepopulated_set-2-1-slug2').get_attribute('value')
self.assertEqual(slug1, 'tabular-inline-with-ignored-characters-1981-08-22')
self.assertEqual(slug2, 'option-one-tabular-inline-with-ignored-characters')
# Add an inline without an initial inline.
# The button is outside of the browser frame.
self.selenium.execute_script("window.scrollTo(0, document.body.scrollHeight);")
self.selenium.find_elements_by_link_text('Add another Related prepopulated')[2].click()
self.assertEqual(
len(self.selenium.find_elements_by_class_name('select2-selection')),
num_initial_select2_inputs + 6
)
# Save and check that everything is properly stored in the database
with self.wait_page_loaded():
self.selenium.find_element_by_xpath('//input[@value="Save"]').click()
self.assertEqual(MainPrepopulated.objects.all().count(), 1)
MainPrepopulated.objects.get(
name=' the mAin nÀMë and it\'s awεšomeıııİ',
pubdate='2012-02-18',
status='option two',
slug1='the-main-name-and-its-awesomeiiii-2012-02-18',
slug2='option-two-the-main-name-and-its-awesomeiiii',
slug3='the-main-nàmë-and-its-awεšomeıııi',
)
self.assertEqual(RelatedPrepopulated.objects.all().count(), 4)
RelatedPrepopulated.objects.get(
name=' here is a sŤāÇkeð inline ! ',
pubdate='2011-12-17',
status='option one',
slug1='here-is-a-stacked-inline-2011-12-17',
slug2='option-one-here-is-a-stacked-inline',
)
RelatedPrepopulated.objects.get(
# 75 characters in name field
name=' now you haVe anöther sŤāÇkeð inline with a very ... loooooooooooooooooo',
pubdate='1999-01-25',
status='option two',
slug1='now-you-have-another-stacked-inline-with-a-very-lo',
slug2='option-two-now-you-have-another-stacked-inline-with-a-very-l',
)
RelatedPrepopulated.objects.get(
name='And now, with a tÃbűlaŘ inline !!!',
pubdate='1234-12-07',
status='option two',
slug1='and-now-with-a-tabular-inline-1234-12-07',
slug2='option-two-and-now-with-a-tabular-inline',
)
RelatedPrepopulated.objects.get(
name=r'tÃbűlaŘ inline with ignored ;"&*^\%$#@-/`~ characters',
pubdate='1981-08-22',
status='option one',
slug1='tabular-inline-with-ignored-characters-1981-08-22',
slug2='option-one-tabular-inline-with-ignored-characters',
)
def test_populate_existing_object(self):
"""
The prepopulation works for existing objects too, as long as
the original field is empty (#19082).
"""
# Slugs are empty to start with.
item = MainPrepopulated.objects.create(
name=' this is the mAin nÀMë',
pubdate='2012-02-18',
status='option two',
slug1='',
slug2='',
)
self.admin_login(username='super', password='secret', login_url=reverse('admin:index'))
object_url = self.live_server_url + reverse('admin:admin_views_mainprepopulated_change', args=(item.id,))
self.selenium.get(object_url)
self.selenium.find_element_by_id('id_name').send_keys(' the best')
# The slugs got prepopulated since they were originally empty
slug1 = self.selenium.find_element_by_id('id_slug1').get_attribute('value')
slug2 = self.selenium.find_element_by_id('id_slug2').get_attribute('value')
self.assertEqual(slug1, 'this-is-the-main-name-the-best-2012-02-18')
self.assertEqual(slug2, 'option-two-this-is-the-main-name-the-best')
# Save the object
with self.wait_page_loaded():
self.selenium.find_element_by_xpath('//input[@value="Save"]').click()
self.selenium.get(object_url)
self.selenium.find_element_by_id('id_name').send_keys(' hello')
# The slugs got prepopulated didn't change since they were originally not empty
slug1 = self.selenium.find_element_by_id('id_slug1').get_attribute('value')
slug2 = self.selenium.find_element_by_id('id_slug2').get_attribute('value')
self.assertEqual(slug1, 'this-is-the-main-name-the-best-2012-02-18')
self.assertEqual(slug2, 'option-two-this-is-the-main-name-the-best')
def test_collapsible_fieldset(self):
"""
The 'collapse' class in fieldsets definition allows to
show/hide the appropriate field section.
"""
self.admin_login(username='super', password='secret', login_url=reverse('admin:index'))
self.selenium.get(self.live_server_url + reverse('admin:admin_views_article_add'))
self.assertFalse(self.selenium.find_element_by_id('id_title').is_displayed())
self.selenium.find_elements_by_link_text('Show')[0].click()
self.assertTrue(self.selenium.find_element_by_id('id_title').is_displayed())
self.assertEqual(self.selenium.find_element_by_id('fieldsetcollapser0').text, "Hide")
def test_first_field_focus(self):
"""JavaScript-assisted auto-focus on first usable form field."""
# First form field has a single widget
self.admin_login(username='super', password='secret', login_url=reverse('admin:index'))
with self.wait_page_loaded():
self.selenium.get(self.live_server_url + reverse('admin:admin_views_picture_add'))
self.assertEqual(
self.selenium.switch_to.active_element,
self.selenium.find_element_by_id('id_name')
)
# First form field has a MultiWidget
with self.wait_page_loaded():
self.selenium.get(self.live_server_url + reverse('admin:admin_views_reservation_add'))
self.assertEqual(
self.selenium.switch_to.active_element,
self.selenium.find_element_by_id('id_start_date_0')
)
def test_cancel_delete_confirmation(self):
"Cancelling the deletion of an object takes the user back one page."
pizza = Pizza.objects.create(name="Double Cheese")
url = reverse('admin:admin_views_pizza_change', args=(pizza.id,))
full_url = self.live_server_url + url
self.admin_login(username='super', password='secret', login_url=reverse('admin:index'))
self.selenium.get(full_url)
self.selenium.find_element_by_class_name('deletelink').click()
# Click 'cancel' on the delete page.
self.selenium.find_element_by_class_name('cancel-link').click()
# Wait until we're back on the change page.
self.wait_for_text('#content h1', 'Change pizza')
self.assertEqual(self.selenium.current_url, full_url)
self.assertEqual(Pizza.objects.count(), 1)
def test_cancel_delete_related_confirmation(self):
"""
Cancelling the deletion of an object with relations takes the user back
one page.
"""
pizza = Pizza.objects.create(name="Double Cheese")
topping1 = Topping.objects.create(name="Cheddar")
topping2 = Topping.objects.create(name="Mozzarella")
pizza.toppings.add(topping1, topping2)
url = reverse('admin:admin_views_pizza_change', args=(pizza.id,))
full_url = self.live_server_url + url
self.admin_login(username='super', password='secret', login_url=reverse('admin:index'))
self.selenium.get(full_url)
self.selenium.find_element_by_class_name('deletelink').click()
# Click 'cancel' on the delete page.
self.selenium.find_element_by_class_name('cancel-link').click()
# Wait until we're back on the change page.
self.wait_for_text('#content h1', 'Change pizza')
self.assertEqual(self.selenium.current_url, full_url)
self.assertEqual(Pizza.objects.count(), 1)
self.assertEqual(Topping.objects.count(), 2)
def test_list_editable_popups(self):
"""
list_editable foreign keys have add/change popups.
"""
from selenium.webdriver.support.ui import Select
s1 = Section.objects.create(name='Test section')
Article.objects.create(
title='foo',
content='<p>Middle content</p>',
date=datetime.datetime(2008, 3, 18, 11, 54, 58),
section=s1,
)
self.admin_login(username='super', password='secret', login_url=reverse('admin:index'))
self.selenium.get(self.live_server_url + reverse('admin:admin_views_article_changelist'))
# Change popup
self.selenium.find_element_by_id('change_id_form-0-section').click()
self.wait_for_and_switch_to_popup()
self.wait_for_text('#content h1', 'Change section')
name_input = self.selenium.find_element_by_id('id_name')
name_input.clear()
name_input.send_keys('<i>edited section</i>')
self.selenium.find_element_by_xpath('//input[@value="Save"]').click()
self.selenium.switch_to.window(self.selenium.window_handles[0])
# Hide sidebar.
toggle_button = self.selenium.find_element_by_css_selector('#toggle-nav-sidebar')
toggle_button.click()
select = Select(self.selenium.find_element_by_id('id_form-0-section'))
self.assertEqual(select.first_selected_option.text, '<i>edited section</i>')
# Rendered select2 input.
select2_display = self.selenium.find_element_by_class_name('select2-selection__rendered')
# Clear button (×\n) is included in text.
self.assertEqual(select2_display.text, '×\n<i>edited section</i>')
# Add popup
self.selenium.find_element_by_id('add_id_form-0-section').click()
self.wait_for_and_switch_to_popup()
self.wait_for_text('#content h1', 'Add section')
self.selenium.find_element_by_id('id_name').send_keys('new section')
self.selenium.find_element_by_xpath('//input[@value="Save"]').click()
self.selenium.switch_to.window(self.selenium.window_handles[0])
select = Select(self.selenium.find_element_by_id('id_form-0-section'))
self.assertEqual(select.first_selected_option.text, 'new section')
select2_display = self.selenium.find_element_by_class_name('select2-selection__rendered')
# Clear button (×\n) is included in text.
self.assertEqual(select2_display.text, '×\nnew section')
def test_inline_uuid_pk_edit_with_popup(self):
from selenium.webdriver.support.ui import Select
parent = ParentWithUUIDPK.objects.create(title='test')
related_with_parent = RelatedWithUUIDPKModel.objects.create(parent=parent)
self.admin_login(username='super', password='secret', login_url=reverse('admin:index'))
change_url = reverse('admin:admin_views_relatedwithuuidpkmodel_change', args=(related_with_parent.id,))
self.selenium.get(self.live_server_url + change_url)
self.selenium.find_element_by_id('change_id_parent').click()
self.wait_for_and_switch_to_popup()
self.selenium.find_element_by_xpath('//input[@value="Save"]').click()
self.selenium.switch_to.window(self.selenium.window_handles[0])
select = Select(self.selenium.find_element_by_id('id_parent'))
self.assertEqual(select.first_selected_option.text, str(parent.id))
self.assertEqual(select.first_selected_option.get_attribute('value'), str(parent.id))
def test_inline_uuid_pk_add_with_popup(self):
from selenium.webdriver.support.ui import Select
self.admin_login(username='super', password='secret', login_url=reverse('admin:index'))
self.selenium.get(self.live_server_url + reverse('admin:admin_views_relatedwithuuidpkmodel_add'))
self.selenium.find_element_by_id('add_id_parent').click()
self.wait_for_and_switch_to_popup()
self.selenium.find_element_by_id('id_title').send_keys('test')
self.selenium.find_element_by_xpath('//input[@value="Save"]').click()
self.selenium.switch_to.window(self.selenium.window_handles[0])
select = Select(self.selenium.find_element_by_id('id_parent'))
uuid_id = str(ParentWithUUIDPK.objects.first().id)
self.assertEqual(select.first_selected_option.text, uuid_id)
self.assertEqual(select.first_selected_option.get_attribute('value'), uuid_id)
def test_inline_uuid_pk_delete_with_popup(self):
from selenium.webdriver.support.ui import Select
parent = ParentWithUUIDPK.objects.create(title='test')
related_with_parent = RelatedWithUUIDPKModel.objects.create(parent=parent)
self.admin_login(username='super', password='secret', login_url=reverse('admin:index'))
change_url = reverse('admin:admin_views_relatedwithuuidpkmodel_change', args=(related_with_parent.id,))
self.selenium.get(self.live_server_url + change_url)
self.selenium.find_element_by_id('delete_id_parent').click()
self.wait_for_and_switch_to_popup()
self.selenium.find_element_by_xpath('//input[@value="Yes, I’m sure"]').click()
self.selenium.switch_to.window(self.selenium.window_handles[0])
select = Select(self.selenium.find_element_by_id('id_parent'))
self.assertEqual(ParentWithUUIDPK.objects.count(), 0)
self.assertEqual(select.first_selected_option.text, '---------')
self.assertEqual(select.first_selected_option.get_attribute('value'), '')
def test_inline_with_popup_cancel_delete(self):
"""Clicking ""No, take me back" on a delete popup closes the window."""
parent = ParentWithUUIDPK.objects.create(title='test')
related_with_parent = RelatedWithUUIDPKModel.objects.create(parent=parent)
self.admin_login(username='super', password='secret', login_url=reverse('admin:index'))
change_url = reverse('admin:admin_views_relatedwithuuidpkmodel_change', args=(related_with_parent.id,))
self.selenium.get(self.live_server_url + change_url)
self.selenium.find_element_by_id('delete_id_parent').click()
self.wait_for_and_switch_to_popup()
self.selenium.find_element_by_xpath('//a[text()="No, take me back"]').click()
self.selenium.switch_to.window(self.selenium.window_handles[0])
self.assertEqual(len(self.selenium.window_handles), 1)
def test_list_editable_raw_id_fields(self):
parent = ParentWithUUIDPK.objects.create(title='test')
parent2 = ParentWithUUIDPK.objects.create(title='test2')
RelatedWithUUIDPKModel.objects.create(parent=parent)
self.admin_login(username='super', password='secret', login_url=reverse('admin:index'))
change_url = reverse('admin:admin_views_relatedwithuuidpkmodel_changelist', current_app=site2.name)
self.selenium.get(self.live_server_url + change_url)
self.selenium.find_element_by_id('lookup_id_form-0-parent').click()
self.wait_for_and_switch_to_popup()
# Select "parent2" in the popup.
self.selenium.find_element_by_link_text(str(parent2.pk)).click()
self.selenium.switch_to.window(self.selenium.window_handles[0])
# The newly selected pk should appear in the raw id input.
value = self.selenium.find_element_by_id('id_form-0-parent').get_attribute('value')
self.assertEqual(value, str(parent2.pk))
def test_input_element_font(self):
"""
Browsers' default stylesheets override the font of inputs. The admin
adds additional CSS to handle this.
"""
self.selenium.get(self.live_server_url + reverse('admin:login'))
element = self.selenium.find_element_by_id('id_username')
# Some browsers quotes the fonts, some don't.
fonts = [
font.strip().strip('"')
for font in element.value_of_css_property('font-family').split(',')
]
self.assertEqual(
fonts,
['Roboto', 'Lucida Grande', 'Verdana', 'Arial', 'sans-serif'],
)
def test_search_input_filtered_page(self):
Person.objects.create(name='Guido van Rossum', gender=1, alive=True)
Person.objects.create(name='Grace Hopper', gender=1, alive=False)
self.admin_login(username='super', password='secret', login_url=reverse('admin:index'))
person_url = reverse('admin:admin_views_person_changelist') + '?q=Gui'
self.selenium.get(self.live_server_url + person_url)
self.assertGreater(
self.selenium.find_element_by_id('searchbar').rect['width'],
50,
)
@override_settings(ROOT_URLCONF='admin_views.urls')
class ReadonlyTest(AdminFieldExtractionMixin, TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]')
def setUp(self):
self.client.force_login(self.superuser)
def test_readonly_get(self):
response = self.client.get(reverse('admin:admin_views_post_add'))
self.assertNotContains(response, 'name="posted"')
# 3 fields + 2 submit buttons + 5 inline management form fields, + 2
# hidden fields for inlines + 1 field for the inline + 2 empty form
self.assertContains(response, "<input", count=16)
self.assertContains(response, formats.localize(datetime.date.today()))
self.assertContains(response, "<label>Awesomeness level:</label>")
self.assertContains(response, "Very awesome.")
self.assertContains(response, "Unknown coolness.")
self.assertContains(response, "foo")
# Multiline text in a readonly field gets <br> tags
self.assertContains(response, 'Multiline<br>test<br>string')
self.assertContains(response, '<div class="readonly">Multiline<br>html<br>content</div>', html=True)
self.assertContains(response, 'InlineMultiline<br>test<br>string')
self.assertContains(response, formats.localize(datetime.date.today() - datetime.timedelta(days=7)))
self.assertContains(response, '<div class="form-row field-coolness">')
self.assertContains(response, '<div class="form-row field-awesomeness_level">')
self.assertContains(response, '<div class="form-row field-posted">')
self.assertContains(response, '<div class="form-row field-value">')
self.assertContains(response, '<div class="form-row">')
self.assertContains(response, '<div class="help">', 3)
self.assertContains(
response,
'<div class="help">Some help text for the title (with Unicode ŠĐĆŽćžšđ)</div>',
html=True
)
self.assertContains(
response,
'<div class="help">Some help text for the content (with Unicode ŠĐĆŽćžšđ)</div>',
html=True
)
self.assertContains(
response,
'<div class="help">Some help text for the date (with Unicode ŠĐĆŽćžšđ)</div>',
html=True
)
p = Post.objects.create(title="I worked on readonly_fields", content="Its good stuff")
response = self.client.get(reverse('admin:admin_views_post_change', args=(p.pk,)))
self.assertContains(response, "%d amount of cool" % p.pk)
def test_readonly_text_field(self):
p = Post.objects.create(
title="Readonly test", content="test",
readonly_content='test\r\n\r\ntest\r\n\r\ntest\r\n\r\ntest',
)
Link.objects.create(
url="http://www.djangoproject.com", post=p,
readonly_link_content="test\r\nlink",
)
response = self.client.get(reverse('admin:admin_views_post_change', args=(p.pk,)))
# Checking readonly field.
self.assertContains(response, 'test<br><br>test<br><br>test<br><br>test')
# Checking readonly field in inline.
self.assertContains(response, 'test<br>link')
def test_readonly_post(self):
data = {
"title": "Django Got Readonly Fields",
"content": "This is an incredible development.",
"link_set-TOTAL_FORMS": "1",
"link_set-INITIAL_FORMS": "0",
"link_set-MAX_NUM_FORMS": "0",
}
response = self.client.post(reverse('admin:admin_views_post_add'), data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Post.objects.count(), 1)
p = Post.objects.get()
self.assertEqual(p.posted, datetime.date.today())
data["posted"] = "10-8-1990" # some date that's not today
response = self.client.post(reverse('admin:admin_views_post_add'), data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Post.objects.count(), 2)
p = Post.objects.order_by('-id')[0]
self.assertEqual(p.posted, datetime.date.today())
def test_readonly_manytomany(self):
"Regression test for #13004"
response = self.client.get(reverse('admin:admin_views_pizza_add'))
self.assertEqual(response.status_code, 200)
def test_user_password_change_limited_queryset(self):
su = User.objects.filter(is_superuser=True)[0]
response = self.client.get(reverse('admin2:auth_user_password_change', args=(su.pk,)))
self.assertEqual(response.status_code, 404)
def test_change_form_renders_correct_null_choice_value(self):
"""
Regression test for #17911.
"""
choice = Choice.objects.create(choice=None)
response = self.client.get(reverse('admin:admin_views_choice_change', args=(choice.pk,)))
self.assertContains(response, '<div class="readonly">No opinion</div>', html=True)
def test_readonly_foreignkey_links(self):
"""
ForeignKey readonly fields render as links if the target model is
registered in admin.
"""
chapter = Chapter.objects.create(
title='Chapter 1',
content='content',
book=Book.objects.create(name='Book 1'),
)
language = Language.objects.create(iso='_40', name='Test')
obj = ReadOnlyRelatedField.objects.create(
chapter=chapter,
language=language,
user=self.superuser,
)
response = self.client.get(
reverse('admin:admin_views_readonlyrelatedfield_change', args=(obj.pk,)),
)
# Related ForeignKey object registered in admin.
user_url = reverse('admin:auth_user_change', args=(self.superuser.pk,))
self.assertContains(
response,
'<div class="readonly"><a href="%s">super</a></div>' % user_url,
html=True,
)
# Related ForeignKey with the string primary key registered in admin.
language_url = reverse(
'admin:admin_views_language_change',
args=(quote(language.pk),),
)
self.assertContains(
response,
'<div class="readonly"><a href="%s">_40</a></div>' % language_url,
html=True,
)
# Related ForeignKey object not registered in admin.
self.assertContains(response, '<div class="readonly">Chapter 1</div>', html=True)
def test_readonly_manytomany_backwards_ref(self):
"""
Regression test for #16433 - backwards references for related objects
broke if the related field is read-only due to the help_text attribute
"""
topping = Topping.objects.create(name='Salami')
pizza = Pizza.objects.create(name='Americano')
pizza.toppings.add(topping)
response = self.client.get(reverse('admin:admin_views_topping_add'))
self.assertEqual(response.status_code, 200)
def test_readonly_manytomany_forwards_ref(self):
topping = Topping.objects.create(name='Salami')
pizza = Pizza.objects.create(name='Americano')
pizza.toppings.add(topping)
response = self.client.get(reverse('admin:admin_views_pizza_change', args=(pizza.pk,)))
self.assertContains(response, '<label>Toppings:</label>', html=True)
self.assertContains(response, '<div class="readonly">Salami</div>', html=True)
def test_readonly_onetoone_backwards_ref(self):
"""
Can reference a reverse OneToOneField in ModelAdmin.readonly_fields.
"""
v1 = Villain.objects.create(name='Adam')
pl = Plot.objects.create(name='Test Plot', team_leader=v1, contact=v1)
pd = PlotDetails.objects.create(details='Brand New Plot', plot=pl)
response = self.client.get(reverse('admin:admin_views_plotproxy_change', args=(pl.pk,)))
field = self.get_admin_readonly_field(response, 'plotdetails')
pd_url = reverse('admin:admin_views_plotdetails_change', args=(pd.pk,))
self.assertEqual(field.contents(), '<a href="%s">Brand New Plot</a>' % pd_url)
# The reverse relation also works if the OneToOneField is null.
pd.plot = None
pd.save()
response = self.client.get(reverse('admin:admin_views_plotproxy_change', args=(pl.pk,)))
field = self.get_admin_readonly_field(response, 'plotdetails')
self.assertEqual(field.contents(), '-') # default empty value
def test_readonly_field_overrides(self):
"""
Regression test for #22087 - ModelForm Meta overrides are ignored by
AdminReadonlyField
"""
p = FieldOverridePost.objects.create(title="Test Post", content="Test Content")
response = self.client.get(reverse('admin:admin_views_fieldoverridepost_change', args=(p.pk,)))
self.assertContains(response, '<div class="help">Overridden help text for the date</div>')
self.assertContains(response, '<label for="id_public">Overridden public label:</label>', html=True)
self.assertNotContains(response, 'Some help text for the date (with Unicode ŠĐĆŽćžšđ)')
def test_correct_autoescaping(self):
"""
Make sure that non-field readonly elements are properly autoescaped (#24461)
"""
section = Section.objects.create(name='<a>evil</a>')
response = self.client.get(reverse('admin:admin_views_section_change', args=(section.pk,)))
self.assertNotContains(response, "<a>evil</a>", status_code=200)
self.assertContains(response, "<a>evil</a>", status_code=200)
def test_label_suffix_translated(self):
pizza = Pizza.objects.create(name='Americano')
url = reverse('admin:admin_views_pizza_change', args=(pizza.pk,))
with self.settings(LANGUAGE_CODE='fr'):
response = self.client.get(url)
self.assertContains(response, '<label>Toppings\u00A0:</label>', html=True)
@override_settings(ROOT_URLCONF='admin_views.urls')
class LimitChoicesToInAdminTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]')
def setUp(self):
self.client.force_login(self.superuser)
def test_limit_choices_to_as_callable(self):
"""Test for ticket 2445 changes to admin."""
threepwood = Character.objects.create(
username='threepwood',
last_action=datetime.datetime.today() + datetime.timedelta(days=1),
)
marley = Character.objects.create(
username='marley',
last_action=datetime.datetime.today() - datetime.timedelta(days=1),
)
response = self.client.get(reverse('admin:admin_views_stumpjoke_add'))
# The allowed option should appear twice; the limited option should not appear.
self.assertContains(response, threepwood.username, count=2)
self.assertNotContains(response, marley.username)
@override_settings(ROOT_URLCONF='admin_views.urls')
class RawIdFieldsTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]')
def setUp(self):
self.client.force_login(self.superuser)
def test_limit_choices_to(self):
"""Regression test for 14880"""
actor = Actor.objects.create(name="Palin", age=27)
Inquisition.objects.create(expected=True,
leader=actor,
country="England")
Inquisition.objects.create(expected=False,
leader=actor,
country="Spain")
response = self.client.get(reverse('admin:admin_views_sketch_add'))
# Find the link
m = re.search(br'<a href="([^"]*)"[^>]* id="lookup_id_inquisition"', response.content)
self.assertTrue(m) # Got a match
popup_url = m[1].decode().replace('&', '&')
# Handle relative links
popup_url = urljoin(response.request['PATH_INFO'], popup_url)
# Get the popup and verify the correct objects show up in the resulting
# page. This step also tests integers, strings and booleans in the
# lookup query string; in model we define inquisition field to have a
# limit_choices_to option that includes a filter on a string field
# (inquisition__actor__name), a filter on an integer field
# (inquisition__actor__age), and a filter on a boolean field
# (inquisition__expected).
response2 = self.client.get(popup_url)
self.assertContains(response2, "Spain")
self.assertNotContains(response2, "England")
def test_limit_choices_to_isnull_false(self):
"""Regression test for 20182"""
Actor.objects.create(name="Palin", age=27)
Actor.objects.create(name="Kilbraken", age=50, title="Judge")
response = self.client.get(reverse('admin:admin_views_sketch_add'))
# Find the link
m = re.search(br'<a href="([^"]*)"[^>]* id="lookup_id_defendant0"', response.content)
self.assertTrue(m) # Got a match
popup_url = m[1].decode().replace('&', '&')
# Handle relative links
popup_url = urljoin(response.request['PATH_INFO'], popup_url)
# Get the popup and verify the correct objects show up in the resulting
# page. This step tests field__isnull=0 gets parsed correctly from the
# lookup query string; in model we define defendant0 field to have a
# limit_choices_to option that includes "actor__title__isnull=False".
response2 = self.client.get(popup_url)
self.assertContains(response2, "Kilbraken")
self.assertNotContains(response2, "Palin")
def test_limit_choices_to_isnull_true(self):
"""Regression test for 20182"""
Actor.objects.create(name="Palin", age=27)
Actor.objects.create(name="Kilbraken", age=50, title="Judge")
response = self.client.get(reverse('admin:admin_views_sketch_add'))
# Find the link
m = re.search(br'<a href="([^"]*)"[^>]* id="lookup_id_defendant1"', response.content)
self.assertTrue(m) # Got a match
popup_url = m[1].decode().replace('&', '&')
# Handle relative links
popup_url = urljoin(response.request['PATH_INFO'], popup_url)
# Get the popup and verify the correct objects show up in the resulting
# page. This step tests field__isnull=1 gets parsed correctly from the
# lookup query string; in model we define defendant1 field to have a
# limit_choices_to option that includes "actor__title__isnull=True".
response2 = self.client.get(popup_url)
self.assertNotContains(response2, "Kilbraken")
self.assertContains(response2, "Palin")
def test_list_display_method_same_name_as_reverse_accessor(self):
"""
Should be able to use a ModelAdmin method in list_display that has the
same name as a reverse model field ("sketch" in this case).
"""
actor = Actor.objects.create(name="Palin", age=27)
Inquisition.objects.create(expected=True, leader=actor, country="England")
response = self.client.get(reverse('admin:admin_views_inquisition_changelist'))
self.assertContains(response, 'list-display-sketch')
@override_settings(ROOT_URLCONF='admin_views.urls')
class UserAdminTest(TestCase):
"""
Tests user CRUD functionality.
"""
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]')
cls.adduser = User.objects.create_user(username='adduser', password='secret', is_staff=True)
cls.changeuser = User.objects.create_user(username='changeuser', password='secret', is_staff=True)
cls.s1 = Section.objects.create(name='Test section')
cls.a1 = Article.objects.create(
content='<p>Middle content</p>', date=datetime.datetime(2008, 3, 18, 11, 54, 58), section=cls.s1
)
cls.a2 = Article.objects.create(
content='<p>Oldest content</p>', date=datetime.datetime(2000, 3, 18, 11, 54, 58), section=cls.s1
)
cls.a3 = Article.objects.create(
content='<p>Newest content</p>', date=datetime.datetime(2009, 3, 18, 11, 54, 58), section=cls.s1
)
cls.p1 = PrePopulatedPost.objects.create(title='A Long Title', published=True, slug='a-long-title')
cls.per1 = Person.objects.create(name='John Mauchly', gender=1, alive=True)
cls.per2 = Person.objects.create(name='Grace Hopper', gender=1, alive=False)
cls.per3 = Person.objects.create(name='Guido van Rossum', gender=1, alive=True)
def setUp(self):
self.client.force_login(self.superuser)
def test_save_button(self):
user_count = User.objects.count()
response = self.client.post(reverse('admin:auth_user_add'), {
'username': 'newuser',
'password1': 'newpassword',
'password2': 'newpassword',
})
new_user = User.objects.get(username='newuser')
self.assertRedirects(response, reverse('admin:auth_user_change', args=(new_user.pk,)))
self.assertEqual(User.objects.count(), user_count + 1)
self.assertTrue(new_user.has_usable_password())
def test_save_continue_editing_button(self):
user_count = User.objects.count()
response = self.client.post(reverse('admin:auth_user_add'), {
'username': 'newuser',
'password1': 'newpassword',
'password2': 'newpassword',
'_continue': '1',
})
new_user = User.objects.get(username='newuser')
new_user_url = reverse('admin:auth_user_change', args=(new_user.pk,))
self.assertRedirects(response, new_user_url, fetch_redirect_response=False)
self.assertEqual(User.objects.count(), user_count + 1)
self.assertTrue(new_user.has_usable_password())
response = self.client.get(new_user_url)
self.assertContains(
response,
'<li class="success">The user “<a href="%s">'
'%s</a>” was added successfully. You may edit it again below.</li>'
% (new_user_url, new_user),
html=True,
)
def test_password_mismatch(self):
response = self.client.post(reverse('admin:auth_user_add'), {
'username': 'newuser',
'password1': 'newpassword',
'password2': 'mismatch',
})
self.assertEqual(response.status_code, 200)
self.assertFormError(response, 'adminform', 'password', [])
self.assertFormError(response, 'adminform', 'password2', ['The two password fields didn’t match.'])
def test_user_fk_add_popup(self):
"""User addition through a FK popup should return the appropriate JavaScript response."""
response = self.client.get(reverse('admin:admin_views_album_add'))
self.assertContains(response, reverse('admin:auth_user_add'))
self.assertContains(response, 'class="related-widget-wrapper-link add-related" id="add_id_owner"')
response = self.client.get(reverse('admin:auth_user_add') + '?%s=1' % IS_POPUP_VAR)
self.assertNotContains(response, 'name="_continue"')
self.assertNotContains(response, 'name="_addanother"')
data = {
'username': 'newuser',
'password1': 'newpassword',
'password2': 'newpassword',
IS_POPUP_VAR: '1',
'_save': '1',
}
response = self.client.post(reverse('admin:auth_user_add') + '?%s=1' % IS_POPUP_VAR, data, follow=True)
self.assertContains(response, '"obj": "newuser"')
def test_user_fk_change_popup(self):
"""User change through a FK popup should return the appropriate JavaScript response."""
response = self.client.get(reverse('admin:admin_views_album_add'))
self.assertContains(response, reverse('admin:auth_user_change', args=('__fk__',)))
self.assertContains(response, 'class="related-widget-wrapper-link change-related" id="change_id_owner"')
user = User.objects.get(username='changeuser')
url = reverse('admin:auth_user_change', args=(user.pk,)) + '?%s=1' % IS_POPUP_VAR
response = self.client.get(url)
self.assertNotContains(response, 'name="_continue"')
self.assertNotContains(response, 'name="_addanother"')
data = {
'username': 'newuser',
'password1': 'newpassword',
'password2': 'newpassword',
'last_login_0': '2007-05-30',
'last_login_1': '13:20:10',
'date_joined_0': '2007-05-30',
'date_joined_1': '13:20:10',
IS_POPUP_VAR: '1',
'_save': '1',
}
response = self.client.post(url, data, follow=True)
self.assertContains(response, '"obj": "newuser"')
self.assertContains(response, '"action": "change"')
def test_user_fk_delete_popup(self):
"""User deletion through a FK popup should return the appropriate JavaScript response."""
response = self.client.get(reverse('admin:admin_views_album_add'))
self.assertContains(response, reverse('admin:auth_user_delete', args=('__fk__',)))
self.assertContains(response, 'class="related-widget-wrapper-link change-related" id="change_id_owner"')
user = User.objects.get(username='changeuser')
url = reverse('admin:auth_user_delete', args=(user.pk,)) + '?%s=1' % IS_POPUP_VAR
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
data = {
'post': 'yes',
IS_POPUP_VAR: '1',
}
response = self.client.post(url, data, follow=True)
self.assertContains(response, '"action": "delete"')
def test_save_add_another_button(self):
user_count = User.objects.count()
response = self.client.post(reverse('admin:auth_user_add'), {
'username': 'newuser',
'password1': 'newpassword',
'password2': 'newpassword',
'_addanother': '1',
})
new_user = User.objects.order_by('-id')[0]
self.assertRedirects(response, reverse('admin:auth_user_add'))
self.assertEqual(User.objects.count(), user_count + 1)
self.assertTrue(new_user.has_usable_password())
def test_user_permission_performance(self):
u = User.objects.all()[0]
# Don't depend on a warm cache, see #17377.
ContentType.objects.clear_cache()
with self.assertNumQueries(10):
response = self.client.get(reverse('admin:auth_user_change', args=(u.pk,)))
self.assertEqual(response.status_code, 200)
def test_form_url_present_in_context(self):
u = User.objects.all()[0]
response = self.client.get(reverse('admin3:auth_user_password_change', args=(u.pk,)))
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['form_url'], 'pony')
@override_settings(ROOT_URLCONF='admin_views.urls')
class GroupAdminTest(TestCase):
"""
Tests group CRUD functionality.
"""
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]')
def setUp(self):
self.client.force_login(self.superuser)
def test_save_button(self):
group_count = Group.objects.count()
response = self.client.post(reverse('admin:auth_group_add'), {
'name': 'newgroup',
})
Group.objects.order_by('-id')[0]
self.assertRedirects(response, reverse('admin:auth_group_changelist'))
self.assertEqual(Group.objects.count(), group_count + 1)
def test_group_permission_performance(self):
g = Group.objects.create(name="test_group")
# Ensure no queries are skipped due to cached content type for Group.
ContentType.objects.clear_cache()
with self.assertNumQueries(8):
response = self.client.get(reverse('admin:auth_group_change', args=(g.pk,)))
self.assertEqual(response.status_code, 200)
@override_settings(ROOT_URLCONF='admin_views.urls')
class CSSTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]')
cls.s1 = Section.objects.create(name='Test section')
cls.a1 = Article.objects.create(
content='<p>Middle content</p>', date=datetime.datetime(2008, 3, 18, 11, 54, 58), section=cls.s1
)
cls.a2 = Article.objects.create(
content='<p>Oldest content</p>', date=datetime.datetime(2000, 3, 18, 11, 54, 58), section=cls.s1
)
cls.a3 = Article.objects.create(
content='<p>Newest content</p>', date=datetime.datetime(2009, 3, 18, 11, 54, 58), section=cls.s1
)
cls.p1 = PrePopulatedPost.objects.create(title='A Long Title', published=True, slug='a-long-title')
def setUp(self):
self.client.force_login(self.superuser)
def test_field_prefix_css_classes(self):
"""
Fields have a CSS class name with a 'field-' prefix.
"""
response = self.client.get(reverse('admin:admin_views_post_add'))
# The main form
self.assertContains(response, 'class="form-row field-title"')
self.assertContains(response, 'class="form-row field-content"')
self.assertContains(response, 'class="form-row field-public"')
self.assertContains(response, 'class="form-row field-awesomeness_level"')
self.assertContains(response, 'class="form-row field-coolness"')
self.assertContains(response, 'class="form-row field-value"')
self.assertContains(response, 'class="form-row"') # The lambda function
# The tabular inline
self.assertContains(response, '<td class="field-url">')
self.assertContains(response, '<td class="field-posted">')
def test_index_css_classes(self):
"""
CSS class names are used for each app and model on the admin index
pages (#17050).
"""
# General index page
response = self.client.get(reverse('admin:index'))
self.assertContains(response, '<div class="app-admin_views module')
self.assertContains(response, '<tr class="model-actor">')
self.assertContains(response, '<tr class="model-album">')
# App index page
response = self.client.get(reverse('admin:app_list', args=('admin_views',)))
self.assertContains(response, '<div class="app-admin_views module')
self.assertContains(response, '<tr class="model-actor">')
self.assertContains(response, '<tr class="model-album">')
def test_app_model_in_form_body_class(self):
"""
Ensure app and model tag are correctly read by change_form template
"""
response = self.client.get(reverse('admin:admin_views_section_add'))
self.assertContains(response, '<body class=" app-admin_views model-section ')
def test_app_model_in_list_body_class(self):
"""
Ensure app and model tag are correctly read by change_list template
"""
response = self.client.get(reverse('admin:admin_views_section_changelist'))
self.assertContains(response, '<body class=" app-admin_views model-section ')
def test_app_model_in_delete_confirmation_body_class(self):
"""
Ensure app and model tag are correctly read by delete_confirmation
template
"""
response = self.client.get(reverse('admin:admin_views_section_delete', args=(self.s1.pk,)))
self.assertContains(response, '<body class=" app-admin_views model-section ')
def test_app_model_in_app_index_body_class(self):
"""
Ensure app and model tag are correctly read by app_index template
"""
response = self.client.get(reverse('admin:app_list', args=('admin_views',)))
self.assertContains(response, '<body class=" dashboard app-admin_views')
def test_app_model_in_delete_selected_confirmation_body_class(self):
"""
Ensure app and model tag are correctly read by
delete_selected_confirmation template
"""
action_data = {
ACTION_CHECKBOX_NAME: [self.s1.pk],
'action': 'delete_selected',
'index': 0,
}
response = self.client.post(reverse('admin:admin_views_section_changelist'), action_data)
self.assertContains(response, '<body class=" app-admin_views model-section ')
def test_changelist_field_classes(self):
"""
Cells of the change list table should contain the field name in their class attribute
Refs #11195.
"""
Podcast.objects.create(name="Django Dose", release_date=datetime.date.today())
response = self.client.get(reverse('admin:admin_views_podcast_changelist'))
self.assertContains(response, '<th class="field-name">')
self.assertContains(response, '<td class="field-release_date nowrap">')
self.assertContains(response, '<td class="action-checkbox">')
try:
import docutils
except ImportError:
docutils = None
@unittest.skipUnless(docutils, "no docutils installed.")
@override_settings(ROOT_URLCONF='admin_views.urls')
@modify_settings(INSTALLED_APPS={'append': ['django.contrib.admindocs', 'django.contrib.flatpages']})
class AdminDocsTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]')
def setUp(self):
self.client.force_login(self.superuser)
def test_tags(self):
response = self.client.get(reverse('django-admindocs-tags'))
# The builtin tag group exists
self.assertContains(response, "<h2>Built-in tags</h2>", count=2, html=True)
# A builtin tag exists in both the index and detail
self.assertContains(response, '<h3 id="built_in-autoescape">autoescape</h3>', html=True)
self.assertContains(response, '<li><a href="#built_in-autoescape">autoescape</a></li>', html=True)
# An app tag exists in both the index and detail
self.assertContains(response, '<h3 id="flatpages-get_flatpages">get_flatpages</h3>', html=True)
self.assertContains(response, '<li><a href="#flatpages-get_flatpages">get_flatpages</a></li>', html=True)
# The admin list tag group exists
self.assertContains(response, "<h2>admin_list</h2>", count=2, html=True)
# An admin list tag exists in both the index and detail
self.assertContains(response, '<h3 id="admin_list-admin_actions">admin_actions</h3>', html=True)
self.assertContains(response, '<li><a href="#admin_list-admin_actions">admin_actions</a></li>', html=True)
def test_filters(self):
response = self.client.get(reverse('django-admindocs-filters'))
# The builtin filter group exists
self.assertContains(response, "<h2>Built-in filters</h2>", count=2, html=True)
# A builtin filter exists in both the index and detail
self.assertContains(response, '<h3 id="built_in-add">add</h3>', html=True)
self.assertContains(response, '<li><a href="#built_in-add">add</a></li>', html=True)
@override_settings(
ROOT_URLCONF='admin_views.urls',
TEMPLATES=[{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
}],
USE_I18N=False,
)
class ValidXHTMLTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]')
def setUp(self):
self.client.force_login(self.superuser)
def test_lang_name_present(self):
response = self.client.get(reverse('admin:app_list', args=('admin_views',)))
self.assertNotContains(response, ' lang=""')
self.assertNotContains(response, ' xml:lang=""')
@override_settings(ROOT_URLCONF='admin_views.urls', USE_THOUSAND_SEPARATOR=True, USE_L10N=True)
class DateHierarchyTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]')
def setUp(self):
self.client.force_login(self.superuser)
def assert_non_localized_year(self, response, year):
"""
The year is not localized with USE_THOUSAND_SEPARATOR (#15234).
"""
self.assertNotContains(response, formats.number_format(year))
def assert_contains_year_link(self, response, date):
self.assertContains(response, '?release_date__year=%d"' % date.year)
def assert_contains_month_link(self, response, date):
self.assertContains(
response, '?release_date__month=%d&release_date__year=%d"' % (
date.month, date.year))
def assert_contains_day_link(self, response, date):
self.assertContains(
response, '?release_date__day=%d&'
'release_date__month=%d&release_date__year=%d"' % (
date.day, date.month, date.year))
def test_empty(self):
"""
No date hierarchy links display with empty changelist.
"""
response = self.client.get(
reverse('admin:admin_views_podcast_changelist'))
self.assertNotContains(response, 'release_date__year=')
self.assertNotContains(response, 'release_date__month=')
self.assertNotContains(response, 'release_date__day=')
def test_single(self):
"""
Single day-level date hierarchy appears for single object.
"""
DATE = datetime.date(2000, 6, 30)
Podcast.objects.create(release_date=DATE)
url = reverse('admin:admin_views_podcast_changelist')
response = self.client.get(url)
self.assert_contains_day_link(response, DATE)
self.assert_non_localized_year(response, 2000)
def test_within_month(self):
"""
day-level links appear for changelist within single month.
"""
DATES = (datetime.date(2000, 6, 30),
datetime.date(2000, 6, 15),
datetime.date(2000, 6, 3))
for date in DATES:
Podcast.objects.create(release_date=date)
url = reverse('admin:admin_views_podcast_changelist')
response = self.client.get(url)
for date in DATES:
self.assert_contains_day_link(response, date)
self.assert_non_localized_year(response, 2000)
def test_within_year(self):
"""
month-level links appear for changelist within single year.
"""
DATES = (datetime.date(2000, 1, 30),
datetime.date(2000, 3, 15),
datetime.date(2000, 5, 3))
for date in DATES:
Podcast.objects.create(release_date=date)
url = reverse('admin:admin_views_podcast_changelist')
response = self.client.get(url)
# no day-level links
self.assertNotContains(response, 'release_date__day=')
for date in DATES:
self.assert_contains_month_link(response, date)
self.assert_non_localized_year(response, 2000)
def test_multiple_years(self):
"""
year-level links appear for year-spanning changelist.
"""
DATES = (datetime.date(2001, 1, 30),
datetime.date(2003, 3, 15),
datetime.date(2005, 5, 3))
for date in DATES:
Podcast.objects.create(release_date=date)
response = self.client.get(
reverse('admin:admin_views_podcast_changelist'))
# no day/month-level links
self.assertNotContains(response, 'release_date__day=')
self.assertNotContains(response, 'release_date__month=')
for date in DATES:
self.assert_contains_year_link(response, date)
# and make sure GET parameters still behave correctly
for date in DATES:
url = '%s?release_date__year=%d' % (
reverse('admin:admin_views_podcast_changelist'),
date.year)
response = self.client.get(url)
self.assert_contains_month_link(response, date)
self.assert_non_localized_year(response, 2000)
self.assert_non_localized_year(response, 2003)
self.assert_non_localized_year(response, 2005)
url = '%s?release_date__year=%d&release_date__month=%d' % (
reverse('admin:admin_views_podcast_changelist'),
date.year, date.month)
response = self.client.get(url)
self.assert_contains_day_link(response, date)
self.assert_non_localized_year(response, 2000)
self.assert_non_localized_year(response, 2003)
self.assert_non_localized_year(response, 2005)
def test_related_field(self):
questions_data = (
# (posted data, number of answers),
(datetime.date(2001, 1, 30), 0),
(datetime.date(2003, 3, 15), 1),
(datetime.date(2005, 5, 3), 2),
)
for date, answer_count in questions_data:
question = Question.objects.create(posted=date)
for i in range(answer_count):
question.answer_set.create()
response = self.client.get(reverse('admin:admin_views_answer_changelist'))
for date, answer_count in questions_data:
link = '?question__posted__year=%d"' % date.year
if answer_count > 0:
self.assertContains(response, link)
else:
self.assertNotContains(response, link)
@override_settings(ROOT_URLCONF='admin_views.urls')
class AdminCustomSaveRelatedTests(TestCase):
"""
One can easily customize the way related objects are saved.
Refs #16115.
"""
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]')
def setUp(self):
self.client.force_login(self.superuser)
def test_should_be_able_to_edit_related_objects_on_add_view(self):
post = {
'child_set-TOTAL_FORMS': '3',
'child_set-INITIAL_FORMS': '0',
'name': 'Josh Stone',
'child_set-0-name': 'Paul',
'child_set-1-name': 'Catherine',
}
self.client.post(reverse('admin:admin_views_parent_add'), post)
self.assertEqual(1, Parent.objects.count())
self.assertEqual(2, Child.objects.count())
children_names = list(Child.objects.order_by('name').values_list('name', flat=True))
self.assertEqual('Josh Stone', Parent.objects.latest('id').name)
self.assertEqual(['Catherine Stone', 'Paul Stone'], children_names)
def test_should_be_able_to_edit_related_objects_on_change_view(self):
parent = Parent.objects.create(name='Josh Stone')
paul = Child.objects.create(parent=parent, name='Paul')
catherine = Child.objects.create(parent=parent, name='Catherine')
post = {
'child_set-TOTAL_FORMS': '5',
'child_set-INITIAL_FORMS': '2',
'name': 'Josh Stone',
'child_set-0-name': 'Paul',
'child_set-0-id': paul.id,
'child_set-1-name': 'Catherine',
'child_set-1-id': catherine.id,
}
self.client.post(reverse('admin:admin_views_parent_change', args=(parent.id,)), post)
children_names = list(Child.objects.order_by('name').values_list('name', flat=True))
self.assertEqual('Josh Stone', Parent.objects.latest('id').name)
self.assertEqual(['Catherine Stone', 'Paul Stone'], children_names)
def test_should_be_able_to_edit_related_objects_on_changelist_view(self):
parent = Parent.objects.create(name='Josh Rock')
Child.objects.create(parent=parent, name='Paul')
Child.objects.create(parent=parent, name='Catherine')
post = {
'form-TOTAL_FORMS': '1',
'form-INITIAL_FORMS': '1',
'form-MAX_NUM_FORMS': '0',
'form-0-id': parent.id,
'form-0-name': 'Josh Stone',
'_save': 'Save'
}
self.client.post(reverse('admin:admin_views_parent_changelist'), post)
children_names = list(Child.objects.order_by('name').values_list('name', flat=True))
self.assertEqual('Josh Stone', Parent.objects.latest('id').name)
self.assertEqual(['Catherine Stone', 'Paul Stone'], children_names)
@override_settings(ROOT_URLCONF='admin_views.urls')
class AdminViewLogoutTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]')
def test_logout(self):
self.client.force_login(self.superuser)
response = self.client.get(reverse('admin:logout'))
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'registration/logged_out.html')
self.assertEqual(response.request['PATH_INFO'], reverse('admin:logout'))
self.assertFalse(response.context['has_permission'])
self.assertNotContains(response, 'user-tools') # user-tools div shouldn't visible.
def test_client_logout_url_can_be_used_to_login(self):
response = self.client.get(reverse('admin:logout'))
self.assertEqual(response.status_code, 302) # we should be redirected to the login page.
# follow the redirect and test results.
response = self.client.get(reverse('admin:logout'), follow=True)
self.assertContains(
response,
'<input type="hidden" name="next" value="%s">' % reverse('admin:index'),
)
self.assertTemplateUsed(response, 'admin/login.html')
self.assertEqual(response.request['PATH_INFO'], reverse('admin:login'))
@override_settings(ROOT_URLCONF='admin_views.urls')
class AdminUserMessageTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]')
def setUp(self):
self.client.force_login(self.superuser)
def send_message(self, level):
"""
Helper that sends a post to the dummy test methods and asserts that a
message with the level has appeared in the response.
"""
action_data = {
ACTION_CHECKBOX_NAME: [1],
'action': 'message_%s' % level,
'index': 0,
}
response = self.client.post(reverse('admin:admin_views_usermessenger_changelist'),
action_data, follow=True)
self.assertContains(response,
'<li class="%s">Test %s</li>' % (level, level),
html=True)
@override_settings(MESSAGE_LEVEL=10) # Set to DEBUG for this request
def test_message_debug(self):
self.send_message('debug')
def test_message_info(self):
self.send_message('info')
def test_message_success(self):
self.send_message('success')
def test_message_warning(self):
self.send_message('warning')
def test_message_error(self):
self.send_message('error')
def test_message_extra_tags(self):
action_data = {
ACTION_CHECKBOX_NAME: [1],
'action': 'message_extra_tags',
'index': 0,
}
response = self.client.post(reverse('admin:admin_views_usermessenger_changelist'),
action_data, follow=True)
self.assertContains(response,
'<li class="extra_tag info">Test tags</li>',
html=True)
@override_settings(ROOT_URLCONF='admin_views.urls')
class AdminKeepChangeListFiltersTests(TestCase):
admin_site = site
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]')
cls.joepublicuser = User.objects.create_user(username='joepublic', password='secret')
def setUp(self):
self.client.force_login(self.superuser)
def assertURLEqual(self, url1, url2, msg_prefix=''):
"""
Assert that two URLs are equal despite the ordering
of their querystring. Refs #22360.
"""
parsed_url1 = urlparse(url1)
path1 = parsed_url1.path
parsed_qs1 = dict(parse_qsl(parsed_url1.query))
parsed_url2 = urlparse(url2)
path2 = parsed_url2.path
parsed_qs2 = dict(parse_qsl(parsed_url2.query))
for parsed_qs in [parsed_qs1, parsed_qs2]:
if '_changelist_filters' in parsed_qs:
changelist_filters = parsed_qs['_changelist_filters']
parsed_filters = dict(parse_qsl(changelist_filters))
parsed_qs['_changelist_filters'] = parsed_filters
self.assertEqual(path1, path2)
self.assertEqual(parsed_qs1, parsed_qs2)
def test_assert_url_equal(self):
# Test equality.
change_user_url = reverse('admin:auth_user_change', args=(self.joepublicuser.pk,))
self.assertURLEqual(
'http://testserver{}?_changelist_filters=is_staff__exact%3D0%26is_superuser__exact%3D0'.format(
change_user_url
),
'http://testserver{}?_changelist_filters=is_staff__exact%3D0%26is_superuser__exact%3D0'.format(
change_user_url
)
)
# Test inequality.
with self.assertRaises(AssertionError):
self.assertURLEqual(
'http://testserver{}?_changelist_filters=is_staff__exact%3D0%26is_superuser__exact%3D0'.format(
change_user_url
),
'http://testserver{}?_changelist_filters=is_staff__exact%3D1%26is_superuser__exact%3D1'.format(
change_user_url
)
)
# Ignore scheme and host.
self.assertURLEqual(
'http://testserver{}?_changelist_filters=is_staff__exact%3D0%26is_superuser__exact%3D0'.format(
change_user_url
),
'{}?_changelist_filters=is_staff__exact%3D0%26is_superuser__exact%3D0'.format(change_user_url)
)
# Ignore ordering of querystring.
self.assertURLEqual(
'{}?is_staff__exact=0&is_superuser__exact=0'.format(reverse('admin:auth_user_changelist')),
'{}?is_superuser__exact=0&is_staff__exact=0'.format(reverse('admin:auth_user_changelist'))
)
# Ignore ordering of _changelist_filters.
self.assertURLEqual(
'{}?_changelist_filters=is_staff__exact%3D0%26is_superuser__exact%3D0'.format(change_user_url),
'{}?_changelist_filters=is_superuser__exact%3D0%26is_staff__exact%3D0'.format(change_user_url)
)
def get_changelist_filters(self):
return {
'is_superuser__exact': 0,
'is_staff__exact': 0,
}
def get_changelist_filters_querystring(self):
return urlencode(self.get_changelist_filters())
def get_preserved_filters_querystring(self):
return urlencode({
'_changelist_filters': self.get_changelist_filters_querystring()
})
def get_sample_user_id(self):
return self.joepublicuser.pk
def get_changelist_url(self):
return '%s?%s' % (
reverse('admin:auth_user_changelist',
current_app=self.admin_site.name),
self.get_changelist_filters_querystring(),
)
def get_add_url(self, add_preserved_filters=True):
url = reverse('admin:auth_user_add', current_app=self.admin_site.name)
if add_preserved_filters:
url = '%s?%s' % (url, self.get_preserved_filters_querystring())
return url
def get_change_url(self, user_id=None, add_preserved_filters=True):
if user_id is None:
user_id = self.get_sample_user_id()
url = reverse('admin:auth_user_change', args=(user_id,), current_app=self.admin_site.name)
if add_preserved_filters:
url = '%s?%s' % (url, self.get_preserved_filters_querystring())
return url
def get_history_url(self, user_id=None):
if user_id is None:
user_id = self.get_sample_user_id()
return "%s?%s" % (
reverse('admin:auth_user_history', args=(user_id,),
current_app=self.admin_site.name),
self.get_preserved_filters_querystring(),
)
def get_delete_url(self, user_id=None):
if user_id is None:
user_id = self.get_sample_user_id()
return "%s?%s" % (
reverse('admin:auth_user_delete', args=(user_id,),
current_app=self.admin_site.name),
self.get_preserved_filters_querystring(),
)
def test_changelist_view(self):
response = self.client.get(self.get_changelist_url())
self.assertEqual(response.status_code, 200)
# Check the `change_view` link has the correct querystring.
detail_link = re.search(
'<a href="(.*?)">{}</a>'.format(self.joepublicuser.username),
response.content.decode()
)
self.assertURLEqual(detail_link[1], self.get_change_url())
def test_change_view(self):
# Get the `change_view`.
response = self.client.get(self.get_change_url())
self.assertEqual(response.status_code, 200)
# Check the form action.
form_action = re.search(
'<form action="(.*?)" method="post" id="user_form" novalidate>',
response.content.decode()
)
self.assertURLEqual(form_action[1], '?%s' % self.get_preserved_filters_querystring())
# Check the history link.
history_link = re.search(
'<a href="(.*?)" class="historylink">History</a>',
response.content.decode()
)
self.assertURLEqual(history_link[1], self.get_history_url())
# Check the delete link.
delete_link = re.search(
'<a href="(.*?)" class="deletelink">Delete</a>',
response.content.decode()
)
self.assertURLEqual(delete_link[1], self.get_delete_url())
# Test redirect on "Save".
post_data = {
'username': 'joepublic',
'last_login_0': '2007-05-30',
'last_login_1': '13:20:10',
'date_joined_0': '2007-05-30',
'date_joined_1': '13:20:10',
}
post_data['_save'] = 1
response = self.client.post(self.get_change_url(), data=post_data)
self.assertRedirects(response, self.get_changelist_url())
post_data.pop('_save')
# Test redirect on "Save and continue".
post_data['_continue'] = 1
response = self.client.post(self.get_change_url(), data=post_data)
self.assertRedirects(response, self.get_change_url())
post_data.pop('_continue')
# Test redirect on "Save and add new".
post_data['_addanother'] = 1
response = self.client.post(self.get_change_url(), data=post_data)
self.assertRedirects(response, self.get_add_url())
post_data.pop('_addanother')
def test_change_view_without_preserved_filters(self):
response = self.client.get(self.get_change_url(add_preserved_filters=False))
# The action attribute is omitted.
self.assertContains(response, '<form method="post" id="user_form" novalidate>')
def test_add_view(self):
# Get the `add_view`.
response = self.client.get(self.get_add_url())
self.assertEqual(response.status_code, 200)
# Check the form action.
form_action = re.search(
'<form action="(.*?)" method="post" id="user_form" novalidate>',
response.content.decode()
)
self.assertURLEqual(form_action[1], '?%s' % self.get_preserved_filters_querystring())
post_data = {
'username': 'dummy',
'password1': 'test',
'password2': 'test',
}
# Test redirect on "Save".
post_data['_save'] = 1
response = self.client.post(self.get_add_url(), data=post_data)
self.assertRedirects(response, self.get_change_url(User.objects.get(username='dummy').pk))
post_data.pop('_save')
# Test redirect on "Save and continue".
post_data['username'] = 'dummy2'
post_data['_continue'] = 1
response = self.client.post(self.get_add_url(), data=post_data)
self.assertRedirects(response, self.get_change_url(User.objects.get(username='dummy2').pk))
post_data.pop('_continue')
# Test redirect on "Save and add new".
post_data['username'] = 'dummy3'
post_data['_addanother'] = 1
response = self.client.post(self.get_add_url(), data=post_data)
self.assertRedirects(response, self.get_add_url())
post_data.pop('_addanother')
def test_add_view_without_preserved_filters(self):
response = self.client.get(self.get_add_url(add_preserved_filters=False))
# The action attribute is omitted.
self.assertContains(response, '<form method="post" id="user_form" novalidate>')
def test_delete_view(self):
# Test redirect on "Delete".
response = self.client.post(self.get_delete_url(), {'post': 'yes'})
self.assertRedirects(response, self.get_changelist_url())
def test_url_prefix(self):
context = {
'preserved_filters': self.get_preserved_filters_querystring(),
'opts': User._meta,
}
prefixes = ('', '/prefix/', '/後台/')
for prefix in prefixes:
with self.subTest(prefix=prefix), override_script_prefix(prefix):
url = reverse('admin:auth_user_changelist', current_app=self.admin_site.name)
self.assertURLEqual(
self.get_changelist_url(),
add_preserved_filters(context, url),
)
class NamespacedAdminKeepChangeListFiltersTests(AdminKeepChangeListFiltersTests):
admin_site = site2
@override_settings(ROOT_URLCONF='admin_views.urls')
class TestLabelVisibility(TestCase):
""" #11277 -Labels of hidden fields in admin were not hidden. """
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]')
def setUp(self):
self.client.force_login(self.superuser)
def test_all_fields_visible(self):
response = self.client.get(reverse('admin:admin_views_emptymodelvisible_add'))
self.assert_fieldline_visible(response)
self.assert_field_visible(response, 'first')
self.assert_field_visible(response, 'second')
def test_all_fields_hidden(self):
response = self.client.get(reverse('admin:admin_views_emptymodelhidden_add'))
self.assert_fieldline_hidden(response)
self.assert_field_hidden(response, 'first')
self.assert_field_hidden(response, 'second')
def test_mixin(self):
response = self.client.get(reverse('admin:admin_views_emptymodelmixin_add'))
self.assert_fieldline_visible(response)
self.assert_field_hidden(response, 'first')
self.assert_field_visible(response, 'second')
def assert_field_visible(self, response, field_name):
self.assertContains(response, '<div class="fieldBox field-%s">' % field_name)
def assert_field_hidden(self, response, field_name):
self.assertContains(response, '<div class="fieldBox field-%s hidden">' % field_name)
def assert_fieldline_visible(self, response):
self.assertContains(response, '<div class="form-row field-first field-second">')
def assert_fieldline_hidden(self, response):
self.assertContains(response, '<div class="form-row hidden')
@override_settings(ROOT_URLCONF='admin_views.urls')
class AdminViewOnSiteTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]')
cls.s1 = State.objects.create(name='New York')
cls.s2 = State.objects.create(name='Illinois')
cls.s3 = State.objects.create(name='California')
cls.c1 = City.objects.create(state=cls.s1, name='New York')
cls.c2 = City.objects.create(state=cls.s2, name='Chicago')
cls.c3 = City.objects.create(state=cls.s3, name='San Francisco')
cls.r1 = Restaurant.objects.create(city=cls.c1, name='Italian Pizza')
cls.r2 = Restaurant.objects.create(city=cls.c1, name='Boulevard')
cls.r3 = Restaurant.objects.create(city=cls.c2, name='Chinese Dinner')
cls.r4 = Restaurant.objects.create(city=cls.c2, name='Angels')
cls.r5 = Restaurant.objects.create(city=cls.c2, name='Take Away')
cls.r6 = Restaurant.objects.create(city=cls.c3, name='The Unknown Restaurant')
cls.w1 = Worker.objects.create(work_at=cls.r1, name='Mario', surname='Rossi')
cls.w2 = Worker.objects.create(work_at=cls.r1, name='Antonio', surname='Bianchi')
cls.w3 = Worker.objects.create(work_at=cls.r1, name='John', surname='Doe')
def setUp(self):
self.client.force_login(self.superuser)
def test_add_view_form_and_formsets_run_validation(self):
"""
Issue #20522
Verifying that if the parent form fails validation, the inlines also
run validation even if validation is contingent on parent form data.
Also, assertFormError() and assertFormsetError() is usable for admin
forms and formsets.
"""
# The form validation should fail because 'some_required_info' is
# not included on the parent form, and the family_name of the parent
# does not match that of the child
post_data = {
'family_name': 'Test1',
'dependentchild_set-TOTAL_FORMS': '1',
'dependentchild_set-INITIAL_FORMS': '0',
'dependentchild_set-MAX_NUM_FORMS': '1',
'dependentchild_set-0-id': '',
'dependentchild_set-0-parent': '',
'dependentchild_set-0-family_name': 'Test2',
}
response = self.client.post(reverse('admin:admin_views_parentwithdependentchildren_add'), post_data)
self.assertFormError(response, 'adminform', 'some_required_info', ['This field is required.'])
msg = "The form 'adminform' in context 0 does not contain the non-field error 'Error'"
with self.assertRaisesMessage(AssertionError, msg):
self.assertFormError(response, 'adminform', None, ['Error'])
self.assertFormsetError(
response, 'inline_admin_formset', 0, None,
['Children must share a family name with their parents in this contrived test case']
)
msg = "The formset 'inline_admin_formset' in context 12 does not contain any non-form errors."
with self.assertRaisesMessage(AssertionError, msg):
self.assertFormsetError(response, 'inline_admin_formset', None, None, ['Error'])
def test_change_view_form_and_formsets_run_validation(self):
"""
Issue #20522
Verifying that if the parent form fails validation, the inlines also
run validation even if validation is contingent on parent form data
"""
pwdc = ParentWithDependentChildren.objects.create(some_required_info=6, family_name='Test1')
# The form validation should fail because 'some_required_info' is
# not included on the parent form, and the family_name of the parent
# does not match that of the child
post_data = {
'family_name': 'Test2',
'dependentchild_set-TOTAL_FORMS': '1',
'dependentchild_set-INITIAL_FORMS': '0',
'dependentchild_set-MAX_NUM_FORMS': '1',
'dependentchild_set-0-id': '',
'dependentchild_set-0-parent': str(pwdc.id),
'dependentchild_set-0-family_name': 'Test1',
}
response = self.client.post(
reverse('admin:admin_views_parentwithdependentchildren_change', args=(pwdc.id,)), post_data
)
self.assertFormError(response, 'adminform', 'some_required_info', ['This field is required.'])
self.assertFormsetError(
response, 'inline_admin_formset', 0, None,
['Children must share a family name with their parents in this contrived test case']
)
def test_check(self):
"The view_on_site value is either a boolean or a callable"
try:
admin = CityAdmin(City, AdminSite())
CityAdmin.view_on_site = True
self.assertEqual(admin.check(), [])
CityAdmin.view_on_site = False
self.assertEqual(admin.check(), [])
CityAdmin.view_on_site = lambda obj: obj.get_absolute_url()
self.assertEqual(admin.check(), [])
CityAdmin.view_on_site = []
self.assertEqual(admin.check(), [
Error(
"The value of 'view_on_site' must be a callable or a boolean value.",
obj=CityAdmin,
id='admin.E025',
),
])
finally:
# Restore the original values for the benefit of other tests.
CityAdmin.view_on_site = True
def test_false(self):
"The 'View on site' button is not displayed if view_on_site is False"
response = self.client.get(reverse('admin:admin_views_restaurant_change', args=(self.r1.pk,)))
content_type_pk = ContentType.objects.get_for_model(Restaurant).pk
self.assertNotContains(response, reverse('admin:view_on_site', args=(content_type_pk, 1)))
def test_true(self):
"The default behavior is followed if view_on_site is True"
response = self.client.get(reverse('admin:admin_views_city_change', args=(self.c1.pk,)))
content_type_pk = ContentType.objects.get_for_model(City).pk
self.assertContains(response, reverse('admin:view_on_site', args=(content_type_pk, self.c1.pk)))
def test_callable(self):
"The right link is displayed if view_on_site is a callable"
response = self.client.get(reverse('admin:admin_views_worker_change', args=(self.w1.pk,)))
self.assertContains(response, '"/worker/%s/%s/"' % (self.w1.surname, self.w1.name))
def test_missing_get_absolute_url(self):
"None is returned if model doesn't have get_absolute_url"
model_admin = ModelAdmin(Worker, None)
self.assertIsNone(model_admin.get_view_on_site_url(Worker()))
@override_settings(ROOT_URLCONF='admin_views.urls')
class InlineAdminViewOnSiteTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]')
cls.s1 = State.objects.create(name='New York')
cls.s2 = State.objects.create(name='Illinois')
cls.s3 = State.objects.create(name='California')
cls.c1 = City.objects.create(state=cls.s1, name='New York')
cls.c2 = City.objects.create(state=cls.s2, name='Chicago')
cls.c3 = City.objects.create(state=cls.s3, name='San Francisco')
cls.r1 = Restaurant.objects.create(city=cls.c1, name='Italian Pizza')
cls.r2 = Restaurant.objects.create(city=cls.c1, name='Boulevard')
cls.r3 = Restaurant.objects.create(city=cls.c2, name='Chinese Dinner')
cls.r4 = Restaurant.objects.create(city=cls.c2, name='Angels')
cls.r5 = Restaurant.objects.create(city=cls.c2, name='Take Away')
cls.r6 = Restaurant.objects.create(city=cls.c3, name='The Unknown Restaurant')
cls.w1 = Worker.objects.create(work_at=cls.r1, name='Mario', surname='Rossi')
cls.w2 = Worker.objects.create(work_at=cls.r1, name='Antonio', surname='Bianchi')
cls.w3 = Worker.objects.create(work_at=cls.r1, name='John', surname='Doe')
def setUp(self):
self.client.force_login(self.superuser)
def test_false(self):
"The 'View on site' button is not displayed if view_on_site is False"
response = self.client.get(reverse('admin:admin_views_state_change', args=(self.s1.pk,)))
content_type_pk = ContentType.objects.get_for_model(City).pk
self.assertNotContains(response, reverse('admin:view_on_site', args=(content_type_pk, self.c1.pk)))
def test_true(self):
"The 'View on site' button is displayed if view_on_site is True"
response = self.client.get(reverse('admin:admin_views_city_change', args=(self.c1.pk,)))
content_type_pk = ContentType.objects.get_for_model(Restaurant).pk
self.assertContains(response, reverse('admin:view_on_site', args=(content_type_pk, self.r1.pk)))
def test_callable(self):
"The right link is displayed if view_on_site is a callable"
response = self.client.get(reverse('admin:admin_views_restaurant_change', args=(self.r1.pk,)))
self.assertContains(response, '"/worker_inline/%s/%s/"' % (self.w1.surname, self.w1.name))
@override_settings(ROOT_URLCONF='admin_views.urls')
class GetFormsetsWithInlinesArgumentTest(TestCase):
"""
#23934 - When adding a new model instance in the admin, the 'obj' argument
of get_formsets_with_inlines() should be None. When changing, it should be
equal to the existing model instance.
The GetFormsetsArgumentCheckingAdmin ModelAdmin throws an exception
if obj is not None during add_view or obj is None during change_view.
"""
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]')
def setUp(self):
self.client.force_login(self.superuser)
def test_explicitly_provided_pk(self):
post_data = {'name': '1'}
response = self.client.post(reverse('admin:admin_views_explicitlyprovidedpk_add'), post_data)
self.assertEqual(response.status_code, 302)
post_data = {'name': '2'}
response = self.client.post(reverse('admin:admin_views_explicitlyprovidedpk_change', args=(1,)), post_data)
self.assertEqual(response.status_code, 302)
def test_implicitly_generated_pk(self):
post_data = {'name': '1'}
response = self.client.post(reverse('admin:admin_views_implicitlygeneratedpk_add'), post_data)
self.assertEqual(response.status_code, 302)
post_data = {'name': '2'}
response = self.client.post(reverse('admin:admin_views_implicitlygeneratedpk_change', args=(1,)), post_data)
self.assertEqual(response.status_code, 302)
@override_settings(ROOT_URLCONF='admin_views.urls')
class AdminSiteFinalCatchAllPatternTests(TestCase):
"""
Verifies the behaviour of the admin catch-all view.
* Anonynous/non-staff users are redirected to login for all URLs, whether
otherwise valid or not.
* APPEND_SLASH is applied for staff if needed.
* Otherwise Http404.
* Catch-all view disabled via AdminSite.final_catch_all_view.
"""
def test_unknown_url_redirects_login_if_not_authenticated(self):
unknown_url = '/test_admin/admin/unknown/'
response = self.client.get(unknown_url)
self.assertRedirects(response, '%s?next=%s' % (reverse('admin:login'), unknown_url))
def test_unknown_url_404_if_authenticated(self):
superuser = User.objects.create_superuser(
username='super',
password='secret',
email='[email protected]',
)
self.client.force_login(superuser)
unknown_url = '/test_admin/admin/unknown/'
response = self.client.get(unknown_url)
self.assertEqual(response.status_code, 404)
def test_known_url_redirects_login_if_not_authenticated(self):
known_url = reverse('admin:admin_views_article_changelist')
response = self.client.get(known_url)
self.assertRedirects(response, '%s?next=%s' % (reverse('admin:login'), known_url))
def test_known_url_missing_slash_redirects_login_if_not_authenticated(self):
known_url = reverse('admin:admin_views_article_changelist')[:-1]
response = self.client.get(known_url)
# Redirects with the next URL also missing the slash.
self.assertRedirects(response, '%s?next=%s' % (reverse('admin:login'), known_url))
def test_non_admin_url_shares_url_prefix(self):
url = reverse('non_admin')[:-1]
response = self.client.get(url)
# Redirects with the next URL also missing the slash.
self.assertRedirects(response, '%s?next=%s' % (reverse('admin:login'), url))
def test_url_without_trailing_slash_if_not_authenticated(self):
url = reverse('admin:article_extra_json')
response = self.client.get(url)
self.assertRedirects(response, '%s?next=%s' % (reverse('admin:login'), url))
def test_unkown_url_without_trailing_slash_if_not_authenticated(self):
url = reverse('admin:article_extra_json')[:-1]
response = self.client.get(url)
self.assertRedirects(response, '%s?next=%s' % (reverse('admin:login'), url))
@override_settings(APPEND_SLASH=True)
def test_missing_slash_append_slash_true_unknown_url(self):
superuser = User.objects.create_user(
username='staff',
password='secret',
email='[email protected]',
is_staff=True,
)
self.client.force_login(superuser)
unknown_url = '/test_admin/admin/unknown/'
response = self.client.get(unknown_url[:-1])
self.assertEqual(response.status_code, 404)
@override_settings(APPEND_SLASH=True)
def test_missing_slash_append_slash_true(self):
superuser = User.objects.create_user(
username='staff',
password='secret',
email='[email protected]',
is_staff=True,
)
self.client.force_login(superuser)
known_url = reverse('admin:admin_views_article_changelist')
response = self.client.get(known_url[:-1])
self.assertRedirects(response, known_url, status_code=301, target_status_code=403)
@override_settings(APPEND_SLASH=True)
def test_missing_slash_append_slash_true_script_name(self):
superuser = User.objects.create_user(
username='staff',
password='secret',
email='[email protected]',
is_staff=True,
)
self.client.force_login(superuser)
known_url = reverse('admin:admin_views_article_changelist')
response = self.client.get(known_url[:-1], SCRIPT_NAME='/prefix/')
self.assertRedirects(
response,
'/prefix' + known_url,
status_code=301,
fetch_redirect_response=False,
)
@override_settings(APPEND_SLASH=True, FORCE_SCRIPT_NAME='/prefix/')
def test_missing_slash_append_slash_true_force_script_name(self):
superuser = User.objects.create_user(
username='staff',
password='secret',
email='[email protected]',
is_staff=True,
)
self.client.force_login(superuser)
known_url = reverse('admin:admin_views_article_changelist')
response = self.client.get(known_url[:-1])
self.assertRedirects(
response,
'/prefix' + known_url,
status_code=301,
fetch_redirect_response=False,
)
@override_settings(APPEND_SLASH=True)
def test_missing_slash_append_slash_true_non_staff_user(self):
user = User.objects.create_user(
username='user',
password='secret',
email='[email protected]',
is_staff=False,
)
self.client.force_login(user)
known_url = reverse('admin:admin_views_article_changelist')
response = self.client.get(known_url[:-1])
self.assertRedirects(response, '/test_admin/admin/login/?next=/test_admin/admin/admin_views/article')
@override_settings(APPEND_SLASH=False)
def test_missing_slash_append_slash_false(self):
superuser = User.objects.create_user(
username='staff',
password='secret',
email='[email protected]',
is_staff=True,
)
self.client.force_login(superuser)
known_url = reverse('admin:admin_views_article_changelist')
response = self.client.get(known_url[:-1])
self.assertEqual(response.status_code, 404)
@override_settings(APPEND_SLASH=True)
def test_single_model_no_append_slash(self):
superuser = User.objects.create_user(
username='staff',
password='secret',
email='[email protected]',
is_staff=True,
)
self.client.force_login(superuser)
known_url = reverse('admin9:admin_views_actor_changelist')
response = self.client.get(known_url[:-1])
self.assertEqual(response.status_code, 404)
# Same tests above with final_catch_all_view=False.
def test_unknown_url_404_if_not_authenticated_without_final_catch_all_view(self):
unknown_url = '/test_admin/admin10/unknown/'
response = self.client.get(unknown_url)
self.assertEqual(response.status_code, 404)
def test_unknown_url_404_if_authenticated_without_final_catch_all_view(self):
superuser = User.objects.create_superuser(
username='super',
password='secret',
email='[email protected]',
)
self.client.force_login(superuser)
unknown_url = '/test_admin/admin10/unknown/'
response = self.client.get(unknown_url)
self.assertEqual(response.status_code, 404)
def test_known_url_redirects_login_if_not_authenticated_without_final_catch_all_view(self):
known_url = reverse('admin10:admin_views_article_changelist')
response = self.client.get(known_url)
self.assertRedirects(response, '%s?next=%s' % (reverse('admin10:login'), known_url))
def test_known_url_missing_slash_redirects_with_slash_if_not_authenticated_without_final_catch_all_view(self):
known_url = reverse('admin10:admin_views_article_changelist')
response = self.client.get(known_url[:-1])
self.assertRedirects(response, known_url, status_code=301, fetch_redirect_response=False)
def test_non_admin_url_shares_url_prefix_without_final_catch_all_view(self):
url = reverse('non_admin10')
response = self.client.get(url[:-1])
self.assertRedirects(response, url, status_code=301)
def test_url_without_trailing_slash_if_not_authenticated_without_final_catch_all_view(self):
url = reverse('admin10:article_extra_json')
response = self.client.get(url)
self.assertRedirects(response, '%s?next=%s' % (reverse('admin10:login'), url))
def test_unkown_url_without_trailing_slash_if_not_authenticated_without_final_catch_all_view(self):
url = reverse('admin10:article_extra_json')[:-1]
response = self.client.get(url)
# Matches test_admin/admin10/admin_views/article/<path:object_id>/
self.assertRedirects(response, url + '/', status_code=301, fetch_redirect_response=False)
@override_settings(APPEND_SLASH=True)
def test_missing_slash_append_slash_true_unknown_url_without_final_catch_all_view(self):
superuser = User.objects.create_user(
username='staff',
password='secret',
email='[email protected]',
is_staff=True,
)
self.client.force_login(superuser)
unknown_url = '/test_admin/admin10/unknown/'
response = self.client.get(unknown_url[:-1])
self.assertEqual(response.status_code, 404)
@override_settings(APPEND_SLASH=True)
def test_missing_slash_append_slash_true_without_final_catch_all_view(self):
superuser = User.objects.create_user(
username='staff',
password='secret',
email='[email protected]',
is_staff=True,
)
self.client.force_login(superuser)
known_url = reverse('admin10:admin_views_article_changelist')
response = self.client.get(known_url[:-1])
self.assertRedirects(response, known_url, status_code=301, target_status_code=403)
@override_settings(APPEND_SLASH=False)
def test_missing_slash_append_slash_false_without_final_catch_all_view(self):
superuser = User.objects.create_user(
username='staff',
password='secret',
email='[email protected]',
is_staff=True,
)
self.client.force_login(superuser)
known_url = reverse('admin10:admin_views_article_changelist')
response = self.client.get(known_url[:-1])
self.assertEqual(response.status_code, 404)
# Outside admin.
def test_non_admin_url_404_if_not_authenticated(self):
unknown_url = '/unknown/'
response = self.client.get(unknown_url)
# Does not redirect to the admin login.
self.assertEqual(response.status_code, 404)
|
3e34c47faf083b072e11016748cca28cac59ffc5f80bab18d3c6b43be8976eca | import datetime
import tempfile
import uuid
from django.contrib import admin
from django.contrib.auth.models import User
from django.contrib.contenttypes.fields import (
GenericForeignKey, GenericRelation,
)
from django.contrib.contenttypes.models import ContentType
from django.core.exceptions import ValidationError
from django.core.files.storage import FileSystemStorage
from django.db import models
class Section(models.Model):
"""
A simple section that links to articles, to test linking to related items
in admin views.
"""
name = models.CharField(max_length=100)
def __str__(self):
return self.name
@property
def name_property(self):
"""
A property that simply returns the name. Used to test #24461
"""
return self.name
class Article(models.Model):
"""
A simple article to test admin views. Test backwards compatibility.
"""
title = models.CharField(max_length=100)
content = models.TextField()
date = models.DateTimeField()
section = models.ForeignKey(Section, models.CASCADE, null=True, blank=True)
another_section = models.ForeignKey(Section, models.CASCADE, null=True, blank=True, related_name='+')
sub_section = models.ForeignKey(Section, models.SET_NULL, null=True, blank=True, related_name='+')
def __str__(self):
return self.title
@admin.display(ordering='date', description='')
def model_year(self):
return self.date.year
@admin.display(ordering='-date', description='')
def model_year_reversed(self):
return self.date.year
@property
@admin.display(ordering='date')
def model_property_year(self):
return self.date.year
@property
def model_month(self):
return self.date.month
class Book(models.Model):
"""
A simple book that has chapters.
"""
name = models.CharField(max_length=100, verbose_name='¿Name?')
def __str__(self):
return self.name
class Promo(models.Model):
name = models.CharField(max_length=100, verbose_name='¿Name?')
book = models.ForeignKey(Book, models.CASCADE)
author = models.ForeignKey(User, models.SET_NULL, blank=True, null=True)
def __str__(self):
return self.name
class Chapter(models.Model):
title = models.CharField(max_length=100, verbose_name='¿Title?')
content = models.TextField()
book = models.ForeignKey(Book, models.CASCADE)
class Meta:
# Use a utf-8 bytestring to ensure it works (see #11710)
verbose_name = '¿Chapter?'
def __str__(self):
return self.title
class ChapterXtra1(models.Model):
chap = models.OneToOneField(Chapter, models.CASCADE, verbose_name='¿Chap?')
xtra = models.CharField(max_length=100, verbose_name='¿Xtra?')
guest_author = models.ForeignKey(User, models.SET_NULL, blank=True, null=True)
def __str__(self):
return '¿Xtra1: %s' % self.xtra
class ChapterXtra2(models.Model):
chap = models.OneToOneField(Chapter, models.CASCADE, verbose_name='¿Chap?')
xtra = models.CharField(max_length=100, verbose_name='¿Xtra?')
def __str__(self):
return '¿Xtra2: %s' % self.xtra
class RowLevelChangePermissionModel(models.Model):
name = models.CharField(max_length=100, blank=True)
class CustomArticle(models.Model):
content = models.TextField()
date = models.DateTimeField()
class ModelWithStringPrimaryKey(models.Model):
string_pk = models.CharField(max_length=255, primary_key=True)
def __str__(self):
return self.string_pk
def get_absolute_url(self):
return '/dummy/%s/' % self.string_pk
class Color(models.Model):
value = models.CharField(max_length=10)
warm = models.BooleanField(default=False)
def __str__(self):
return self.value
# we replicate Color to register with another ModelAdmin
class Color2(Color):
class Meta:
proxy = True
class Thing(models.Model):
title = models.CharField(max_length=20)
color = models.ForeignKey(Color, models.CASCADE, limit_choices_to={'warm': True})
pub_date = models.DateField(blank=True, null=True)
def __str__(self):
return self.title
class Actor(models.Model):
name = models.CharField(max_length=50)
age = models.IntegerField()
title = models.CharField(max_length=50, null=True, blank=True)
def __str__(self):
return self.name
class Inquisition(models.Model):
expected = models.BooleanField(default=False)
leader = models.ForeignKey(Actor, models.CASCADE)
country = models.CharField(max_length=20)
def __str__(self):
return "by %s from %s" % (self.leader, self.country)
class Sketch(models.Model):
title = models.CharField(max_length=100)
inquisition = models.ForeignKey(
Inquisition,
models.CASCADE,
limit_choices_to={
'leader__name': 'Palin',
'leader__age': 27,
'expected': False,
},
)
defendant0 = models.ForeignKey(
Actor,
models.CASCADE,
limit_choices_to={'title__isnull': False},
related_name='as_defendant0',
)
defendant1 = models.ForeignKey(
Actor,
models.CASCADE,
limit_choices_to={'title__isnull': True},
related_name='as_defendant1',
)
def __str__(self):
return self.title
def today_callable_dict():
return {"last_action__gte": datetime.datetime.today()}
def today_callable_q():
return models.Q(last_action__gte=datetime.datetime.today())
class Character(models.Model):
username = models.CharField(max_length=100)
last_action = models.DateTimeField()
def __str__(self):
return self.username
class StumpJoke(models.Model):
variation = models.CharField(max_length=100)
most_recently_fooled = models.ForeignKey(
Character,
models.CASCADE,
limit_choices_to=today_callable_dict,
related_name="+",
)
has_fooled_today = models.ManyToManyField(Character, limit_choices_to=today_callable_q, related_name="+")
def __str__(self):
return self.variation
class Fabric(models.Model):
NG_CHOICES = (
('Textured', (
('x', 'Horizontal'),
('y', 'Vertical'),
)),
('plain', 'Smooth'),
)
surface = models.CharField(max_length=20, choices=NG_CHOICES)
class Person(models.Model):
GENDER_CHOICES = (
(1, "Male"),
(2, "Female"),
)
name = models.CharField(max_length=100)
gender = models.IntegerField(choices=GENDER_CHOICES)
age = models.IntegerField(default=21)
alive = models.BooleanField(default=True)
def __str__(self):
return self.name
class Persona(models.Model):
"""
A simple persona associated with accounts, to test inlining of related
accounts which inherit from a common accounts class.
"""
name = models.CharField(blank=False, max_length=80)
def __str__(self):
return self.name
class Account(models.Model):
"""
A simple, generic account encapsulating the information shared by all
types of accounts.
"""
username = models.CharField(blank=False, max_length=80)
persona = models.ForeignKey(Persona, models.CASCADE, related_name="accounts")
servicename = 'generic service'
def __str__(self):
return "%s: %s" % (self.servicename, self.username)
class FooAccount(Account):
"""A service-specific account of type Foo."""
servicename = 'foo'
class BarAccount(Account):
"""A service-specific account of type Bar."""
servicename = 'bar'
class Subscriber(models.Model):
name = models.CharField(blank=False, max_length=80)
email = models.EmailField(blank=False, max_length=175)
def __str__(self):
return "%s (%s)" % (self.name, self.email)
class ExternalSubscriber(Subscriber):
pass
class OldSubscriber(Subscriber):
pass
class Media(models.Model):
name = models.CharField(max_length=60)
class Podcast(Media):
release_date = models.DateField()
class Meta:
ordering = ('release_date',) # overridden in PodcastAdmin
class Vodcast(Media):
media = models.OneToOneField(Media, models.CASCADE, primary_key=True, parent_link=True)
released = models.BooleanField(default=False)
class Parent(models.Model):
name = models.CharField(max_length=128)
def clean(self):
if self.name == '_invalid':
raise ValidationError('invalid')
class Child(models.Model):
parent = models.ForeignKey(Parent, models.CASCADE, editable=False)
name = models.CharField(max_length=30, blank=True)
def clean(self):
if self.name == '_invalid':
raise ValidationError('invalid')
class PKChild(models.Model):
"""
Used to check autocomplete to_field resolution when ForeignKey is PK.
"""
parent = models.ForeignKey(Parent, models.CASCADE, primary_key=True)
name = models.CharField(max_length=128)
class Meta:
ordering = ['parent']
def __str__(self):
return self.name
class Toy(models.Model):
child = models.ForeignKey(PKChild, models.CASCADE)
class EmptyModel(models.Model):
def __str__(self):
return "Primary key = %s" % self.id
temp_storage = FileSystemStorage(tempfile.mkdtemp())
class Gallery(models.Model):
name = models.CharField(max_length=100)
class Picture(models.Model):
name = models.CharField(max_length=100)
image = models.FileField(storage=temp_storage, upload_to='test_upload')
gallery = models.ForeignKey(Gallery, models.CASCADE, related_name="pictures")
class Language(models.Model):
iso = models.CharField(max_length=5, primary_key=True)
name = models.CharField(max_length=50)
english_name = models.CharField(max_length=50)
shortlist = models.BooleanField(default=False)
def __str__(self):
return self.iso
class Meta:
ordering = ('iso',)
# a base class for Recommender and Recommendation
class Title(models.Model):
pass
class TitleTranslation(models.Model):
title = models.ForeignKey(Title, models.CASCADE)
text = models.CharField(max_length=100)
class Recommender(Title):
pass
class Recommendation(Title):
the_recommender = models.ForeignKey(Recommender, models.CASCADE)
class Collector(models.Model):
name = models.CharField(max_length=100)
class Widget(models.Model):
owner = models.ForeignKey(Collector, models.CASCADE)
name = models.CharField(max_length=100)
class DooHickey(models.Model):
code = models.CharField(max_length=10, primary_key=True)
owner = models.ForeignKey(Collector, models.CASCADE)
name = models.CharField(max_length=100)
class Grommet(models.Model):
code = models.AutoField(primary_key=True)
owner = models.ForeignKey(Collector, models.CASCADE)
name = models.CharField(max_length=100)
class Whatsit(models.Model):
index = models.IntegerField(primary_key=True)
owner = models.ForeignKey(Collector, models.CASCADE)
name = models.CharField(max_length=100)
class Doodad(models.Model):
name = models.CharField(max_length=100)
class FancyDoodad(Doodad):
owner = models.ForeignKey(Collector, models.CASCADE)
expensive = models.BooleanField(default=True)
class Category(models.Model):
collector = models.ForeignKey(Collector, models.CASCADE)
order = models.PositiveIntegerField()
class Meta:
ordering = ('order',)
def __str__(self):
return '%s:o%s' % (self.id, self.order)
def link_posted_default():
return datetime.date.today() - datetime.timedelta(days=7)
class Link(models.Model):
posted = models.DateField(default=link_posted_default)
url = models.URLField()
post = models.ForeignKey("Post", models.CASCADE)
readonly_link_content = models.TextField()
class PrePopulatedPost(models.Model):
title = models.CharField(max_length=100)
published = models.BooleanField(default=False)
slug = models.SlugField()
class PrePopulatedSubPost(models.Model):
post = models.ForeignKey(PrePopulatedPost, models.CASCADE)
subtitle = models.CharField(max_length=100)
subslug = models.SlugField()
class Post(models.Model):
title = models.CharField(max_length=100, help_text='Some help text for the title (with Unicode ŠĐĆŽćžšđ)')
content = models.TextField(help_text='Some help text for the content (with Unicode ŠĐĆŽćžšđ)')
readonly_content = models.TextField()
posted = models.DateField(
default=datetime.date.today,
help_text='Some help text for the date (with Unicode ŠĐĆŽćžšđ)',
)
public = models.BooleanField(null=True, blank=True)
def awesomeness_level(self):
return "Very awesome."
# Proxy model to test overridden fields attrs on Post model so as not to
# interfere with other tests.
class FieldOverridePost(Post):
class Meta:
proxy = True
class Gadget(models.Model):
name = models.CharField(max_length=100)
def __str__(self):
return self.name
class Villain(models.Model):
name = models.CharField(max_length=100)
def __str__(self):
return self.name
class SuperVillain(Villain):
pass
class FunkyTag(models.Model):
"Because we all know there's only one real use case for GFKs."
name = models.CharField(max_length=25)
content_type = models.ForeignKey(ContentType, models.CASCADE)
object_id = models.PositiveIntegerField()
content_object = GenericForeignKey('content_type', 'object_id')
def __str__(self):
return self.name
class Plot(models.Model):
name = models.CharField(max_length=100)
team_leader = models.ForeignKey(Villain, models.CASCADE, related_name='lead_plots')
contact = models.ForeignKey(Villain, models.CASCADE, related_name='contact_plots')
tags = GenericRelation(FunkyTag)
def __str__(self):
return self.name
class PlotDetails(models.Model):
details = models.CharField(max_length=100)
plot = models.OneToOneField(Plot, models.CASCADE, null=True, blank=True)
def __str__(self):
return self.details
class PlotProxy(Plot):
class Meta:
proxy = True
class SecretHideout(models.Model):
""" Secret! Not registered with the admin! """
location = models.CharField(max_length=100)
villain = models.ForeignKey(Villain, models.CASCADE)
def __str__(self):
return self.location
class SuperSecretHideout(models.Model):
""" Secret! Not registered with the admin! """
location = models.CharField(max_length=100)
supervillain = models.ForeignKey(SuperVillain, models.CASCADE)
def __str__(self):
return self.location
class Bookmark(models.Model):
name = models.CharField(max_length=60)
tag = GenericRelation(FunkyTag, related_query_name='bookmark')
def __str__(self):
return self.name
class CyclicOne(models.Model):
name = models.CharField(max_length=25)
two = models.ForeignKey('CyclicTwo', models.CASCADE)
def __str__(self):
return self.name
class CyclicTwo(models.Model):
name = models.CharField(max_length=25)
one = models.ForeignKey(CyclicOne, models.CASCADE)
def __str__(self):
return self.name
class Topping(models.Model):
name = models.CharField(max_length=20)
def __str__(self):
return self.name
class Pizza(models.Model):
name = models.CharField(max_length=20)
toppings = models.ManyToManyField('Topping', related_name='pizzas')
# Pizza's ModelAdmin has readonly_fields = ['toppings'].
# toppings is editable for this model's admin.
class ReadablePizza(Pizza):
class Meta:
proxy = True
# No default permissions are created for this model and both name and toppings
# are readonly for this model's admin.
class ReadOnlyPizza(Pizza):
class Meta:
proxy = True
default_permissions = ()
class Album(models.Model):
owner = models.ForeignKey(User, models.SET_NULL, null=True, blank=True)
title = models.CharField(max_length=30)
class Song(models.Model):
name = models.CharField(max_length=20)
album = models.ForeignKey(Album, on_delete=models.RESTRICT)
def __str__(self):
return self.name
class Employee(Person):
code = models.CharField(max_length=20)
class Meta:
ordering = ['name']
class WorkHour(models.Model):
datum = models.DateField()
employee = models.ForeignKey(Employee, models.CASCADE)
class Manager(Employee):
"""
A multi-layer MTI child.
"""
pass
class Bonus(models.Model):
recipient = models.ForeignKey(Manager, on_delete=models.CASCADE)
class Question(models.Model):
big_id = models.BigAutoField(primary_key=True)
question = models.CharField(max_length=20)
posted = models.DateField(default=datetime.date.today)
expires = models.DateTimeField(null=True, blank=True)
related_questions = models.ManyToManyField('self')
uuid = models.UUIDField(default=uuid.uuid4, unique=True)
def __str__(self):
return self.question
class Answer(models.Model):
question = models.ForeignKey(Question, models.PROTECT)
question_with_to_field = models.ForeignKey(
Question, models.SET_NULL,
blank=True, null=True, to_field='uuid',
related_name='uuid_answers',
limit_choices_to=~models.Q(question__istartswith='not'),
)
related_answers = models.ManyToManyField('self')
answer = models.CharField(max_length=20)
def __str__(self):
return self.answer
class Answer2(Answer):
class Meta:
proxy = True
class Reservation(models.Model):
start_date = models.DateTimeField()
price = models.IntegerField()
class FoodDelivery(models.Model):
DRIVER_CHOICES = (
('bill', 'Bill G'),
('steve', 'Steve J'),
)
RESTAURANT_CHOICES = (
('indian', 'A Taste of India'),
('thai', 'Thai Pography'),
('pizza', 'Pizza Mama'),
)
reference = models.CharField(max_length=100)
driver = models.CharField(max_length=100, choices=DRIVER_CHOICES, blank=True)
restaurant = models.CharField(max_length=100, choices=RESTAURANT_CHOICES, blank=True)
class Meta:
unique_together = (("driver", "restaurant"),)
class CoverLetter(models.Model):
author = models.CharField(max_length=30)
date_written = models.DateField(null=True, blank=True)
def __str__(self):
return self.author
class Paper(models.Model):
title = models.CharField(max_length=30)
author = models.CharField(max_length=30, blank=True, null=True)
class ShortMessage(models.Model):
content = models.CharField(max_length=140)
timestamp = models.DateTimeField(null=True, blank=True)
class Telegram(models.Model):
title = models.CharField(max_length=30)
date_sent = models.DateField(null=True, blank=True)
def __str__(self):
return self.title
class Story(models.Model):
title = models.CharField(max_length=100)
content = models.TextField()
class OtherStory(models.Model):
title = models.CharField(max_length=100)
content = models.TextField()
class ComplexSortedPerson(models.Model):
name = models.CharField(max_length=100)
age = models.PositiveIntegerField()
is_employee = models.BooleanField(null=True)
class PluggableSearchPerson(models.Model):
name = models.CharField(max_length=100)
age = models.PositiveIntegerField()
class PrePopulatedPostLargeSlug(models.Model):
"""
Regression test for #15938: a large max_length for the slugfield must not
be localized in prepopulated_fields_js.html or it might end up breaking
the JavaScript (ie, using THOUSAND_SEPARATOR ends up with maxLength=1,000)
"""
title = models.CharField(max_length=100)
published = models.BooleanField(default=False)
# `db_index=False` because MySQL cannot index large CharField (#21196).
slug = models.SlugField(max_length=1000, db_index=False)
class AdminOrderedField(models.Model):
order = models.IntegerField()
stuff = models.CharField(max_length=200)
class AdminOrderedModelMethod(models.Model):
order = models.IntegerField()
stuff = models.CharField(max_length=200)
@admin.display(ordering='order')
def some_order(self):
return self.order
class AdminOrderedAdminMethod(models.Model):
order = models.IntegerField()
stuff = models.CharField(max_length=200)
class AdminOrderedCallable(models.Model):
order = models.IntegerField()
stuff = models.CharField(max_length=200)
class Report(models.Model):
title = models.CharField(max_length=100)
def __str__(self):
return self.title
class MainPrepopulated(models.Model):
name = models.CharField(max_length=100)
pubdate = models.DateField()
status = models.CharField(
max_length=20,
choices=(('option one', 'Option One'),
('option two', 'Option Two')))
slug1 = models.SlugField(blank=True)
slug2 = models.SlugField(blank=True)
slug3 = models.SlugField(blank=True, allow_unicode=True)
class RelatedPrepopulated(models.Model):
parent = models.ForeignKey(MainPrepopulated, models.CASCADE)
name = models.CharField(max_length=75)
fk = models.ForeignKey('self', models.CASCADE, blank=True, null=True)
m2m = models.ManyToManyField('self', blank=True)
pubdate = models.DateField()
status = models.CharField(
max_length=20,
choices=(('option one', 'Option One'),
('option two', 'Option Two')))
slug1 = models.SlugField(max_length=50)
slug2 = models.SlugField(max_length=60)
class UnorderedObject(models.Model):
"""
Model without any defined `Meta.ordering`.
Refs #16819.
"""
name = models.CharField(max_length=255)
bool = models.BooleanField(default=True)
class UndeletableObject(models.Model):
"""
Model whose show_delete in admin change_view has been disabled
Refs #10057.
"""
name = models.CharField(max_length=255)
class UnchangeableObject(models.Model):
"""
Model whose change_view is disabled in admin
Refs #20640.
"""
class UserMessenger(models.Model):
"""
Dummy class for testing message_user functions on ModelAdmin
"""
class Simple(models.Model):
"""
Simple model with nothing on it for use in testing
"""
class Choice(models.Model):
choice = models.IntegerField(
blank=True, null=True,
choices=((1, 'Yes'), (0, 'No'), (None, 'No opinion')),
)
class ParentWithDependentChildren(models.Model):
"""
Issue #20522
Model where the validation of child foreign-key relationships depends
on validation of the parent
"""
some_required_info = models.PositiveIntegerField()
family_name = models.CharField(max_length=255, blank=False)
class DependentChild(models.Model):
"""
Issue #20522
Model that depends on validation of the parent class for one of its
fields to validate during clean
"""
parent = models.ForeignKey(ParentWithDependentChildren, models.CASCADE)
family_name = models.CharField(max_length=255)
class _Manager(models.Manager):
def get_queryset(self):
return super().get_queryset().filter(pk__gt=1)
class FilteredManager(models.Model):
def __str__(self):
return "PK=%d" % self.pk
pk_gt_1 = _Manager()
objects = models.Manager()
class EmptyModelVisible(models.Model):
""" See ticket #11277. """
class EmptyModelHidden(models.Model):
""" See ticket #11277. """
class EmptyModelMixin(models.Model):
""" See ticket #11277. """
class State(models.Model):
name = models.CharField(max_length=100, verbose_name='State verbose_name')
class City(models.Model):
state = models.ForeignKey(State, models.CASCADE)
name = models.CharField(max_length=100, verbose_name='City verbose_name')
def get_absolute_url(self):
return '/dummy/%s/' % self.pk
class Restaurant(models.Model):
city = models.ForeignKey(City, models.CASCADE)
name = models.CharField(max_length=100)
def get_absolute_url(self):
return '/dummy/%s/' % self.pk
class Worker(models.Model):
work_at = models.ForeignKey(Restaurant, models.CASCADE)
name = models.CharField(max_length=50)
surname = models.CharField(max_length=50)
# Models for #23329
class ReferencedByParent(models.Model):
name = models.CharField(max_length=20, unique=True)
class ParentWithFK(models.Model):
fk = models.ForeignKey(
ReferencedByParent,
models.CASCADE,
to_field='name',
related_name='hidden+',
)
class ChildOfReferer(ParentWithFK):
pass
# Models for #23431
class InlineReferer(models.Model):
pass
class ReferencedByInline(models.Model):
name = models.CharField(max_length=20, unique=True)
class InlineReference(models.Model):
referer = models.ForeignKey(InlineReferer, models.CASCADE)
fk = models.ForeignKey(
ReferencedByInline,
models.CASCADE,
to_field='name',
related_name='hidden+',
)
class Recipe(models.Model):
rname = models.CharField(max_length=20, unique=True)
class Ingredient(models.Model):
iname = models.CharField(max_length=20, unique=True)
recipes = models.ManyToManyField(Recipe, through='RecipeIngredient')
class RecipeIngredient(models.Model):
ingredient = models.ForeignKey(Ingredient, models.CASCADE, to_field='iname')
recipe = models.ForeignKey(Recipe, models.CASCADE, to_field='rname')
# Model for #23839
class NotReferenced(models.Model):
# Don't point any FK at this model.
pass
# Models for #23934
class ExplicitlyProvidedPK(models.Model):
name = models.IntegerField(primary_key=True)
class ImplicitlyGeneratedPK(models.Model):
name = models.IntegerField(unique=True)
# Models for #25622
class ReferencedByGenRel(models.Model):
content_type = models.ForeignKey(ContentType, on_delete=models.CASCADE)
object_id = models.PositiveIntegerField()
content_object = GenericForeignKey('content_type', 'object_id')
class GenRelReference(models.Model):
references = GenericRelation(ReferencedByGenRel)
class ParentWithUUIDPK(models.Model):
id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
title = models.CharField(max_length=100)
def __str__(self):
return str(self.id)
class RelatedWithUUIDPKModel(models.Model):
parent = models.ForeignKey(ParentWithUUIDPK, on_delete=models.SET_NULL, null=True, blank=True)
class Author(models.Model):
pass
class Authorship(models.Model):
book = models.ForeignKey(Book, models.CASCADE)
author = models.ForeignKey(Author, models.CASCADE)
class UserProxy(User):
"""Proxy a model with a different app_label."""
class Meta:
proxy = True
class ReadOnlyRelatedField(models.Model):
chapter = models.ForeignKey(Chapter, models.CASCADE)
language = models.ForeignKey(Language, models.CASCADE)
user = models.ForeignKey(User, models.CASCADE)
|
74f9f5a843197cb14820e248e097b5f7a046e92bc378a656cba715240a5568bb | import datetime
from io import StringIO
from wsgiref.util import FileWrapper
from django import forms
from django.contrib import admin
from django.contrib.admin import BooleanFieldListFilter
from django.contrib.admin.views.main import ChangeList
from django.contrib.auth.admin import GroupAdmin, UserAdmin
from django.contrib.auth.models import Group, User
from django.core.exceptions import ValidationError
from django.core.mail import EmailMessage
from django.db import models
from django.forms.models import BaseModelFormSet
from django.http import HttpResponse, JsonResponse, StreamingHttpResponse
from django.urls import path
from django.utils.html import format_html
from django.utils.safestring import mark_safe
from django.views.decorators.common import no_append_slash
from .forms import MediaActionForm
from .models import (
Actor, AdminOrderedAdminMethod, AdminOrderedCallable, AdminOrderedField,
AdminOrderedModelMethod, Album, Answer, Answer2, Article, BarAccount, Book,
Bookmark, Category, Chapter, ChapterXtra1, Child, ChildOfReferer, Choice,
City, Collector, Color, Color2, ComplexSortedPerson, CoverLetter,
CustomArticle, CyclicOne, CyclicTwo, DependentChild, DooHickey, EmptyModel,
EmptyModelHidden, EmptyModelMixin, EmptyModelVisible, ExplicitlyProvidedPK,
ExternalSubscriber, Fabric, FancyDoodad, FieldOverridePost,
FilteredManager, FooAccount, FoodDelivery, FunkyTag, Gadget, Gallery,
GenRelReference, Grommet, ImplicitlyGeneratedPK, Ingredient,
InlineReference, InlineReferer, Inquisition, Language, Link,
MainPrepopulated, ModelWithStringPrimaryKey, NotReferenced, OldSubscriber,
OtherStory, Paper, Parent, ParentWithDependentChildren, ParentWithUUIDPK,
Person, Persona, Picture, Pizza, Plot, PlotDetails, PlotProxy,
PluggableSearchPerson, Podcast, Post, PrePopulatedPost,
PrePopulatedPostLargeSlug, PrePopulatedSubPost, Promo, Question,
ReadablePizza, ReadOnlyPizza, ReadOnlyRelatedField, Recipe, Recommendation,
Recommender, ReferencedByGenRel, ReferencedByInline, ReferencedByParent,
RelatedPrepopulated, RelatedWithUUIDPKModel, Report, Reservation,
Restaurant, RowLevelChangePermissionModel, Section, ShortMessage, Simple,
Sketch, Song, State, Story, StumpJoke, Subscriber, SuperVillain, Telegram,
Thing, Topping, UnchangeableObject, UndeletableObject, UnorderedObject,
UserMessenger, UserProxy, Villain, Vodcast, Whatsit, Widget, Worker,
WorkHour,
)
@admin.display(ordering='date')
def callable_year(dt_value):
try:
return dt_value.year
except AttributeError:
return None
class ArticleInline(admin.TabularInline):
model = Article
fk_name = 'section'
prepopulated_fields = {
'title': ('content',)
}
fieldsets = (
('Some fields', {
'classes': ('collapse',),
'fields': ('title', 'content')
}),
('Some other fields', {
'classes': ('wide',),
'fields': ('date', 'section')
})
)
class ChapterInline(admin.TabularInline):
model = Chapter
class ChapterXtra1Admin(admin.ModelAdmin):
list_filter = (
'chap',
'chap__title',
'chap__book',
'chap__book__name',
'chap__book__promo',
'chap__book__promo__name',
'guest_author__promo__book',
)
class ArticleForm(forms.ModelForm):
extra_form_field = forms.BooleanField(required=False)
class Meta:
fields = '__all__'
model = Article
class ArticleAdminWithExtraUrl(admin.ModelAdmin):
def get_urls(self):
urlpatterns = super().get_urls()
urlpatterns.append(
path('extra.json', self.admin_site.admin_view(self.extra_json), name='article_extra_json')
)
return urlpatterns
def extra_json(self, request):
return JsonResponse({})
class ArticleAdmin(ArticleAdminWithExtraUrl):
list_display = (
'content', 'date', callable_year, 'model_year', 'modeladmin_year',
'model_year_reversed', 'section', lambda obj: obj.title,
'order_by_expression', 'model_property_year', 'model_month',
'order_by_f_expression', 'order_by_orderby_expression',
)
list_editable = ('section',)
list_filter = ('date', 'section')
autocomplete_fields = ('section',)
view_on_site = False
form = ArticleForm
fieldsets = (
('Some fields', {
'classes': ('collapse',),
'fields': ('title', 'content', 'extra_form_field'),
}),
('Some other fields', {
'classes': ('wide',),
'fields': ('date', 'section', 'sub_section')
})
)
# These orderings aren't particularly useful but show that expressions can
# be used for admin_order_field.
@admin.display(ordering=models.F('date') + datetime.timedelta(days=3))
def order_by_expression(self, obj):
return obj.model_year
@admin.display(ordering=models.F('date'))
def order_by_f_expression(self, obj):
return obj.model_year
@admin.display(ordering=models.F('date').asc(nulls_last=True))
def order_by_orderby_expression(self, obj):
return obj.model_year
def changelist_view(self, request):
return super().changelist_view(request, extra_context={'extra_var': 'Hello!'})
@admin.display(ordering='date', description=None)
def modeladmin_year(self, obj):
return obj.date.year
def delete_model(self, request, obj):
EmailMessage(
'Greetings from a deleted object',
'I hereby inform you that some user deleted me',
'[email protected]',
['[email protected]']
).send()
return super().delete_model(request, obj)
def save_model(self, request, obj, form, change=True):
EmailMessage(
'Greetings from a created object',
'I hereby inform you that some user created me',
'[email protected]',
['[email protected]']
).send()
return super().save_model(request, obj, form, change)
class ArticleAdmin2(admin.ModelAdmin):
def has_module_permission(self, request):
return False
class RowLevelChangePermissionModelAdmin(admin.ModelAdmin):
def has_change_permission(self, request, obj=None):
""" Only allow changing objects with even id number """
return request.user.is_staff and (obj is not None) and (obj.id % 2 == 0)
def has_view_permission(self, request, obj=None):
"""Only allow viewing objects if id is a multiple of 3."""
return request.user.is_staff and obj is not None and obj.id % 3 == 0
class CustomArticleAdmin(admin.ModelAdmin):
"""
Tests various hooks for using custom templates and contexts.
"""
change_list_template = 'custom_admin/change_list.html'
change_form_template = 'custom_admin/change_form.html'
add_form_template = 'custom_admin/add_form.html'
object_history_template = 'custom_admin/object_history.html'
delete_confirmation_template = 'custom_admin/delete_confirmation.html'
delete_selected_confirmation_template = 'custom_admin/delete_selected_confirmation.html'
popup_response_template = 'custom_admin/popup_response.html'
def changelist_view(self, request):
return super().changelist_view(request, extra_context={'extra_var': 'Hello!'})
class ThingAdmin(admin.ModelAdmin):
list_filter = ('color', 'color__warm', 'color__value', 'pub_date')
class InquisitionAdmin(admin.ModelAdmin):
list_display = ('leader', 'country', 'expected', 'sketch')
@admin.display
def sketch(self, obj):
# A method with the same name as a reverse accessor.
return 'list-display-sketch'
class SketchAdmin(admin.ModelAdmin):
raw_id_fields = ('inquisition', 'defendant0', 'defendant1')
class FabricAdmin(admin.ModelAdmin):
list_display = ('surface',)
list_filter = ('surface',)
class BasePersonModelFormSet(BaseModelFormSet):
def clean(self):
for person_dict in self.cleaned_data:
person = person_dict.get('id')
alive = person_dict.get('alive')
if person and alive and person.name == "Grace Hopper":
raise ValidationError("Grace is not a Zombie")
class PersonAdmin(admin.ModelAdmin):
list_display = ('name', 'gender', 'alive')
list_editable = ('gender', 'alive')
list_filter = ('gender',)
search_fields = ('^name',)
save_as = True
def get_changelist_formset(self, request, **kwargs):
return super().get_changelist_formset(request, formset=BasePersonModelFormSet, **kwargs)
def get_queryset(self, request):
# Order by a field that isn't in list display, to be able to test
# whether ordering is preserved.
return super().get_queryset(request).order_by('age')
class FooAccountAdmin(admin.StackedInline):
model = FooAccount
extra = 1
class BarAccountAdmin(admin.StackedInline):
model = BarAccount
extra = 1
class PersonaAdmin(admin.ModelAdmin):
inlines = (
FooAccountAdmin,
BarAccountAdmin
)
class SubscriberAdmin(admin.ModelAdmin):
actions = ['mail_admin']
action_form = MediaActionForm
def delete_queryset(self, request, queryset):
SubscriberAdmin.overridden = True
super().delete_queryset(request, queryset)
@admin.action
def mail_admin(self, request, selected):
EmailMessage(
'Greetings from a ModelAdmin action',
'This is the test email from an admin action',
'[email protected]',
['[email protected]']
).send()
@admin.action(description='External mail (Another awesome action)')
def external_mail(modeladmin, request, selected):
EmailMessage(
'Greetings from a function action',
'This is the test email from a function action',
'[email protected]',
['[email protected]']
).send()
@admin.action(description='Redirect to (Awesome action)')
def redirect_to(modeladmin, request, selected):
from django.http import HttpResponseRedirect
return HttpResponseRedirect('/some-where-else/')
@admin.action(description='Download subscription')
def download(modeladmin, request, selected):
buf = StringIO('This is the content of the file')
return StreamingHttpResponse(FileWrapper(buf))
@admin.action(description='No permission to run')
def no_perm(modeladmin, request, selected):
return HttpResponse(content='No permission to perform this action', status=403)
class ExternalSubscriberAdmin(admin.ModelAdmin):
actions = [redirect_to, external_mail, download, no_perm]
class PodcastAdmin(admin.ModelAdmin):
list_display = ('name', 'release_date')
list_editable = ('release_date',)
date_hierarchy = 'release_date'
ordering = ('name',)
class VodcastAdmin(admin.ModelAdmin):
list_display = ('name', 'released')
list_editable = ('released',)
ordering = ('name',)
class ChildInline(admin.StackedInline):
model = Child
class ParentAdmin(admin.ModelAdmin):
model = Parent
inlines = [ChildInline]
save_as = True
list_display = ('id', 'name',)
list_display_links = ('id',)
list_editable = ('name',)
def save_related(self, request, form, formsets, change):
super().save_related(request, form, formsets, change)
first_name, last_name = form.instance.name.split()
for child in form.instance.child_set.all():
if len(child.name.split()) < 2:
child.name = child.name + ' ' + last_name
child.save()
class EmptyModelAdmin(admin.ModelAdmin):
def get_queryset(self, request):
return super().get_queryset(request).filter(pk__gt=1)
class OldSubscriberAdmin(admin.ModelAdmin):
actions = None
class PictureInline(admin.TabularInline):
model = Picture
extra = 1
class GalleryAdmin(admin.ModelAdmin):
inlines = [PictureInline]
class PictureAdmin(admin.ModelAdmin):
pass
class LanguageAdmin(admin.ModelAdmin):
list_display = ['iso', 'shortlist', 'english_name', 'name']
list_editable = ['shortlist']
class RecommendationAdmin(admin.ModelAdmin):
show_full_result_count = False
search_fields = ('=titletranslation__text', '=the_recommender__titletranslation__text',)
class WidgetInline(admin.StackedInline):
model = Widget
class DooHickeyInline(admin.StackedInline):
model = DooHickey
class GrommetInline(admin.StackedInline):
model = Grommet
class WhatsitInline(admin.StackedInline):
model = Whatsit
class FancyDoodadInline(admin.StackedInline):
model = FancyDoodad
class CategoryAdmin(admin.ModelAdmin):
list_display = ('id', 'collector', 'order')
list_editable = ('order',)
class CategoryInline(admin.StackedInline):
model = Category
class CollectorAdmin(admin.ModelAdmin):
inlines = [
WidgetInline, DooHickeyInline, GrommetInline, WhatsitInline,
FancyDoodadInline, CategoryInline
]
class LinkInline(admin.TabularInline):
model = Link
extra = 1
readonly_fields = ("posted", "multiline", "readonly_link_content")
@admin.display
def multiline(self, instance):
return "InlineMultiline\ntest\nstring"
class SubPostInline(admin.TabularInline):
model = PrePopulatedSubPost
prepopulated_fields = {
'subslug': ('subtitle',)
}
def get_readonly_fields(self, request, obj=None):
if obj and obj.published:
return ('subslug',)
return self.readonly_fields
def get_prepopulated_fields(self, request, obj=None):
if obj and obj.published:
return {}
return self.prepopulated_fields
class PrePopulatedPostAdmin(admin.ModelAdmin):
list_display = ['title', 'slug']
prepopulated_fields = {
'slug': ('title',)
}
inlines = [SubPostInline]
def get_readonly_fields(self, request, obj=None):
if obj and obj.published:
return ('slug',)
return self.readonly_fields
def get_prepopulated_fields(self, request, obj=None):
if obj and obj.published:
return {}
return self.prepopulated_fields
class PrePopulatedPostReadOnlyAdmin(admin.ModelAdmin):
prepopulated_fields = {'slug': ('title',)}
def has_change_permission(self, *args, **kwargs):
return False
class PostAdmin(admin.ModelAdmin):
list_display = ['title', 'public']
readonly_fields = (
'posted', 'awesomeness_level', 'coolness', 'value',
'multiline', 'multiline_html', lambda obj: "foo",
'readonly_content',
)
inlines = [
LinkInline
]
@admin.display
def coolness(self, instance):
if instance.pk:
return "%d amount of cool." % instance.pk
else:
return "Unknown coolness."
@admin.display(description='Value in $US')
def value(self, instance):
return 1000
@admin.display
def multiline(self, instance):
return "Multiline\ntest\nstring"
@admin.display
def multiline_html(self, instance):
return mark_safe("Multiline<br>\nhtml<br>\ncontent")
class FieldOverridePostForm(forms.ModelForm):
model = FieldOverridePost
class Meta:
help_texts = {
'posted': 'Overridden help text for the date',
}
labels = {
'public': 'Overridden public label',
}
class FieldOverridePostAdmin(PostAdmin):
form = FieldOverridePostForm
class CustomChangeList(ChangeList):
def get_queryset(self, request):
return self.root_queryset.order_by('pk').filter(pk=9999) # Doesn't exist
class GadgetAdmin(admin.ModelAdmin):
def get_changelist(self, request, **kwargs):
return CustomChangeList
class ToppingAdmin(admin.ModelAdmin):
readonly_fields = ('pizzas',)
class PizzaAdmin(admin.ModelAdmin):
readonly_fields = ('toppings',)
class ReadOnlyRelatedFieldAdmin(admin.ModelAdmin):
readonly_fields = ('chapter', 'language', 'user')
class StudentAdmin(admin.ModelAdmin):
search_fields = ('name',)
class ReadOnlyPizzaAdmin(admin.ModelAdmin):
readonly_fields = ('name', 'toppings')
def has_add_permission(self, request):
return False
def has_change_permission(self, request, obj=None):
return True
def has_delete_permission(self, request, obj=None):
return True
class WorkHourAdmin(admin.ModelAdmin):
list_display = ('datum', 'employee')
list_filter = ('employee',)
class FoodDeliveryAdmin(admin.ModelAdmin):
list_display = ('reference', 'driver', 'restaurant')
list_editable = ('driver', 'restaurant')
class CoverLetterAdmin(admin.ModelAdmin):
"""
A ModelAdmin with a custom get_queryset() method that uses defer(), to test
verbose_name display in messages shown after adding/editing CoverLetter
instances. Note that the CoverLetter model defines a __str__ method.
For testing fix for ticket #14529.
"""
def get_queryset(self, request):
return super().get_queryset(request).defer('date_written')
class PaperAdmin(admin.ModelAdmin):
"""
A ModelAdmin with a custom get_queryset() method that uses only(), to test
verbose_name display in messages shown after adding/editing Paper
instances.
For testing fix for ticket #14529.
"""
def get_queryset(self, request):
return super().get_queryset(request).only('title')
class ShortMessageAdmin(admin.ModelAdmin):
"""
A ModelAdmin with a custom get_queryset() method that uses defer(), to test
verbose_name display in messages shown after adding/editing ShortMessage
instances.
For testing fix for ticket #14529.
"""
def get_queryset(self, request):
return super().get_queryset(request).defer('timestamp')
class TelegramAdmin(admin.ModelAdmin):
"""
A ModelAdmin with a custom get_queryset() method that uses only(), to test
verbose_name display in messages shown after adding/editing Telegram
instances. Note that the Telegram model defines a __str__ method.
For testing fix for ticket #14529.
"""
def get_queryset(self, request):
return super().get_queryset(request).only('title')
class StoryForm(forms.ModelForm):
class Meta:
widgets = {'title': forms.HiddenInput}
class StoryAdmin(admin.ModelAdmin):
list_display = ('id', 'title', 'content')
list_display_links = ('title',) # 'id' not in list_display_links
list_editable = ('content',)
form = StoryForm
ordering = ['-id']
class OtherStoryAdmin(admin.ModelAdmin):
list_display = ('id', 'title', 'content')
list_display_links = ('title', 'id') # 'id' in list_display_links
list_editable = ('content',)
ordering = ['-id']
class ComplexSortedPersonAdmin(admin.ModelAdmin):
list_display = ('name', 'age', 'is_employee', 'colored_name')
ordering = ('name',)
@admin.display(ordering='name')
def colored_name(self, obj):
return format_html('<span style="color: #ff00ff;">{}</span>', obj.name)
class PluggableSearchPersonAdmin(admin.ModelAdmin):
list_display = ('name', 'age')
search_fields = ('name',)
def get_search_results(self, request, queryset, search_term):
queryset, may_have_duplicates = super().get_search_results(
request, queryset, search_term,
)
try:
search_term_as_int = int(search_term)
except ValueError:
pass
else:
queryset |= self.model.objects.filter(age=search_term_as_int)
return queryset, may_have_duplicates
class AlbumAdmin(admin.ModelAdmin):
list_filter = ['title']
class QuestionAdmin(admin.ModelAdmin):
ordering = ['-posted']
search_fields = ['question']
autocomplete_fields = ['related_questions']
class AnswerAdmin(admin.ModelAdmin):
autocomplete_fields = ['question']
class PrePopulatedPostLargeSlugAdmin(admin.ModelAdmin):
prepopulated_fields = {
'slug': ('title',)
}
class AdminOrderedFieldAdmin(admin.ModelAdmin):
ordering = ('order',)
list_display = ('stuff', 'order')
class AdminOrderedModelMethodAdmin(admin.ModelAdmin):
ordering = ('order',)
list_display = ('stuff', 'some_order')
class AdminOrderedAdminMethodAdmin(admin.ModelAdmin):
@admin.display(ordering='order')
def some_admin_order(self, obj):
return obj.order
ordering = ('order',)
list_display = ('stuff', 'some_admin_order')
@admin.display(ordering='order')
def admin_ordered_callable(obj):
return obj.order
class AdminOrderedCallableAdmin(admin.ModelAdmin):
ordering = ('order',)
list_display = ('stuff', admin_ordered_callable)
class ReportAdmin(admin.ModelAdmin):
def extra(self, request):
return HttpResponse()
def get_urls(self):
# Corner case: Don't call parent implementation
return [path('extra/', self.extra, name='cable_extra')]
class CustomTemplateBooleanFieldListFilter(BooleanFieldListFilter):
template = 'custom_filter_template.html'
class CustomTemplateFilterColorAdmin(admin.ModelAdmin):
list_filter = (('warm', CustomTemplateBooleanFieldListFilter),)
# For Selenium Prepopulated tests -------------------------------------
class RelatedPrepopulatedInline1(admin.StackedInline):
fieldsets = (
(None, {
'fields': (
('fk', 'm2m'),
('pubdate', 'status'),
('name', 'slug1', 'slug2',),
),
}),
)
formfield_overrides = {models.CharField: {'strip': False}}
model = RelatedPrepopulated
extra = 1
autocomplete_fields = ['fk', 'm2m']
prepopulated_fields = {
'slug1': ['name', 'pubdate'],
'slug2': ['status', 'name'],
}
class RelatedPrepopulatedInline2(admin.TabularInline):
model = RelatedPrepopulated
extra = 1
autocomplete_fields = ['fk', 'm2m']
prepopulated_fields = {
'slug1': ['name', 'pubdate'],
'slug2': ['status', 'name'],
}
class RelatedPrepopulatedInline3(admin.TabularInline):
model = RelatedPrepopulated
extra = 0
autocomplete_fields = ['fk', 'm2m']
class MainPrepopulatedAdmin(admin.ModelAdmin):
inlines = [RelatedPrepopulatedInline1, RelatedPrepopulatedInline2, RelatedPrepopulatedInline3]
fieldsets = (
(None, {
'fields': (('pubdate', 'status'), ('name', 'slug1', 'slug2', 'slug3'))
}),
)
formfield_overrides = {models.CharField: {'strip': False}}
prepopulated_fields = {
'slug1': ['name', 'pubdate'],
'slug2': ['status', 'name'],
'slug3': ['name'],
}
class UnorderedObjectAdmin(admin.ModelAdmin):
list_display = ['id', 'name']
list_display_links = ['id']
list_editable = ['name']
list_per_page = 2
class UndeletableObjectAdmin(admin.ModelAdmin):
def change_view(self, *args, **kwargs):
kwargs['extra_context'] = {'show_delete': False}
return super().change_view(*args, **kwargs)
class UnchangeableObjectAdmin(admin.ModelAdmin):
def get_urls(self):
# Disable change_view, but leave other urls untouched
urlpatterns = super().get_urls()
return [p for p in urlpatterns if p.name and not p.name.endswith("_change")]
@admin.display
def callable_on_unknown(obj):
return obj.unknown
class AttributeErrorRaisingAdmin(admin.ModelAdmin):
list_display = [callable_on_unknown]
class CustomManagerAdmin(admin.ModelAdmin):
def get_queryset(self, request):
return FilteredManager.objects
class MessageTestingAdmin(admin.ModelAdmin):
actions = ["message_debug", "message_info", "message_success",
"message_warning", "message_error", "message_extra_tags"]
@admin.action
def message_debug(self, request, selected):
self.message_user(request, "Test debug", level="debug")
@admin.action
def message_info(self, request, selected):
self.message_user(request, "Test info", level="info")
@admin.action
def message_success(self, request, selected):
self.message_user(request, "Test success", level="success")
@admin.action
def message_warning(self, request, selected):
self.message_user(request, "Test warning", level="warning")
@admin.action
def message_error(self, request, selected):
self.message_user(request, "Test error", level="error")
@admin.action
def message_extra_tags(self, request, selected):
self.message_user(request, "Test tags", extra_tags="extra_tag")
class ChoiceList(admin.ModelAdmin):
list_display = ['choice']
readonly_fields = ['choice']
fields = ['choice']
class DependentChildAdminForm(forms.ModelForm):
"""
Issue #20522
Form to test child dependency on parent object's validation
"""
def clean(self):
parent = self.cleaned_data.get('parent')
if parent.family_name and parent.family_name != self.cleaned_data.get('family_name'):
raise ValidationError("Children must share a family name with their parents " +
"in this contrived test case")
return super().clean()
class DependentChildInline(admin.TabularInline):
model = DependentChild
form = DependentChildAdminForm
class ParentWithDependentChildrenAdmin(admin.ModelAdmin):
inlines = [DependentChildInline]
# Tests for ticket 11277 ----------------------------------
class FormWithoutHiddenField(forms.ModelForm):
first = forms.CharField()
second = forms.CharField()
class FormWithoutVisibleField(forms.ModelForm):
first = forms.CharField(widget=forms.HiddenInput)
second = forms.CharField(widget=forms.HiddenInput)
class FormWithVisibleAndHiddenField(forms.ModelForm):
first = forms.CharField(widget=forms.HiddenInput)
second = forms.CharField()
class EmptyModelVisibleAdmin(admin.ModelAdmin):
form = FormWithoutHiddenField
fieldsets = (
(None, {
'fields': (('first', 'second'),),
}),
)
class EmptyModelHiddenAdmin(admin.ModelAdmin):
form = FormWithoutVisibleField
fieldsets = EmptyModelVisibleAdmin.fieldsets
class EmptyModelMixinAdmin(admin.ModelAdmin):
form = FormWithVisibleAndHiddenField
fieldsets = EmptyModelVisibleAdmin.fieldsets
class CityInlineAdmin(admin.TabularInline):
model = City
view_on_site = False
class StateAdminForm(forms.ModelForm):
nolabel_form_field = forms.BooleanField(required=False)
class Meta:
model = State
fields = '__all__'
labels = {'name': 'State name (from form’s Meta.labels)'}
@property
def changed_data(self):
data = super().changed_data
if data:
# Add arbitrary name to changed_data to test
# change message construction.
return data + ['not_a_form_field']
return data
class StateAdmin(admin.ModelAdmin):
inlines = [CityInlineAdmin]
form = StateAdminForm
class RestaurantInlineAdmin(admin.TabularInline):
model = Restaurant
view_on_site = True
class CityAdmin(admin.ModelAdmin):
inlines = [RestaurantInlineAdmin]
view_on_site = True
def get_formset_kwargs(self, request, obj, inline, prefix):
return {
**super().get_formset_kwargs(request, obj, inline, prefix),
'form_kwargs': {'initial': {'name': 'overridden_name'}},
}
class WorkerAdmin(admin.ModelAdmin):
def view_on_site(self, obj):
return '/worker/%s/%s/' % (obj.surname, obj.name)
class WorkerInlineAdmin(admin.TabularInline):
model = Worker
def view_on_site(self, obj):
return '/worker_inline/%s/%s/' % (obj.surname, obj.name)
class RestaurantAdmin(admin.ModelAdmin):
inlines = [WorkerInlineAdmin]
view_on_site = False
def get_changeform_initial_data(self, request):
return {'name': 'overridden_value'}
class FunkyTagAdmin(admin.ModelAdmin):
list_display = ('name', 'content_object')
class InlineReferenceInline(admin.TabularInline):
model = InlineReference
class InlineRefererAdmin(admin.ModelAdmin):
inlines = [InlineReferenceInline]
class PlotReadonlyAdmin(admin.ModelAdmin):
readonly_fields = ('plotdetails',)
class GetFormsetsArgumentCheckingAdmin(admin.ModelAdmin):
fields = ['name']
def add_view(self, request, *args, **kwargs):
request.is_add_view = True
return super().add_view(request, *args, **kwargs)
def change_view(self, request, *args, **kwargs):
request.is_add_view = False
return super().change_view(request, *args, **kwargs)
def get_formsets_with_inlines(self, request, obj=None):
if request.is_add_view and obj is not None:
raise Exception("'obj' passed to get_formsets_with_inlines wasn't None during add_view")
if not request.is_add_view and obj is None:
raise Exception("'obj' passed to get_formsets_with_inlines was None during change_view")
return super().get_formsets_with_inlines(request, obj)
site = admin.AdminSite(name="admin")
site.site_url = '/my-site-url/'
site.register(Article, ArticleAdmin)
site.register(CustomArticle, CustomArticleAdmin)
site.register(
Section, save_as=True, inlines=[ArticleInline],
readonly_fields=['name_property'], search_fields=['name'],
)
site.register(ModelWithStringPrimaryKey)
site.register(Color)
site.register(Thing, ThingAdmin)
site.register(Actor)
site.register(Inquisition, InquisitionAdmin)
site.register(Sketch, SketchAdmin)
site.register(Person, PersonAdmin)
site.register(Persona, PersonaAdmin)
site.register(Subscriber, SubscriberAdmin)
site.register(ExternalSubscriber, ExternalSubscriberAdmin)
site.register(OldSubscriber, OldSubscriberAdmin)
site.register(Podcast, PodcastAdmin)
site.register(Vodcast, VodcastAdmin)
site.register(Parent, ParentAdmin)
site.register(EmptyModel, EmptyModelAdmin)
site.register(Fabric, FabricAdmin)
site.register(Gallery, GalleryAdmin)
site.register(Picture, PictureAdmin)
site.register(Language, LanguageAdmin)
site.register(Recommendation, RecommendationAdmin)
site.register(Recommender)
site.register(Collector, CollectorAdmin)
site.register(Category, CategoryAdmin)
site.register(Post, PostAdmin)
site.register(FieldOverridePost, FieldOverridePostAdmin)
site.register(Gadget, GadgetAdmin)
site.register(Villain)
site.register(SuperVillain)
site.register(Plot)
site.register(PlotDetails)
site.register(PlotProxy, PlotReadonlyAdmin)
site.register(Bookmark)
site.register(CyclicOne)
site.register(CyclicTwo)
site.register(WorkHour, WorkHourAdmin)
site.register(Reservation)
site.register(FoodDelivery, FoodDeliveryAdmin)
site.register(RowLevelChangePermissionModel, RowLevelChangePermissionModelAdmin)
site.register(Paper, PaperAdmin)
site.register(CoverLetter, CoverLetterAdmin)
site.register(ShortMessage, ShortMessageAdmin)
site.register(Telegram, TelegramAdmin)
site.register(Story, StoryAdmin)
site.register(OtherStory, OtherStoryAdmin)
site.register(Report, ReportAdmin)
site.register(MainPrepopulated, MainPrepopulatedAdmin)
site.register(UnorderedObject, UnorderedObjectAdmin)
site.register(UndeletableObject, UndeletableObjectAdmin)
site.register(UnchangeableObject, UnchangeableObjectAdmin)
site.register(State, StateAdmin)
site.register(City, CityAdmin)
site.register(Restaurant, RestaurantAdmin)
site.register(Worker, WorkerAdmin)
site.register(FunkyTag, FunkyTagAdmin)
site.register(ReferencedByParent)
site.register(ChildOfReferer)
site.register(ReferencedByInline)
site.register(InlineReferer, InlineRefererAdmin)
site.register(ReferencedByGenRel)
site.register(GenRelReference)
site.register(ParentWithUUIDPK)
site.register(RelatedPrepopulated, search_fields=['name'])
site.register(RelatedWithUUIDPKModel)
site.register(ReadOnlyRelatedField, ReadOnlyRelatedFieldAdmin)
# We intentionally register Promo and ChapterXtra1 but not Chapter nor ChapterXtra2.
# That way we cover all four cases:
# related ForeignKey object registered in admin
# related ForeignKey object not registered in admin
# related OneToOne object registered in admin
# related OneToOne object not registered in admin
# when deleting Book so as exercise all four paths through
# contrib.admin.utils's get_deleted_objects function.
site.register(Book, inlines=[ChapterInline])
site.register(Promo)
site.register(ChapterXtra1, ChapterXtra1Admin)
site.register(Pizza, PizzaAdmin)
site.register(ReadOnlyPizza, ReadOnlyPizzaAdmin)
site.register(ReadablePizza)
site.register(Topping, ToppingAdmin)
site.register(Album, AlbumAdmin)
site.register(Song)
site.register(Question, QuestionAdmin)
site.register(Answer, AnswerAdmin, date_hierarchy='question__posted')
site.register(Answer2, date_hierarchy='question__expires')
site.register(PrePopulatedPost, PrePopulatedPostAdmin)
site.register(ComplexSortedPerson, ComplexSortedPersonAdmin)
site.register(FilteredManager, CustomManagerAdmin)
site.register(PluggableSearchPerson, PluggableSearchPersonAdmin)
site.register(PrePopulatedPostLargeSlug, PrePopulatedPostLargeSlugAdmin)
site.register(AdminOrderedField, AdminOrderedFieldAdmin)
site.register(AdminOrderedModelMethod, AdminOrderedModelMethodAdmin)
site.register(AdminOrderedAdminMethod, AdminOrderedAdminMethodAdmin)
site.register(AdminOrderedCallable, AdminOrderedCallableAdmin)
site.register(Color2, CustomTemplateFilterColorAdmin)
site.register(Simple, AttributeErrorRaisingAdmin)
site.register(UserMessenger, MessageTestingAdmin)
site.register(Choice, ChoiceList)
site.register(ParentWithDependentChildren, ParentWithDependentChildrenAdmin)
site.register(EmptyModelHidden, EmptyModelHiddenAdmin)
site.register(EmptyModelVisible, EmptyModelVisibleAdmin)
site.register(EmptyModelMixin, EmptyModelMixinAdmin)
site.register(StumpJoke)
site.register(Recipe)
site.register(Ingredient)
site.register(NotReferenced)
site.register(ExplicitlyProvidedPK, GetFormsetsArgumentCheckingAdmin)
site.register(ImplicitlyGeneratedPK, GetFormsetsArgumentCheckingAdmin)
site.register(UserProxy)
# Register core models we need in our tests
site.register(User, UserAdmin)
site.register(Group, GroupAdmin)
# Used to test URL namespaces
site2 = admin.AdminSite(name="namespaced_admin")
site2.register(User, UserAdmin)
site2.register(Group, GroupAdmin)
site2.register(ParentWithUUIDPK)
site2.register(
RelatedWithUUIDPKModel,
list_display=['pk', 'parent'],
list_editable=['parent'],
raw_id_fields=['parent'],
)
site2.register(Person, save_as_continue=False)
site7 = admin.AdminSite(name="admin7")
site7.register(Article, ArticleAdmin2)
site7.register(Section)
site7.register(PrePopulatedPost, PrePopulatedPostReadOnlyAdmin)
# Used to test ModelAdmin.sortable_by and get_sortable_by().
class ArticleAdmin6(admin.ModelAdmin):
list_display = (
'content', 'date', callable_year, 'model_year', 'modeladmin_year',
'model_year_reversed', 'section',
)
sortable_by = ('date', callable_year)
@admin.display(ordering='date')
def modeladmin_year(self, obj):
return obj.date.year
class ActorAdmin6(admin.ModelAdmin):
list_display = ('name', 'age')
sortable_by = ('name',)
def get_sortable_by(self, request):
return ('age',)
class ChapterAdmin6(admin.ModelAdmin):
list_display = ('title', 'book')
sortable_by = ()
class ColorAdmin6(admin.ModelAdmin):
list_display = ('value',)
def get_sortable_by(self, request):
return ()
site6 = admin.AdminSite(name='admin6')
site6.register(Article, ArticleAdmin6)
site6.register(Actor, ActorAdmin6)
site6.register(Chapter, ChapterAdmin6)
site6.register(Color, ColorAdmin6)
class ArticleAdmin9(admin.ModelAdmin):
def has_change_permission(self, request, obj=None):
# Simulate that the user can't change a specific object.
return obj is None
class ActorAdmin9(admin.ModelAdmin):
def get_urls(self):
# Opt-out of append slash for single model.
urls = super().get_urls()
for pattern in urls:
pattern.callback = no_append_slash(pattern.callback)
return urls
site9 = admin.AdminSite(name='admin9')
site9.register(Article, ArticleAdmin9)
site9.register(Actor, ActorAdmin9)
site10 = admin.AdminSite(name='admin10')
site10.final_catch_all_view = False
site10.register(Article, ArticleAdminWithExtraUrl)
|
30e75b3709d3b3a524d71f37a5f52fcd1152bc08eb677548a813ae1aa997cb86 | from django.contrib import admin
from django.contrib.admin.actions import delete_selected
from django.contrib.auth.models import User
from django.test import SimpleTestCase, TestCase, override_settings
from django.test.client import RequestFactory
from django.urls import path, reverse
from .models import Article
site = admin.AdminSite(name="test_adminsite")
site.register(User)
site.register(Article)
urlpatterns = [
path('test_admin/admin/', site.urls),
]
@override_settings(ROOT_URLCONF='admin_views.test_adminsite')
class SiteEachContextTest(TestCase):
"""
Check each_context contains the documented variables and that available_apps context
variable structure is the expected one.
"""
request_factory = RequestFactory()
@classmethod
def setUpTestData(cls):
cls.u1 = User.objects.create_superuser(username='super', password='secret', email='[email protected]')
def setUp(self):
request = self.request_factory.get(reverse('test_adminsite:index'))
request.user = self.u1
self.ctx = site.each_context(request)
def test_each_context(self):
ctx = self.ctx
self.assertEqual(ctx['site_header'], 'Django administration')
self.assertEqual(ctx['site_title'], 'Django site admin')
self.assertEqual(ctx['site_url'], '/')
self.assertIs(ctx['has_permission'], True)
def test_each_context_site_url_with_script_name(self):
request = self.request_factory.get(reverse('test_adminsite:index'), SCRIPT_NAME='/my-script-name/')
request.user = self.u1
self.assertEqual(site.each_context(request)['site_url'], '/my-script-name/')
def test_available_apps(self):
ctx = self.ctx
apps = ctx['available_apps']
# we have registered two models from two different apps
self.assertEqual(len(apps), 2)
# admin_views.Article
admin_views = apps[0]
self.assertEqual(admin_views['app_label'], 'admin_views')
self.assertEqual(len(admin_views['models']), 1)
article = admin_views['models'][0]
self.assertEqual(article['object_name'], 'Article')
self.assertEqual(article['model'], Article)
# auth.User
auth = apps[1]
self.assertEqual(auth['app_label'], 'auth')
self.assertEqual(len(auth['models']), 1)
user = auth['models'][0]
self.assertEqual(user['object_name'], 'User')
self.assertEqual(user['model'], User)
self.assertEqual(auth['app_url'], '/test_admin/admin/auth/')
self.assertIs(auth['has_module_perms'], True)
self.assertIn('perms', user)
self.assertIs(user['perms']['add'], True)
self.assertIs(user['perms']['change'], True)
self.assertIs(user['perms']['delete'], True)
self.assertEqual(user['admin_url'], '/test_admin/admin/auth/user/')
self.assertEqual(user['add_url'], '/test_admin/admin/auth/user/add/')
self.assertEqual(user['name'], 'Users')
class SiteActionsTests(SimpleTestCase):
def setUp(self):
self.site = admin.AdminSite()
def test_add_action(self):
def test_action():
pass
self.site.add_action(test_action)
self.assertEqual(self.site.get_action('test_action'), test_action)
def test_disable_action(self):
action_name = 'delete_selected'
self.assertEqual(self.site._actions[action_name], delete_selected)
self.site.disable_action(action_name)
with self.assertRaises(KeyError):
self.site._actions[action_name]
def test_get_action(self):
"""AdminSite.get_action() returns an action even if it's disabled."""
action_name = 'delete_selected'
self.assertEqual(self.site.get_action(action_name), delete_selected)
self.site.disable_action(action_name)
self.assertEqual(self.site.get_action(action_name), delete_selected)
|
6a6d660e23ec6daea24d5a7bc15286f33242ee260914391b6cbaae1c97abcaf4 | from django.contrib.admin.forms import AdminAuthenticationForm
from django.contrib.admin.helpers import AdminForm
from django.contrib.auth.models import User
from django.test import SimpleTestCase, TestCase, override_settings
from .admin import ArticleForm
# To verify that the login form rejects inactive users, use an authentication
# backend that allows them.
@override_settings(AUTHENTICATION_BACKENDS=['django.contrib.auth.backends.AllowAllUsersModelBackend'])
class AdminAuthenticationFormTests(TestCase):
@classmethod
def setUpTestData(cls):
User.objects.create_user(username='inactive', password='password', is_active=False)
def test_inactive_user(self):
data = {
'username': 'inactive',
'password': 'password',
}
form = AdminAuthenticationForm(None, data)
self.assertEqual(form.non_field_errors(), ['This account is inactive.'])
class AdminFormTests(SimpleTestCase):
def test_repr(self):
fieldsets = (
('My fields', {
'classes': ['collapse'],
'fields': ('url', 'title', 'content', 'sites'),
}),
)
form = ArticleForm()
admin_form = AdminForm(form, fieldsets, {})
self.assertEqual(
repr(admin_form),
"<AdminForm: form=ArticleForm fieldsets=(('My fields', "
"{'classes': ['collapse'], "
"'fields': ('url', 'title', 'content', 'sites')}),)>",
)
|
0a2ad1d7c49826fe894f0be871ef0d79cec1239c64f0a36ad44812bc8a857e29 | from django.apps import apps
from django.apps.registry import Apps
from django.conf import settings
from django.contrib.sites import models
from django.contrib.sites.checks import check_site_id
from django.contrib.sites.management import create_default_site
from django.contrib.sites.middleware import CurrentSiteMiddleware
from django.contrib.sites.models import Site, clear_site_cache
from django.contrib.sites.requests import RequestSite
from django.contrib.sites.shortcuts import get_current_site
from django.core import checks
from django.core.exceptions import ObjectDoesNotExist, ValidationError
from django.db.models.signals import post_migrate
from django.http import HttpRequest, HttpResponse
from django.test import (
SimpleTestCase, TestCase, modify_settings, override_settings,
)
from django.test.utils import captured_stdout
@modify_settings(INSTALLED_APPS={'append': 'django.contrib.sites'})
class SitesFrameworkTests(TestCase):
databases = {'default', 'other'}
@classmethod
def setUpTestData(cls):
cls.site = Site(id=settings.SITE_ID, domain='example.com', name='example.com')
cls.site.save()
def setUp(self):
Site.objects.clear_cache()
def tearDown(self):
Site.objects.clear_cache()
def test_site_manager(self):
# Make sure that get_current() does not return a deleted Site object.
s = Site.objects.get_current()
self.assertIsInstance(s, Site)
s.delete()
with self.assertRaises(ObjectDoesNotExist):
Site.objects.get_current()
def test_site_cache(self):
# After updating a Site object (e.g. via the admin), we shouldn't return a
# bogus value from the SITE_CACHE.
site = Site.objects.get_current()
self.assertEqual("example.com", site.name)
s2 = Site.objects.get(id=settings.SITE_ID)
s2.name = "Example site"
s2.save()
site = Site.objects.get_current()
self.assertEqual("Example site", site.name)
def test_delete_all_sites_clears_cache(self):
# When all site objects are deleted the cache should also
# be cleared and get_current() should raise a DoesNotExist.
self.assertIsInstance(Site.objects.get_current(), Site)
Site.objects.all().delete()
with self.assertRaises(Site.DoesNotExist):
Site.objects.get_current()
@override_settings(ALLOWED_HOSTS=['example.com'])
def test_get_current_site(self):
# The correct Site object is returned
request = HttpRequest()
request.META = {
"SERVER_NAME": "example.com",
"SERVER_PORT": "80",
}
site = get_current_site(request)
self.assertIsInstance(site, Site)
self.assertEqual(site.id, settings.SITE_ID)
# An exception is raised if the sites framework is installed
# but there is no matching Site
site.delete()
with self.assertRaises(ObjectDoesNotExist):
get_current_site(request)
# A RequestSite is returned if the sites framework is not installed
with self.modify_settings(INSTALLED_APPS={'remove': 'django.contrib.sites'}):
site = get_current_site(request)
self.assertIsInstance(site, RequestSite)
self.assertEqual(site.name, "example.com")
@override_settings(SITE_ID=None, ALLOWED_HOSTS=['example.com'])
def test_get_current_site_no_site_id(self):
request = HttpRequest()
request.META = {
"SERVER_NAME": "example.com",
"SERVER_PORT": "80",
}
del settings.SITE_ID
site = get_current_site(request)
self.assertEqual(site.name, "example.com")
@override_settings(SITE_ID=None, ALLOWED_HOSTS=['example.com'])
def test_get_current_site_host_with_trailing_dot(self):
"""
The site is matched if the name in the request has a trailing dot.
"""
request = HttpRequest()
request.META = {
'SERVER_NAME': 'example.com.',
'SERVER_PORT': '80',
}
site = get_current_site(request)
self.assertEqual(site.name, 'example.com')
@override_settings(SITE_ID=None, ALLOWED_HOSTS=['example.com', 'example.net'])
def test_get_current_site_no_site_id_and_handle_port_fallback(self):
request = HttpRequest()
s1 = self.site
s2 = Site.objects.create(domain='example.com:80', name='example.com:80')
# Host header without port
request.META = {'HTTP_HOST': 'example.com'}
site = get_current_site(request)
self.assertEqual(site, s1)
# Host header with port - match, no fallback without port
request.META = {'HTTP_HOST': 'example.com:80'}
site = get_current_site(request)
self.assertEqual(site, s2)
# Host header with port - no match, fallback without port
request.META = {'HTTP_HOST': 'example.com:81'}
site = get_current_site(request)
self.assertEqual(site, s1)
# Host header with non-matching domain
request.META = {'HTTP_HOST': 'example.net'}
with self.assertRaises(ObjectDoesNotExist):
get_current_site(request)
# Ensure domain for RequestSite always matches host header
with self.modify_settings(INSTALLED_APPS={'remove': 'django.contrib.sites'}):
request.META = {'HTTP_HOST': 'example.com'}
site = get_current_site(request)
self.assertEqual(site.name, 'example.com')
request.META = {'HTTP_HOST': 'example.com:80'}
site = get_current_site(request)
self.assertEqual(site.name, 'example.com:80')
def test_domain_name_with_whitespaces(self):
# Regression for #17320
# Domain names are not allowed contain whitespace characters
site = Site(name="test name", domain="test test")
with self.assertRaises(ValidationError):
site.full_clean()
site.domain = "test\ttest"
with self.assertRaises(ValidationError):
site.full_clean()
site.domain = "test\ntest"
with self.assertRaises(ValidationError):
site.full_clean()
@override_settings(ALLOWED_HOSTS=['example.com'])
def test_clear_site_cache(self):
request = HttpRequest()
request.META = {
"SERVER_NAME": "example.com",
"SERVER_PORT": "80",
}
self.assertEqual(models.SITE_CACHE, {})
get_current_site(request)
expected_cache = {self.site.id: self.site}
self.assertEqual(models.SITE_CACHE, expected_cache)
with self.settings(SITE_ID=None):
get_current_site(request)
expected_cache.update({self.site.domain: self.site})
self.assertEqual(models.SITE_CACHE, expected_cache)
clear_site_cache(Site, instance=self.site, using='default')
self.assertEqual(models.SITE_CACHE, {})
@override_settings(SITE_ID=None, ALLOWED_HOSTS=['example2.com'])
def test_clear_site_cache_domain(self):
site = Site.objects.create(name='example2.com', domain='example2.com')
request = HttpRequest()
request.META = {
"SERVER_NAME": "example2.com",
"SERVER_PORT": "80",
}
get_current_site(request) # prime the models.SITE_CACHE
expected_cache = {site.domain: site}
self.assertEqual(models.SITE_CACHE, expected_cache)
# Site exists in 'default' database so using='other' shouldn't clear.
clear_site_cache(Site, instance=site, using='other')
self.assertEqual(models.SITE_CACHE, expected_cache)
# using='default' should clear.
clear_site_cache(Site, instance=site, using='default')
self.assertEqual(models.SITE_CACHE, {})
def test_unique_domain(self):
site = Site(domain=self.site.domain)
msg = 'Site with this Domain name already exists.'
with self.assertRaisesMessage(ValidationError, msg):
site.validate_unique()
def test_site_natural_key(self):
self.assertEqual(Site.objects.get_by_natural_key(self.site.domain), self.site)
self.assertEqual(self.site.natural_key(), (self.site.domain,))
@override_settings(SITE_ID='1')
def test_check_site_id(self):
self.assertEqual(check_site_id(None), [
checks.Error(
msg='The SITE_ID setting must be an integer',
id='sites.E101',
),
])
def test_valid_site_id(self):
for site_id in [1, None]:
with self.subTest(site_id=site_id), self.settings(SITE_ID=site_id):
self.assertEqual(check_site_id(None), [])
@override_settings(ALLOWED_HOSTS=['example.com'])
class RequestSiteTests(SimpleTestCase):
def setUp(self):
request = HttpRequest()
request.META = {'HTTP_HOST': 'example.com'}
self.site = RequestSite(request)
def test_init_attributes(self):
self.assertEqual(self.site.domain, 'example.com')
self.assertEqual(self.site.name, 'example.com')
def test_str(self):
self.assertEqual(str(self.site), 'example.com')
def test_save(self):
msg = 'RequestSite cannot be saved.'
with self.assertRaisesMessage(NotImplementedError, msg):
self.site.save()
def test_delete(self):
msg = 'RequestSite cannot be deleted.'
with self.assertRaisesMessage(NotImplementedError, msg):
self.site.delete()
class JustOtherRouter:
def allow_migrate(self, db, app_label, **hints):
return db == 'other'
@modify_settings(INSTALLED_APPS={'append': 'django.contrib.sites'})
class CreateDefaultSiteTests(TestCase):
databases = {'default', 'other'}
@classmethod
def setUpTestData(cls):
# Delete the site created as part of the default migration process.
Site.objects.all().delete()
def setUp(self):
self.app_config = apps.get_app_config('sites')
def test_basic(self):
"""
#15346, #15573 - create_default_site() creates an example site only if
none exist.
"""
with captured_stdout() as stdout:
create_default_site(self.app_config)
self.assertEqual(Site.objects.count(), 1)
self.assertIn("Creating example.com", stdout.getvalue())
with captured_stdout() as stdout:
create_default_site(self.app_config)
self.assertEqual(Site.objects.count(), 1)
self.assertEqual("", stdout.getvalue())
@override_settings(DATABASE_ROUTERS=[JustOtherRouter()])
def test_multi_db_with_router(self):
"""
#16353, #16828 - The default site creation should respect db routing.
"""
create_default_site(self.app_config, using='default', verbosity=0)
create_default_site(self.app_config, using='other', verbosity=0)
self.assertFalse(Site.objects.using('default').exists())
self.assertTrue(Site.objects.using('other').exists())
def test_multi_db(self):
create_default_site(self.app_config, using='default', verbosity=0)
create_default_site(self.app_config, using='other', verbosity=0)
self.assertTrue(Site.objects.using('default').exists())
self.assertTrue(Site.objects.using('other').exists())
def test_save_another(self):
"""
#17415 - Another site can be created right after the default one.
On some backends the sequence needs to be reset after saving with an
explicit ID. There shouldn't be a sequence collisions by saving another
site. This test is only meaningful with databases that use sequences
for automatic primary keys such as PostgreSQL and Oracle.
"""
create_default_site(self.app_config, verbosity=0)
Site(domain='example2.com', name='example2.com').save()
def test_signal(self):
"""
#23641 - Sending the ``post_migrate`` signal triggers creation of the
default site.
"""
post_migrate.send(sender=self.app_config, app_config=self.app_config, verbosity=0)
self.assertTrue(Site.objects.exists())
@override_settings(SITE_ID=35696)
def test_custom_site_id(self):
"""
#23945 - The configured ``SITE_ID`` should be respected.
"""
create_default_site(self.app_config, verbosity=0)
self.assertEqual(Site.objects.get().pk, 35696)
@override_settings() # Restore original ``SITE_ID`` afterward.
def test_no_site_id(self):
"""
#24488 - The pk should default to 1 if no ``SITE_ID`` is configured.
"""
del settings.SITE_ID
create_default_site(self.app_config, verbosity=0)
self.assertEqual(Site.objects.get().pk, 1)
def test_unavailable_site_model(self):
"""
#24075 - A Site shouldn't be created if the model isn't available.
"""
apps = Apps()
create_default_site(self.app_config, verbosity=0, apps=apps)
self.assertFalse(Site.objects.exists())
class MiddlewareTest(TestCase):
def test_request(self):
def get_response(request):
return HttpResponse(str(request.site.id))
response = CurrentSiteMiddleware(get_response)(HttpRequest())
self.assertContains(response, settings.SITE_ID)
|
272a168b29a8ec4538d6eb25bb07c98e18701428b632bf08c56302657962e904 | from django.core.exceptions import FieldError
from django.test import TestCase
from .models import Article, Author
class CustomColumnsTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.a1 = Author.objects.create(first_name="John", last_name="Smith")
cls.a2 = Author.objects.create(first_name="Peter", last_name="Jones")
cls.authors = [cls.a1, cls.a2]
cls.article = Article.objects.create(headline="Django lets you build web apps easily", primary_author=cls.a1)
cls.article.authors.set(cls.authors)
def test_query_all_available_authors(self):
self.assertQuerysetEqual(
Author.objects.all(), [
"Peter Jones", "John Smith",
],
str
)
def test_get_first_name(self):
self.assertEqual(
Author.objects.get(first_name__exact="John"),
self.a1,
)
def test_filter_first_name(self):
self.assertQuerysetEqual(
Author.objects.filter(first_name__exact="John"), [
"John Smith",
],
str
)
def test_field_error(self):
msg = (
"Cannot resolve keyword 'firstname' into field. Choices are: "
"Author_ID, article, first_name, last_name, primary_set"
)
with self.assertRaisesMessage(FieldError, msg):
Author.objects.filter(firstname__exact="John")
def test_attribute_error(self):
with self.assertRaises(AttributeError):
self.a1.firstname
with self.assertRaises(AttributeError):
self.a1.last
def test_get_all_authors_for_an_article(self):
self.assertQuerysetEqual(
self.article.authors.all(), [
"Peter Jones",
"John Smith",
],
str
)
def test_get_all_articles_for_an_author(self):
self.assertQuerysetEqual(
self.a1.article_set.all(), [
"Django lets you build web apps easily",
],
lambda a: a.headline
)
def test_get_author_m2m_relation(self):
self.assertQuerysetEqual(
self.article.authors.filter(last_name='Jones'), [
"Peter Jones"
],
str
)
def test_author_querying(self):
self.assertSequenceEqual(
Author.objects.all().order_by('last_name'),
[self.a2, self.a1],
)
def test_author_filtering(self):
self.assertSequenceEqual(
Author.objects.filter(first_name__exact='John'),
[self.a1],
)
def test_author_get(self):
self.assertEqual(self.a1, Author.objects.get(first_name__exact='John'))
def test_filter_on_nonexistent_field(self):
msg = (
"Cannot resolve keyword 'firstname' into field. Choices are: "
"Author_ID, article, first_name, last_name, primary_set"
)
with self.assertRaisesMessage(FieldError, msg):
Author.objects.filter(firstname__exact='John')
def test_author_get_attributes(self):
a = Author.objects.get(last_name__exact='Smith')
self.assertEqual('John', a.first_name)
self.assertEqual('Smith', a.last_name)
with self.assertRaisesMessage(AttributeError, "'Author' object has no attribute 'firstname'"):
getattr(a, 'firstname')
with self.assertRaisesMessage(AttributeError, "'Author' object has no attribute 'last'"):
getattr(a, 'last')
def test_m2m_table(self):
self.assertSequenceEqual(
self.article.authors.all().order_by('last_name'),
[self.a2, self.a1],
)
self.assertSequenceEqual(self.a1.article_set.all(), [self.article])
self.assertSequenceEqual(
self.article.authors.filter(last_name='Jones'),
[self.a2],
)
|
526fe993ef6a759e442bb55ae659aa90a0dc660bad0159be054b8c2bb1773fc9 | import logging
from contextlib import contextmanager
from io import StringIO
from admin_scripts.tests import AdminScriptTestCase
from django.conf import settings
from django.core import mail
from django.core.exceptions import PermissionDenied
from django.core.files.temp import NamedTemporaryFile
from django.core.management import color
from django.http.multipartparser import MultiPartParserError
from django.test import RequestFactory, SimpleTestCase, override_settings
from django.test.utils import LoggingCaptureMixin
from django.utils.log import (
DEFAULT_LOGGING, AdminEmailHandler, CallbackFilter, RequireDebugFalse,
RequireDebugTrue, ServerFormatter,
)
from django.views.debug import ExceptionReporter
from . import views
from .logconfig import MyEmailBackend
class LoggingFiltersTest(SimpleTestCase):
def test_require_debug_false_filter(self):
"""
Test the RequireDebugFalse filter class.
"""
filter_ = RequireDebugFalse()
with self.settings(DEBUG=True):
self.assertIs(filter_.filter("record is not used"), False)
with self.settings(DEBUG=False):
self.assertIs(filter_.filter("record is not used"), True)
def test_require_debug_true_filter(self):
"""
Test the RequireDebugTrue filter class.
"""
filter_ = RequireDebugTrue()
with self.settings(DEBUG=True):
self.assertIs(filter_.filter("record is not used"), True)
with self.settings(DEBUG=False):
self.assertIs(filter_.filter("record is not used"), False)
class SetupDefaultLoggingMixin:
@classmethod
def setUpClass(cls):
super().setUpClass()
logging.config.dictConfig(DEFAULT_LOGGING)
cls.addClassCleanup(logging.config.dictConfig, settings.LOGGING)
class DefaultLoggingTests(SetupDefaultLoggingMixin, LoggingCaptureMixin, SimpleTestCase):
def test_django_logger(self):
"""
The 'django' base logger only output anything when DEBUG=True.
"""
self.logger.error("Hey, this is an error.")
self.assertEqual(self.logger_output.getvalue(), '')
with self.settings(DEBUG=True):
self.logger.error("Hey, this is an error.")
self.assertEqual(self.logger_output.getvalue(), 'Hey, this is an error.\n')
@override_settings(DEBUG=True)
def test_django_logger_warning(self):
self.logger.warning('warning')
self.assertEqual(self.logger_output.getvalue(), 'warning\n')
@override_settings(DEBUG=True)
def test_django_logger_info(self):
self.logger.info('info')
self.assertEqual(self.logger_output.getvalue(), 'info\n')
@override_settings(DEBUG=True)
def test_django_logger_debug(self):
self.logger.debug('debug')
self.assertEqual(self.logger_output.getvalue(), '')
class LoggingAssertionMixin:
def assertLogsRequest(self, url, level, msg, status_code, logger='django.request', exc_class=None):
with self.assertLogs(logger, level) as cm:
try:
self.client.get(url)
except views.UncaughtException:
pass
self.assertEqual(
len(cm.records), 1,
"Wrong number of calls for logger %r in %r level." % (logger, level)
)
record = cm.records[0]
self.assertEqual(record.getMessage(), msg)
self.assertEqual(record.status_code, status_code)
if exc_class:
self.assertIsNotNone(record.exc_info)
self.assertEqual(record.exc_info[0], exc_class)
@override_settings(DEBUG=True, ROOT_URLCONF='logging_tests.urls')
class HandlerLoggingTests(SetupDefaultLoggingMixin, LoggingAssertionMixin, LoggingCaptureMixin, SimpleTestCase):
def test_page_found_no_warning(self):
self.client.get('/innocent/')
self.assertEqual(self.logger_output.getvalue(), '')
def test_redirect_no_warning(self):
self.client.get('/redirect/')
self.assertEqual(self.logger_output.getvalue(), '')
def test_page_not_found_warning(self):
self.assertLogsRequest(
url='/does_not_exist/',
level='WARNING',
status_code=404,
msg='Not Found: /does_not_exist/',
)
def test_page_not_found_raised(self):
self.assertLogsRequest(
url='/does_not_exist_raised/',
level='WARNING',
status_code=404,
msg='Not Found: /does_not_exist_raised/',
)
def test_uncaught_exception(self):
self.assertLogsRequest(
url='/uncaught_exception/',
level='ERROR',
status_code=500,
msg='Internal Server Error: /uncaught_exception/',
exc_class=views.UncaughtException,
)
def test_internal_server_error(self):
self.assertLogsRequest(
url='/internal_server_error/',
level='ERROR',
status_code=500,
msg='Internal Server Error: /internal_server_error/',
)
def test_internal_server_error_599(self):
self.assertLogsRequest(
url='/internal_server_error/?status=599',
level='ERROR',
status_code=599,
msg='Unknown Status Code: /internal_server_error/',
)
def test_permission_denied(self):
self.assertLogsRequest(
url='/permission_denied/',
level='WARNING',
status_code=403,
msg='Forbidden (Permission denied): /permission_denied/',
exc_class=PermissionDenied,
)
def test_multi_part_parser_error(self):
self.assertLogsRequest(
url='/multi_part_parser_error/',
level='WARNING',
status_code=400,
msg='Bad request (Unable to parse request body): /multi_part_parser_error/',
exc_class=MultiPartParserError,
)
@override_settings(
DEBUG=True,
USE_I18N=True,
LANGUAGES=[('en', 'English')],
MIDDLEWARE=[
'django.middleware.locale.LocaleMiddleware',
'django.middleware.common.CommonMiddleware',
],
ROOT_URLCONF='logging_tests.urls_i18n',
)
class I18nLoggingTests(SetupDefaultLoggingMixin, LoggingCaptureMixin, SimpleTestCase):
def test_i18n_page_found_no_warning(self):
self.client.get('/exists/')
self.client.get('/en/exists/')
self.assertEqual(self.logger_output.getvalue(), '')
def test_i18n_page_not_found_warning(self):
self.client.get('/this_does_not/')
self.client.get('/en/nor_this/')
self.assertEqual(self.logger_output.getvalue(), 'Not Found: /this_does_not/\nNot Found: /en/nor_this/\n')
class CallbackFilterTest(SimpleTestCase):
def test_sense(self):
f_false = CallbackFilter(lambda r: False)
f_true = CallbackFilter(lambda r: True)
self.assertFalse(f_false.filter('record'))
self.assertTrue(f_true.filter('record'))
def test_passes_on_record(self):
collector = []
def _callback(record):
collector.append(record)
return True
f = CallbackFilter(_callback)
f.filter("a record")
self.assertEqual(collector, ["a record"])
class AdminEmailHandlerTest(SimpleTestCase):
logger = logging.getLogger('django')
request_factory = RequestFactory()
def get_admin_email_handler(self, logger):
# AdminEmailHandler does not get filtered out
# even with DEBUG=True.
return [
h for h in logger.handlers
if h.__class__.__name__ == "AdminEmailHandler"
][0]
def test_fail_silently(self):
admin_email_handler = self.get_admin_email_handler(self.logger)
self.assertTrue(admin_email_handler.connection().fail_silently)
@override_settings(
ADMINS=[('whatever admin', '[email protected]')],
EMAIL_SUBJECT_PREFIX='-SuperAwesomeSubject-'
)
def test_accepts_args(self):
"""
User-supplied arguments and the EMAIL_SUBJECT_PREFIX setting are used
to compose the email subject (#16736).
"""
message = "Custom message that says '%s' and '%s'"
token1 = 'ping'
token2 = 'pong'
admin_email_handler = self.get_admin_email_handler(self.logger)
# Backup then override original filters
orig_filters = admin_email_handler.filters
try:
admin_email_handler.filters = []
self.logger.error(message, token1, token2)
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(mail.outbox[0].to, ['[email protected]'])
self.assertEqual(mail.outbox[0].subject,
"-SuperAwesomeSubject-ERROR: Custom message that says 'ping' and 'pong'")
finally:
# Restore original filters
admin_email_handler.filters = orig_filters
@override_settings(
ADMINS=[('whatever admin', '[email protected]')],
EMAIL_SUBJECT_PREFIX='-SuperAwesomeSubject-',
INTERNAL_IPS=['127.0.0.1'],
)
def test_accepts_args_and_request(self):
"""
The subject is also handled if being passed a request object.
"""
message = "Custom message that says '%s' and '%s'"
token1 = 'ping'
token2 = 'pong'
admin_email_handler = self.get_admin_email_handler(self.logger)
# Backup then override original filters
orig_filters = admin_email_handler.filters
try:
admin_email_handler.filters = []
request = self.request_factory.get('/')
self.logger.error(
message, token1, token2,
extra={
'status_code': 403,
'request': request,
}
)
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(mail.outbox[0].to, ['[email protected]'])
self.assertEqual(mail.outbox[0].subject,
"-SuperAwesomeSubject-ERROR (internal IP): Custom message that says 'ping' and 'pong'")
finally:
# Restore original filters
admin_email_handler.filters = orig_filters
@override_settings(
ADMINS=[('admin', '[email protected]')],
EMAIL_SUBJECT_PREFIX='',
DEBUG=False,
)
def test_subject_accepts_newlines(self):
"""
Newlines in email reports' subjects are escaped to prevent
AdminErrorHandler from failing (#17281).
"""
message = 'Message \r\n with newlines'
expected_subject = 'ERROR: Message \\r\\n with newlines'
self.assertEqual(len(mail.outbox), 0)
self.logger.error(message)
self.assertEqual(len(mail.outbox), 1)
self.assertNotIn('\n', mail.outbox[0].subject)
self.assertNotIn('\r', mail.outbox[0].subject)
self.assertEqual(mail.outbox[0].subject, expected_subject)
@override_settings(
ADMINS=[('admin', '[email protected]')],
DEBUG=False,
)
def test_uses_custom_email_backend(self):
"""
Refs #19325
"""
message = 'All work and no play makes Jack a dull boy'
admin_email_handler = self.get_admin_email_handler(self.logger)
mail_admins_called = {'called': False}
def my_mail_admins(*args, **kwargs):
connection = kwargs['connection']
self.assertIsInstance(connection, MyEmailBackend)
mail_admins_called['called'] = True
# Monkeypatches
orig_mail_admins = mail.mail_admins
orig_email_backend = admin_email_handler.email_backend
mail.mail_admins = my_mail_admins
admin_email_handler.email_backend = (
'logging_tests.logconfig.MyEmailBackend')
try:
self.logger.error(message)
self.assertTrue(mail_admins_called['called'])
finally:
# Revert Monkeypatches
mail.mail_admins = orig_mail_admins
admin_email_handler.email_backend = orig_email_backend
@override_settings(
ADMINS=[('whatever admin', '[email protected]')],
)
def test_emit_non_ascii(self):
"""
#23593 - AdminEmailHandler should allow Unicode characters in the
request.
"""
handler = self.get_admin_email_handler(self.logger)
record = self.logger.makeRecord('name', logging.ERROR, 'function', 'lno', 'message', None, None)
url_path = '/º'
record.request = self.request_factory.get(url_path)
handler.emit(record)
self.assertEqual(len(mail.outbox), 1)
msg = mail.outbox[0]
self.assertEqual(msg.to, ['[email protected]'])
self.assertEqual(msg.subject, "[Django] ERROR (EXTERNAL IP): message")
self.assertIn("Report at %s" % url_path, msg.body)
@override_settings(
MANAGERS=[('manager', '[email protected]')],
DEBUG=False,
)
def test_customize_send_mail_method(self):
class ManagerEmailHandler(AdminEmailHandler):
def send_mail(self, subject, message, *args, **kwargs):
mail.mail_managers(subject, message, *args, connection=self.connection(), **kwargs)
handler = ManagerEmailHandler()
record = self.logger.makeRecord('name', logging.ERROR, 'function', 'lno', 'message', None, None)
self.assertEqual(len(mail.outbox), 0)
handler.emit(record)
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(mail.outbox[0].to, ['[email protected]'])
@override_settings(ALLOWED_HOSTS='example.com')
def test_disallowed_host_doesnt_crash(self):
admin_email_handler = self.get_admin_email_handler(self.logger)
old_include_html = admin_email_handler.include_html
# Text email
admin_email_handler.include_html = False
try:
self.client.get('/', HTTP_HOST='evil.com')
finally:
admin_email_handler.include_html = old_include_html
# HTML email
admin_email_handler.include_html = True
try:
self.client.get('/', HTTP_HOST='evil.com')
finally:
admin_email_handler.include_html = old_include_html
def test_default_exception_reporter_class(self):
admin_email_handler = self.get_admin_email_handler(self.logger)
self.assertEqual(admin_email_handler.reporter_class, ExceptionReporter)
@override_settings(ADMINS=[('A.N.Admin', '[email protected]')])
def test_custom_exception_reporter_is_used(self):
record = self.logger.makeRecord('name', logging.ERROR, 'function', 'lno', 'message', None, None)
record.request = self.request_factory.get('/')
handler = AdminEmailHandler(reporter_class='logging_tests.logconfig.CustomExceptionReporter')
handler.emit(record)
self.assertEqual(len(mail.outbox), 1)
msg = mail.outbox[0]
self.assertEqual(msg.body, 'message\n\ncustom traceback text')
class SettingsConfigTest(AdminScriptTestCase):
"""
Accessing settings in a custom logging handler does not trigger
a circular import error.
"""
def setUp(self):
super().setUp()
log_config = """{
'version': 1,
'handlers': {
'custom_handler': {
'level': 'INFO',
'class': 'logging_tests.logconfig.MyHandler',
}
}
}"""
self.write_settings('settings.py', sdict={'LOGGING': log_config})
def test_circular_dependency(self):
# validate is just an example command to trigger settings configuration
out, err = self.run_manage(['check'])
self.assertNoOutput(err)
self.assertOutput(out, "System check identified no issues (0 silenced).")
def dictConfig(config):
dictConfig.called = True
dictConfig.called = False
class SetupConfigureLogging(SimpleTestCase):
"""
Calling django.setup() initializes the logging configuration.
"""
def test_configure_initializes_logging(self):
from django import setup
try:
with override_settings(
LOGGING_CONFIG='logging_tests.tests.dictConfig',
):
setup()
finally:
# Restore logging from settings.
setup()
self.assertTrue(dictConfig.called)
@override_settings(DEBUG=True, ROOT_URLCONF='logging_tests.urls')
class SecurityLoggerTest(LoggingAssertionMixin, SimpleTestCase):
def test_suspicious_operation_creates_log_message(self):
self.assertLogsRequest(
url='/suspicious/',
level='ERROR',
msg='dubious',
status_code=400,
logger='django.security.SuspiciousOperation',
)
def test_suspicious_operation_uses_sublogger(self):
self.assertLogsRequest(
url='/suspicious_spec/',
level='ERROR',
msg='dubious',
status_code=400,
logger='django.security.DisallowedHost',
)
@override_settings(
ADMINS=[('admin', '[email protected]')],
DEBUG=False,
)
def test_suspicious_email_admins(self):
self.client.get('/suspicious/')
self.assertEqual(len(mail.outbox), 1)
self.assertIn('Report at /suspicious/', mail.outbox[0].body)
class SettingsCustomLoggingTest(AdminScriptTestCase):
"""
Using a logging defaults are still applied when using a custom
callable in LOGGING_CONFIG (i.e., logging.config.fileConfig).
"""
def setUp(self):
super().setUp()
logging_conf = """
[loggers]
keys=root
[handlers]
keys=stream
[formatters]
keys=simple
[logger_root]
handlers=stream
[handler_stream]
class=StreamHandler
formatter=simple
args=(sys.stdout,)
[formatter_simple]
format=%(message)s
"""
self.temp_file = NamedTemporaryFile()
self.temp_file.write(logging_conf.encode())
self.temp_file.flush()
self.write_settings('settings.py', sdict={
'LOGGING_CONFIG': '"logging.config.fileConfig"',
'LOGGING': 'r"%s"' % self.temp_file.name,
})
def tearDown(self):
self.temp_file.close()
def test_custom_logging(self):
out, err = self.run_manage(['check'])
self.assertNoOutput(err)
self.assertOutput(out, "System check identified no issues (0 silenced).")
class LogFormattersTests(SimpleTestCase):
def test_server_formatter_styles(self):
color_style = color.make_style('')
formatter = ServerFormatter()
formatter.style = color_style
log_msg = 'log message'
status_code_styles = [
(200, 'HTTP_SUCCESS'),
(100, 'HTTP_INFO'),
(304, 'HTTP_NOT_MODIFIED'),
(300, 'HTTP_REDIRECT'),
(404, 'HTTP_NOT_FOUND'),
(400, 'HTTP_BAD_REQUEST'),
(500, 'HTTP_SERVER_ERROR'),
]
for status_code, style in status_code_styles:
record = logging.makeLogRecord({'msg': log_msg, 'status_code': status_code})
self.assertEqual(formatter.format(record), getattr(color_style, style)(log_msg))
record = logging.makeLogRecord({'msg': log_msg})
self.assertEqual(formatter.format(record), log_msg)
def test_server_formatter_default_format(self):
server_time = '2016-09-25 10:20:30'
log_msg = 'log message'
logger = logging.getLogger('django.server')
@contextmanager
def patch_django_server_logger():
old_stream = logger.handlers[0].stream
new_stream = StringIO()
logger.handlers[0].stream = new_stream
yield new_stream
logger.handlers[0].stream = old_stream
with patch_django_server_logger() as logger_output:
logger.info(log_msg, extra={'server_time': server_time})
self.assertEqual('[%s] %s\n' % (server_time, log_msg), logger_output.getvalue())
with patch_django_server_logger() as logger_output:
logger.info(log_msg)
self.assertRegex(logger_output.getvalue(), r'^\[[/:,\w\s\d]+\] %s\n' % log_msg)
|
80da95b4dd779e3aeb295da25269cc1d7817002dc742c2dc9fc8a99f816e01c4 | import datetime
import pickle
from decimal import Decimal
from operator import attrgetter
from unittest import mock
from django.contrib.contenttypes.models import ContentType
from django.core.exceptions import FieldError
from django.db import connection
from django.db.models import (
Aggregate, Avg, Case, Count, DecimalField, F, IntegerField, Max, Q, StdDev,
Sum, Value, Variance, When,
)
from django.test import TestCase, skipUnlessAnyDBFeature, skipUnlessDBFeature
from django.test.utils import Approximate
from .models import (
Alfa, Author, Book, Bravo, Charlie, Clues, Entries, HardbackBook, ItemTag,
Publisher, SelfRefFK, Store, WithManualPK,
)
class AggregationTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.a1 = Author.objects.create(name='Adrian Holovaty', age=34)
cls.a2 = Author.objects.create(name='Jacob Kaplan-Moss', age=35)
cls.a3 = Author.objects.create(name='Brad Dayley', age=45)
cls.a4 = Author.objects.create(name='James Bennett', age=29)
cls.a5 = Author.objects.create(name='Jeffrey Forcier', age=37)
cls.a6 = Author.objects.create(name='Paul Bissex', age=29)
cls.a7 = Author.objects.create(name='Wesley J. Chun', age=25)
cls.a8 = Author.objects.create(name='Peter Norvig', age=57)
cls.a9 = Author.objects.create(name='Stuart Russell', age=46)
cls.a1.friends.add(cls.a2, cls.a4)
cls.a2.friends.add(cls.a1, cls.a7)
cls.a4.friends.add(cls.a1)
cls.a5.friends.add(cls.a6, cls.a7)
cls.a6.friends.add(cls.a5, cls.a7)
cls.a7.friends.add(cls.a2, cls.a5, cls.a6)
cls.a8.friends.add(cls.a9)
cls.a9.friends.add(cls.a8)
cls.p1 = Publisher.objects.create(name='Apress', num_awards=3)
cls.p2 = Publisher.objects.create(name='Sams', num_awards=1)
cls.p3 = Publisher.objects.create(name='Prentice Hall', num_awards=7)
cls.p4 = Publisher.objects.create(name='Morgan Kaufmann', num_awards=9)
cls.p5 = Publisher.objects.create(name="Jonno's House of Books", num_awards=0)
cls.b1 = Book.objects.create(
isbn='159059725', name='The Definitive Guide to Django: Web Development Done Right',
pages=447, rating=4.5, price=Decimal('30.00'), contact=cls.a1, publisher=cls.p1,
pubdate=datetime.date(2007, 12, 6)
)
cls.b2 = Book.objects.create(
isbn='067232959', name='Sams Teach Yourself Django in 24 Hours',
pages=528, rating=3.0, price=Decimal('23.09'), contact=cls.a3, publisher=cls.p2,
pubdate=datetime.date(2008, 3, 3)
)
cls.b3 = Book.objects.create(
isbn='159059996', name='Practical Django Projects',
pages=300, rating=4.0, price=Decimal('29.69'), contact=cls.a4, publisher=cls.p1,
pubdate=datetime.date(2008, 6, 23)
)
cls.b4 = Book.objects.create(
isbn='013235613', name='Python Web Development with Django',
pages=350, rating=4.0, price=Decimal('29.69'), contact=cls.a5, publisher=cls.p3,
pubdate=datetime.date(2008, 11, 3)
)
cls.b5 = HardbackBook.objects.create(
isbn='013790395', name='Artificial Intelligence: A Modern Approach',
pages=1132, rating=4.0, price=Decimal('82.80'), contact=cls.a8, publisher=cls.p3,
pubdate=datetime.date(1995, 1, 15), weight=4.5)
cls.b6 = HardbackBook.objects.create(
isbn='155860191', name='Paradigms of Artificial Intelligence Programming: Case Studies in Common Lisp',
pages=946, rating=5.0, price=Decimal('75.00'), contact=cls.a8, publisher=cls.p4,
pubdate=datetime.date(1991, 10, 15), weight=3.7)
cls.b1.authors.add(cls.a1, cls.a2)
cls.b2.authors.add(cls.a3)
cls.b3.authors.add(cls.a4)
cls.b4.authors.add(cls.a5, cls.a6, cls.a7)
cls.b5.authors.add(cls.a8, cls.a9)
cls.b6.authors.add(cls.a8)
s1 = Store.objects.create(
name='Amazon.com',
original_opening=datetime.datetime(1994, 4, 23, 9, 17, 42),
friday_night_closing=datetime.time(23, 59, 59)
)
s2 = Store.objects.create(
name='Books.com',
original_opening=datetime.datetime(2001, 3, 15, 11, 23, 37),
friday_night_closing=datetime.time(23, 59, 59)
)
s3 = Store.objects.create(
name="Mamma and Pappa's Books",
original_opening=datetime.datetime(1945, 4, 25, 16, 24, 14),
friday_night_closing=datetime.time(21, 30)
)
s1.books.add(cls.b1, cls.b2, cls.b3, cls.b4, cls.b5, cls.b6)
s2.books.add(cls.b1, cls.b3, cls.b5, cls.b6)
s3.books.add(cls.b3, cls.b4, cls.b6)
def assertObjectAttrs(self, obj, **kwargs):
for attr, value in kwargs.items():
self.assertEqual(getattr(obj, attr), value)
def test_annotation_with_value(self):
values = Book.objects.filter(
name='Practical Django Projects',
).annotate(
discount_price=F('price') * 2,
).values(
'discount_price',
).annotate(sum_discount=Sum('discount_price'))
self.assertSequenceEqual(
values,
[{'discount_price': Decimal('59.38'), 'sum_discount': Decimal('59.38')}]
)
def test_aggregates_in_where_clause(self):
"""
Regression test for #12822: DatabaseError: aggregates not allowed in
WHERE clause
The subselect works and returns results equivalent to a
query with the IDs listed.
Before the corresponding fix for this bug, this test passed in 1.1 and
failed in 1.2-beta (trunk).
"""
qs = Book.objects.values('contact').annotate(Max('id'))
qs = qs.order_by('contact').values_list('id__max', flat=True)
# don't do anything with the queryset (qs) before including it as a
# subquery
books = Book.objects.order_by('id')
qs1 = books.filter(id__in=qs)
qs2 = books.filter(id__in=list(qs))
self.assertEqual(list(qs1), list(qs2))
def test_aggregates_in_where_clause_pre_eval(self):
"""
Regression test for #12822: DatabaseError: aggregates not allowed in
WHERE clause
Same as the above test, but evaluates the queryset for the subquery
before it's used as a subquery.
Before the corresponding fix for this bug, this test failed in both
1.1 and 1.2-beta (trunk).
"""
qs = Book.objects.values('contact').annotate(Max('id'))
qs = qs.order_by('contact').values_list('id__max', flat=True)
# force the queryset (qs) for the subquery to be evaluated in its
# current state
list(qs)
books = Book.objects.order_by('id')
qs1 = books.filter(id__in=qs)
qs2 = books.filter(id__in=list(qs))
self.assertEqual(list(qs1), list(qs2))
@skipUnlessDBFeature('supports_subqueries_in_group_by')
def test_annotate_with_extra(self):
"""
Regression test for #11916: Extra params + aggregation creates
incorrect SQL.
"""
# Oracle doesn't support subqueries in group by clause
shortest_book_sql = """
SELECT name
FROM aggregation_regress_book b
WHERE b.publisher_id = aggregation_regress_publisher.id
ORDER BY b.pages
LIMIT 1
"""
# tests that this query does not raise a DatabaseError due to the full
# subselect being (erroneously) added to the GROUP BY parameters
qs = Publisher.objects.extra(select={
'name_of_shortest_book': shortest_book_sql,
}).annotate(total_books=Count('book'))
# force execution of the query
list(qs)
def test_aggregate(self):
# Ordering requests are ignored
self.assertEqual(
Author.objects.order_by("name").aggregate(Avg("age")),
{"age__avg": Approximate(37.444, places=1)}
)
# Implicit ordering is also ignored
self.assertEqual(
Book.objects.aggregate(Sum("pages")),
{"pages__sum": 3703},
)
# Baseline results
self.assertEqual(
Book.objects.aggregate(Sum('pages'), Avg('pages')),
{'pages__sum': 3703, 'pages__avg': Approximate(617.166, places=2)}
)
# Empty values query doesn't affect grouping or results
self.assertEqual(
Book.objects.values().aggregate(Sum('pages'), Avg('pages')),
{'pages__sum': 3703, 'pages__avg': Approximate(617.166, places=2)}
)
# Aggregate overrides extra selected column
self.assertEqual(
Book.objects.extra(select={'price_per_page': 'price / pages'}).aggregate(Sum('pages')),
{'pages__sum': 3703}
)
def test_annotation(self):
# Annotations get combined with extra select clauses
obj = Book.objects.annotate(mean_auth_age=Avg("authors__age")).extra(
select={"manufacture_cost": "price * .5"}).get(pk=self.b2.pk)
self.assertObjectAttrs(
obj,
contact_id=self.a3.id,
isbn='067232959',
mean_auth_age=45.0,
name='Sams Teach Yourself Django in 24 Hours',
pages=528,
price=Decimal("23.09"),
pubdate=datetime.date(2008, 3, 3),
publisher_id=self.p2.id,
rating=3.0
)
# Different DB backends return different types for the extra select computation
self.assertIn(obj.manufacture_cost, (11.545, Decimal('11.545')))
# Order of the annotate/extra in the query doesn't matter
obj = Book.objects.extra(select={'manufacture_cost': 'price * .5'}).annotate(
mean_auth_age=Avg('authors__age')).get(pk=self.b2.pk)
self.assertObjectAttrs(
obj,
contact_id=self.a3.id,
isbn='067232959',
mean_auth_age=45.0,
name='Sams Teach Yourself Django in 24 Hours',
pages=528,
price=Decimal("23.09"),
pubdate=datetime.date(2008, 3, 3),
publisher_id=self.p2.id,
rating=3.0
)
# Different DB backends return different types for the extra select computation
self.assertIn(obj.manufacture_cost, (11.545, Decimal('11.545')))
# Values queries can be combined with annotate and extra
obj = Book.objects.annotate(mean_auth_age=Avg('authors__age')).extra(
select={'manufacture_cost': 'price * .5'}).values().get(pk=self.b2.pk)
manufacture_cost = obj['manufacture_cost']
self.assertIn(manufacture_cost, (11.545, Decimal('11.545')))
del obj['manufacture_cost']
self.assertEqual(obj, {
'id': self.b2.id,
'contact_id': self.a3.id,
'isbn': '067232959',
'mean_auth_age': 45.0,
'name': 'Sams Teach Yourself Django in 24 Hours',
'pages': 528,
'price': Decimal('23.09'),
'pubdate': datetime.date(2008, 3, 3),
'publisher_id': self.p2.id,
'rating': 3.0,
})
# The order of the (empty) values, annotate and extra clauses doesn't
# matter
obj = Book.objects.values().annotate(mean_auth_age=Avg('authors__age')).extra(
select={'manufacture_cost': 'price * .5'}).get(pk=self.b2.pk)
manufacture_cost = obj['manufacture_cost']
self.assertIn(manufacture_cost, (11.545, Decimal('11.545')))
del obj['manufacture_cost']
self.assertEqual(obj, {
'id': self.b2.id,
'contact_id': self.a3.id,
'isbn': '067232959',
'mean_auth_age': 45.0,
'name': 'Sams Teach Yourself Django in 24 Hours',
'pages': 528,
'price': Decimal('23.09'),
'pubdate': datetime.date(2008, 3, 3),
'publisher_id': self.p2.id,
'rating': 3.0
})
# If the annotation precedes the values clause, it won't be included
# unless it is explicitly named
obj = Book.objects.annotate(mean_auth_age=Avg('authors__age')).extra(
select={'price_per_page': 'price / pages'}).values('name').get(pk=self.b1.pk)
self.assertEqual(obj, {
"name": 'The Definitive Guide to Django: Web Development Done Right',
})
obj = Book.objects.annotate(mean_auth_age=Avg('authors__age')).extra(
select={'price_per_page': 'price / pages'}).values('name', 'mean_auth_age').get(pk=self.b1.pk)
self.assertEqual(obj, {
'mean_auth_age': 34.5,
'name': 'The Definitive Guide to Django: Web Development Done Right',
})
# If an annotation isn't included in the values, it can still be used
# in a filter
qs = Book.objects.annotate(n_authors=Count('authors')).values('name').filter(n_authors__gt=2)
self.assertSequenceEqual(
qs, [
{"name": 'Python Web Development with Django'}
],
)
# The annotations are added to values output if values() precedes
# annotate()
obj = Book.objects.values('name').annotate(mean_auth_age=Avg('authors__age')).extra(
select={'price_per_page': 'price / pages'}).get(pk=self.b1.pk)
self.assertEqual(obj, {
'mean_auth_age': 34.5,
'name': 'The Definitive Guide to Django: Web Development Done Right',
})
# All of the objects are getting counted (allow_nulls) and that values
# respects the amount of objects
self.assertEqual(
len(Author.objects.annotate(Avg('friends__age')).values()),
9
)
# Consecutive calls to annotate accumulate in the query
qs = (
Book.objects
.values('price')
.annotate(oldest=Max('authors__age'))
.order_by('oldest', 'price')
.annotate(Max('publisher__num_awards'))
)
self.assertSequenceEqual(
qs, [
{'price': Decimal("30"), 'oldest': 35, 'publisher__num_awards__max': 3},
{'price': Decimal("29.69"), 'oldest': 37, 'publisher__num_awards__max': 7},
{'price': Decimal("23.09"), 'oldest': 45, 'publisher__num_awards__max': 1},
{'price': Decimal("75"), 'oldest': 57, 'publisher__num_awards__max': 9},
{'price': Decimal("82.8"), 'oldest': 57, 'publisher__num_awards__max': 7}
],
)
def test_aggregate_annotation(self):
# Aggregates can be composed over annotations.
# The return type is derived from the composed aggregate
vals = (
Book.objects
.all()
.annotate(num_authors=Count('authors__id'))
.aggregate(Max('pages'), Max('price'), Sum('num_authors'), Avg('num_authors'))
)
self.assertEqual(vals, {
'num_authors__sum': 10,
'num_authors__avg': Approximate(1.666, places=2),
'pages__max': 1132,
'price__max': Decimal("82.80")
})
# Regression for #15624 - Missing SELECT columns when using values, annotate
# and aggregate in a single query
self.assertEqual(
Book.objects.annotate(c=Count('authors')).values('c').aggregate(Max('c')),
{'c__max': 3}
)
def test_conditional_aggregate(self):
# Conditional aggregation of a grouped queryset.
self.assertEqual(
Book.objects.annotate(c=Count('authors')).values('pk').aggregate(test=Sum(
Case(When(c__gt=1, then=1))
))['test'],
3
)
def test_sliced_conditional_aggregate(self):
self.assertEqual(
Author.objects.all()[:5].aggregate(test=Sum(Case(
When(age__lte=35, then=1)
)))['test'],
3
)
def test_annotated_conditional_aggregate(self):
annotated_qs = Book.objects.annotate(discount_price=F('price') * Decimal('0.75'))
self.assertAlmostEqual(
annotated_qs.aggregate(test=Avg(Case(
When(pages__lt=400, then='discount_price'),
output_field=DecimalField()
)))['test'],
Decimal('22.27'), places=2
)
def test_distinct_conditional_aggregate(self):
self.assertEqual(
Book.objects.distinct().aggregate(test=Avg(Case(
When(price=Decimal('29.69'), then='pages'),
output_field=IntegerField()
)))['test'],
325
)
def test_conditional_aggregate_on_complex_condition(self):
self.assertEqual(
Book.objects.distinct().aggregate(test=Avg(Case(
When(Q(price__gte=Decimal('29')) & Q(price__lt=Decimal('30')), then='pages'),
output_field=IntegerField()
)))['test'],
325
)
def test_decimal_aggregate_annotation_filter(self):
"""
Filtering on an aggregate annotation with Decimal values should work.
Requires special handling on SQLite (#18247).
"""
self.assertEqual(
len(Author.objects.annotate(sum=Sum('book_contact_set__price')).filter(sum__gt=Decimal(40))),
1
)
self.assertEqual(
len(Author.objects.annotate(sum=Sum('book_contact_set__price')).filter(sum__lte=Decimal(40))),
4
)
def test_field_error(self):
# Bad field requests in aggregates are caught and reported
msg = (
"Cannot resolve keyword 'foo' into field. Choices are: authors, "
"contact, contact_id, hardbackbook, id, isbn, name, pages, price, "
"pubdate, publisher, publisher_id, rating, store, tags"
)
with self.assertRaisesMessage(FieldError, msg):
Book.objects.all().aggregate(num_authors=Count('foo'))
with self.assertRaisesMessage(FieldError, msg):
Book.objects.all().annotate(num_authors=Count('foo'))
msg = (
"Cannot resolve keyword 'foo' into field. Choices are: authors, "
"contact, contact_id, hardbackbook, id, isbn, name, num_authors, "
"pages, price, pubdate, publisher, publisher_id, rating, store, tags"
)
with self.assertRaisesMessage(FieldError, msg):
Book.objects.all().annotate(num_authors=Count('authors__id')).aggregate(Max('foo'))
def test_more(self):
# Old-style count aggregations can be mixed with new-style
self.assertEqual(
Book.objects.annotate(num_authors=Count('authors')).count(),
6
)
# Non-ordinal, non-computed Aggregates over annotations correctly
# inherit the annotation's internal type if the annotation is ordinal
# or computed
vals = Book.objects.annotate(num_authors=Count('authors')).aggregate(Max('num_authors'))
self.assertEqual(
vals,
{'num_authors__max': 3}
)
vals = Publisher.objects.annotate(avg_price=Avg('book__price')).aggregate(Max('avg_price'))
self.assertEqual(
vals,
{'avg_price__max': 75.0}
)
# Aliases are quoted to protected aliases that might be reserved names
vals = Book.objects.aggregate(number=Max('pages'), select=Max('pages'))
self.assertEqual(
vals,
{'number': 1132, 'select': 1132}
)
# Regression for #10064: select_related() plays nice with aggregates
obj = Book.objects.select_related('publisher').annotate(
num_authors=Count('authors')).values().get(isbn='013790395')
self.assertEqual(obj, {
'contact_id': self.a8.id,
'id': self.b5.id,
'isbn': '013790395',
'name': 'Artificial Intelligence: A Modern Approach',
'num_authors': 2,
'pages': 1132,
'price': Decimal("82.8"),
'pubdate': datetime.date(1995, 1, 15),
'publisher_id': self.p3.id,
'rating': 4.0,
})
# Regression for #10010: exclude on an aggregate field is correctly
# negated
self.assertEqual(
len(Book.objects.annotate(num_authors=Count('authors'))),
6
)
self.assertEqual(
len(Book.objects.annotate(num_authors=Count('authors')).filter(num_authors__gt=2)),
1
)
self.assertEqual(
len(Book.objects.annotate(num_authors=Count('authors')).exclude(num_authors__gt=2)),
5
)
self.assertEqual(
len(
Book.objects
.annotate(num_authors=Count('authors'))
.filter(num_authors__lt=3)
.exclude(num_authors__lt=2)
),
2
)
self.assertEqual(
len(
Book.objects
.annotate(num_authors=Count('authors'))
.exclude(num_authors__lt=2)
.filter(num_authors__lt=3)
),
2
)
def test_aggregate_fexpr(self):
# Aggregates can be used with F() expressions
# ... where the F() is pushed into the HAVING clause
qs = (
Publisher.objects
.annotate(num_books=Count('book'))
.filter(num_books__lt=F('num_awards') / 2)
.order_by('name')
.values('name', 'num_books', 'num_awards')
)
self.assertSequenceEqual(
qs, [
{'num_books': 1, 'name': 'Morgan Kaufmann', 'num_awards': 9},
{'num_books': 2, 'name': 'Prentice Hall', 'num_awards': 7}
],
)
qs = (
Publisher.objects
.annotate(num_books=Count('book'))
.exclude(num_books__lt=F('num_awards') / 2)
.order_by('name')
.values('name', 'num_books', 'num_awards')
)
self.assertSequenceEqual(
qs, [
{'num_books': 2, 'name': 'Apress', 'num_awards': 3},
{'num_books': 0, 'name': "Jonno's House of Books", 'num_awards': 0},
{'num_books': 1, 'name': 'Sams', 'num_awards': 1}
],
)
# ... and where the F() references an aggregate
qs = (
Publisher.objects
.annotate(num_books=Count('book'))
.filter(num_awards__gt=2 * F('num_books'))
.order_by('name')
.values('name', 'num_books', 'num_awards')
)
self.assertSequenceEqual(
qs, [
{'num_books': 1, 'name': 'Morgan Kaufmann', 'num_awards': 9},
{'num_books': 2, 'name': 'Prentice Hall', 'num_awards': 7}
],
)
qs = (
Publisher.objects
.annotate(num_books=Count('book'))
.exclude(num_books__lt=F('num_awards') / 2)
.order_by('name')
.values('name', 'num_books', 'num_awards')
)
self.assertSequenceEqual(
qs, [
{'num_books': 2, 'name': 'Apress', 'num_awards': 3},
{'num_books': 0, 'name': "Jonno's House of Books", 'num_awards': 0},
{'num_books': 1, 'name': 'Sams', 'num_awards': 1}
],
)
def test_db_col_table(self):
# Tests on fields with non-default table and column names.
qs = (
Clues.objects
.values('EntryID__Entry')
.annotate(Appearances=Count('EntryID'), Distinct_Clues=Count('Clue', distinct=True))
)
self.assertQuerysetEqual(qs, [])
qs = Entries.objects.annotate(clue_count=Count('clues__ID'))
self.assertQuerysetEqual(qs, [])
def test_boolean_conversion(self):
# Aggregates mixed up ordering of columns for backend's convert_values
# method. Refs #21126.
e = Entries.objects.create(Entry='foo')
c = Clues.objects.create(EntryID=e, Clue='bar')
qs = Clues.objects.select_related('EntryID').annotate(Count('ID'))
self.assertSequenceEqual(qs, [c])
self.assertEqual(qs[0].EntryID, e)
self.assertIs(qs[0].EntryID.Exclude, False)
def test_empty(self):
# Regression for #10089: Check handling of empty result sets with
# aggregates
self.assertEqual(
Book.objects.filter(id__in=[]).count(),
0
)
vals = (
Book.objects
.filter(id__in=[])
.aggregate(
num_authors=Count('authors'),
avg_authors=Avg('authors'),
max_authors=Max('authors'),
max_price=Max('price'),
max_rating=Max('rating'),
)
)
self.assertEqual(
vals,
{'max_authors': None, 'max_rating': None, 'num_authors': 0, 'avg_authors': None, 'max_price': None}
)
qs = (
Publisher.objects
.filter(name="Jonno's House of Books")
.annotate(
num_authors=Count('book__authors'),
avg_authors=Avg('book__authors'),
max_authors=Max('book__authors'),
max_price=Max('book__price'),
max_rating=Max('book__rating'),
).values()
)
self.assertSequenceEqual(
qs,
[{
'max_authors': None,
'name': "Jonno's House of Books",
'num_awards': 0,
'max_price': None,
'num_authors': 0,
'max_rating': None,
'id': self.p5.id,
'avg_authors': None,
}],
)
def test_more_more(self):
# Regression for #10113 - Fields mentioned in order_by() must be
# included in the GROUP BY. This only becomes a problem when the
# order_by introduces a new join.
self.assertQuerysetEqual(
Book.objects.annotate(num_authors=Count('authors')).order_by('publisher__name', 'name'), [
"Practical Django Projects",
"The Definitive Guide to Django: Web Development Done Right",
"Paradigms of Artificial Intelligence Programming: Case Studies in Common Lisp",
"Artificial Intelligence: A Modern Approach",
"Python Web Development with Django",
"Sams Teach Yourself Django in 24 Hours",
],
lambda b: b.name
)
# Regression for #10127 - Empty select_related() works with annotate
qs = Book.objects.filter(rating__lt=4.5).select_related().annotate(Avg('authors__age')).order_by('name')
self.assertQuerysetEqual(
qs,
[
('Artificial Intelligence: A Modern Approach', 51.5, 'Prentice Hall', 'Peter Norvig'),
('Practical Django Projects', 29.0, 'Apress', 'James Bennett'),
(
'Python Web Development with Django',
Approximate(30.333, places=2),
'Prentice Hall',
'Jeffrey Forcier',
),
('Sams Teach Yourself Django in 24 Hours', 45.0, 'Sams', 'Brad Dayley')
],
lambda b: (b.name, b.authors__age__avg, b.publisher.name, b.contact.name)
)
# Regression for #10132 - If the values() clause only mentioned extra
# (select=) columns, those columns are used for grouping
qs = Book.objects.extra(select={'pub': 'publisher_id'}).values('pub').annotate(Count('id')).order_by('pub')
self.assertSequenceEqual(
qs, [
{'pub': self.p1.id, 'id__count': 2},
{'pub': self.p2.id, 'id__count': 1},
{'pub': self.p3.id, 'id__count': 2},
{'pub': self.p4.id, 'id__count': 1},
],
)
qs = (
Book.objects
.extra(select={'pub': 'publisher_id', 'foo': 'pages'})
.values('pub')
.annotate(Count('id'))
.order_by('pub')
)
self.assertSequenceEqual(
qs, [
{'pub': self.p1.id, 'id__count': 2},
{'pub': self.p2.id, 'id__count': 1},
{'pub': self.p3.id, 'id__count': 2},
{'pub': self.p4.id, 'id__count': 1}
],
)
# Regression for #10182 - Queries with aggregate calls are correctly
# realiased when used in a subquery
ids = (
Book.objects
.filter(pages__gt=100)
.annotate(n_authors=Count('authors'))
.filter(n_authors__gt=2)
.order_by('n_authors')
)
self.assertQuerysetEqual(
Book.objects.filter(id__in=ids), [
"Python Web Development with Django",
],
lambda b: b.name
)
# Regression for #15709 - Ensure each group_by field only exists once
# per query
qstr = str(Book.objects.values('publisher').annotate(max_pages=Max('pages')).order_by().query)
# There is just one GROUP BY clause (zero commas means at most one clause).
self.assertEqual(qstr[qstr.index('GROUP BY'):].count(', '), 0)
def test_duplicate_alias(self):
# Regression for #11256 - duplicating a default alias raises ValueError.
msg = (
"The named annotation 'authors__age__avg' conflicts with "
"the default name for another annotation."
)
with self.assertRaisesMessage(ValueError, msg):
Book.objects.all().annotate(Avg('authors__age'), authors__age__avg=Avg('authors__age'))
def test_field_name_conflict(self):
# Regression for #11256 - providing an aggregate name
# that conflicts with a field name on the model raises ValueError
msg = "The annotation 'age' conflicts with a field on the model."
with self.assertRaisesMessage(ValueError, msg):
Author.objects.annotate(age=Avg('friends__age'))
def test_m2m_name_conflict(self):
# Regression for #11256 - providing an aggregate name
# that conflicts with an m2m name on the model raises ValueError
msg = "The annotation 'friends' conflicts with a field on the model."
with self.assertRaisesMessage(ValueError, msg):
Author.objects.annotate(friends=Count('friends'))
def test_fk_attname_conflict(self):
msg = "The annotation 'contact_id' conflicts with a field on the model."
with self.assertRaisesMessage(ValueError, msg):
Book.objects.annotate(contact_id=F('publisher_id'))
def test_values_queryset_non_conflict(self):
# Regression for #14707 -- If you're using a values query set, some potential conflicts are avoided.
# age is a field on Author, so it shouldn't be allowed as an aggregate.
# But age isn't included in values(), so it is.
results = Author.objects.values('name').annotate(age=Count('book_contact_set')).order_by('name')
self.assertEqual(len(results), 9)
self.assertEqual(results[0]['name'], 'Adrian Holovaty')
self.assertEqual(results[0]['age'], 1)
# Same problem, but aggregating over m2m fields
results = Author.objects.values('name').annotate(age=Avg('friends__age')).order_by('name')
self.assertEqual(len(results), 9)
self.assertEqual(results[0]['name'], 'Adrian Holovaty')
self.assertEqual(results[0]['age'], 32.0)
# Same problem, but colliding with an m2m field
results = Author.objects.values('name').annotate(friends=Count('friends')).order_by('name')
self.assertEqual(len(results), 9)
self.assertEqual(results[0]['name'], 'Adrian Holovaty')
self.assertEqual(results[0]['friends'], 2)
def test_reverse_relation_name_conflict(self):
# Regression for #11256 - providing an aggregate name
# that conflicts with a reverse-related name on the model raises ValueError
msg = "The annotation 'book_contact_set' conflicts with a field on the model."
with self.assertRaisesMessage(ValueError, msg):
Author.objects.annotate(book_contact_set=Avg('friends__age'))
def test_pickle(self):
# Regression for #10197 -- Queries with aggregates can be pickled.
# First check that pickling is possible at all. No crash = success
qs = Book.objects.annotate(num_authors=Count('authors'))
pickle.dumps(qs)
# Then check that the round trip works.
query = qs.query.get_compiler(qs.db).as_sql()[0]
qs2 = pickle.loads(pickle.dumps(qs))
self.assertEqual(
qs2.query.get_compiler(qs2.db).as_sql()[0],
query,
)
def test_more_more_more(self):
# Regression for #10199 - Aggregate calls clone the original query so
# the original query can still be used
books = Book.objects.all()
books.aggregate(Avg("authors__age"))
self.assertQuerysetEqual(
books.all(), [
'Artificial Intelligence: A Modern Approach',
'Paradigms of Artificial Intelligence Programming: Case Studies in Common Lisp',
'Practical Django Projects',
'Python Web Development with Django',
'Sams Teach Yourself Django in 24 Hours',
'The Definitive Guide to Django: Web Development Done Right'
],
lambda b: b.name
)
# Regression for #10248 - Annotations work with dates()
qs = Book.objects.annotate(num_authors=Count('authors')).filter(num_authors=2).dates('pubdate', 'day')
self.assertSequenceEqual(
qs, [
datetime.date(1995, 1, 15),
datetime.date(2007, 12, 6),
],
)
# Regression for #10290 - extra selects with parameters can be used for
# grouping.
qs = (
Book.objects
.annotate(mean_auth_age=Avg('authors__age'))
.extra(select={'sheets': '(pages + %s) / %s'}, select_params=[1, 2])
.order_by('sheets')
.values('sheets')
)
self.assertQuerysetEqual(
qs, [
150,
175,
224,
264,
473,
566
],
lambda b: int(b["sheets"])
)
# Regression for 10425 - annotations don't get in the way of a count()
# clause
self.assertEqual(
Book.objects.values('publisher').annotate(Count('publisher')).count(),
4
)
self.assertEqual(
Book.objects.annotate(Count('publisher')).values('publisher').count(),
6
)
# Note: intentionally no order_by(), that case needs tests, too.
publishers = Publisher.objects.filter(id__in=[self.p1.id, self.p2.id])
self.assertEqual(
sorted(p.name for p in publishers),
[
"Apress",
"Sams"
]
)
publishers = publishers.annotate(n_books=Count("book"))
sorted_publishers = sorted(publishers, key=lambda x: x.name)
self.assertEqual(
sorted_publishers[0].n_books,
2
)
self.assertEqual(
sorted_publishers[1].n_books,
1
)
self.assertEqual(
sorted(p.name for p in publishers),
[
"Apress",
"Sams"
]
)
books = Book.objects.filter(publisher__in=publishers)
self.assertQuerysetEqual(
books, [
"Practical Django Projects",
"Sams Teach Yourself Django in 24 Hours",
"The Definitive Guide to Django: Web Development Done Right",
],
lambda b: b.name
)
self.assertEqual(
sorted(p.name for p in publishers),
[
"Apress",
"Sams"
]
)
# Regression for 10666 - inherited fields work with annotations and
# aggregations
self.assertEqual(
HardbackBook.objects.aggregate(n_pages=Sum('book_ptr__pages')),
{'n_pages': 2078}
)
self.assertEqual(
HardbackBook.objects.aggregate(n_pages=Sum('pages')),
{'n_pages': 2078},
)
qs = HardbackBook.objects.annotate(
n_authors=Count('book_ptr__authors'),
).values('name', 'n_authors').order_by('name')
self.assertSequenceEqual(
qs,
[
{'n_authors': 2, 'name': 'Artificial Intelligence: A Modern Approach'},
{
'n_authors': 1,
'name': 'Paradigms of Artificial Intelligence Programming: Case Studies in Common Lisp'
}
],
)
qs = HardbackBook.objects.annotate(n_authors=Count('authors')).values('name', 'n_authors').order_by('name')
self.assertSequenceEqual(
qs,
[
{'n_authors': 2, 'name': 'Artificial Intelligence: A Modern Approach'},
{
'n_authors': 1,
'name': 'Paradigms of Artificial Intelligence Programming: Case Studies in Common Lisp'
}
],
)
# Regression for #10766 - Shouldn't be able to reference an aggregate
# fields in an aggregate() call.
msg = "Cannot compute Avg('mean_age'): 'mean_age' is an aggregate"
with self.assertRaisesMessage(FieldError, msg):
Book.objects.annotate(mean_age=Avg('authors__age')).annotate(Avg('mean_age'))
def test_empty_filter_count(self):
self.assertEqual(
Author.objects.filter(id__in=[]).annotate(Count("friends")).count(),
0
)
def test_empty_filter_aggregate(self):
self.assertEqual(
Author.objects.filter(id__in=[]).annotate(Count("friends")).aggregate(Count("pk")),
{"pk__count": 0}
)
def test_none_call_before_aggregate(self):
# Regression for #11789
self.assertEqual(
Author.objects.none().aggregate(Avg('age')),
{'age__avg': None}
)
def test_annotate_and_join(self):
self.assertEqual(
Author.objects.annotate(c=Count("friends__name")).exclude(friends__name="Joe").count(),
Author.objects.count()
)
def test_f_expression_annotation(self):
# Books with less than 200 pages per author.
qs = Book.objects.values("name").annotate(
n_authors=Count("authors")
).filter(
pages__lt=F("n_authors") * 200
).values_list("pk")
self.assertQuerysetEqual(
Book.objects.filter(pk__in=qs), [
"Python Web Development with Django"
],
attrgetter("name")
)
def test_values_annotate_values(self):
qs = Book.objects.values("name").annotate(
n_authors=Count("authors")
).values_list("pk", flat=True).order_by('name')
self.assertEqual(list(qs), list(Book.objects.values_list("pk", flat=True)))
def test_having_group_by(self):
# When a field occurs on the LHS of a HAVING clause that it
# appears correctly in the GROUP BY clause
qs = Book.objects.values_list("name").annotate(
n_authors=Count("authors")
).filter(
pages__gt=F("n_authors")
).values_list("name", flat=True).order_by('name')
# Results should be the same, all Books have more pages than authors
self.assertEqual(
list(qs), list(Book.objects.values_list("name", flat=True))
)
def test_values_list_annotation_args_ordering(self):
"""
Annotate *args ordering should be preserved in values_list results.
**kwargs comes after *args.
Regression test for #23659.
"""
books = Book.objects.values_list("publisher__name").annotate(
Count("id"), Avg("price"), Avg("authors__age"), avg_pgs=Avg("pages")
).order_by("-publisher__name")
self.assertEqual(books[0], ('Sams', 1, Decimal('23.09'), 45.0, 528.0))
def test_annotation_disjunction(self):
qs = Book.objects.annotate(n_authors=Count("authors")).filter(
Q(n_authors=2) | Q(name="Python Web Development with Django")
).order_by('name')
self.assertQuerysetEqual(
qs, [
"Artificial Intelligence: A Modern Approach",
"Python Web Development with Django",
"The Definitive Guide to Django: Web Development Done Right",
],
attrgetter("name")
)
qs = (
Book.objects
.annotate(n_authors=Count("authors"))
.filter(
Q(name="The Definitive Guide to Django: Web Development Done Right") |
(Q(name="Artificial Intelligence: A Modern Approach") & Q(n_authors=3))
)
).order_by('name')
self.assertQuerysetEqual(
qs,
[
"The Definitive Guide to Django: Web Development Done Right",
],
attrgetter("name")
)
qs = Publisher.objects.annotate(
rating_sum=Sum("book__rating"),
book_count=Count("book")
).filter(
Q(rating_sum__gt=5.5) | Q(rating_sum__isnull=True)
).order_by('pk')
self.assertQuerysetEqual(
qs, [
"Apress",
"Prentice Hall",
"Jonno's House of Books",
],
attrgetter("name")
)
qs = Publisher.objects.annotate(
rating_sum=Sum("book__rating"),
book_count=Count("book")
).filter(
Q(rating_sum__gt=F("book_count")) | Q(rating_sum=None)
).order_by("num_awards")
self.assertQuerysetEqual(
qs, [
"Jonno's House of Books",
"Sams",
"Apress",
"Prentice Hall",
"Morgan Kaufmann"
],
attrgetter("name")
)
def test_quoting_aggregate_order_by(self):
qs = Book.objects.filter(
name="Python Web Development with Django"
).annotate(
authorCount=Count("authors")
).order_by("authorCount")
self.assertQuerysetEqual(
qs, [
("Python Web Development with Django", 3),
],
lambda b: (b.name, b.authorCount)
)
def test_stddev(self):
self.assertEqual(
Book.objects.aggregate(StdDev('pages')),
{'pages__stddev': Approximate(311.46, 1)}
)
self.assertEqual(
Book.objects.aggregate(StdDev('rating')),
{'rating__stddev': Approximate(0.60, 1)}
)
self.assertEqual(
Book.objects.aggregate(StdDev('price')),
{'price__stddev': Approximate(Decimal('24.16'), 2)}
)
self.assertEqual(
Book.objects.aggregate(StdDev('pages', sample=True)),
{'pages__stddev': Approximate(341.19, 2)}
)
self.assertEqual(
Book.objects.aggregate(StdDev('rating', sample=True)),
{'rating__stddev': Approximate(0.66, 2)}
)
self.assertEqual(
Book.objects.aggregate(StdDev('price', sample=True)),
{'price__stddev': Approximate(Decimal('26.46'), 1)}
)
self.assertEqual(
Book.objects.aggregate(Variance('pages')),
{'pages__variance': Approximate(97010.80, 1)}
)
self.assertEqual(
Book.objects.aggregate(Variance('rating')),
{'rating__variance': Approximate(0.36, 1)}
)
self.assertEqual(
Book.objects.aggregate(Variance('price')),
{'price__variance': Approximate(Decimal('583.77'), 1)}
)
self.assertEqual(
Book.objects.aggregate(Variance('pages', sample=True)),
{'pages__variance': Approximate(116412.96, 1)}
)
self.assertEqual(
Book.objects.aggregate(Variance('rating', sample=True)),
{'rating__variance': Approximate(0.44, 2)}
)
self.assertEqual(
Book.objects.aggregate(Variance('price', sample=True)),
{'price__variance': Approximate(Decimal('700.53'), 2)}
)
def test_filtering_by_annotation_name(self):
# Regression test for #14476
# The name of the explicitly provided annotation name in this case
# poses no problem
qs = Author.objects.annotate(book_cnt=Count('book')).filter(book_cnt=2).order_by('name')
self.assertQuerysetEqual(
qs,
['Peter Norvig'],
lambda b: b.name
)
# Neither in this case
qs = Author.objects.annotate(book_count=Count('book')).filter(book_count=2).order_by('name')
self.assertQuerysetEqual(
qs,
['Peter Norvig'],
lambda b: b.name
)
# This case used to fail because the ORM couldn't resolve the
# automatically generated annotation name `book__count`
qs = Author.objects.annotate(Count('book')).filter(book__count=2).order_by('name')
self.assertQuerysetEqual(
qs,
['Peter Norvig'],
lambda b: b.name
)
# Referencing the auto-generated name in an aggregate() also works.
self.assertEqual(
Author.objects.annotate(Count('book')).aggregate(Max('book__count')),
{'book__count__max': 2}
)
def test_annotate_joins(self):
"""
The base table's join isn't promoted to LOUTER. This could
cause the query generation to fail if there is an exclude() for fk-field
in the query, too. Refs #19087.
"""
qs = Book.objects.annotate(n=Count('pk'))
self.assertIs(qs.query.alias_map['aggregation_regress_book'].join_type, None)
# The query executes without problems.
self.assertEqual(len(qs.exclude(publisher=-1)), 6)
@skipUnlessAnyDBFeature('allows_group_by_pk', 'allows_group_by_selected_pks')
def test_aggregate_duplicate_columns(self):
# Regression test for #17144
results = Author.objects.annotate(num_contacts=Count('book_contact_set'))
# There should only be one GROUP BY clause, for the `id` column.
# `name` and `age` should not be grouped on.
_, _, group_by = results.query.get_compiler(using='default').pre_sql_setup()
self.assertEqual(len(group_by), 1)
self.assertIn('id', group_by[0][0])
self.assertNotIn('name', group_by[0][0])
self.assertNotIn('age', group_by[0][0])
self.assertEqual(
[(a.name, a.num_contacts) for a in results.order_by('name')],
[
('Adrian Holovaty', 1),
('Brad Dayley', 1),
('Jacob Kaplan-Moss', 0),
('James Bennett', 1),
('Jeffrey Forcier', 1),
('Paul Bissex', 0),
('Peter Norvig', 2),
('Stuart Russell', 0),
('Wesley J. Chun', 0),
]
)
@skipUnlessAnyDBFeature('allows_group_by_pk', 'allows_group_by_selected_pks')
def test_aggregate_duplicate_columns_only(self):
# Works with only() too.
results = Author.objects.only('id', 'name').annotate(num_contacts=Count('book_contact_set'))
_, _, grouping = results.query.get_compiler(using='default').pre_sql_setup()
self.assertEqual(len(grouping), 1)
self.assertIn('id', grouping[0][0])
self.assertNotIn('name', grouping[0][0])
self.assertNotIn('age', grouping[0][0])
self.assertEqual(
[(a.name, a.num_contacts) for a in results.order_by('name')],
[
('Adrian Holovaty', 1),
('Brad Dayley', 1),
('Jacob Kaplan-Moss', 0),
('James Bennett', 1),
('Jeffrey Forcier', 1),
('Paul Bissex', 0),
('Peter Norvig', 2),
('Stuart Russell', 0),
('Wesley J. Chun', 0),
]
)
@skipUnlessAnyDBFeature('allows_group_by_pk', 'allows_group_by_selected_pks')
def test_aggregate_duplicate_columns_select_related(self):
# And select_related()
results = Book.objects.select_related('contact').annotate(
num_authors=Count('authors'))
_, _, grouping = results.query.get_compiler(using='default').pre_sql_setup()
# In the case of `group_by_selected_pks` we also group by contact.id because of the select_related.
self.assertEqual(len(grouping), 1 if connection.features.allows_group_by_pk else 2)
self.assertIn('id', grouping[0][0])
self.assertNotIn('name', grouping[0][0])
self.assertNotIn('contact', grouping[0][0])
self.assertEqual(
[(b.name, b.num_authors) for b in results.order_by('name')],
[
('Artificial Intelligence: A Modern Approach', 2),
('Paradigms of Artificial Intelligence Programming: Case Studies in Common Lisp', 1),
('Practical Django Projects', 1),
('Python Web Development with Django', 3),
('Sams Teach Yourself Django in 24 Hours', 1),
('The Definitive Guide to Django: Web Development Done Right', 2)
]
)
@skipUnlessDBFeature('allows_group_by_selected_pks')
def test_aggregate_unmanaged_model_columns(self):
"""
Unmanaged models are sometimes used to represent database views which
may not allow grouping by selected primary key.
"""
def assertQuerysetResults(queryset):
self.assertEqual(
[(b.name, b.num_authors) for b in queryset.order_by('name')],
[
('Artificial Intelligence: A Modern Approach', 2),
('Paradigms of Artificial Intelligence Programming: Case Studies in Common Lisp', 1),
('Practical Django Projects', 1),
('Python Web Development with Django', 3),
('Sams Teach Yourself Django in 24 Hours', 1),
('The Definitive Guide to Django: Web Development Done Right', 2),
]
)
queryset = Book.objects.select_related('contact').annotate(num_authors=Count('authors'))
# Unmanaged origin model.
with mock.patch.object(Book._meta, 'managed', False):
_, _, grouping = queryset.query.get_compiler(using='default').pre_sql_setup()
self.assertEqual(len(grouping), len(Book._meta.fields) + 1)
for index, field in enumerate(Book._meta.fields):
self.assertIn(field.name, grouping[index][0])
self.assertIn(Author._meta.pk.name, grouping[-1][0])
assertQuerysetResults(queryset)
# Unmanaged related model.
with mock.patch.object(Author._meta, 'managed', False):
_, _, grouping = queryset.query.get_compiler(using='default').pre_sql_setup()
self.assertEqual(len(grouping), len(Author._meta.fields) + 1)
self.assertIn(Book._meta.pk.name, grouping[0][0])
for index, field in enumerate(Author._meta.fields):
self.assertIn(field.name, grouping[index + 1][0])
assertQuerysetResults(queryset)
@skipUnlessDBFeature('allows_group_by_selected_pks')
def test_aggregate_unmanaged_model_as_tables(self):
qs = Book.objects.select_related('contact').annotate(num_authors=Count('authors'))
# Force treating unmanaged models as tables.
with mock.patch(
'django.db.connection.features.allows_group_by_selected_pks_on_model',
return_value=True,
):
with mock.patch.object(Book._meta, 'managed', False), \
mock.patch.object(Author._meta, 'managed', False):
_, _, grouping = qs.query.get_compiler(using='default').pre_sql_setup()
self.assertEqual(len(grouping), 2)
self.assertIn('id', grouping[0][0])
self.assertIn('id', grouping[1][0])
self.assertQuerysetEqual(
qs.order_by('name'),
[
('Artificial Intelligence: A Modern Approach', 2),
('Paradigms of Artificial Intelligence Programming: Case Studies in Common Lisp', 1),
('Practical Django Projects', 1),
('Python Web Development with Django', 3),
('Sams Teach Yourself Django in 24 Hours', 1),
('The Definitive Guide to Django: Web Development Done Right', 2),
],
attrgetter('name', 'num_authors'),
)
def test_reverse_join_trimming(self):
qs = Author.objects.annotate(Count('book_contact_set__contact'))
self.assertIn(' JOIN ', str(qs.query))
def test_aggregation_with_generic_reverse_relation(self):
"""
Regression test for #10870: Aggregates with joins ignore extra
filters provided by setup_joins
tests aggregations with generic reverse relations
"""
django_book = Book.objects.get(name='Practical Django Projects')
ItemTag.objects.create(
object_id=django_book.id, tag='intermediate',
content_type=ContentType.objects.get_for_model(django_book),
)
ItemTag.objects.create(
object_id=django_book.id, tag='django',
content_type=ContentType.objects.get_for_model(django_book),
)
# Assign a tag to model with same PK as the book above. If the JOIN
# used in aggregation doesn't have content type as part of the
# condition the annotation will also count the 'hi mom' tag for b.
wmpk = WithManualPK.objects.create(id=django_book.pk)
ItemTag.objects.create(
object_id=wmpk.id, tag='hi mom',
content_type=ContentType.objects.get_for_model(wmpk),
)
ai_book = Book.objects.get(name__startswith='Paradigms of Artificial Intelligence')
ItemTag.objects.create(
object_id=ai_book.id, tag='intermediate',
content_type=ContentType.objects.get_for_model(ai_book),
)
self.assertEqual(Book.objects.aggregate(Count('tags')), {'tags__count': 3})
results = Book.objects.annotate(Count('tags')).order_by('-tags__count', 'name')
self.assertEqual(
[(b.name, b.tags__count) for b in results],
[
('Practical Django Projects', 2),
('Paradigms of Artificial Intelligence Programming: Case Studies in Common Lisp', 1),
('Artificial Intelligence: A Modern Approach', 0),
('Python Web Development with Django', 0),
('Sams Teach Yourself Django in 24 Hours', 0),
('The Definitive Guide to Django: Web Development Done Right', 0)
]
)
def test_negated_aggregation(self):
expected_results = Author.objects.exclude(
pk__in=Author.objects.annotate(book_cnt=Count('book')).filter(book_cnt=2)
).order_by('name')
expected_results = [a.name for a in expected_results]
qs = Author.objects.annotate(book_cnt=Count('book')).exclude(
Q(book_cnt=2), Q(book_cnt=2)).order_by('name')
self.assertQuerysetEqual(
qs,
expected_results,
lambda b: b.name
)
expected_results = Author.objects.exclude(
pk__in=Author.objects.annotate(book_cnt=Count('book')).filter(book_cnt=2)
).order_by('name')
expected_results = [a.name for a in expected_results]
qs = Author.objects.annotate(book_cnt=Count('book')).exclude(Q(book_cnt=2) | Q(book_cnt=2)).order_by('name')
self.assertQuerysetEqual(
qs,
expected_results,
lambda b: b.name
)
def test_name_filters(self):
qs = Author.objects.annotate(Count('book')).filter(
Q(book__count__exact=2) | Q(name='Adrian Holovaty')
).order_by('name')
self.assertQuerysetEqual(
qs,
['Adrian Holovaty', 'Peter Norvig'],
lambda b: b.name
)
def test_name_expressions(self):
# Aggregates are spotted correctly from F objects.
# Note that Adrian's age is 34 in the fixtures, and he has one book
# so both conditions match one author.
qs = Author.objects.annotate(Count('book')).filter(
Q(name='Peter Norvig') | Q(age=F('book__count') + 33)
).order_by('name')
self.assertQuerysetEqual(
qs,
['Adrian Holovaty', 'Peter Norvig'],
lambda b: b.name
)
def test_ticket_11293(self):
q1 = Q(price__gt=50)
q2 = Q(authors__count__gt=1)
query = Book.objects.annotate(Count('authors')).filter(
q1 | q2).order_by('pk')
self.assertQuerysetEqual(
query,
[self.b1.pk, self.b4.pk, self.b5.pk, self.b6.pk],
attrgetter('pk'),
)
def test_ticket_11293_q_immutable(self):
"""
Splitting a q object to parts for where/having doesn't alter
the original q-object.
"""
q1 = Q(isbn='')
q2 = Q(authors__count__gt=1)
query = Book.objects.annotate(Count('authors'))
query.filter(q1 | q2)
self.assertEqual(len(q2.children), 1)
def test_fobj_group_by(self):
"""
An F() object referring to related column works correctly in group by.
"""
qs = Book.objects.annotate(
account=Count('authors')
).filter(
account=F('publisher__num_awards')
)
self.assertQuerysetEqual(
qs, ['Sams Teach Yourself Django in 24 Hours'],
lambda b: b.name)
def test_annotate_reserved_word(self):
"""
Regression #18333 - Ensure annotated column name is properly quoted.
"""
vals = Book.objects.annotate(select=Count('authors__id')).aggregate(Sum('select'), Avg('select'))
self.assertEqual(vals, {
'select__sum': 10,
'select__avg': Approximate(1.666, places=2),
})
def test_annotate_on_relation(self):
book = Book.objects.annotate(avg_price=Avg('price'), publisher_name=F('publisher__name')).get(pk=self.b1.pk)
self.assertEqual(book.avg_price, 30.00)
self.assertEqual(book.publisher_name, "Apress")
def test_aggregate_on_relation(self):
# A query with an existing annotation aggregation on a relation should
# succeed.
qs = Book.objects.annotate(avg_price=Avg('price')).aggregate(
publisher_awards=Sum('publisher__num_awards')
)
self.assertEqual(qs['publisher_awards'], 30)
def test_annotate_distinct_aggregate(self):
# There are three books with rating of 4.0 and two of the books have
# the same price. Hence, the distinct removes one rating of 4.0
# from the results.
vals1 = Book.objects.values('rating', 'price').distinct().aggregate(result=Sum('rating'))
vals2 = Book.objects.aggregate(result=Sum('rating') - Value(4.0))
self.assertEqual(vals1, vals2)
def test_annotate_values_list_flat(self):
"""Find ages that are shared by at least two authors."""
qs = Author.objects.values_list('age', flat=True).annotate(age_count=Count('age')).filter(age_count__gt=1)
self.assertSequenceEqual(qs, [29])
def test_allow_distinct(self):
class MyAggregate(Aggregate):
pass
with self.assertRaisesMessage(TypeError, 'MyAggregate does not allow distinct'):
MyAggregate('foo', distinct=True)
class DistinctAggregate(Aggregate):
allow_distinct = True
DistinctAggregate('foo', distinct=True)
@skipUnlessDBFeature('supports_subqueries_in_group_by')
def test_having_subquery_select(self):
authors = Author.objects.filter(pk=self.a1.pk)
books = Book.objects.annotate(Count('authors')).filter(
Q(authors__in=authors) | Q(authors__count__gt=2)
)
self.assertEqual(set(books), {self.b1, self.b4})
class JoinPromotionTests(TestCase):
def test_ticket_21150(self):
b = Bravo.objects.create()
c = Charlie.objects.create(bravo=b)
qs = Charlie.objects.select_related('alfa').annotate(Count('bravo__charlie'))
self.assertSequenceEqual(qs, [c])
self.assertIs(qs[0].alfa, None)
a = Alfa.objects.create()
c.alfa = a
c.save()
# Force re-evaluation
qs = qs.all()
self.assertSequenceEqual(qs, [c])
self.assertEqual(qs[0].alfa, a)
def test_existing_join_not_promoted(self):
# No promotion for existing joins
qs = Charlie.objects.filter(alfa__name__isnull=False).annotate(Count('alfa__name'))
self.assertIn(' INNER JOIN ', str(qs.query))
# Also, the existing join is unpromoted when doing filtering for already
# promoted join.
qs = Charlie.objects.annotate(Count('alfa__name')).filter(alfa__name__isnull=False)
self.assertIn(' INNER JOIN ', str(qs.query))
# But, as the join is nullable first use by annotate will be LOUTER
qs = Charlie.objects.annotate(Count('alfa__name'))
self.assertIn(' LEFT OUTER JOIN ', str(qs.query))
def test_non_nullable_fk_not_promoted(self):
qs = Book.objects.annotate(Count('contact__name'))
self.assertIn(' INNER JOIN ', str(qs.query))
class SelfReferentialFKTests(TestCase):
def test_ticket_24748(self):
t1 = SelfRefFK.objects.create(name='t1')
SelfRefFK.objects.create(name='t2', parent=t1)
SelfRefFK.objects.create(name='t3', parent=t1)
self.assertQuerysetEqual(
SelfRefFK.objects.annotate(num_children=Count('children')).order_by('name'),
[('t1', 2), ('t2', 0), ('t3', 0)],
lambda x: (x.name, x.num_children)
)
|
afea726b89324a3832ee981800233e5ca3194005dc138e9a67107415c2f9ca62 | import datetime
from django.core import signing
from django.test import SimpleTestCase
from django.test.utils import freeze_time
from django.utils.crypto import InvalidAlgorithm
class TestSigner(SimpleTestCase):
def test_signature(self):
"signature() method should generate a signature"
signer = signing.Signer('predictable-secret')
signer2 = signing.Signer('predictable-secret2')
for s in (
b'hello',
b'3098247:529:087:',
'\u2019'.encode(),
):
self.assertEqual(
signer.signature(s),
signing.base64_hmac(
signer.salt + 'signer',
s,
'predictable-secret',
algorithm=signer.algorithm,
)
)
self.assertNotEqual(signer.signature(s), signer2.signature(s))
def test_signature_with_salt(self):
"signature(value, salt=...) should work"
signer = signing.Signer('predictable-secret', salt='extra-salt')
self.assertEqual(
signer.signature('hello'),
signing.base64_hmac(
'extra-salt' + 'signer',
'hello',
'predictable-secret',
algorithm=signer.algorithm,
)
)
self.assertNotEqual(
signing.Signer('predictable-secret', salt='one').signature('hello'),
signing.Signer('predictable-secret', salt='two').signature('hello'))
def test_custom_algorithm(self):
signer = signing.Signer('predictable-secret', algorithm='sha512')
self.assertEqual(
signer.signature('hello'),
'Usf3uVQOZ9m6uPfVonKR-EBXjPe7bjMbp3_Fq8MfsptgkkM1ojidN0BxYaT5HAEN1'
'VzO9_jVu7R-VkqknHYNvw',
)
def test_invalid_algorithm(self):
signer = signing.Signer('predictable-secret', algorithm='whatever')
msg = "'whatever' is not an algorithm accepted by the hashlib module."
with self.assertRaisesMessage(InvalidAlgorithm, msg):
signer.sign('hello')
def test_sign_unsign(self):
"sign/unsign should be reversible"
signer = signing.Signer('predictable-secret')
examples = [
'q;wjmbk;wkmb',
'3098247529087',
'3098247:529:087:',
'jkw osanteuh ,rcuh nthu aou oauh ,ud du',
'\u2019',
]
for example in examples:
signed = signer.sign(example)
self.assertIsInstance(signed, str)
self.assertNotEqual(example, signed)
self.assertEqual(example, signer.unsign(signed))
def test_sign_unsign_non_string(self):
signer = signing.Signer('predictable-secret')
values = [
123,
1.23,
True,
datetime.date.today(),
]
for value in values:
with self.subTest(value):
signed = signer.sign(value)
self.assertIsInstance(signed, str)
self.assertNotEqual(signed, value)
self.assertEqual(signer.unsign(signed), str(value))
def test_unsign_detects_tampering(self):
"unsign should raise an exception if the value has been tampered with"
signer = signing.Signer('predictable-secret')
value = 'Another string'
signed_value = signer.sign(value)
transforms = (
lambda s: s.upper(),
lambda s: s + 'a',
lambda s: 'a' + s[1:],
lambda s: s.replace(':', ''),
)
self.assertEqual(value, signer.unsign(signed_value))
for transform in transforms:
with self.assertRaises(signing.BadSignature):
signer.unsign(transform(signed_value))
def test_sign_unsign_object(self):
signer = signing.Signer('predictable-secret')
tests = [
['a', 'list'],
'a string \u2019',
{'a': 'dictionary'},
]
for obj in tests:
with self.subTest(obj=obj):
signed_obj = signer.sign_object(obj)
self.assertNotEqual(obj, signed_obj)
self.assertEqual(obj, signer.unsign_object(signed_obj))
signed_obj = signer.sign_object(obj, compress=True)
self.assertNotEqual(obj, signed_obj)
self.assertEqual(obj, signer.unsign_object(signed_obj))
def test_dumps_loads(self):
"dumps and loads be reversible for any JSON serializable object"
objects = [
['a', 'list'],
'a string \u2019',
{'a': 'dictionary'},
]
for o in objects:
self.assertNotEqual(o, signing.dumps(o))
self.assertEqual(o, signing.loads(signing.dumps(o)))
self.assertNotEqual(o, signing.dumps(o, compress=True))
self.assertEqual(o, signing.loads(signing.dumps(o, compress=True)))
def test_decode_detects_tampering(self):
"loads should raise exception for tampered objects"
transforms = (
lambda s: s.upper(),
lambda s: s + 'a',
lambda s: 'a' + s[1:],
lambda s: s.replace(':', ''),
)
value = {
'foo': 'bar',
'baz': 1,
}
encoded = signing.dumps(value)
self.assertEqual(value, signing.loads(encoded))
for transform in transforms:
with self.assertRaises(signing.BadSignature):
signing.loads(transform(encoded))
def test_works_with_non_ascii_keys(self):
binary_key = b'\xe7' # Set some binary (non-ASCII key)
s = signing.Signer(binary_key)
self.assertEqual(
'foo:EE4qGC5MEKyQG5msxYA0sBohAxLC0BJf8uRhemh0BGU',
s.sign('foo'),
)
def test_valid_sep(self):
separators = ['/', '*sep*', ',']
for sep in separators:
signer = signing.Signer('predictable-secret', sep=sep)
self.assertEqual(
'foo%sjZQoX_FtSO70jX9HLRGg2A_2s4kdDBxz1QoO_OpEQb0' % sep,
signer.sign('foo'),
)
def test_invalid_sep(self):
"""should warn on invalid separator"""
msg = 'Unsafe Signer separator: %r (cannot be empty or consist of only A-z0-9-_=)'
separators = ['', '-', 'abc']
for sep in separators:
with self.assertRaisesMessage(ValueError, msg % sep):
signing.Signer(sep=sep)
class TestTimestampSigner(SimpleTestCase):
def test_timestamp_signer(self):
value = 'hello'
with freeze_time(123456789):
signer = signing.TimestampSigner('predictable-key')
ts = signer.sign(value)
self.assertNotEqual(ts, signing.Signer('predictable-key').sign(value))
self.assertEqual(signer.unsign(ts), value)
with freeze_time(123456800):
self.assertEqual(signer.unsign(ts, max_age=12), value)
# max_age parameter can also accept a datetime.timedelta object
self.assertEqual(signer.unsign(ts, max_age=datetime.timedelta(seconds=11)), value)
with self.assertRaises(signing.SignatureExpired):
signer.unsign(ts, max_age=10)
class TestBase62(SimpleTestCase):
def test_base62(self):
tests = [-10 ** 10, 10 ** 10, 1620378259, *range(-100, 100)]
for i in tests:
self.assertEqual(i, signing.b62_decode(signing.b62_encode(i)))
|
22cec0afd258414b379eb719d5004253f4edfcbae9b5d88db78b0fed72ff1a68 | import os
from io import StringIO
from unittest import mock
from admin_scripts.tests import AdminScriptTestCase
from django.apps import apps
from django.core import management
from django.core.checks import Tags
from django.core.management import BaseCommand, CommandError, find_commands
from django.core.management.utils import (
find_command, get_random_secret_key, is_ignored_path,
normalize_path_patterns, popen_wrapper,
)
from django.db import connection
from django.test import SimpleTestCase, override_settings
from django.test.utils import captured_stderr, extend_sys_path, ignore_warnings
from django.utils import translation
from django.utils.deprecation import RemovedInDjango41Warning
from django.utils.version import PY310
from .management.commands import dance
# A minimal set of apps to avoid system checks running on all apps.
@override_settings(
INSTALLED_APPS=[
'django.contrib.auth',
'django.contrib.contenttypes',
'user_commands',
],
)
class CommandTests(SimpleTestCase):
def test_command(self):
out = StringIO()
management.call_command('dance', stdout=out)
self.assertIn("I don't feel like dancing Rock'n'Roll.\n", out.getvalue())
def test_command_style(self):
out = StringIO()
management.call_command('dance', style='Jive', stdout=out)
self.assertIn("I don't feel like dancing Jive.\n", out.getvalue())
# Passing options as arguments also works (thanks argparse)
management.call_command('dance', '--style', 'Jive', stdout=out)
self.assertIn("I don't feel like dancing Jive.\n", out.getvalue())
def test_language_preserved(self):
with translation.override('fr'):
management.call_command('dance', verbosity=0)
self.assertEqual(translation.get_language(), 'fr')
def test_explode(self):
""" An unknown command raises CommandError """
with self.assertRaisesMessage(CommandError, "Unknown command: 'explode'"):
management.call_command(('explode',))
def test_system_exit(self):
""" Exception raised in a command should raise CommandError with
call_command, but SystemExit when run from command line
"""
with self.assertRaises(CommandError) as cm:
management.call_command('dance', example="raise")
self.assertEqual(cm.exception.returncode, 3)
dance.Command.requires_system_checks = []
try:
with captured_stderr() as stderr, self.assertRaises(SystemExit) as cm:
management.ManagementUtility(['manage.py', 'dance', '--example=raise']).execute()
self.assertEqual(cm.exception.code, 3)
finally:
dance.Command.requires_system_checks = '__all__'
self.assertIn("CommandError", stderr.getvalue())
def test_no_translations_deactivate_translations(self):
"""
When the Command handle method is decorated with @no_translations,
translations are deactivated inside the command.
"""
current_locale = translation.get_language()
with translation.override('pl'):
result = management.call_command('no_translations')
self.assertIsNone(result)
self.assertEqual(translation.get_language(), current_locale)
def test_find_command_without_PATH(self):
"""
find_command should still work when the PATH environment variable
doesn't exist (#22256).
"""
current_path = os.environ.pop('PATH', None)
try:
self.assertIsNone(find_command('_missing_'))
finally:
if current_path is not None:
os.environ['PATH'] = current_path
def test_discover_commands_in_eggs(self):
"""
Management commands can also be loaded from Python eggs.
"""
egg_dir = '%s/eggs' % os.path.dirname(__file__)
egg_name = '%s/basic.egg' % egg_dir
with extend_sys_path(egg_name):
with self.settings(INSTALLED_APPS=['commandegg']):
cmds = find_commands(os.path.join(apps.get_app_config('commandegg').path, 'management'))
self.assertEqual(cmds, ['eggcommand'])
def test_call_command_option_parsing(self):
"""
When passing the long option name to call_command, the available option
key is the option dest name (#22985).
"""
out = StringIO()
management.call_command('dance', stdout=out, opt_3=True)
self.assertIn("option3", out.getvalue())
self.assertNotIn("opt_3", out.getvalue())
self.assertNotIn("opt-3", out.getvalue())
def test_call_command_option_parsing_non_string_arg(self):
"""
It should be possible to pass non-string arguments to call_command.
"""
out = StringIO()
management.call_command('dance', 1, verbosity=0, stdout=out)
self.assertIn("You passed 1 as a positional argument.", out.getvalue())
def test_calling_a_command_with_only_empty_parameter_should_ends_gracefully(self):
out = StringIO()
management.call_command('hal', "--empty", stdout=out)
self.assertEqual(out.getvalue(), "\nDave, I can't do that.\n")
def test_calling_command_with_app_labels_and_parameters_should_be_ok(self):
out = StringIO()
management.call_command('hal', 'myapp', "--verbosity", "3", stdout=out)
self.assertIn("Dave, my mind is going. I can feel it. I can feel it.\n", out.getvalue())
def test_calling_command_with_parameters_and_app_labels_at_the_end_should_be_ok(self):
out = StringIO()
management.call_command('hal', "--verbosity", "3", "myapp", stdout=out)
self.assertIn("Dave, my mind is going. I can feel it. I can feel it.\n", out.getvalue())
def test_calling_a_command_with_no_app_labels_and_parameters_should_raise_a_command_error(self):
with self.assertRaises(CommandError):
management.call_command('hal')
def test_output_transaction(self):
output = management.call_command('transaction', stdout=StringIO(), no_color=True)
self.assertTrue(output.strip().startswith(connection.ops.start_transaction_sql()))
self.assertTrue(output.strip().endswith(connection.ops.end_transaction_sql()))
def test_call_command_no_checks(self):
"""
By default, call_command should not trigger the check framework, unless
specifically asked.
"""
self.counter = 0
def patched_check(self_, **kwargs):
self.counter += 1
self.kwargs = kwargs
saved_check = BaseCommand.check
BaseCommand.check = patched_check
try:
management.call_command("dance", verbosity=0)
self.assertEqual(self.counter, 0)
management.call_command("dance", verbosity=0, skip_checks=False)
self.assertEqual(self.counter, 1)
self.assertEqual(self.kwargs, {})
finally:
BaseCommand.check = saved_check
def test_requires_system_checks_empty(self):
with mock.patch('django.core.management.base.BaseCommand.check') as mocked_check:
management.call_command('no_system_checks')
self.assertIs(mocked_check.called, False)
def test_requires_system_checks_specific(self):
with mock.patch('django.core.management.base.BaseCommand.check') as mocked_check:
management.call_command('specific_system_checks')
mocked_check.called_once_with(tags=[Tags.staticfiles, Tags.models])
def test_requires_system_checks_invalid(self):
class Command(BaseCommand):
requires_system_checks = 'x'
msg = 'requires_system_checks must be a list or tuple.'
with self.assertRaisesMessage(TypeError, msg):
Command()
def test_check_migrations(self):
requires_migrations_checks = dance.Command.requires_migrations_checks
self.assertIs(requires_migrations_checks, False)
try:
with mock.patch.object(BaseCommand, 'check_migrations') as check_migrations:
management.call_command('dance', verbosity=0)
self.assertFalse(check_migrations.called)
dance.Command.requires_migrations_checks = True
management.call_command('dance', verbosity=0)
self.assertTrue(check_migrations.called)
finally:
dance.Command.requires_migrations_checks = requires_migrations_checks
def test_call_command_unrecognized_option(self):
msg = (
'Unknown option(s) for dance command: unrecognized. Valid options '
'are: example, force_color, help, integer, no_color, opt_3, '
'option3, pythonpath, settings, skip_checks, stderr, stdout, '
'style, traceback, verbosity, version.'
)
with self.assertRaisesMessage(TypeError, msg):
management.call_command('dance', unrecognized=1)
msg = (
'Unknown option(s) for dance command: unrecognized, unrecognized2. '
'Valid options are: example, force_color, help, integer, no_color, '
'opt_3, option3, pythonpath, settings, skip_checks, stderr, '
'stdout, style, traceback, verbosity, version.'
)
with self.assertRaisesMessage(TypeError, msg):
management.call_command('dance', unrecognized=1, unrecognized2=1)
def test_call_command_with_required_parameters_in_options(self):
out = StringIO()
management.call_command('required_option', need_me='foo', needme2='bar', stdout=out)
self.assertIn('need_me', out.getvalue())
self.assertIn('needme2', out.getvalue())
def test_call_command_with_required_parameters_in_mixed_options(self):
out = StringIO()
management.call_command('required_option', '--need-me=foo', needme2='bar', stdout=out)
self.assertIn('need_me', out.getvalue())
self.assertIn('needme2', out.getvalue())
def test_command_add_arguments_after_common_arguments(self):
out = StringIO()
management.call_command('common_args', stdout=out)
self.assertIn('Detected that --version already exists', out.getvalue())
def test_mutually_exclusive_group_required_options(self):
out = StringIO()
management.call_command('mutually_exclusive_required', foo_id=1, stdout=out)
self.assertIn('foo_id', out.getvalue())
management.call_command('mutually_exclusive_required', foo_name='foo', stdout=out)
self.assertIn('foo_name', out.getvalue())
msg = (
'Error: one of the arguments --foo-id --foo-name --foo-list '
'--append_const --const --count --flag_false --flag_true is '
'required'
)
with self.assertRaisesMessage(CommandError, msg):
management.call_command('mutually_exclusive_required', stdout=out)
def test_mutually_exclusive_group_required_const_options(self):
tests = [
('append_const', [42]),
('const', 31),
('count', 1),
('flag_false', False),
('flag_true', True),
]
for arg, value in tests:
out = StringIO()
expected_output = '%s=%s' % (arg, value)
with self.subTest(arg=arg):
management.call_command(
'mutually_exclusive_required',
'--%s' % arg,
stdout=out,
)
self.assertIn(expected_output, out.getvalue())
out.truncate(0)
management.call_command(
'mutually_exclusive_required',
**{arg: value, 'stdout': out},
)
self.assertIn(expected_output, out.getvalue())
def test_required_list_option(self):
tests = [
(('--foo-list', [1, 2]), {}),
((), {'foo_list': [1, 2]}),
]
for command in ['mutually_exclusive_required', 'required_list_option']:
for args, kwargs in tests:
with self.subTest(command=command, args=args, kwargs=kwargs):
out = StringIO()
management.call_command(
command,
*args,
**{**kwargs, 'stdout': out},
)
self.assertIn('foo_list=[1, 2]', out.getvalue())
def test_required_const_options(self):
args = {
'append_const': [42],
'const': 31,
'count': 1,
'flag_false': False,
'flag_true': True,
}
expected_output = '\n'.join(
'%s=%s' % (arg, value) for arg, value in args.items()
)
out = StringIO()
management.call_command(
'required_constant_option',
'--append_const',
'--const',
'--count',
'--flag_false',
'--flag_true',
stdout=out,
)
self.assertIn(expected_output, out.getvalue())
out.truncate(0)
management.call_command('required_constant_option', **{**args, 'stdout': out})
self.assertIn(expected_output, out.getvalue())
def test_subparser(self):
out = StringIO()
management.call_command('subparser', 'foo', 12, stdout=out)
self.assertIn('bar', out.getvalue())
def test_subparser_dest_args(self):
out = StringIO()
management.call_command('subparser_dest', 'foo', bar=12, stdout=out)
self.assertIn('bar', out.getvalue())
def test_subparser_dest_required_args(self):
out = StringIO()
management.call_command('subparser_required', 'foo_1', 'foo_2', bar=12, stdout=out)
self.assertIn('bar', out.getvalue())
def test_subparser_invalid_option(self):
msg = "Error:%s invalid choice: 'test' (choose from 'foo')" % (
' argument {foo}:' if PY310 else ''
)
with self.assertRaisesMessage(CommandError, msg):
management.call_command('subparser', 'test', 12)
msg = 'Error: the following arguments are required: subcommand'
with self.assertRaisesMessage(CommandError, msg):
management.call_command('subparser_dest', subcommand='foo', bar=12)
def test_create_parser_kwargs(self):
"""BaseCommand.create_parser() passes kwargs to CommandParser."""
epilog = 'some epilog text'
parser = BaseCommand().create_parser('prog_name', 'subcommand', epilog=epilog)
self.assertEqual(parser.epilog, epilog)
def test_outputwrapper_flush(self):
out = StringIO()
with mock.patch.object(out, 'flush') as mocked_flush:
management.call_command('outputwrapper', stdout=out)
self.assertIn('Working...', out.getvalue())
self.assertIs(mocked_flush.called, True)
class CommandRunTests(AdminScriptTestCase):
"""
Tests that need to run by simulating the command line, not by call_command.
"""
def test_script_prefix_set_in_commands(self):
self.write_settings('settings.py', apps=['user_commands'], sdict={
'ROOT_URLCONF': '"user_commands.urls"',
'FORCE_SCRIPT_NAME': '"/PREFIX/"',
})
out, err = self.run_manage(['reverse_url'])
self.assertNoOutput(err)
self.assertEqual(out.strip(), '/PREFIX/some/url/')
def test_disallowed_abbreviated_options(self):
"""
To avoid conflicts with custom options, commands don't allow
abbreviated forms of the --setting and --pythonpath options.
"""
self.write_settings('settings.py', apps=['user_commands'])
out, err = self.run_manage(['set_option', '--set', 'foo'])
self.assertNoOutput(err)
self.assertEqual(out.strip(), 'Set foo')
def test_skip_checks(self):
self.write_settings('settings.py', apps=['django.contrib.staticfiles', 'user_commands'], sdict={
# (staticfiles.E001) The STATICFILES_DIRS setting is not a tuple or
# list.
'STATICFILES_DIRS': '"foo"',
})
out, err = self.run_manage(['set_option', '--skip-checks', '--set', 'foo'])
self.assertNoOutput(err)
self.assertEqual(out.strip(), 'Set foo')
class UtilsTests(SimpleTestCase):
def test_no_existent_external_program(self):
msg = 'Error executing a_42_command_that_doesnt_exist_42'
with self.assertRaisesMessage(CommandError, msg):
popen_wrapper(['a_42_command_that_doesnt_exist_42'])
def test_get_random_secret_key(self):
key = get_random_secret_key()
self.assertEqual(len(key), 50)
for char in key:
self.assertIn(char, 'abcdefghijklmnopqrstuvwxyz0123456789!@#$%^&*(-_=+)')
def test_is_ignored_path_true(self):
patterns = (
['foo/bar/baz'],
['baz'],
['foo/bar/baz'],
['*/baz'],
['*'],
['b?z'],
['[abc]az'],
['*/ba[!z]/baz'],
)
for ignore_patterns in patterns:
with self.subTest(ignore_patterns=ignore_patterns):
self.assertIs(is_ignored_path('foo/bar/baz', ignore_patterns=ignore_patterns), True)
def test_is_ignored_path_false(self):
self.assertIs(is_ignored_path('foo/bar/baz', ignore_patterns=['foo/bar/bat', 'bar', 'flub/blub']), False)
def test_normalize_path_patterns_truncates_wildcard_base(self):
expected = [os.path.normcase(p) for p in ['foo/bar', 'bar/*/']]
self.assertEqual(normalize_path_patterns(['foo/bar/*', 'bar/*/']), expected)
class DeprecationTests(SimpleTestCase):
def test_requires_system_checks_warning(self):
class Command(BaseCommand):
pass
msg = (
"Using a boolean value for requires_system_checks is deprecated. "
"Use '__all__' instead of True, and [] (an empty list) instead of "
"False."
)
for value in [False, True]:
Command.requires_system_checks = value
with self.assertRaisesMessage(RemovedInDjango41Warning, msg):
Command()
@ignore_warnings(category=RemovedInDjango41Warning)
def test_requires_system_checks_true(self):
class Command(BaseCommand):
requires_system_checks = True
def handle(self, *args, **options):
pass
command = Command()
with mock.patch('django.core.management.base.BaseCommand.check') as mocked_check:
management.call_command(command, skip_checks=False)
mocked_check.assert_called_once_with()
@ignore_warnings(category=RemovedInDjango41Warning)
def test_requires_system_checks_false(self):
class Command(BaseCommand):
requires_system_checks = False
def handle(self, *args, **options):
pass
command = Command()
with mock.patch('django.core.management.base.BaseCommand.check') as mocked_check:
management.call_command(command)
self.assertIs(mocked_check.called, False)
|
fec657e53a77627abb7c0ef3d6f4ae2c1e1150b8b2d929ef8e6ead0272ef1f20 | import sys
import unittest
from django.conf import settings
from django.contrib import admin
from django.contrib.admindocs import utils, views
from django.contrib.admindocs.views import get_return_data_type, simplify_regex
from django.contrib.sites.models import Site
from django.db import models
from django.db.models import fields
from django.test import SimpleTestCase, modify_settings, override_settings
from django.test.utils import captured_stderr
from django.urls import include, path, reverse
from django.utils.functional import SimpleLazyObject
from .models import Company, Person
from .tests import AdminDocsTestCase, TestDataMixin
@unittest.skipUnless(utils.docutils_is_available, "no docutils installed.")
class AdminDocViewTests(TestDataMixin, AdminDocsTestCase):
def setUp(self):
self.client.force_login(self.superuser)
def test_index(self):
response = self.client.get(reverse('django-admindocs-docroot'))
self.assertContains(response, '<h1>Documentation</h1>', html=True)
self.assertContains(response, '<h1 id="site-name"><a href="/admin/">Django administration</a></h1>')
self.client.logout()
response = self.client.get(reverse('django-admindocs-docroot'), follow=True)
# Should display the login screen
self.assertContains(response, '<input type="hidden" name="next" value="/admindocs/">', html=True)
def test_bookmarklets(self):
response = self.client.get(reverse('django-admindocs-bookmarklets'))
self.assertContains(response, '/admindocs/views/')
def test_templatetag_index(self):
response = self.client.get(reverse('django-admindocs-tags'))
self.assertContains(response, '<h3 id="built_in-extends">extends</h3>', html=True)
def test_templatefilter_index(self):
response = self.client.get(reverse('django-admindocs-filters'))
self.assertContains(response, '<h3 id="built_in-first">first</h3>', html=True)
def test_view_index(self):
response = self.client.get(reverse('django-admindocs-views-index'))
self.assertContains(
response,
'<h3><a href="/admindocs/views/django.contrib.admindocs.views.BaseAdminDocsView/">/admindocs/</a></h3>',
html=True
)
self.assertContains(response, 'Views by namespace test')
self.assertContains(response, 'Name: <code>test:func</code>.')
self.assertContains(
response,
'<h3><a href="/admindocs/views/admin_docs.views.XViewCallableObject/">'
'/xview/callable_object_without_xview/</a></h3>',
html=True,
)
def test_view_index_with_method(self):
"""
Views that are methods are listed correctly.
"""
response = self.client.get(reverse('django-admindocs-views-index'))
self.assertContains(
response,
'<h3><a href="/admindocs/views/django.contrib.admin.sites.AdminSite.index/">/admin/</a></h3>',
html=True
)
def test_view_detail(self):
url = reverse('django-admindocs-views-detail', args=['django.contrib.admindocs.views.BaseAdminDocsView'])
response = self.client.get(url)
# View docstring
self.assertContains(response, 'Base view for admindocs views.')
@override_settings(ROOT_URLCONF='admin_docs.namespace_urls')
def test_namespaced_view_detail(self):
url = reverse('django-admindocs-views-detail', args=['admin_docs.views.XViewClass'])
response = self.client.get(url)
self.assertContains(response, '<h1>admin_docs.views.XViewClass</h1>')
def test_view_detail_illegal_import(self):
url = reverse('django-admindocs-views-detail', args=['urlpatterns_reverse.nonimported_module.view'])
response = self.client.get(url)
self.assertEqual(response.status_code, 404)
self.assertNotIn("urlpatterns_reverse.nonimported_module", sys.modules)
def test_view_detail_as_method(self):
"""
Views that are methods can be displayed.
"""
url = reverse('django-admindocs-views-detail', args=['django.contrib.admin.sites.AdminSite.index'])
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
def test_model_index(self):
response = self.client.get(reverse('django-admindocs-models-index'))
self.assertContains(
response,
'<h2 id="app-auth">Authentication and Authorization (django.contrib.auth)</h2>',
html=True
)
def test_template_detail(self):
response = self.client.get(reverse('django-admindocs-templates', args=['admin_doc/template_detail.html']))
self.assertContains(response, '<h1>Template: <q>admin_doc/template_detail.html</q></h1>', html=True)
def test_missing_docutils(self):
utils.docutils_is_available = False
try:
response = self.client.get(reverse('django-admindocs-docroot'))
self.assertContains(
response,
'<h3>The admin documentation system requires Python’s '
'<a href="https://docutils.sourceforge.io/">docutils</a> '
'library.</h3>'
'<p>Please ask your administrators to install '
'<a href="https://docutils.sourceforge.io/">docutils</a>.</p>',
html=True
)
self.assertContains(response, '<h1 id="site-name"><a href="/admin/">Django administration</a></h1>')
finally:
utils.docutils_is_available = True
@modify_settings(INSTALLED_APPS={'remove': 'django.contrib.sites'})
@override_settings(SITE_ID=None) # will restore SITE_ID after the test
def test_no_sites_framework(self):
"""
Without the sites framework, should not access SITE_ID or Site
objects. Deleting settings is fine here as UserSettingsHolder is used.
"""
Site.objects.all().delete()
del settings.SITE_ID
response = self.client.get(reverse('django-admindocs-views-index'))
self.assertContains(response, 'View documentation')
def test_callable_urlconf(self):
"""
Index view should correctly resolve view patterns when ROOT_URLCONF is
not a string.
"""
def urlpatterns():
return (
path('admin/doc/', include('django.contrib.admindocs.urls')),
path('admin/', admin.site.urls),
)
with self.settings(ROOT_URLCONF=SimpleLazyObject(urlpatterns)):
response = self.client.get(reverse('django-admindocs-views-index'))
self.assertEqual(response.status_code, 200)
@unittest.skipUnless(utils.docutils_is_available, 'no docutils installed.')
class AdminDocViewDefaultEngineOnly(TestDataMixin, AdminDocsTestCase):
def setUp(self):
self.client.force_login(self.superuser)
def test_template_detail_path_traversal(self):
cases = ['/etc/passwd', '../passwd']
for fpath in cases:
with self.subTest(path=fpath):
response = self.client.get(
reverse('django-admindocs-templates', args=[fpath]),
)
self.assertEqual(response.status_code, 400)
@override_settings(TEMPLATES=[{
'NAME': 'ONE',
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'APP_DIRS': True,
}, {
'NAME': 'TWO',
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'APP_DIRS': True,
}])
@unittest.skipUnless(utils.docutils_is_available, "no docutils installed.")
class AdminDocViewWithMultipleEngines(AdminDocViewTests):
def test_templatefilter_index(self):
# Overridden because non-trivial TEMPLATES settings aren't supported
# but the page shouldn't crash (#24125).
response = self.client.get(reverse('django-admindocs-filters'))
self.assertContains(response, '<title>Template filters</title>', html=True)
def test_templatetag_index(self):
# Overridden because non-trivial TEMPLATES settings aren't supported
# but the page shouldn't crash (#24125).
response = self.client.get(reverse('django-admindocs-tags'))
self.assertContains(response, '<title>Template tags</title>', html=True)
@unittest.skipUnless(utils.docutils_is_available, "no docutils installed.")
class TestModelDetailView(TestDataMixin, AdminDocsTestCase):
def setUp(self):
self.client.force_login(self.superuser)
with captured_stderr() as self.docutils_stderr:
self.response = self.client.get(reverse('django-admindocs-models-detail', args=['admin_docs', 'Person']))
def test_method_excludes(self):
"""
Methods that begin with strings defined in
``django.contrib.admindocs.views.MODEL_METHODS_EXCLUDE``
shouldn't be displayed in the admin docs.
"""
self.assertContains(self.response, "<td>get_full_name</td>")
self.assertNotContains(self.response, "<td>_get_full_name</td>")
self.assertNotContains(self.response, "<td>add_image</td>")
self.assertNotContains(self.response, "<td>delete_image</td>")
self.assertNotContains(self.response, "<td>set_status</td>")
self.assertNotContains(self.response, "<td>save_changes</td>")
def test_methods_with_arguments(self):
"""
Methods that take arguments should also displayed.
"""
self.assertContains(self.response, "<h3>Methods with arguments</h3>")
self.assertContains(self.response, "<td>rename_company</td>")
self.assertContains(self.response, "<td>dummy_function</td>")
self.assertContains(self.response, "<td>suffix_company_name</td>")
def test_methods_with_arguments_display_arguments(self):
"""
Methods with arguments should have their arguments displayed.
"""
self.assertContains(self.response, "<td>new_name</td>")
def test_methods_with_arguments_display_arguments_default_value(self):
"""
Methods with keyword arguments should have their arguments displayed.
"""
self.assertContains(self.response, '<td>suffix='ltd'</td>')
def test_methods_with_multiple_arguments_display_arguments(self):
"""
Methods with multiple arguments should have all their arguments
displayed, but omitting 'self'.
"""
self.assertContains(self.response, "<td>baz, rox, *some_args, **some_kwargs</td>")
def test_instance_of_property_methods_are_displayed(self):
"""Model properties are displayed as fields."""
self.assertContains(self.response, '<td>a_property</td>')
def test_instance_of_cached_property_methods_are_displayed(self):
"""Model cached properties are displayed as fields."""
self.assertContains(self.response, '<td>a_cached_property</td>')
def test_method_data_types(self):
company = Company.objects.create(name="Django")
person = Person.objects.create(first_name="Human", last_name="User", company=company)
self.assertEqual(get_return_data_type(person.get_status_count.__name__), 'Integer')
self.assertEqual(get_return_data_type(person.get_groups_list.__name__), 'List')
def test_descriptions_render_correctly(self):
"""
The ``description`` field should render correctly for each field type.
"""
# help text in fields
self.assertContains(self.response, "<td>first name - The person's first name</td>")
self.assertContains(self.response, "<td>last name - The person's last name</td>")
# method docstrings
self.assertContains(self.response, "<p>Get the full name of the person</p>")
link = '<a class="reference external" href="/admindocs/models/%s/">%s</a>'
markup = '<p>the related %s object</p>'
company_markup = markup % (link % ("admin_docs.company", "admin_docs.Company"))
# foreign keys
self.assertContains(self.response, company_markup)
# foreign keys with help text
self.assertContains(self.response, "%s\n - place of work" % company_markup)
# many to many fields
self.assertContains(
self.response,
"number of related %s objects" % (link % ("admin_docs.group", "admin_docs.Group"))
)
self.assertContains(
self.response,
"all related %s objects" % (link % ("admin_docs.group", "admin_docs.Group"))
)
# "raw" and "include" directives are disabled
self.assertContains(self.response, '<p>"raw" directive disabled.</p>',)
self.assertContains(self.response, '.. raw:: html\n :file: admin_docs/evilfile.txt')
self.assertContains(self.response, '<p>"include" directive disabled.</p>',)
self.assertContains(self.response, '.. include:: admin_docs/evilfile.txt')
out = self.docutils_stderr.getvalue()
self.assertIn('"raw" directive disabled', out)
self.assertIn('"include" directive disabled', out)
def test_model_with_many_to_one(self):
link = '<a class="reference external" href="/admindocs/models/%s/">%s</a>'
response = self.client.get(
reverse('django-admindocs-models-detail', args=['admin_docs', 'company'])
)
self.assertContains(
response,
"number of related %s objects" % (link % ("admin_docs.person", "admin_docs.Person"))
)
self.assertContains(
response,
"all related %s objects" % (link % ("admin_docs.person", "admin_docs.Person"))
)
def test_model_with_no_backward_relations_render_only_relevant_fields(self):
"""
A model with ``related_name`` of `+` shouldn't show backward
relationship links.
"""
response = self.client.get(reverse('django-admindocs-models-detail', args=['admin_docs', 'family']))
fields = response.context_data.get('fields')
self.assertEqual(len(fields), 2)
def test_model_docstring_renders_correctly(self):
summary = (
'<h2 class="subhead"><p>Stores information about a person, related to <a class="reference external" '
'href="/admindocs/models/myapp.company/">myapp.Company</a>.</p></h2>'
)
subheading = '<p><strong>Notes</strong></p>'
body = '<p>Use <tt class="docutils literal">save_changes()</tt> when saving this object.</p>'
model_body = (
'<dl class="docutils"><dt><tt class="'
'docutils literal">company</tt></dt><dd>Field storing <a class="'
'reference external" href="/admindocs/models/myapp.company/">'
'myapp.Company</a> where the person works.</dd></dl>'
)
self.assertContains(self.response, 'DESCRIPTION')
self.assertContains(self.response, summary, html=True)
self.assertContains(self.response, subheading, html=True)
self.assertContains(self.response, body, html=True)
self.assertContains(self.response, model_body, html=True)
def test_model_detail_title(self):
self.assertContains(self.response, '<h1>admin_docs.Person</h1>', html=True)
def test_app_not_found(self):
response = self.client.get(reverse('django-admindocs-models-detail', args=['doesnotexist', 'Person']))
self.assertEqual(response.context['exception'], "App 'doesnotexist' not found")
self.assertEqual(response.status_code, 404)
def test_model_not_found(self):
response = self.client.get(reverse('django-admindocs-models-detail', args=['admin_docs', 'doesnotexist']))
self.assertEqual(response.context['exception'], "Model 'doesnotexist' not found in app 'admin_docs'")
self.assertEqual(response.status_code, 404)
class CustomField(models.Field):
description = "A custom field type"
class DescriptionLackingField(models.Field):
pass
class TestFieldType(unittest.TestCase):
def test_field_name(self):
with self.assertRaises(AttributeError):
views.get_readable_field_data_type("NotAField")
def test_builtin_fields(self):
self.assertEqual(
views.get_readable_field_data_type(fields.BooleanField()),
'Boolean (Either True or False)'
)
def test_custom_fields(self):
self.assertEqual(views.get_readable_field_data_type(CustomField()), 'A custom field type')
self.assertEqual(
views.get_readable_field_data_type(DescriptionLackingField()),
'Field of type: DescriptionLackingField'
)
class AdminDocViewFunctionsTests(SimpleTestCase):
def test_simplify_regex(self):
tests = (
(r'^a', '/a'),
(r'^(?P<a>\w+)/b/(?P<c>\w+)/$', '/<a>/b/<c>/'),
(r'^(?P<a>\w+)/b/(?P<c>\w+)$', '/<a>/b/<c>'),
(r'^(?P<a>\w+)/b/(?P<c>\w+)', '/<a>/b/<c>'),
(r'^(?P<a>\w+)/b/(\w+)$', '/<a>/b/<var>'),
(r'^(?P<a>\w+)/b/(\w+)', '/<a>/b/<var>'),
(r'^(?P<a>\w+)/b/((x|y)\w+)$', '/<a>/b/<var>'),
(r'^(?P<a>\w+)/b/((x|y)\w+)', '/<a>/b/<var>'),
(r'^(?P<a>(x|y))/b/(?P<c>\w+)$', '/<a>/b/<c>'),
(r'^(?P<a>(x|y))/b/(?P<c>\w+)', '/<a>/b/<c>'),
(r'^(?P<a>(x|y))/b/(?P<c>\w+)ab', '/<a>/b/<c>ab'),
(r'^(?P<a>(x|y)(\(|\)))/b/(?P<c>\w+)ab', '/<a>/b/<c>ab'),
(r'^a/?$', '/a/'),
)
for pattern, output in tests:
with self.subTest(pattern=pattern):
self.assertEqual(simplify_regex(pattern), output)
|
9f564f1f1a147c43f93e8557623b38ecaf3290a51fef4f3f5bd8171011de9a07 | from django.db import IntegrityError
from django.db.models import ProtectedError, Q, Sum
from django.forms.models import modelform_factory
from django.test import TestCase, skipIfDBFeature
from .models import (
A, Address, B, Board, C, Cafe, CharLink, Company, Contact, Content, D,
Developer, Guild, HasLinkThing, Link, Node, Note, OddRelation1,
OddRelation2, Organization, Person, Place, Related, Restaurant, Tag, Team,
TextLink,
)
class GenericRelationTests(TestCase):
def test_inherited_models_content_type(self):
"""
GenericRelations on inherited classes use the correct content type.
"""
p = Place.objects.create(name="South Park")
r = Restaurant.objects.create(name="Chubby's")
l1 = Link.objects.create(content_object=p)
l2 = Link.objects.create(content_object=r)
self.assertEqual(list(p.links.all()), [l1])
self.assertEqual(list(r.links.all()), [l2])
def test_reverse_relation_pk(self):
"""
The correct column name is used for the primary key on the
originating model of a query. See #12664.
"""
p = Person.objects.create(account=23, name='Chef')
Address.objects.create(street='123 Anywhere Place',
city='Conifer', state='CO',
zipcode='80433', content_object=p)
qs = Person.objects.filter(addresses__zipcode='80433')
self.assertEqual(1, qs.count())
self.assertEqual('Chef', qs[0].name)
def test_charlink_delete(self):
oddrel = OddRelation1.objects.create(name='clink')
CharLink.objects.create(content_object=oddrel)
oddrel.delete()
def test_textlink_delete(self):
oddrel = OddRelation2.objects.create(name='tlink')
TextLink.objects.create(content_object=oddrel)
oddrel.delete()
def test_coerce_object_id_remote_field_cache_persistence(self):
restaurant = Restaurant.objects.create()
CharLink.objects.create(content_object=restaurant)
charlink = CharLink.objects.latest('pk')
self.assertIs(charlink.content_object, charlink.content_object)
# If the model (Cafe) uses more than one level of multi-table inheritance.
cafe = Cafe.objects.create()
CharLink.objects.create(content_object=cafe)
charlink = CharLink.objects.latest('pk')
self.assertIs(charlink.content_object, charlink.content_object)
def test_q_object_or(self):
"""
SQL query parameters for generic relations are properly
grouped when OR is used (#11535).
In this bug the first query (below) works while the second, with the
query parameters the same but in reverse order, does not.
The issue is that the generic relation conditions do not get properly
grouped in parentheses.
"""
note_contact = Contact.objects.create()
org_contact = Contact.objects.create()
Note.objects.create(note='note', content_object=note_contact)
org = Organization.objects.create(name='org name')
org.contacts.add(org_contact)
# search with a non-matching note and a matching org name
qs = Contact.objects.filter(Q(notes__note__icontains=r'other note') |
Q(organizations__name__icontains=r'org name'))
self.assertIn(org_contact, qs)
# search again, with the same query parameters, in reverse order
qs = Contact.objects.filter(
Q(organizations__name__icontains=r'org name') |
Q(notes__note__icontains=r'other note'))
self.assertIn(org_contact, qs)
def test_join_reuse(self):
qs = Person.objects.filter(
addresses__street='foo'
).filter(
addresses__street='bar'
)
self.assertEqual(str(qs.query).count('JOIN'), 2)
def test_generic_relation_ordering(self):
"""
Ordering over a generic relation does not include extraneous
duplicate results, nor excludes rows not participating in the relation.
"""
p1 = Place.objects.create(name="South Park")
p2 = Place.objects.create(name="The City")
c = Company.objects.create(name="Chubby's Intl.")
Link.objects.create(content_object=p1)
Link.objects.create(content_object=c)
places = list(Place.objects.order_by('links__id'))
def count_places(place):
return len([p for p in places if p.id == place.id])
self.assertEqual(len(places), 2)
self.assertEqual(count_places(p1), 1)
self.assertEqual(count_places(p2), 1)
def test_target_model_is_unsaved(self):
"""Test related to #13085"""
# Fails with another, ORM-level error
dev1 = Developer(name='Joe')
note = Note(note='Deserves promotion', content_object=dev1)
with self.assertRaises(IntegrityError):
note.save()
def test_target_model_len_zero(self):
"""
Saving a model with a GenericForeignKey to a model instance whose
__len__ method returns 0 (Team.__len__() here) shouldn't fail (#13085).
"""
team1 = Team.objects.create(name='Backend devs')
note = Note(note='Deserve a bonus', content_object=team1)
note.save()
def test_target_model_bool_false(self):
"""
Saving a model with a GenericForeignKey to a model instance whose
__bool__ method returns False (Guild.__bool__() here) shouldn't fail
(#13085).
"""
g1 = Guild.objects.create(name='First guild')
note = Note(note='Note for guild', content_object=g1)
note.save()
@skipIfDBFeature('interprets_empty_strings_as_nulls')
def test_gfk_to_model_with_empty_pk(self):
"""Test related to #13085"""
# Saving model with GenericForeignKey to model instance with an
# empty CharField PK
b1 = Board.objects.create(name='')
tag = Tag(label='VP', content_object=b1)
tag.save()
def test_ticket_20378(self):
# Create a couple of extra HasLinkThing so that the autopk value
# isn't the same for Link and HasLinkThing.
hs1 = HasLinkThing.objects.create()
hs2 = HasLinkThing.objects.create()
hs3 = HasLinkThing.objects.create()
hs4 = HasLinkThing.objects.create()
l1 = Link.objects.create(content_object=hs3)
l2 = Link.objects.create(content_object=hs4)
self.assertSequenceEqual(HasLinkThing.objects.filter(links=l1), [hs3])
self.assertSequenceEqual(HasLinkThing.objects.filter(links=l2), [hs4])
self.assertSequenceEqual(HasLinkThing.objects.exclude(links=l2), [hs1, hs2, hs3])
self.assertSequenceEqual(HasLinkThing.objects.exclude(links=l1), [hs1, hs2, hs4])
def test_ticket_20564(self):
b1 = B.objects.create()
b2 = B.objects.create()
b3 = B.objects.create()
c1 = C.objects.create(b=b1)
c2 = C.objects.create(b=b2)
c3 = C.objects.create(b=b3)
A.objects.create(flag=None, content_object=b1)
A.objects.create(flag=True, content_object=b2)
self.assertSequenceEqual(C.objects.filter(b__a__flag=None), [c1, c3])
self.assertSequenceEqual(C.objects.exclude(b__a__flag=None), [c2])
def test_ticket_20564_nullable_fk(self):
b1 = B.objects.create()
b2 = B.objects.create()
b3 = B.objects.create()
d1 = D.objects.create(b=b1)
d2 = D.objects.create(b=b2)
d3 = D.objects.create(b=b3)
d4 = D.objects.create()
A.objects.create(flag=None, content_object=b1)
A.objects.create(flag=True, content_object=b1)
A.objects.create(flag=True, content_object=b2)
self.assertSequenceEqual(D.objects.exclude(b__a__flag=None), [d2])
self.assertSequenceEqual(D.objects.filter(b__a__flag=None), [d1, d3, d4])
self.assertSequenceEqual(B.objects.filter(a__flag=None), [b1, b3])
self.assertSequenceEqual(B.objects.exclude(a__flag=None), [b2])
def test_extra_join_condition(self):
# A crude check that content_type_id is taken in account in the
# join/subquery condition.
self.assertIn("content_type_id", str(B.objects.exclude(a__flag=None).query).lower())
# No need for any joins - the join from inner query can be trimmed in
# this case (but not in the above case as no a objects at all for given
# B would then fail).
self.assertNotIn(" join ", str(B.objects.exclude(a__flag=True).query).lower())
self.assertIn("content_type_id", str(B.objects.exclude(a__flag=True).query).lower())
def test_annotate(self):
hs1 = HasLinkThing.objects.create()
hs2 = HasLinkThing.objects.create()
HasLinkThing.objects.create()
b = Board.objects.create(name=str(hs1.pk))
Link.objects.create(content_object=hs2)
link = Link.objects.create(content_object=hs1)
Link.objects.create(content_object=b)
qs = HasLinkThing.objects.annotate(Sum('links')).filter(pk=hs1.pk)
# If content_type restriction isn't in the query's join condition,
# then wrong results are produced here as the link to b will also match
# (b and hs1 have equal pks).
self.assertEqual(qs.count(), 1)
self.assertEqual(qs[0].links__sum, link.id)
link.delete()
# Now if we don't have proper left join, we will not produce any
# results at all here.
# clear cached results
qs = qs.all()
self.assertEqual(qs.count(), 1)
# Note - 0 here would be a nicer result...
self.assertIs(qs[0].links__sum, None)
# Finally test that filtering works.
self.assertEqual(qs.filter(links__sum__isnull=True).count(), 1)
self.assertEqual(qs.filter(links__sum__isnull=False).count(), 0)
def test_filter_targets_related_pk(self):
# Use hardcoded PKs to ensure different PKs for "link" and "hs2"
# objects.
HasLinkThing.objects.create(pk=1)
hs2 = HasLinkThing.objects.create(pk=2)
link = Link.objects.create(content_object=hs2, pk=1)
self.assertNotEqual(link.object_id, link.pk)
self.assertSequenceEqual(HasLinkThing.objects.filter(links=link.pk), [hs2])
def test_editable_generic_rel(self):
GenericRelationForm = modelform_factory(HasLinkThing, fields='__all__')
form = GenericRelationForm()
self.assertIn('links', form.fields)
form = GenericRelationForm({'links': None})
self.assertTrue(form.is_valid())
form.save()
links = HasLinkThing._meta.get_field('links')
self.assertEqual(links.save_form_data_calls, 1)
def test_ticket_22998(self):
related = Related.objects.create()
content = Content.objects.create(related_obj=related)
Node.objects.create(content=content)
# deleting the Related cascades to the Content cascades to the Node,
# where the pre_delete signal should fire and prevent deletion.
with self.assertRaises(ProtectedError):
related.delete()
def test_ticket_22982(self):
place = Place.objects.create(name='My Place')
self.assertIn('GenericRelatedObjectManager', str(place.links))
def test_filter_on_related_proxy_model(self):
place = Place.objects.create()
Link.objects.create(content_object=place)
self.assertEqual(Place.objects.get(link_proxy__object_id=place.id), place)
def test_generic_reverse_relation_with_mti(self):
"""
Filtering with a reverse generic relation, where the GenericRelation
comes from multi-table inheritance.
"""
place = Place.objects.create(name='Test Place')
link = Link.objects.create(content_object=place)
result = Link.objects.filter(places=place)
self.assertCountEqual(result, [link])
def test_generic_reverse_relation_with_abc(self):
"""
The reverse generic relation accessor (targets) is created if the
GenericRelation comes from an abstract base model (HasLinks).
"""
thing = HasLinkThing.objects.create()
link = Link.objects.create(content_object=thing)
self.assertCountEqual(link.targets.all(), [thing])
|
1e0373a8a26c6c1e70a94de8f75198f692e33e38bb81318f453b0ff5e73a1c56 | import decimal
import enum
import json
import unittest
import uuid
from django import forms
from django.core import checks, exceptions, serializers, validators
from django.core.exceptions import FieldError
from django.core.management import call_command
from django.db import IntegrityError, connection, models
from django.db.models.expressions import Exists, OuterRef, RawSQL, Value
from django.db.models.functions import Cast, JSONObject, Upper
from django.test import TransactionTestCase, modify_settings, override_settings
from django.test.utils import isolate_apps
from django.utils import timezone
from . import (
PostgreSQLSimpleTestCase, PostgreSQLTestCase, PostgreSQLWidgetTestCase,
)
from .models import (
ArrayEnumModel, ArrayFieldSubclass, CharArrayModel, DateTimeArrayModel,
IntegerArrayModel, NestedIntegerArrayModel, NullableIntegerArrayModel,
OtherTypesArrayModel, PostgreSQLModel, Tag,
)
try:
from psycopg2.extras import NumericRange
from django.contrib.postgres.aggregates import ArrayAgg
from django.contrib.postgres.expressions import ArraySubquery
from django.contrib.postgres.fields import ArrayField
from django.contrib.postgres.fields.array import (
IndexTransform, SliceTransform,
)
from django.contrib.postgres.forms import (
SimpleArrayField, SplitArrayField, SplitArrayWidget,
)
from django.db.backends.postgresql.base import PSYCOPG2_VERSION
except ImportError:
pass
@isolate_apps('postgres_tests')
class BasicTests(PostgreSQLSimpleTestCase):
def test_get_field_display(self):
class MyModel(PostgreSQLModel):
field = ArrayField(
models.CharField(max_length=16),
choices=[
['Media', [(['vinyl', 'cd'], 'Audio')]],
(('mp3', 'mp4'), 'Digital'),
],
)
tests = (
(['vinyl', 'cd'], 'Audio'),
(('mp3', 'mp4'), 'Digital'),
(('a', 'b'), "('a', 'b')"),
(['c', 'd'], "['c', 'd']"),
)
for value, display in tests:
with self.subTest(value=value, display=display):
instance = MyModel(field=value)
self.assertEqual(instance.get_field_display(), display)
def test_get_field_display_nested_array(self):
class MyModel(PostgreSQLModel):
field = ArrayField(
ArrayField(models.CharField(max_length=16)),
choices=[
[
'Media',
[([['vinyl', 'cd'], ('x',)], 'Audio')],
],
((['mp3'], ('mp4',)), 'Digital'),
],
)
tests = (
([['vinyl', 'cd'], ('x',)], 'Audio'),
((['mp3'], ('mp4',)), 'Digital'),
((('a', 'b'), ('c',)), "(('a', 'b'), ('c',))"),
([['a', 'b'], ['c']], "[['a', 'b'], ['c']]"),
)
for value, display in tests:
with self.subTest(value=value, display=display):
instance = MyModel(field=value)
self.assertEqual(instance.get_field_display(), display)
class TestSaveLoad(PostgreSQLTestCase):
def test_integer(self):
instance = IntegerArrayModel(field=[1, 2, 3])
instance.save()
loaded = IntegerArrayModel.objects.get()
self.assertEqual(instance.field, loaded.field)
def test_char(self):
instance = CharArrayModel(field=['hello', 'goodbye'])
instance.save()
loaded = CharArrayModel.objects.get()
self.assertEqual(instance.field, loaded.field)
def test_dates(self):
instance = DateTimeArrayModel(
datetimes=[timezone.now()],
dates=[timezone.now().date()],
times=[timezone.now().time()],
)
instance.save()
loaded = DateTimeArrayModel.objects.get()
self.assertEqual(instance.datetimes, loaded.datetimes)
self.assertEqual(instance.dates, loaded.dates)
self.assertEqual(instance.times, loaded.times)
def test_tuples(self):
instance = IntegerArrayModel(field=(1,))
instance.save()
loaded = IntegerArrayModel.objects.get()
self.assertSequenceEqual(instance.field, loaded.field)
def test_integers_passed_as_strings(self):
# This checks that get_prep_value is deferred properly
instance = IntegerArrayModel(field=['1'])
instance.save()
loaded = IntegerArrayModel.objects.get()
self.assertEqual(loaded.field, [1])
def test_default_null(self):
instance = NullableIntegerArrayModel()
instance.save()
loaded = NullableIntegerArrayModel.objects.get(pk=instance.pk)
self.assertIsNone(loaded.field)
self.assertEqual(instance.field, loaded.field)
def test_null_handling(self):
instance = NullableIntegerArrayModel(field=None)
instance.save()
loaded = NullableIntegerArrayModel.objects.get()
self.assertEqual(instance.field, loaded.field)
instance = IntegerArrayModel(field=None)
with self.assertRaises(IntegrityError):
instance.save()
def test_nested(self):
instance = NestedIntegerArrayModel(field=[[1, 2], [3, 4]])
instance.save()
loaded = NestedIntegerArrayModel.objects.get()
self.assertEqual(instance.field, loaded.field)
def test_other_array_types(self):
instance = OtherTypesArrayModel(
ips=['192.168.0.1', '::1'],
uuids=[uuid.uuid4()],
decimals=[decimal.Decimal(1.25), 1.75],
tags=[Tag(1), Tag(2), Tag(3)],
json=[{'a': 1}, {'b': 2}],
int_ranges=[NumericRange(10, 20), NumericRange(30, 40)],
bigint_ranges=[
NumericRange(7000000000, 10000000000),
NumericRange(50000000000, 70000000000),
]
)
instance.save()
loaded = OtherTypesArrayModel.objects.get()
self.assertEqual(instance.ips, loaded.ips)
self.assertEqual(instance.uuids, loaded.uuids)
self.assertEqual(instance.decimals, loaded.decimals)
self.assertEqual(instance.tags, loaded.tags)
self.assertEqual(instance.json, loaded.json)
self.assertEqual(instance.int_ranges, loaded.int_ranges)
self.assertEqual(instance.bigint_ranges, loaded.bigint_ranges)
def test_null_from_db_value_handling(self):
instance = OtherTypesArrayModel.objects.create(
ips=['192.168.0.1', '::1'],
uuids=[uuid.uuid4()],
decimals=[decimal.Decimal(1.25), 1.75],
tags=None,
)
instance.refresh_from_db()
self.assertIsNone(instance.tags)
self.assertEqual(instance.json, [])
self.assertIsNone(instance.int_ranges)
self.assertIsNone(instance.bigint_ranges)
def test_model_set_on_base_field(self):
instance = IntegerArrayModel()
field = instance._meta.get_field('field')
self.assertEqual(field.model, IntegerArrayModel)
self.assertEqual(field.base_field.model, IntegerArrayModel)
def test_nested_nullable_base_field(self):
if PSYCOPG2_VERSION < (2, 7, 5):
self.skipTest('See https://github.com/psycopg/psycopg2/issues/325')
instance = NullableIntegerArrayModel.objects.create(
field_nested=[[None, None], [None, None]],
)
self.assertEqual(instance.field_nested, [[None, None], [None, None]])
class TestQuerying(PostgreSQLTestCase):
@classmethod
def setUpTestData(cls):
cls.objs = NullableIntegerArrayModel.objects.bulk_create([
NullableIntegerArrayModel(order=1, field=[1]),
NullableIntegerArrayModel(order=2, field=[2]),
NullableIntegerArrayModel(order=3, field=[2, 3]),
NullableIntegerArrayModel(order=4, field=[20, 30, 40]),
NullableIntegerArrayModel(order=5, field=None),
])
def test_empty_list(self):
NullableIntegerArrayModel.objects.create(field=[])
obj = NullableIntegerArrayModel.objects.annotate(
empty_array=models.Value([], output_field=ArrayField(models.IntegerField())),
).filter(field=models.F('empty_array')).get()
self.assertEqual(obj.field, [])
self.assertEqual(obj.empty_array, [])
def test_exact(self):
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__exact=[1]),
self.objs[:1]
)
def test_exact_with_expression(self):
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__exact=[Value(1)]),
self.objs[:1],
)
def test_exact_charfield(self):
instance = CharArrayModel.objects.create(field=['text'])
self.assertSequenceEqual(
CharArrayModel.objects.filter(field=['text']),
[instance]
)
def test_exact_nested(self):
instance = NestedIntegerArrayModel.objects.create(field=[[1, 2], [3, 4]])
self.assertSequenceEqual(
NestedIntegerArrayModel.objects.filter(field=[[1, 2], [3, 4]]),
[instance]
)
def test_isnull(self):
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__isnull=True),
self.objs[-1:]
)
def test_gt(self):
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__gt=[0]),
self.objs[:4]
)
def test_lt(self):
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__lt=[2]),
self.objs[:1]
)
def test_in(self):
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__in=[[1], [2]]),
self.objs[:2]
)
def test_in_subquery(self):
IntegerArrayModel.objects.create(field=[2, 3])
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(
field__in=IntegerArrayModel.objects.all().values_list('field', flat=True)
),
self.objs[2:3]
)
@unittest.expectedFailure
def test_in_including_F_object(self):
# This test asserts that Array objects passed to filters can be
# constructed to contain F objects. This currently doesn't work as the
# psycopg2 mogrify method that generates the ARRAY() syntax is
# expecting literals, not column references (#27095).
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__in=[[models.F('id')]]),
self.objs[:2]
)
def test_in_as_F_object(self):
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__in=[models.F('field')]),
self.objs[:4]
)
def test_contained_by(self):
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__contained_by=[1, 2]),
self.objs[:2]
)
def test_contained_by_including_F_object(self):
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__contained_by=[models.F('order'), 2]),
self.objs[:3],
)
def test_contains(self):
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__contains=[2]),
self.objs[1:3]
)
def test_contains_subquery(self):
IntegerArrayModel.objects.create(field=[2, 3])
inner_qs = IntegerArrayModel.objects.values_list('field', flat=True)
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__contains=inner_qs[:1]),
self.objs[2:3],
)
inner_qs = IntegerArrayModel.objects.filter(field__contains=OuterRef('field'))
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(Exists(inner_qs)),
self.objs[1:3],
)
def test_contains_including_expression(self):
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(
field__contains=[2, Value(6) / Value(2)],
),
self.objs[2:3],
)
def test_icontains(self):
# Using the __icontains lookup with ArrayField is inefficient.
instance = CharArrayModel.objects.create(field=['FoO'])
self.assertSequenceEqual(
CharArrayModel.objects.filter(field__icontains='foo'),
[instance]
)
def test_contains_charfield(self):
# Regression for #22907
self.assertSequenceEqual(
CharArrayModel.objects.filter(field__contains=['text']),
[]
)
def test_contained_by_charfield(self):
self.assertSequenceEqual(
CharArrayModel.objects.filter(field__contained_by=['text']),
[]
)
def test_overlap_charfield(self):
self.assertSequenceEqual(
CharArrayModel.objects.filter(field__overlap=['text']),
[]
)
def test_overlap_charfield_including_expression(self):
obj_1 = CharArrayModel.objects.create(field=['TEXT', 'lower text'])
obj_2 = CharArrayModel.objects.create(field=['lower text', 'TEXT'])
CharArrayModel.objects.create(field=['lower text', 'text'])
self.assertSequenceEqual(
CharArrayModel.objects.filter(field__overlap=[
Upper(Value('text')),
'other',
]),
[obj_1, obj_2],
)
def test_lookups_autofield_array(self):
qs = NullableIntegerArrayModel.objects.filter(
field__0__isnull=False,
).values('field__0').annotate(
arrayagg=ArrayAgg('id'),
).order_by('field__0')
tests = (
('contained_by', [self.objs[1].pk, self.objs[2].pk, 0], [2]),
('contains', [self.objs[2].pk], [2]),
('exact', [self.objs[3].pk], [20]),
('overlap', [self.objs[1].pk, self.objs[3].pk], [2, 20]),
)
for lookup, value, expected in tests:
with self.subTest(lookup=lookup):
self.assertSequenceEqual(
qs.filter(
**{'arrayagg__' + lookup: value},
).values_list('field__0', flat=True),
expected,
)
def test_index(self):
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__0=2),
self.objs[1:3]
)
def test_index_chained(self):
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__0__lt=3),
self.objs[0:3]
)
def test_index_nested(self):
instance = NestedIntegerArrayModel.objects.create(field=[[1, 2], [3, 4]])
self.assertSequenceEqual(
NestedIntegerArrayModel.objects.filter(field__0__0=1),
[instance]
)
@unittest.expectedFailure
def test_index_used_on_nested_data(self):
instance = NestedIntegerArrayModel.objects.create(field=[[1, 2], [3, 4]])
self.assertSequenceEqual(
NestedIntegerArrayModel.objects.filter(field__0=[1, 2]),
[instance]
)
def test_index_transform_expression(self):
expr = RawSQL("string_to_array(%s, ';')", ['1;2'])
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(
field__0=Cast(
IndexTransform(1, models.IntegerField, expr),
output_field=models.IntegerField(),
),
),
self.objs[:1],
)
def test_index_annotation(self):
qs = NullableIntegerArrayModel.objects.annotate(second=models.F('field__1'))
self.assertCountEqual(
qs.values_list('second', flat=True),
[None, None, None, 3, 30],
)
def test_overlap(self):
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__overlap=[1, 2]),
self.objs[0:3]
)
def test_len(self):
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__len__lte=2),
self.objs[0:3]
)
def test_len_empty_array(self):
obj = NullableIntegerArrayModel.objects.create(field=[])
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__len=0),
[obj]
)
def test_slice(self):
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__0_1=[2]),
self.objs[1:3]
)
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__0_2=[2, 3]),
self.objs[2:3]
)
def test_order_by_slice(self):
more_objs = (
NullableIntegerArrayModel.objects.create(field=[1, 637]),
NullableIntegerArrayModel.objects.create(field=[2, 1]),
NullableIntegerArrayModel.objects.create(field=[3, -98123]),
NullableIntegerArrayModel.objects.create(field=[4, 2]),
)
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.order_by('field__1'),
[
more_objs[2], more_objs[1], more_objs[3], self.objs[2],
self.objs[3], more_objs[0], self.objs[4], self.objs[1],
self.objs[0],
]
)
@unittest.expectedFailure
def test_slice_nested(self):
instance = NestedIntegerArrayModel.objects.create(field=[[1, 2], [3, 4]])
self.assertSequenceEqual(
NestedIntegerArrayModel.objects.filter(field__0__0_1=[1]),
[instance]
)
def test_slice_transform_expression(self):
expr = RawSQL("string_to_array(%s, ';')", ['9;2;3'])
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__0_2=SliceTransform(2, 3, expr)),
self.objs[2:3],
)
def test_slice_annotation(self):
qs = NullableIntegerArrayModel.objects.annotate(
first_two=models.F('field__0_2'),
)
self.assertCountEqual(
qs.values_list('first_two', flat=True),
[None, [1], [2], [2, 3], [20, 30]],
)
def test_usage_in_subquery(self):
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(
id__in=NullableIntegerArrayModel.objects.filter(field__len=3)
),
[self.objs[3]]
)
def test_enum_lookup(self):
class TestEnum(enum.Enum):
VALUE_1 = 'value_1'
instance = ArrayEnumModel.objects.create(array_of_enums=[TestEnum.VALUE_1])
self.assertSequenceEqual(
ArrayEnumModel.objects.filter(array_of_enums__contains=[TestEnum.VALUE_1]),
[instance]
)
def test_unsupported_lookup(self):
msg = "Unsupported lookup '0_bar' for ArrayField or join on the field not permitted."
with self.assertRaisesMessage(FieldError, msg):
list(NullableIntegerArrayModel.objects.filter(field__0_bar=[2]))
msg = "Unsupported lookup '0bar' for ArrayField or join on the field not permitted."
with self.assertRaisesMessage(FieldError, msg):
list(NullableIntegerArrayModel.objects.filter(field__0bar=[2]))
def test_grouping_by_annotations_with_array_field_param(self):
value = models.Value([1], output_field=ArrayField(models.IntegerField()))
self.assertEqual(
NullableIntegerArrayModel.objects.annotate(
array_length=models.Func(
value, 1, function='ARRAY_LENGTH', output_field=models.IntegerField(),
),
).values('array_length').annotate(
count=models.Count('pk'),
).get()['array_length'],
1,
)
def test_filter_by_array_subquery(self):
inner_qs = NullableIntegerArrayModel.objects.filter(
field__len=models.OuterRef('field__len'),
).values('field')
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.alias(
same_sized_fields=ArraySubquery(inner_qs),
).filter(same_sized_fields__len__gt=1),
self.objs[0:2],
)
def test_annotated_array_subquery(self):
inner_qs = NullableIntegerArrayModel.objects.exclude(
pk=models.OuterRef('pk')
).values('order')
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.annotate(
sibling_ids=ArraySubquery(inner_qs),
).get(order=1).sibling_ids,
[2, 3, 4, 5],
)
def test_group_by_with_annotated_array_subquery(self):
inner_qs = NullableIntegerArrayModel.objects.exclude(
pk=models.OuterRef('pk')
).values('order')
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.annotate(
sibling_ids=ArraySubquery(inner_qs),
sibling_count=models.Max('sibling_ids__len'),
).values_list('sibling_count', flat=True),
[len(self.objs) - 1] * len(self.objs),
)
def test_annotated_ordered_array_subquery(self):
inner_qs = NullableIntegerArrayModel.objects.order_by('-order').values('order')
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.annotate(
ids=ArraySubquery(inner_qs),
).first().ids,
[5, 4, 3, 2, 1],
)
def test_annotated_array_subquery_with_json_objects(self):
inner_qs = NullableIntegerArrayModel.objects.exclude(
pk=models.OuterRef('pk')
).values(json=JSONObject(order='order', field='field'))
siblings_json = NullableIntegerArrayModel.objects.annotate(
siblings_json=ArraySubquery(inner_qs),
).values_list('siblings_json', flat=True).get(order=1)
self.assertSequenceEqual(
siblings_json,
[
{'field': [2], 'order': 2},
{'field': [2, 3], 'order': 3},
{'field': [20, 30, 40], 'order': 4},
{'field': None, 'order': 5},
],
)
class TestDateTimeExactQuerying(PostgreSQLTestCase):
@classmethod
def setUpTestData(cls):
now = timezone.now()
cls.datetimes = [now]
cls.dates = [now.date()]
cls.times = [now.time()]
cls.objs = [
DateTimeArrayModel.objects.create(datetimes=cls.datetimes, dates=cls.dates, times=cls.times),
]
def test_exact_datetimes(self):
self.assertSequenceEqual(
DateTimeArrayModel.objects.filter(datetimes=self.datetimes),
self.objs
)
def test_exact_dates(self):
self.assertSequenceEqual(
DateTimeArrayModel.objects.filter(dates=self.dates),
self.objs
)
def test_exact_times(self):
self.assertSequenceEqual(
DateTimeArrayModel.objects.filter(times=self.times),
self.objs
)
class TestOtherTypesExactQuerying(PostgreSQLTestCase):
@classmethod
def setUpTestData(cls):
cls.ips = ['192.168.0.1', '::1']
cls.uuids = [uuid.uuid4()]
cls.decimals = [decimal.Decimal(1.25), 1.75]
cls.tags = [Tag(1), Tag(2), Tag(3)]
cls.objs = [
OtherTypesArrayModel.objects.create(
ips=cls.ips,
uuids=cls.uuids,
decimals=cls.decimals,
tags=cls.tags,
)
]
def test_exact_ip_addresses(self):
self.assertSequenceEqual(
OtherTypesArrayModel.objects.filter(ips=self.ips),
self.objs
)
def test_exact_uuids(self):
self.assertSequenceEqual(
OtherTypesArrayModel.objects.filter(uuids=self.uuids),
self.objs
)
def test_exact_decimals(self):
self.assertSequenceEqual(
OtherTypesArrayModel.objects.filter(decimals=self.decimals),
self.objs
)
def test_exact_tags(self):
self.assertSequenceEqual(
OtherTypesArrayModel.objects.filter(tags=self.tags),
self.objs
)
@isolate_apps('postgres_tests')
class TestChecks(PostgreSQLSimpleTestCase):
def test_field_checks(self):
class MyModel(PostgreSQLModel):
field = ArrayField(models.CharField())
model = MyModel()
errors = model.check()
self.assertEqual(len(errors), 1)
# The inner CharField is missing a max_length.
self.assertEqual(errors[0].id, 'postgres.E001')
self.assertIn('max_length', errors[0].msg)
def test_invalid_base_fields(self):
class MyModel(PostgreSQLModel):
field = ArrayField(models.ManyToManyField('postgres_tests.IntegerArrayModel'))
model = MyModel()
errors = model.check()
self.assertEqual(len(errors), 1)
self.assertEqual(errors[0].id, 'postgres.E002')
def test_invalid_default(self):
class MyModel(PostgreSQLModel):
field = ArrayField(models.IntegerField(), default=[])
model = MyModel()
self.assertEqual(model.check(), [
checks.Warning(
msg=(
"ArrayField default should be a callable instead of an "
"instance so that it's not shared between all field "
"instances."
),
hint='Use a callable instead, e.g., use `list` instead of `[]`.',
obj=MyModel._meta.get_field('field'),
id='fields.E010',
)
])
def test_valid_default(self):
class MyModel(PostgreSQLModel):
field = ArrayField(models.IntegerField(), default=list)
model = MyModel()
self.assertEqual(model.check(), [])
def test_valid_default_none(self):
class MyModel(PostgreSQLModel):
field = ArrayField(models.IntegerField(), default=None)
model = MyModel()
self.assertEqual(model.check(), [])
def test_nested_field_checks(self):
"""
Nested ArrayFields are permitted.
"""
class MyModel(PostgreSQLModel):
field = ArrayField(ArrayField(models.CharField()))
model = MyModel()
errors = model.check()
self.assertEqual(len(errors), 1)
# The inner CharField is missing a max_length.
self.assertEqual(errors[0].id, 'postgres.E001')
self.assertIn('max_length', errors[0].msg)
def test_choices_tuple_list(self):
class MyModel(PostgreSQLModel):
field = ArrayField(
models.CharField(max_length=16),
choices=[
[
'Media',
[(['vinyl', 'cd'], 'Audio'), (('vhs', 'dvd'), 'Video')],
],
(['mp3', 'mp4'], 'Digital'),
],
)
self.assertEqual(MyModel._meta.get_field('field').check(), [])
@unittest.skipUnless(connection.vendor == 'postgresql', "PostgreSQL specific tests")
class TestMigrations(TransactionTestCase):
available_apps = ['postgres_tests']
def test_deconstruct(self):
field = ArrayField(models.IntegerField())
name, path, args, kwargs = field.deconstruct()
new = ArrayField(*args, **kwargs)
self.assertEqual(type(new.base_field), type(field.base_field))
self.assertIsNot(new.base_field, field.base_field)
def test_deconstruct_with_size(self):
field = ArrayField(models.IntegerField(), size=3)
name, path, args, kwargs = field.deconstruct()
new = ArrayField(*args, **kwargs)
self.assertEqual(new.size, field.size)
def test_deconstruct_args(self):
field = ArrayField(models.CharField(max_length=20))
name, path, args, kwargs = field.deconstruct()
new = ArrayField(*args, **kwargs)
self.assertEqual(new.base_field.max_length, field.base_field.max_length)
def test_subclass_deconstruct(self):
field = ArrayField(models.IntegerField())
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, 'django.contrib.postgres.fields.ArrayField')
field = ArrayFieldSubclass()
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, 'postgres_tests.models.ArrayFieldSubclass')
@override_settings(MIGRATION_MODULES={
"postgres_tests": "postgres_tests.array_default_migrations",
})
def test_adding_field_with_default(self):
# See #22962
table_name = 'postgres_tests_integerarraydefaultmodel'
with connection.cursor() as cursor:
self.assertNotIn(table_name, connection.introspection.table_names(cursor))
call_command('migrate', 'postgres_tests', verbosity=0)
with connection.cursor() as cursor:
self.assertIn(table_name, connection.introspection.table_names(cursor))
call_command('migrate', 'postgres_tests', 'zero', verbosity=0)
with connection.cursor() as cursor:
self.assertNotIn(table_name, connection.introspection.table_names(cursor))
@override_settings(MIGRATION_MODULES={
"postgres_tests": "postgres_tests.array_index_migrations",
})
def test_adding_arrayfield_with_index(self):
"""
ArrayField shouldn't have varchar_patterns_ops or text_patterns_ops indexes.
"""
table_name = 'postgres_tests_chartextarrayindexmodel'
call_command('migrate', 'postgres_tests', verbosity=0)
with connection.cursor() as cursor:
like_constraint_columns_list = [
v['columns']
for k, v in list(connection.introspection.get_constraints(cursor, table_name).items())
if k.endswith('_like')
]
# Only the CharField should have a LIKE index.
self.assertEqual(like_constraint_columns_list, [['char2']])
# All fields should have regular indexes.
with connection.cursor() as cursor:
indexes = [
c['columns'][0]
for c in connection.introspection.get_constraints(cursor, table_name).values()
if c['index'] and len(c['columns']) == 1
]
self.assertIn('char', indexes)
self.assertIn('char2', indexes)
self.assertIn('text', indexes)
call_command('migrate', 'postgres_tests', 'zero', verbosity=0)
with connection.cursor() as cursor:
self.assertNotIn(table_name, connection.introspection.table_names(cursor))
class TestSerialization(PostgreSQLSimpleTestCase):
test_data = (
'[{"fields": {"field": "[\\"1\\", \\"2\\", null]"}, "model": "postgres_tests.integerarraymodel", "pk": null}]'
)
def test_dumping(self):
instance = IntegerArrayModel(field=[1, 2, None])
data = serializers.serialize('json', [instance])
self.assertEqual(json.loads(data), json.loads(self.test_data))
def test_loading(self):
instance = list(serializers.deserialize('json', self.test_data))[0].object
self.assertEqual(instance.field, [1, 2, None])
class TestValidation(PostgreSQLSimpleTestCase):
def test_unbounded(self):
field = ArrayField(models.IntegerField())
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean([1, None], None)
self.assertEqual(cm.exception.code, 'item_invalid')
self.assertEqual(
cm.exception.message % cm.exception.params,
'Item 2 in the array did not validate: This field cannot be null.'
)
def test_blank_true(self):
field = ArrayField(models.IntegerField(blank=True, null=True))
# This should not raise a validation error
field.clean([1, None], None)
def test_with_size(self):
field = ArrayField(models.IntegerField(), size=3)
field.clean([1, 2, 3], None)
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean([1, 2, 3, 4], None)
self.assertEqual(cm.exception.messages[0], 'List contains 4 items, it should contain no more than 3.')
def test_nested_array_mismatch(self):
field = ArrayField(ArrayField(models.IntegerField()))
field.clean([[1, 2], [3, 4]], None)
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean([[1, 2], [3, 4, 5]], None)
self.assertEqual(cm.exception.code, 'nested_array_mismatch')
self.assertEqual(cm.exception.messages[0], 'Nested arrays must have the same length.')
def test_with_base_field_error_params(self):
field = ArrayField(models.CharField(max_length=2))
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean(['abc'], None)
self.assertEqual(len(cm.exception.error_list), 1)
exception = cm.exception.error_list[0]
self.assertEqual(
exception.message,
'Item 1 in the array did not validate: Ensure this value has at most 2 characters (it has 3).'
)
self.assertEqual(exception.code, 'item_invalid')
self.assertEqual(exception.params, {'nth': 1, 'value': 'abc', 'limit_value': 2, 'show_value': 3})
def test_with_validators(self):
field = ArrayField(models.IntegerField(validators=[validators.MinValueValidator(1)]))
field.clean([1, 2], None)
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean([0], None)
self.assertEqual(len(cm.exception.error_list), 1)
exception = cm.exception.error_list[0]
self.assertEqual(
exception.message,
'Item 1 in the array did not validate: Ensure this value is greater than or equal to 1.'
)
self.assertEqual(exception.code, 'item_invalid')
self.assertEqual(exception.params, {'nth': 1, 'value': 0, 'limit_value': 1, 'show_value': 0})
class TestSimpleFormField(PostgreSQLSimpleTestCase):
def test_valid(self):
field = SimpleArrayField(forms.CharField())
value = field.clean('a,b,c')
self.assertEqual(value, ['a', 'b', 'c'])
def test_to_python_fail(self):
field = SimpleArrayField(forms.IntegerField())
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean('a,b,9')
self.assertEqual(cm.exception.messages[0], 'Item 1 in the array did not validate: Enter a whole number.')
def test_validate_fail(self):
field = SimpleArrayField(forms.CharField(required=True))
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean('a,b,')
self.assertEqual(cm.exception.messages[0], 'Item 3 in the array did not validate: This field is required.')
def test_validate_fail_base_field_error_params(self):
field = SimpleArrayField(forms.CharField(max_length=2))
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean('abc,c,defg')
errors = cm.exception.error_list
self.assertEqual(len(errors), 2)
first_error = errors[0]
self.assertEqual(
first_error.message,
'Item 1 in the array did not validate: Ensure this value has at most 2 characters (it has 3).'
)
self.assertEqual(first_error.code, 'item_invalid')
self.assertEqual(first_error.params, {'nth': 1, 'value': 'abc', 'limit_value': 2, 'show_value': 3})
second_error = errors[1]
self.assertEqual(
second_error.message,
'Item 3 in the array did not validate: Ensure this value has at most 2 characters (it has 4).'
)
self.assertEqual(second_error.code, 'item_invalid')
self.assertEqual(second_error.params, {'nth': 3, 'value': 'defg', 'limit_value': 2, 'show_value': 4})
def test_validators_fail(self):
field = SimpleArrayField(forms.RegexField('[a-e]{2}'))
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean('a,bc,de')
self.assertEqual(cm.exception.messages[0], 'Item 1 in the array did not validate: Enter a valid value.')
def test_delimiter(self):
field = SimpleArrayField(forms.CharField(), delimiter='|')
value = field.clean('a|b|c')
self.assertEqual(value, ['a', 'b', 'c'])
def test_delimiter_with_nesting(self):
field = SimpleArrayField(SimpleArrayField(forms.CharField()), delimiter='|')
value = field.clean('a,b|c,d')
self.assertEqual(value, [['a', 'b'], ['c', 'd']])
def test_prepare_value(self):
field = SimpleArrayField(forms.CharField())
value = field.prepare_value(['a', 'b', 'c'])
self.assertEqual(value, 'a,b,c')
def test_max_length(self):
field = SimpleArrayField(forms.CharField(), max_length=2)
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean('a,b,c')
self.assertEqual(cm.exception.messages[0], 'List contains 3 items, it should contain no more than 2.')
def test_min_length(self):
field = SimpleArrayField(forms.CharField(), min_length=4)
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean('a,b,c')
self.assertEqual(cm.exception.messages[0], 'List contains 3 items, it should contain no fewer than 4.')
def test_required(self):
field = SimpleArrayField(forms.CharField(), required=True)
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean('')
self.assertEqual(cm.exception.messages[0], 'This field is required.')
def test_model_field_formfield(self):
model_field = ArrayField(models.CharField(max_length=27))
form_field = model_field.formfield()
self.assertIsInstance(form_field, SimpleArrayField)
self.assertIsInstance(form_field.base_field, forms.CharField)
self.assertEqual(form_field.base_field.max_length, 27)
def test_model_field_formfield_size(self):
model_field = ArrayField(models.CharField(max_length=27), size=4)
form_field = model_field.formfield()
self.assertIsInstance(form_field, SimpleArrayField)
self.assertEqual(form_field.max_length, 4)
def test_model_field_choices(self):
model_field = ArrayField(models.IntegerField(choices=((1, 'A'), (2, 'B'))))
form_field = model_field.formfield()
self.assertEqual(form_field.clean('1,2'), [1, 2])
def test_already_converted_value(self):
field = SimpleArrayField(forms.CharField())
vals = ['a', 'b', 'c']
self.assertEqual(field.clean(vals), vals)
def test_has_changed(self):
field = SimpleArrayField(forms.IntegerField())
self.assertIs(field.has_changed([1, 2], [1, 2]), False)
self.assertIs(field.has_changed([1, 2], '1,2'), False)
self.assertIs(field.has_changed([1, 2], '1,2,3'), True)
self.assertIs(field.has_changed([1, 2], 'a,b'), True)
def test_has_changed_empty(self):
field = SimpleArrayField(forms.CharField())
self.assertIs(field.has_changed(None, None), False)
self.assertIs(field.has_changed(None, ''), False)
self.assertIs(field.has_changed(None, []), False)
self.assertIs(field.has_changed([], None), False)
self.assertIs(field.has_changed([], ''), False)
class TestSplitFormField(PostgreSQLSimpleTestCase):
def test_valid(self):
class SplitForm(forms.Form):
array = SplitArrayField(forms.CharField(), size=3)
data = {'array_0': 'a', 'array_1': 'b', 'array_2': 'c'}
form = SplitForm(data)
self.assertTrue(form.is_valid())
self.assertEqual(form.cleaned_data, {'array': ['a', 'b', 'c']})
def test_required(self):
class SplitForm(forms.Form):
array = SplitArrayField(forms.CharField(), required=True, size=3)
data = {'array_0': '', 'array_1': '', 'array_2': ''}
form = SplitForm(data)
self.assertFalse(form.is_valid())
self.assertEqual(form.errors, {'array': ['This field is required.']})
def test_remove_trailing_nulls(self):
class SplitForm(forms.Form):
array = SplitArrayField(forms.CharField(required=False), size=5, remove_trailing_nulls=True)
data = {'array_0': 'a', 'array_1': '', 'array_2': 'b', 'array_3': '', 'array_4': ''}
form = SplitForm(data)
self.assertTrue(form.is_valid(), form.errors)
self.assertEqual(form.cleaned_data, {'array': ['a', '', 'b']})
def test_remove_trailing_nulls_not_required(self):
class SplitForm(forms.Form):
array = SplitArrayField(
forms.CharField(required=False),
size=2,
remove_trailing_nulls=True,
required=False,
)
data = {'array_0': '', 'array_1': ''}
form = SplitForm(data)
self.assertTrue(form.is_valid())
self.assertEqual(form.cleaned_data, {'array': []})
def test_required_field(self):
class SplitForm(forms.Form):
array = SplitArrayField(forms.CharField(), size=3)
data = {'array_0': 'a', 'array_1': 'b', 'array_2': ''}
form = SplitForm(data)
self.assertFalse(form.is_valid())
self.assertEqual(form.errors, {'array': ['Item 3 in the array did not validate: This field is required.']})
def test_invalid_integer(self):
msg = 'Item 2 in the array did not validate: Ensure this value is less than or equal to 100.'
with self.assertRaisesMessage(exceptions.ValidationError, msg):
SplitArrayField(forms.IntegerField(max_value=100), size=2).clean([0, 101])
# To locate the widget's template.
@modify_settings(INSTALLED_APPS={'append': 'django.contrib.postgres'})
def test_rendering(self):
class SplitForm(forms.Form):
array = SplitArrayField(forms.CharField(), size=3)
self.assertHTMLEqual(str(SplitForm()), '''
<tr>
<th><label for="id_array_0">Array:</label></th>
<td>
<input id="id_array_0" name="array_0" type="text" required>
<input id="id_array_1" name="array_1" type="text" required>
<input id="id_array_2" name="array_2" type="text" required>
</td>
</tr>
''')
def test_invalid_char_length(self):
field = SplitArrayField(forms.CharField(max_length=2), size=3)
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean(['abc', 'c', 'defg'])
self.assertEqual(cm.exception.messages, [
'Item 1 in the array did not validate: Ensure this value has at most 2 characters (it has 3).',
'Item 3 in the array did not validate: Ensure this value has at most 2 characters (it has 4).',
])
def test_splitarraywidget_value_omitted_from_data(self):
class Form(forms.ModelForm):
field = SplitArrayField(forms.IntegerField(), required=False, size=2)
class Meta:
model = IntegerArrayModel
fields = ('field',)
form = Form({'field_0': '1', 'field_1': '2'})
self.assertEqual(form.errors, {})
obj = form.save(commit=False)
self.assertEqual(obj.field, [1, 2])
def test_splitarrayfield_has_changed(self):
class Form(forms.ModelForm):
field = SplitArrayField(forms.IntegerField(), required=False, size=2)
class Meta:
model = IntegerArrayModel
fields = ('field',)
tests = [
({}, {'field_0': '', 'field_1': ''}, True),
({'field': None}, {'field_0': '', 'field_1': ''}, True),
({'field': [1]}, {'field_0': '', 'field_1': ''}, True),
({'field': [1]}, {'field_0': '1', 'field_1': '0'}, True),
({'field': [1, 2]}, {'field_0': '1', 'field_1': '2'}, False),
({'field': [1, 2]}, {'field_0': 'a', 'field_1': 'b'}, True),
]
for initial, data, expected_result in tests:
with self.subTest(initial=initial, data=data):
obj = IntegerArrayModel(**initial)
form = Form(data, instance=obj)
self.assertIs(form.has_changed(), expected_result)
def test_splitarrayfield_remove_trailing_nulls_has_changed(self):
class Form(forms.ModelForm):
field = SplitArrayField(forms.IntegerField(), required=False, size=2, remove_trailing_nulls=True)
class Meta:
model = IntegerArrayModel
fields = ('field',)
tests = [
({}, {'field_0': '', 'field_1': ''}, False),
({'field': None}, {'field_0': '', 'field_1': ''}, False),
({'field': []}, {'field_0': '', 'field_1': ''}, False),
({'field': [1]}, {'field_0': '1', 'field_1': ''}, False),
]
for initial, data, expected_result in tests:
with self.subTest(initial=initial, data=data):
obj = IntegerArrayModel(**initial)
form = Form(data, instance=obj)
self.assertIs(form.has_changed(), expected_result)
class TestSplitFormWidget(PostgreSQLWidgetTestCase):
def test_get_context(self):
self.assertEqual(
SplitArrayWidget(forms.TextInput(), size=2).get_context('name', ['val1', 'val2']),
{
'widget': {
'name': 'name',
'is_hidden': False,
'required': False,
'value': "['val1', 'val2']",
'attrs': {},
'template_name': 'postgres/widgets/split_array.html',
'subwidgets': [
{
'name': 'name_0',
'is_hidden': False,
'required': False,
'value': 'val1',
'attrs': {},
'template_name': 'django/forms/widgets/text.html',
'type': 'text',
},
{
'name': 'name_1',
'is_hidden': False,
'required': False,
'value': 'val2',
'attrs': {},
'template_name': 'django/forms/widgets/text.html',
'type': 'text',
},
]
}
}
)
def test_checkbox_get_context_attrs(self):
context = SplitArrayWidget(
forms.CheckboxInput(),
size=2,
).get_context('name', [True, False])
self.assertEqual(context['widget']['value'], '[True, False]')
self.assertEqual(
[subwidget['attrs'] for subwidget in context['widget']['subwidgets']],
[{'checked': True}, {}]
)
def test_render(self):
self.check_html(
SplitArrayWidget(forms.TextInput(), size=2), 'array', None,
"""
<input name="array_0" type="text">
<input name="array_1" type="text">
"""
)
def test_render_attrs(self):
self.check_html(
SplitArrayWidget(forms.TextInput(), size=2),
'array', ['val1', 'val2'], attrs={'id': 'foo'},
html=(
"""
<input id="foo_0" name="array_0" type="text" value="val1">
<input id="foo_1" name="array_1" type="text" value="val2">
"""
)
)
def test_value_omitted_from_data(self):
widget = SplitArrayWidget(forms.TextInput(), size=2)
self.assertIs(widget.value_omitted_from_data({}, {}, 'field'), True)
self.assertIs(widget.value_omitted_from_data({'field_0': 'value'}, {}, 'field'), False)
self.assertIs(widget.value_omitted_from_data({'field_1': 'value'}, {}, 'field'), False)
self.assertIs(widget.value_omitted_from_data({'field_0': 'value', 'field_1': 'value'}, {}, 'field'), False)
|
d1495db0a573f36c44d46cc08fd4801cf6920ba6b4348e4a988a9be68d13d2f7 | from django.db.models import (
CharField, F, Func, IntegerField, OuterRef, Q, Subquery, Value,
)
from django.db.models.fields.json import KeyTextTransform, KeyTransform
from django.db.models.functions import Cast, Concat, Substr
from django.test.utils import Approximate, ignore_warnings
from django.utils import timezone
from django.utils.deprecation import RemovedInDjango50Warning
from . import PostgreSQLTestCase
from .models import AggregateTestModel, HotelReservation, Room, StatTestModel
try:
from django.contrib.postgres.aggregates import (
ArrayAgg, BitAnd, BitOr, BoolAnd, BoolOr, Corr, CovarPop, JSONBAgg,
RegrAvgX, RegrAvgY, RegrCount, RegrIntercept, RegrR2, RegrSlope,
RegrSXX, RegrSXY, RegrSYY, StatAggregate, StringAgg,
)
from django.contrib.postgres.fields import ArrayField
except ImportError:
pass # psycopg2 is not installed
class TestGeneralAggregate(PostgreSQLTestCase):
@classmethod
def setUpTestData(cls):
cls.aggs = AggregateTestModel.objects.bulk_create([
AggregateTestModel(boolean_field=True, char_field='Foo1', integer_field=0),
AggregateTestModel(
boolean_field=False,
char_field='Foo2',
integer_field=1,
json_field={'lang': 'pl'},
),
AggregateTestModel(
boolean_field=False,
char_field='Foo4',
integer_field=2,
json_field={'lang': 'en'},
),
AggregateTestModel(
boolean_field=True,
char_field='Foo3',
integer_field=0,
json_field={'breed': 'collie'},
),
])
@ignore_warnings(category=RemovedInDjango50Warning)
def test_empty_result_set(self):
AggregateTestModel.objects.all().delete()
tests = [
(ArrayAgg('char_field'), []),
(ArrayAgg('integer_field'), []),
(ArrayAgg('boolean_field'), []),
(BitAnd('integer_field'), None),
(BitOr('integer_field'), None),
(BoolAnd('boolean_field'), None),
(BoolOr('boolean_field'), None),
(JSONBAgg('integer_field'), []),
(StringAgg('char_field', delimiter=';'), ''),
]
for aggregation, expected_result in tests:
with self.subTest(aggregation=aggregation):
# Empty result with non-execution optimization.
with self.assertNumQueries(0):
values = AggregateTestModel.objects.none().aggregate(
aggregation=aggregation,
)
self.assertEqual(values, {'aggregation': expected_result})
# Empty result when query must be executed.
with self.assertNumQueries(1):
values = AggregateTestModel.objects.aggregate(
aggregation=aggregation,
)
self.assertEqual(values, {'aggregation': expected_result})
def test_default_argument(self):
AggregateTestModel.objects.all().delete()
tests = [
(ArrayAgg('char_field', default=['<empty>']), ['<empty>']),
(ArrayAgg('integer_field', default=[0]), [0]),
(ArrayAgg('boolean_field', default=[False]), [False]),
(BitAnd('integer_field', default=0), 0),
(BitOr('integer_field', default=0), 0),
(BoolAnd('boolean_field', default=False), False),
(BoolOr('boolean_field', default=False), False),
(JSONBAgg('integer_field', default=Value('["<empty>"]')), ['<empty>']),
(StringAgg('char_field', delimiter=';', default=Value('<empty>')), '<empty>'),
]
for aggregation, expected_result in tests:
with self.subTest(aggregation=aggregation):
# Empty result with non-execution optimization.
with self.assertNumQueries(0):
values = AggregateTestModel.objects.none().aggregate(
aggregation=aggregation,
)
self.assertEqual(values, {'aggregation': expected_result})
# Empty result when query must be executed.
with self.assertNumQueries(1):
values = AggregateTestModel.objects.aggregate(
aggregation=aggregation,
)
self.assertEqual(values, {'aggregation': expected_result})
def test_convert_value_deprecation(self):
AggregateTestModel.objects.all().delete()
queryset = AggregateTestModel.objects.all()
with self.assertWarnsMessage(RemovedInDjango50Warning, ArrayAgg.deprecation_msg):
queryset.aggregate(aggregation=ArrayAgg('boolean_field'))
with self.assertWarnsMessage(RemovedInDjango50Warning, JSONBAgg.deprecation_msg):
queryset.aggregate(aggregation=JSONBAgg('integer_field'))
with self.assertWarnsMessage(RemovedInDjango50Warning, StringAgg.deprecation_msg):
queryset.aggregate(aggregation=StringAgg('char_field', delimiter=';'))
# No warnings raised if default argument provided.
self.assertEqual(
queryset.aggregate(aggregation=ArrayAgg('boolean_field', default=None)),
{'aggregation': None},
)
self.assertEqual(
queryset.aggregate(aggregation=JSONBAgg('integer_field', default=None)),
{'aggregation': None},
)
self.assertEqual(
queryset.aggregate(
aggregation=StringAgg('char_field', delimiter=';', default=None),
),
{'aggregation': None},
)
self.assertEqual(
queryset.aggregate(aggregation=ArrayAgg('boolean_field', default=Value([]))),
{'aggregation': []},
)
self.assertEqual(
queryset.aggregate(aggregation=JSONBAgg('integer_field', default=Value('[]'))),
{'aggregation': []},
)
self.assertEqual(
queryset.aggregate(
aggregation=StringAgg('char_field', delimiter=';', default=Value('')),
),
{'aggregation': ''},
)
def test_array_agg_charfield(self):
values = AggregateTestModel.objects.aggregate(arrayagg=ArrayAgg('char_field'))
self.assertEqual(values, {'arrayagg': ['Foo1', 'Foo2', 'Foo4', 'Foo3']})
def test_array_agg_charfield_ordering(self):
ordering_test_cases = (
(F('char_field').desc(), ['Foo4', 'Foo3', 'Foo2', 'Foo1']),
(F('char_field').asc(), ['Foo1', 'Foo2', 'Foo3', 'Foo4']),
(F('char_field'), ['Foo1', 'Foo2', 'Foo3', 'Foo4']),
([F('boolean_field'), F('char_field').desc()], ['Foo4', 'Foo2', 'Foo3', 'Foo1']),
((F('boolean_field'), F('char_field').desc()), ['Foo4', 'Foo2', 'Foo3', 'Foo1']),
('char_field', ['Foo1', 'Foo2', 'Foo3', 'Foo4']),
('-char_field', ['Foo4', 'Foo3', 'Foo2', 'Foo1']),
(Concat('char_field', Value('@')), ['Foo1', 'Foo2', 'Foo3', 'Foo4']),
(Concat('char_field', Value('@')).desc(), ['Foo4', 'Foo3', 'Foo2', 'Foo1']),
(
(Substr('char_field', 1, 1), F('integer_field'), Substr('char_field', 4, 1).desc()),
['Foo3', 'Foo1', 'Foo2', 'Foo4'],
),
)
for ordering, expected_output in ordering_test_cases:
with self.subTest(ordering=ordering, expected_output=expected_output):
values = AggregateTestModel.objects.aggregate(
arrayagg=ArrayAgg('char_field', ordering=ordering)
)
self.assertEqual(values, {'arrayagg': expected_output})
def test_array_agg_integerfield(self):
values = AggregateTestModel.objects.aggregate(arrayagg=ArrayAgg('integer_field'))
self.assertEqual(values, {'arrayagg': [0, 1, 2, 0]})
def test_array_agg_integerfield_ordering(self):
values = AggregateTestModel.objects.aggregate(
arrayagg=ArrayAgg('integer_field', ordering=F('integer_field').desc())
)
self.assertEqual(values, {'arrayagg': [2, 1, 0, 0]})
def test_array_agg_booleanfield(self):
values = AggregateTestModel.objects.aggregate(arrayagg=ArrayAgg('boolean_field'))
self.assertEqual(values, {'arrayagg': [True, False, False, True]})
def test_array_agg_booleanfield_ordering(self):
ordering_test_cases = (
(F('boolean_field').asc(), [False, False, True, True]),
(F('boolean_field').desc(), [True, True, False, False]),
(F('boolean_field'), [False, False, True, True]),
)
for ordering, expected_output in ordering_test_cases:
with self.subTest(ordering=ordering, expected_output=expected_output):
values = AggregateTestModel.objects.aggregate(
arrayagg=ArrayAgg('boolean_field', ordering=ordering)
)
self.assertEqual(values, {'arrayagg': expected_output})
def test_array_agg_jsonfield(self):
values = AggregateTestModel.objects.aggregate(
arrayagg=ArrayAgg(
KeyTransform('lang', 'json_field'),
filter=Q(json_field__lang__isnull=False),
),
)
self.assertEqual(values, {'arrayagg': ['pl', 'en']})
def test_array_agg_jsonfield_ordering(self):
values = AggregateTestModel.objects.aggregate(
arrayagg=ArrayAgg(
KeyTransform('lang', 'json_field'),
filter=Q(json_field__lang__isnull=False),
ordering=KeyTransform('lang', 'json_field'),
),
)
self.assertEqual(values, {'arrayagg': ['en', 'pl']})
def test_array_agg_filter(self):
values = AggregateTestModel.objects.aggregate(
arrayagg=ArrayAgg('integer_field', filter=Q(integer_field__gt=0)),
)
self.assertEqual(values, {'arrayagg': [1, 2]})
def test_array_agg_lookups(self):
aggr1 = AggregateTestModel.objects.create()
aggr2 = AggregateTestModel.objects.create()
StatTestModel.objects.create(related_field=aggr1, int1=1, int2=0)
StatTestModel.objects.create(related_field=aggr1, int1=2, int2=0)
StatTestModel.objects.create(related_field=aggr2, int1=3, int2=0)
StatTestModel.objects.create(related_field=aggr2, int1=4, int2=0)
qs = StatTestModel.objects.values('related_field').annotate(
array=ArrayAgg('int1')
).filter(array__overlap=[2]).values_list('array', flat=True)
self.assertCountEqual(qs.get(), [1, 2])
def test_bit_and_general(self):
values = AggregateTestModel.objects.filter(
integer_field__in=[0, 1]).aggregate(bitand=BitAnd('integer_field'))
self.assertEqual(values, {'bitand': 0})
def test_bit_and_on_only_true_values(self):
values = AggregateTestModel.objects.filter(
integer_field=1).aggregate(bitand=BitAnd('integer_field'))
self.assertEqual(values, {'bitand': 1})
def test_bit_and_on_only_false_values(self):
values = AggregateTestModel.objects.filter(
integer_field=0).aggregate(bitand=BitAnd('integer_field'))
self.assertEqual(values, {'bitand': 0})
def test_bit_or_general(self):
values = AggregateTestModel.objects.filter(
integer_field__in=[0, 1]).aggregate(bitor=BitOr('integer_field'))
self.assertEqual(values, {'bitor': 1})
def test_bit_or_on_only_true_values(self):
values = AggregateTestModel.objects.filter(
integer_field=1).aggregate(bitor=BitOr('integer_field'))
self.assertEqual(values, {'bitor': 1})
def test_bit_or_on_only_false_values(self):
values = AggregateTestModel.objects.filter(
integer_field=0).aggregate(bitor=BitOr('integer_field'))
self.assertEqual(values, {'bitor': 0})
def test_bool_and_general(self):
values = AggregateTestModel.objects.aggregate(booland=BoolAnd('boolean_field'))
self.assertEqual(values, {'booland': False})
def test_bool_and_q_object(self):
values = AggregateTestModel.objects.aggregate(
booland=BoolAnd(Q(integer_field__gt=2)),
)
self.assertEqual(values, {'booland': False})
def test_bool_or_general(self):
values = AggregateTestModel.objects.aggregate(boolor=BoolOr('boolean_field'))
self.assertEqual(values, {'boolor': True})
def test_bool_or_q_object(self):
values = AggregateTestModel.objects.aggregate(
boolor=BoolOr(Q(integer_field__gt=2)),
)
self.assertEqual(values, {'boolor': False})
def test_string_agg_requires_delimiter(self):
with self.assertRaises(TypeError):
AggregateTestModel.objects.aggregate(stringagg=StringAgg('char_field'))
def test_string_agg_delimiter_escaping(self):
values = AggregateTestModel.objects.aggregate(stringagg=StringAgg('char_field', delimiter="'"))
self.assertEqual(values, {'stringagg': "Foo1'Foo2'Foo4'Foo3"})
def test_string_agg_charfield(self):
values = AggregateTestModel.objects.aggregate(stringagg=StringAgg('char_field', delimiter=';'))
self.assertEqual(values, {'stringagg': 'Foo1;Foo2;Foo4;Foo3'})
def test_string_agg_charfield_ordering(self):
ordering_test_cases = (
(F('char_field').desc(), 'Foo4;Foo3;Foo2;Foo1'),
(F('char_field').asc(), 'Foo1;Foo2;Foo3;Foo4'),
(F('char_field'), 'Foo1;Foo2;Foo3;Foo4'),
('char_field', 'Foo1;Foo2;Foo3;Foo4'),
('-char_field', 'Foo4;Foo3;Foo2;Foo1'),
(Concat('char_field', Value('@')), 'Foo1;Foo2;Foo3;Foo4'),
(Concat('char_field', Value('@')).desc(), 'Foo4;Foo3;Foo2;Foo1'),
)
for ordering, expected_output in ordering_test_cases:
with self.subTest(ordering=ordering, expected_output=expected_output):
values = AggregateTestModel.objects.aggregate(
stringagg=StringAgg('char_field', delimiter=';', ordering=ordering)
)
self.assertEqual(values, {'stringagg': expected_output})
def test_string_agg_jsonfield_ordering(self):
values = AggregateTestModel.objects.aggregate(
stringagg=StringAgg(
KeyTextTransform('lang', 'json_field'),
delimiter=';',
ordering=KeyTextTransform('lang', 'json_field'),
output_field=CharField(),
),
)
self.assertEqual(values, {'stringagg': 'en;pl'})
def test_string_agg_filter(self):
values = AggregateTestModel.objects.aggregate(
stringagg=StringAgg(
'char_field',
delimiter=';',
filter=Q(char_field__endswith='3') | Q(char_field__endswith='1'),
)
)
self.assertEqual(values, {'stringagg': 'Foo1;Foo3'})
def test_orderable_agg_alternative_fields(self):
values = AggregateTestModel.objects.aggregate(
arrayagg=ArrayAgg('integer_field', ordering=F('char_field').asc())
)
self.assertEqual(values, {'arrayagg': [0, 1, 0, 2]})
def test_jsonb_agg(self):
values = AggregateTestModel.objects.aggregate(jsonbagg=JSONBAgg('char_field'))
self.assertEqual(values, {'jsonbagg': ['Foo1', 'Foo2', 'Foo4', 'Foo3']})
def test_jsonb_agg_charfield_ordering(self):
ordering_test_cases = (
(F('char_field').desc(), ['Foo4', 'Foo3', 'Foo2', 'Foo1']),
(F('char_field').asc(), ['Foo1', 'Foo2', 'Foo3', 'Foo4']),
(F('char_field'), ['Foo1', 'Foo2', 'Foo3', 'Foo4']),
('char_field', ['Foo1', 'Foo2', 'Foo3', 'Foo4']),
('-char_field', ['Foo4', 'Foo3', 'Foo2', 'Foo1']),
(Concat('char_field', Value('@')), ['Foo1', 'Foo2', 'Foo3', 'Foo4']),
(Concat('char_field', Value('@')).desc(), ['Foo4', 'Foo3', 'Foo2', 'Foo1']),
)
for ordering, expected_output in ordering_test_cases:
with self.subTest(ordering=ordering, expected_output=expected_output):
values = AggregateTestModel.objects.aggregate(
jsonbagg=JSONBAgg('char_field', ordering=ordering),
)
self.assertEqual(values, {'jsonbagg': expected_output})
def test_jsonb_agg_integerfield_ordering(self):
values = AggregateTestModel.objects.aggregate(
jsonbagg=JSONBAgg('integer_field', ordering=F('integer_field').desc()),
)
self.assertEqual(values, {'jsonbagg': [2, 1, 0, 0]})
def test_jsonb_agg_booleanfield_ordering(self):
ordering_test_cases = (
(F('boolean_field').asc(), [False, False, True, True]),
(F('boolean_field').desc(), [True, True, False, False]),
(F('boolean_field'), [False, False, True, True]),
)
for ordering, expected_output in ordering_test_cases:
with self.subTest(ordering=ordering, expected_output=expected_output):
values = AggregateTestModel.objects.aggregate(
jsonbagg=JSONBAgg('boolean_field', ordering=ordering),
)
self.assertEqual(values, {'jsonbagg': expected_output})
def test_jsonb_agg_jsonfield_ordering(self):
values = AggregateTestModel.objects.aggregate(
jsonbagg=JSONBAgg(
KeyTransform('lang', 'json_field'),
filter=Q(json_field__lang__isnull=False),
ordering=KeyTransform('lang', 'json_field'),
),
)
self.assertEqual(values, {'jsonbagg': ['en', 'pl']})
def test_jsonb_agg_key_index_transforms(self):
room101 = Room.objects.create(number=101)
room102 = Room.objects.create(number=102)
datetimes = [
timezone.datetime(2018, 6, 20),
timezone.datetime(2018, 6, 24),
timezone.datetime(2018, 6, 28),
]
HotelReservation.objects.create(
datespan=(datetimes[0].date(), datetimes[1].date()),
start=datetimes[0],
end=datetimes[1],
room=room102,
requirements={'double_bed': True, 'parking': True},
)
HotelReservation.objects.create(
datespan=(datetimes[1].date(), datetimes[2].date()),
start=datetimes[1],
end=datetimes[2],
room=room102,
requirements={'double_bed': False, 'sea_view': True, 'parking': False},
)
HotelReservation.objects.create(
datespan=(datetimes[0].date(), datetimes[2].date()),
start=datetimes[0],
end=datetimes[2],
room=room101,
requirements={'sea_view': False},
)
values = Room.objects.annotate(
requirements=JSONBAgg(
'hotelreservation__requirements',
ordering='-hotelreservation__start',
)
).filter(requirements__0__sea_view=True).values('number', 'requirements')
self.assertSequenceEqual(values, [
{'number': 102, 'requirements': [
{'double_bed': False, 'sea_view': True, 'parking': False},
{'double_bed': True, 'parking': True},
]},
])
def test_string_agg_array_agg_ordering_in_subquery(self):
stats = []
for i, agg in enumerate(AggregateTestModel.objects.order_by('char_field')):
stats.append(StatTestModel(related_field=agg, int1=i, int2=i + 1))
stats.append(StatTestModel(related_field=agg, int1=i + 1, int2=i))
StatTestModel.objects.bulk_create(stats)
for aggregate, expected_result in (
(
ArrayAgg('stattestmodel__int1', ordering='-stattestmodel__int2'),
[('Foo1', [0, 1]), ('Foo2', [1, 2]), ('Foo3', [2, 3]), ('Foo4', [3, 4])],
),
(
StringAgg(
Cast('stattestmodel__int1', CharField()),
delimiter=';',
ordering='-stattestmodel__int2',
),
[('Foo1', '0;1'), ('Foo2', '1;2'), ('Foo3', '2;3'), ('Foo4', '3;4')],
),
):
with self.subTest(aggregate=aggregate.__class__.__name__):
subquery = AggregateTestModel.objects.filter(
pk=OuterRef('pk'),
).annotate(agg=aggregate).values('agg')
values = AggregateTestModel.objects.annotate(
agg=Subquery(subquery),
).order_by('char_field').values_list('char_field', 'agg')
self.assertEqual(list(values), expected_result)
def test_string_agg_array_agg_filter_in_subquery(self):
StatTestModel.objects.bulk_create([
StatTestModel(related_field=self.aggs[0], int1=0, int2=5),
StatTestModel(related_field=self.aggs[0], int1=1, int2=4),
StatTestModel(related_field=self.aggs[0], int1=2, int2=3),
])
for aggregate, expected_result in (
(
ArrayAgg('stattestmodel__int1', filter=Q(stattestmodel__int2__gt=3)),
[('Foo1', [0, 1]), ('Foo2', None)],
),
(
StringAgg(
Cast('stattestmodel__int2', CharField()),
delimiter=';',
filter=Q(stattestmodel__int1__lt=2),
),
[('Foo1', '5;4'), ('Foo2', None)],
),
):
with self.subTest(aggregate=aggregate.__class__.__name__):
subquery = AggregateTestModel.objects.filter(
pk=OuterRef('pk'),
).annotate(agg=aggregate).values('agg')
values = AggregateTestModel.objects.annotate(
agg=Subquery(subquery),
).filter(
char_field__in=['Foo1', 'Foo2'],
).order_by('char_field').values_list('char_field', 'agg')
self.assertEqual(list(values), expected_result)
def test_string_agg_filter_in_subquery_with_exclude(self):
subquery = AggregateTestModel.objects.annotate(
stringagg=StringAgg(
'char_field',
delimiter=';',
filter=Q(char_field__endswith='1'),
)
).exclude(stringagg='').values('id')
self.assertSequenceEqual(
AggregateTestModel.objects.filter(id__in=Subquery(subquery)),
[self.aggs[0]],
)
def test_ordering_isnt_cleared_for_array_subquery(self):
inner_qs = AggregateTestModel.objects.order_by('-integer_field')
qs = AggregateTestModel.objects.annotate(
integers=Func(
Subquery(inner_qs.values('integer_field')),
function='ARRAY',
output_field=ArrayField(base_field=IntegerField()),
),
)
self.assertSequenceEqual(
qs.first().integers,
inner_qs.values_list('integer_field', flat=True),
)
class TestAggregateDistinct(PostgreSQLTestCase):
@classmethod
def setUpTestData(cls):
AggregateTestModel.objects.create(char_field='Foo')
AggregateTestModel.objects.create(char_field='Foo')
AggregateTestModel.objects.create(char_field='Bar')
def test_string_agg_distinct_false(self):
values = AggregateTestModel.objects.aggregate(stringagg=StringAgg('char_field', delimiter=' ', distinct=False))
self.assertEqual(values['stringagg'].count('Foo'), 2)
self.assertEqual(values['stringagg'].count('Bar'), 1)
def test_string_agg_distinct_true(self):
values = AggregateTestModel.objects.aggregate(stringagg=StringAgg('char_field', delimiter=' ', distinct=True))
self.assertEqual(values['stringagg'].count('Foo'), 1)
self.assertEqual(values['stringagg'].count('Bar'), 1)
def test_array_agg_distinct_false(self):
values = AggregateTestModel.objects.aggregate(arrayagg=ArrayAgg('char_field', distinct=False))
self.assertEqual(sorted(values['arrayagg']), ['Bar', 'Foo', 'Foo'])
def test_array_agg_distinct_true(self):
values = AggregateTestModel.objects.aggregate(arrayagg=ArrayAgg('char_field', distinct=True))
self.assertEqual(sorted(values['arrayagg']), ['Bar', 'Foo'])
def test_jsonb_agg_distinct_false(self):
values = AggregateTestModel.objects.aggregate(
jsonbagg=JSONBAgg('char_field', distinct=False),
)
self.assertEqual(sorted(values['jsonbagg']), ['Bar', 'Foo', 'Foo'])
def test_jsonb_agg_distinct_true(self):
values = AggregateTestModel.objects.aggregate(
jsonbagg=JSONBAgg('char_field', distinct=True),
)
self.assertEqual(sorted(values['jsonbagg']), ['Bar', 'Foo'])
class TestStatisticsAggregate(PostgreSQLTestCase):
@classmethod
def setUpTestData(cls):
StatTestModel.objects.create(
int1=1,
int2=3,
related_field=AggregateTestModel.objects.create(integer_field=0),
)
StatTestModel.objects.create(
int1=2,
int2=2,
related_field=AggregateTestModel.objects.create(integer_field=1),
)
StatTestModel.objects.create(
int1=3,
int2=1,
related_field=AggregateTestModel.objects.create(integer_field=2),
)
# Tests for base class (StatAggregate)
def test_missing_arguments_raises_exception(self):
with self.assertRaisesMessage(ValueError, 'Both y and x must be provided.'):
StatAggregate(x=None, y=None)
def test_correct_source_expressions(self):
func = StatAggregate(x='test', y=13)
self.assertIsInstance(func.source_expressions[0], Value)
self.assertIsInstance(func.source_expressions[1], F)
def test_alias_is_required(self):
class SomeFunc(StatAggregate):
function = 'TEST'
with self.assertRaisesMessage(TypeError, 'Complex aggregates require an alias'):
StatTestModel.objects.aggregate(SomeFunc(y='int2', x='int1'))
# Test aggregates
def test_empty_result_set(self):
StatTestModel.objects.all().delete()
tests = [
(Corr(y='int2', x='int1'), None),
(CovarPop(y='int2', x='int1'), None),
(CovarPop(y='int2', x='int1', sample=True), None),
(RegrAvgX(y='int2', x='int1'), None),
(RegrAvgY(y='int2', x='int1'), None),
(RegrCount(y='int2', x='int1'), 0),
(RegrIntercept(y='int2', x='int1'), None),
(RegrR2(y='int2', x='int1'), None),
(RegrSlope(y='int2', x='int1'), None),
(RegrSXX(y='int2', x='int1'), None),
(RegrSXY(y='int2', x='int1'), None),
(RegrSYY(y='int2', x='int1'), None),
]
for aggregation, expected_result in tests:
with self.subTest(aggregation=aggregation):
# Empty result with non-execution optimization.
with self.assertNumQueries(0):
values = StatTestModel.objects.none().aggregate(
aggregation=aggregation,
)
self.assertEqual(values, {'aggregation': expected_result})
# Empty result when query must be executed.
with self.assertNumQueries(1):
values = StatTestModel.objects.aggregate(
aggregation=aggregation,
)
self.assertEqual(values, {'aggregation': expected_result})
def test_default_argument(self):
StatTestModel.objects.all().delete()
tests = [
(Corr(y='int2', x='int1', default=0), 0),
(CovarPop(y='int2', x='int1', default=0), 0),
(CovarPop(y='int2', x='int1', sample=True, default=0), 0),
(RegrAvgX(y='int2', x='int1', default=0), 0),
(RegrAvgY(y='int2', x='int1', default=0), 0),
# RegrCount() doesn't support the default argument.
(RegrIntercept(y='int2', x='int1', default=0), 0),
(RegrR2(y='int2', x='int1', default=0), 0),
(RegrSlope(y='int2', x='int1', default=0), 0),
(RegrSXX(y='int2', x='int1', default=0), 0),
(RegrSXY(y='int2', x='int1', default=0), 0),
(RegrSYY(y='int2', x='int1', default=0), 0),
]
for aggregation, expected_result in tests:
with self.subTest(aggregation=aggregation):
# Empty result with non-execution optimization.
with self.assertNumQueries(0):
values = StatTestModel.objects.none().aggregate(
aggregation=aggregation,
)
self.assertEqual(values, {'aggregation': expected_result})
# Empty result when query must be executed.
with self.assertNumQueries(1):
values = StatTestModel.objects.aggregate(
aggregation=aggregation,
)
self.assertEqual(values, {'aggregation': expected_result})
def test_corr_general(self):
values = StatTestModel.objects.aggregate(corr=Corr(y='int2', x='int1'))
self.assertEqual(values, {'corr': -1.0})
def test_covar_pop_general(self):
values = StatTestModel.objects.aggregate(covarpop=CovarPop(y='int2', x='int1'))
self.assertEqual(values, {'covarpop': Approximate(-0.66, places=1)})
def test_covar_pop_sample(self):
values = StatTestModel.objects.aggregate(covarpop=CovarPop(y='int2', x='int1', sample=True))
self.assertEqual(values, {'covarpop': -1.0})
def test_regr_avgx_general(self):
values = StatTestModel.objects.aggregate(regravgx=RegrAvgX(y='int2', x='int1'))
self.assertEqual(values, {'regravgx': 2.0})
def test_regr_avgy_general(self):
values = StatTestModel.objects.aggregate(regravgy=RegrAvgY(y='int2', x='int1'))
self.assertEqual(values, {'regravgy': 2.0})
def test_regr_count_general(self):
values = StatTestModel.objects.aggregate(regrcount=RegrCount(y='int2', x='int1'))
self.assertEqual(values, {'regrcount': 3})
def test_regr_count_default(self):
msg = 'RegrCount does not allow default.'
with self.assertRaisesMessage(TypeError, msg):
RegrCount(y='int2', x='int1', default=0)
def test_regr_intercept_general(self):
values = StatTestModel.objects.aggregate(regrintercept=RegrIntercept(y='int2', x='int1'))
self.assertEqual(values, {'regrintercept': 4})
def test_regr_r2_general(self):
values = StatTestModel.objects.aggregate(regrr2=RegrR2(y='int2', x='int1'))
self.assertEqual(values, {'regrr2': 1})
def test_regr_slope_general(self):
values = StatTestModel.objects.aggregate(regrslope=RegrSlope(y='int2', x='int1'))
self.assertEqual(values, {'regrslope': -1})
def test_regr_sxx_general(self):
values = StatTestModel.objects.aggregate(regrsxx=RegrSXX(y='int2', x='int1'))
self.assertEqual(values, {'regrsxx': 2.0})
def test_regr_sxy_general(self):
values = StatTestModel.objects.aggregate(regrsxy=RegrSXY(y='int2', x='int1'))
self.assertEqual(values, {'regrsxy': -2.0})
def test_regr_syy_general(self):
values = StatTestModel.objects.aggregate(regrsyy=RegrSYY(y='int2', x='int1'))
self.assertEqual(values, {'regrsyy': 2.0})
def test_regr_avgx_with_related_obj_and_number_as_argument(self):
"""
This is more complex test to check if JOIN on field and
number as argument works as expected.
"""
values = StatTestModel.objects.aggregate(complex_regravgx=RegrAvgX(y=5, x='related_field__integer_field'))
self.assertEqual(values, {'complex_regravgx': 1.0})
|
68a3ac871b64b410028fd1af7851d26df1e50372d16571f59eb0db7f691f5235 | """
Test PostgreSQL full text search.
These tests use dialogue from the 1975 film Monty Python and the Holy Grail.
All text copyright Python (Monty) Pictures. Thanks to sacred-texts.com for the
transcript.
"""
from django.db import connection
from django.db.models import F, Value
from django.test import modify_settings, skipUnlessDBFeature
from . import PostgreSQLSimpleTestCase, PostgreSQLTestCase
from .models import Character, Line, LineSavedSearch, Scene
try:
from django.contrib.postgres.search import (
SearchConfig, SearchHeadline, SearchQuery, SearchRank, SearchVector,
)
except ImportError:
pass
class GrailTestData:
@classmethod
def setUpTestData(cls):
cls.robin = Scene.objects.create(scene='Scene 10', setting='The dark forest of Ewing')
cls.minstrel = Character.objects.create(name='Minstrel')
verses = [
(
'Bravely bold Sir Robin, rode forth from Camelot. '
'He was not afraid to die, o Brave Sir Robin. '
'He was not at all afraid to be killed in nasty ways. '
'Brave, brave, brave, brave Sir Robin'
),
(
'He was not in the least bit scared to be mashed into a pulp, '
'Or to have his eyes gouged out, and his elbows broken. '
'To have his kneecaps split, and his body burned away, '
'And his limbs all hacked and mangled, brave Sir Robin!'
),
(
'His head smashed in and his heart cut out, '
'And his liver removed and his bowels unplugged, '
'And his nostrils ripped and his bottom burned off,'
'And his --'
),
]
cls.verses = [Line.objects.create(
scene=cls.robin,
character=cls.minstrel,
dialogue=verse,
) for verse in verses]
cls.verse0, cls.verse1, cls.verse2 = cls.verses
cls.witch_scene = Scene.objects.create(scene='Scene 5', setting="Sir Bedemir's Castle")
bedemir = Character.objects.create(name='Bedemir')
crowd = Character.objects.create(name='Crowd')
witch = Character.objects.create(name='Witch')
duck = Character.objects.create(name='Duck')
cls.bedemir0 = Line.objects.create(
scene=cls.witch_scene,
character=bedemir,
dialogue='We shall use my larger scales!',
dialogue_config='english',
)
cls.bedemir1 = Line.objects.create(
scene=cls.witch_scene,
character=bedemir,
dialogue='Right, remove the supports!',
dialogue_config='english',
)
cls.duck = Line.objects.create(scene=cls.witch_scene, character=duck, dialogue=None)
cls.crowd = Line.objects.create(scene=cls.witch_scene, character=crowd, dialogue='A witch! A witch!')
cls.witch = Line.objects.create(scene=cls.witch_scene, character=witch, dialogue="It's a fair cop.")
trojan_rabbit = Scene.objects.create(scene='Scene 8', setting="The castle of Our Master Ruiz' de lu la Ramper")
guards = Character.objects.create(name='French Guards')
cls.french = Line.objects.create(
scene=trojan_rabbit,
character=guards,
dialogue='Oh. Un beau cadeau. Oui oui.',
dialogue_config='french',
)
@modify_settings(INSTALLED_APPS={'append': 'django.contrib.postgres'})
class SimpleSearchTest(GrailTestData, PostgreSQLTestCase):
def test_simple(self):
searched = Line.objects.filter(dialogue__search='elbows')
self.assertSequenceEqual(searched, [self.verse1])
def test_non_exact_match(self):
searched = Line.objects.filter(dialogue__search='hearts')
self.assertSequenceEqual(searched, [self.verse2])
def test_search_two_terms(self):
searched = Line.objects.filter(dialogue__search='heart bowel')
self.assertSequenceEqual(searched, [self.verse2])
def test_search_two_terms_with_partial_match(self):
searched = Line.objects.filter(dialogue__search='Robin killed')
self.assertSequenceEqual(searched, [self.verse0])
def test_search_query_config(self):
searched = Line.objects.filter(
dialogue__search=SearchQuery('nostrils', config='simple'),
)
self.assertSequenceEqual(searched, [self.verse2])
def test_search_with_F_expression(self):
# Non-matching query.
LineSavedSearch.objects.create(line=self.verse1, query='hearts')
# Matching query.
match = LineSavedSearch.objects.create(line=self.verse1, query='elbows')
for query_expression in [F('query'), SearchQuery(F('query'))]:
with self.subTest(query_expression):
searched = LineSavedSearch.objects.filter(
line__dialogue__search=query_expression,
)
self.assertSequenceEqual(searched, [match])
@modify_settings(INSTALLED_APPS={'append': 'django.contrib.postgres'})
class SearchVectorFieldTest(GrailTestData, PostgreSQLTestCase):
def test_existing_vector(self):
Line.objects.update(dialogue_search_vector=SearchVector('dialogue'))
searched = Line.objects.filter(dialogue_search_vector=SearchQuery('Robin killed'))
self.assertSequenceEqual(searched, [self.verse0])
def test_existing_vector_config_explicit(self):
Line.objects.update(dialogue_search_vector=SearchVector('dialogue'))
searched = Line.objects.filter(dialogue_search_vector=SearchQuery('cadeaux', config='french'))
self.assertSequenceEqual(searched, [self.french])
def test_single_coalesce_expression(self):
searched = Line.objects.annotate(search=SearchVector('dialogue')).filter(search='cadeaux')
self.assertNotIn('COALESCE(COALESCE', str(searched.query))
class SearchConfigTests(PostgreSQLSimpleTestCase):
def test_from_parameter(self):
self.assertIsNone(SearchConfig.from_parameter(None))
self.assertEqual(SearchConfig.from_parameter('foo'), SearchConfig('foo'))
self.assertEqual(SearchConfig.from_parameter(SearchConfig('bar')), SearchConfig('bar'))
class MultipleFieldsTest(GrailTestData, PostgreSQLTestCase):
def test_simple_on_dialogue(self):
searched = Line.objects.annotate(
search=SearchVector('scene__setting', 'dialogue'),
).filter(search='elbows')
self.assertSequenceEqual(searched, [self.verse1])
def test_simple_on_scene(self):
searched = Line.objects.annotate(
search=SearchVector('scene__setting', 'dialogue'),
).filter(search='Forest')
self.assertCountEqual(searched, self.verses)
def test_non_exact_match(self):
searched = Line.objects.annotate(
search=SearchVector('scene__setting', 'dialogue'),
).filter(search='heart')
self.assertSequenceEqual(searched, [self.verse2])
def test_search_two_terms(self):
searched = Line.objects.annotate(
search=SearchVector('scene__setting', 'dialogue'),
).filter(search='heart forest')
self.assertSequenceEqual(searched, [self.verse2])
def test_terms_adjacent(self):
searched = Line.objects.annotate(
search=SearchVector('character__name', 'dialogue'),
).filter(search='minstrel')
self.assertCountEqual(searched, self.verses)
searched = Line.objects.annotate(
search=SearchVector('scene__setting', 'dialogue'),
).filter(search='minstrelbravely')
self.assertSequenceEqual(searched, [])
def test_search_with_null(self):
searched = Line.objects.annotate(
search=SearchVector('scene__setting', 'dialogue'),
).filter(search='bedemir')
self.assertCountEqual(searched, [self.bedemir0, self.bedemir1, self.crowd, self.witch, self.duck])
def test_search_with_non_text(self):
searched = Line.objects.annotate(
search=SearchVector('id'),
).filter(search=str(self.crowd.id))
self.assertSequenceEqual(searched, [self.crowd])
def test_phrase_search(self):
line_qs = Line.objects.annotate(search=SearchVector('dialogue'))
searched = line_qs.filter(search=SearchQuery('burned body his away', search_type='phrase'))
self.assertSequenceEqual(searched, [])
searched = line_qs.filter(search=SearchQuery('his body burned away', search_type='phrase'))
self.assertSequenceEqual(searched, [self.verse1])
def test_phrase_search_with_config(self):
line_qs = Line.objects.annotate(
search=SearchVector('scene__setting', 'dialogue', config='french'),
)
searched = line_qs.filter(
search=SearchQuery('cadeau beau un', search_type='phrase', config='french'),
)
self.assertSequenceEqual(searched, [])
searched = line_qs.filter(
search=SearchQuery('un beau cadeau', search_type='phrase', config='french'),
)
self.assertSequenceEqual(searched, [self.french])
def test_raw_search(self):
line_qs = Line.objects.annotate(search=SearchVector('dialogue'))
searched = line_qs.filter(search=SearchQuery('Robin', search_type='raw'))
self.assertCountEqual(searched, [self.verse0, self.verse1])
searched = line_qs.filter(search=SearchQuery("Robin & !'Camelot'", search_type='raw'))
self.assertSequenceEqual(searched, [self.verse1])
def test_raw_search_with_config(self):
line_qs = Line.objects.annotate(search=SearchVector('dialogue', config='french'))
searched = line_qs.filter(
search=SearchQuery("'cadeaux' & 'beaux'", search_type='raw', config='french'),
)
self.assertSequenceEqual(searched, [self.french])
@skipUnlessDBFeature('has_websearch_to_tsquery')
def test_web_search(self):
line_qs = Line.objects.annotate(search=SearchVector('dialogue'))
searched = line_qs.filter(
search=SearchQuery(
'"burned body" "split kneecaps"',
search_type='websearch',
),
)
self.assertSequenceEqual(searched, [])
searched = line_qs.filter(
search=SearchQuery(
'"body burned" "kneecaps split" -"nostrils"',
search_type='websearch',
),
)
self.assertSequenceEqual(searched, [self.verse1])
searched = line_qs.filter(
search=SearchQuery(
'"Sir Robin" ("kneecaps" OR "Camelot")',
search_type='websearch',
),
)
self.assertSequenceEqual(searched, [self.verse0, self.verse1])
@skipUnlessDBFeature('has_websearch_to_tsquery')
def test_web_search_with_config(self):
line_qs = Line.objects.annotate(
search=SearchVector('scene__setting', 'dialogue', config='french'),
)
searched = line_qs.filter(
search=SearchQuery('cadeau -beau', search_type='websearch', config='french'),
)
self.assertSequenceEqual(searched, [])
searched = line_qs.filter(
search=SearchQuery('beau cadeau', search_type='websearch', config='french'),
)
self.assertSequenceEqual(searched, [self.french])
def test_bad_search_type(self):
with self.assertRaisesMessage(ValueError, "Unknown search_type argument 'foo'."):
SearchQuery('kneecaps', search_type='foo')
def test_config_query_explicit(self):
searched = Line.objects.annotate(
search=SearchVector('scene__setting', 'dialogue', config='french'),
).filter(search=SearchQuery('cadeaux', config='french'))
self.assertSequenceEqual(searched, [self.french])
def test_config_query_implicit(self):
searched = Line.objects.annotate(
search=SearchVector('scene__setting', 'dialogue', config='french'),
).filter(search='cadeaux')
self.assertSequenceEqual(searched, [self.french])
def test_config_from_field_explicit(self):
searched = Line.objects.annotate(
search=SearchVector('scene__setting', 'dialogue', config=F('dialogue_config')),
).filter(search=SearchQuery('cadeaux', config=F('dialogue_config')))
self.assertSequenceEqual(searched, [self.french])
def test_config_from_field_implicit(self):
searched = Line.objects.annotate(
search=SearchVector('scene__setting', 'dialogue', config=F('dialogue_config')),
).filter(search='cadeaux')
self.assertSequenceEqual(searched, [self.french])
@modify_settings(INSTALLED_APPS={'append': 'django.contrib.postgres'})
class TestCombinations(GrailTestData, PostgreSQLTestCase):
def test_vector_add(self):
searched = Line.objects.annotate(
search=SearchVector('scene__setting') + SearchVector('character__name'),
).filter(search='bedemir')
self.assertCountEqual(searched, [self.bedemir0, self.bedemir1, self.crowd, self.witch, self.duck])
def test_vector_add_multi(self):
searched = Line.objects.annotate(
search=(
SearchVector('scene__setting') +
SearchVector('character__name') +
SearchVector('dialogue')
),
).filter(search='bedemir')
self.assertCountEqual(searched, [self.bedemir0, self.bedemir1, self.crowd, self.witch, self.duck])
def test_vector_combined_mismatch(self):
msg = (
'SearchVector can only be combined with other SearchVector '
'instances, got NoneType.'
)
with self.assertRaisesMessage(TypeError, msg):
Line.objects.filter(dialogue__search=None + SearchVector('character__name'))
def test_combine_different_vector_configs(self):
searched = Line.objects.annotate(
search=(
SearchVector('dialogue', config='english') +
SearchVector('dialogue', config='french')
),
).filter(
search=SearchQuery('cadeaux', config='french') | SearchQuery('nostrils')
)
self.assertCountEqual(searched, [self.french, self.verse2])
def test_query_and(self):
searched = Line.objects.annotate(
search=SearchVector('scene__setting', 'dialogue'),
).filter(search=SearchQuery('bedemir') & SearchQuery('scales'))
self.assertSequenceEqual(searched, [self.bedemir0])
def test_query_multiple_and(self):
searched = Line.objects.annotate(
search=SearchVector('scene__setting', 'dialogue'),
).filter(search=SearchQuery('bedemir') & SearchQuery('scales') & SearchQuery('nostrils'))
self.assertSequenceEqual(searched, [])
searched = Line.objects.annotate(
search=SearchVector('scene__setting', 'dialogue'),
).filter(search=SearchQuery('shall') & SearchQuery('use') & SearchQuery('larger'))
self.assertSequenceEqual(searched, [self.bedemir0])
def test_query_or(self):
searched = Line.objects.filter(dialogue__search=SearchQuery('kneecaps') | SearchQuery('nostrils'))
self.assertCountEqual(searched, [self.verse1, self.verse2])
def test_query_multiple_or(self):
searched = Line.objects.filter(
dialogue__search=SearchQuery('kneecaps') | SearchQuery('nostrils') | SearchQuery('Sir Robin')
)
self.assertCountEqual(searched, [self.verse1, self.verse2, self.verse0])
def test_query_invert(self):
searched = Line.objects.filter(character=self.minstrel, dialogue__search=~SearchQuery('kneecaps'))
self.assertCountEqual(searched, [self.verse0, self.verse2])
def test_combine_different_configs(self):
searched = Line.objects.filter(
dialogue__search=(
SearchQuery('cadeau', config='french') |
SearchQuery('nostrils', config='english')
)
)
self.assertCountEqual(searched, [self.french, self.verse2])
def test_combined_configs(self):
searched = Line.objects.filter(
dialogue__search=(
SearchQuery('nostrils', config='simple') &
SearchQuery('bowels', config='simple')
),
)
self.assertSequenceEqual(searched, [self.verse2])
def test_combine_raw_phrase(self):
searched = Line.objects.filter(
dialogue__search=(
SearchQuery('burn:*', search_type='raw', config='simple') |
SearchQuery('rode forth from Camelot', search_type='phrase')
)
)
self.assertCountEqual(searched, [self.verse0, self.verse1, self.verse2])
def test_query_combined_mismatch(self):
msg = (
'SearchQuery can only be combined with other SearchQuery '
'instances, got NoneType.'
)
with self.assertRaisesMessage(TypeError, msg):
Line.objects.filter(dialogue__search=None | SearchQuery('kneecaps'))
with self.assertRaisesMessage(TypeError, msg):
Line.objects.filter(dialogue__search=None & SearchQuery('kneecaps'))
@modify_settings(INSTALLED_APPS={'append': 'django.contrib.postgres'})
class TestRankingAndWeights(GrailTestData, PostgreSQLTestCase):
def test_ranking(self):
searched = Line.objects.filter(character=self.minstrel).annotate(
rank=SearchRank(SearchVector('dialogue'), SearchQuery('brave sir robin')),
).order_by('rank')
self.assertSequenceEqual(searched, [self.verse2, self.verse1, self.verse0])
def test_rank_passing_untyped_args(self):
searched = Line.objects.filter(character=self.minstrel).annotate(
rank=SearchRank('dialogue', 'brave sir robin'),
).order_by('rank')
self.assertSequenceEqual(searched, [self.verse2, self.verse1, self.verse0])
def test_weights_in_vector(self):
vector = SearchVector('dialogue', weight='A') + SearchVector('character__name', weight='D')
searched = Line.objects.filter(scene=self.witch_scene).annotate(
rank=SearchRank(vector, SearchQuery('witch')),
).order_by('-rank')[:2]
self.assertSequenceEqual(searched, [self.crowd, self.witch])
vector = SearchVector('dialogue', weight='D') + SearchVector('character__name', weight='A')
searched = Line.objects.filter(scene=self.witch_scene).annotate(
rank=SearchRank(vector, SearchQuery('witch')),
).order_by('-rank')[:2]
self.assertSequenceEqual(searched, [self.witch, self.crowd])
def test_ranked_custom_weights(self):
vector = SearchVector('dialogue', weight='D') + SearchVector('character__name', weight='A')
searched = Line.objects.filter(scene=self.witch_scene).annotate(
rank=SearchRank(vector, SearchQuery('witch'), weights=[1, 0, 0, 0.5]),
).order_by('-rank')[:2]
self.assertSequenceEqual(searched, [self.crowd, self.witch])
def test_ranking_chaining(self):
searched = Line.objects.filter(character=self.minstrel).annotate(
rank=SearchRank(SearchVector('dialogue'), SearchQuery('brave sir robin')),
).filter(rank__gt=0.3)
self.assertSequenceEqual(searched, [self.verse0])
def test_cover_density_ranking(self):
not_dense_verse = Line.objects.create(
scene=self.robin,
character=self.minstrel,
dialogue=(
'Bravely taking to his feet, he beat a very brave retreat. '
'A brave retreat brave Sir Robin.'
)
)
searched = Line.objects.filter(character=self.minstrel).annotate(
rank=SearchRank(
SearchVector('dialogue'),
SearchQuery('brave robin'),
cover_density=True,
),
).order_by('rank', '-pk')
self.assertSequenceEqual(
searched,
[self.verse2, not_dense_verse, self.verse1, self.verse0],
)
def test_ranking_with_normalization(self):
short_verse = Line.objects.create(
scene=self.robin,
character=self.minstrel,
dialogue='A brave retreat brave Sir Robin.',
)
searched = Line.objects.filter(character=self.minstrel).annotate(
rank=SearchRank(
SearchVector('dialogue'),
SearchQuery('brave sir robin'),
# Divide the rank by the document length.
normalization=2,
),
).order_by('rank')
self.assertSequenceEqual(
searched,
[self.verse2, self.verse1, self.verse0, short_verse],
)
def test_ranking_with_masked_normalization(self):
short_verse = Line.objects.create(
scene=self.robin,
character=self.minstrel,
dialogue='A brave retreat brave Sir Robin.',
)
searched = Line.objects.filter(character=self.minstrel).annotate(
rank=SearchRank(
SearchVector('dialogue'),
SearchQuery('brave sir robin'),
# Divide the rank by the document length and by the number of
# unique words in document.
normalization=Value(2).bitor(Value(8)),
),
).order_by('rank')
self.assertSequenceEqual(
searched,
[self.verse2, self.verse1, self.verse0, short_verse],
)
class SearchVectorIndexTests(PostgreSQLTestCase):
def test_search_vector_index(self):
"""SearchVector generates IMMUTABLE SQL in order to be indexable."""
# This test should be moved to test_indexes and use a functional
# index instead once support lands (see #26167).
query = Line.objects.all().query
resolved = SearchVector('id', 'dialogue', config='english').resolve_expression(query)
compiler = query.get_compiler(connection.alias)
sql, params = resolved.as_sql(compiler, connection)
# Indexed function must be IMMUTABLE.
with connection.cursor() as cursor:
cursor.execute(
'CREATE INDEX search_vector_index ON %s USING GIN (%s)' % (Line._meta.db_table, sql),
params,
)
class SearchQueryTests(PostgreSQLSimpleTestCase):
def test_str(self):
tests = (
(~SearchQuery('a'), "~SearchQuery(Value('a'))"),
(
(SearchQuery('a') | SearchQuery('b')) & (SearchQuery('c') | SearchQuery('d')),
"((SearchQuery(Value('a')) || SearchQuery(Value('b'))) && "
"(SearchQuery(Value('c')) || SearchQuery(Value('d'))))",
),
(
SearchQuery('a') & (SearchQuery('b') | SearchQuery('c')),
"(SearchQuery(Value('a')) && (SearchQuery(Value('b')) || "
"SearchQuery(Value('c'))))",
),
(
(SearchQuery('a') | SearchQuery('b')) & SearchQuery('c'),
"((SearchQuery(Value('a')) || SearchQuery(Value('b'))) && "
"SearchQuery(Value('c')))"
),
(
SearchQuery('a') & (SearchQuery('b') & (SearchQuery('c') | SearchQuery('d'))),
"(SearchQuery(Value('a')) && (SearchQuery(Value('b')) && "
"(SearchQuery(Value('c')) || SearchQuery(Value('d')))))",
),
)
for query, expected_str in tests:
with self.subTest(query=query):
self.assertEqual(str(query), expected_str)
@modify_settings(INSTALLED_APPS={'append': 'django.contrib.postgres'})
class SearchHeadlineTests(GrailTestData, PostgreSQLTestCase):
def test_headline(self):
searched = Line.objects.annotate(
headline=SearchHeadline(
F('dialogue'),
SearchQuery('brave sir robin'),
config=SearchConfig('english'),
),
).get(pk=self.verse0.pk)
self.assertEqual(
searched.headline,
'<b>Robin</b>. He was not at all afraid to be killed in nasty '
'ways. <b>Brave</b>, <b>brave</b>, <b>brave</b>, <b>brave</b> '
'<b>Sir</b> <b>Robin</b>',
)
def test_headline_untyped_args(self):
searched = Line.objects.annotate(
headline=SearchHeadline('dialogue', 'killed', config='english'),
).get(pk=self.verse0.pk)
self.assertEqual(
searched.headline,
'Robin. He was not at all afraid to be <b>killed</b> in nasty '
'ways. Brave, brave, brave, brave Sir Robin',
)
def test_headline_with_config(self):
searched = Line.objects.annotate(
headline=SearchHeadline(
'dialogue',
SearchQuery('cadeaux', config='french'),
config='french',
),
).get(pk=self.french.pk)
self.assertEqual(
searched.headline,
'Oh. Un beau <b>cadeau</b>. Oui oui.',
)
def test_headline_with_config_from_field(self):
searched = Line.objects.annotate(
headline=SearchHeadline(
'dialogue',
SearchQuery('cadeaux', config=F('dialogue_config')),
config=F('dialogue_config'),
),
).get(pk=self.french.pk)
self.assertEqual(
searched.headline,
'Oh. Un beau <b>cadeau</b>. Oui oui.',
)
def test_headline_separator_options(self):
searched = Line.objects.annotate(
headline=SearchHeadline(
'dialogue',
'brave sir robin',
start_sel='<span>',
stop_sel='</span>',
),
).get(pk=self.verse0.pk)
self.assertEqual(
searched.headline,
'<span>Robin</span>. He was not at all afraid to be killed in '
'nasty ways. <span>Brave</span>, <span>brave</span>, <span>brave'
'</span>, <span>brave</span> <span>Sir</span> <span>Robin</span>',
)
def test_headline_highlight_all_option(self):
searched = Line.objects.annotate(
headline=SearchHeadline(
'dialogue',
SearchQuery('brave sir robin', config='english'),
highlight_all=True,
),
).get(pk=self.verse0.pk)
self.assertIn(
'<b>Bravely</b> bold <b>Sir</b> <b>Robin</b>, rode forth from '
'Camelot. He was not afraid to die, o ',
searched.headline,
)
def test_headline_short_word_option(self):
searched = Line.objects.annotate(
headline=SearchHeadline(
'dialogue',
SearchQuery('Camelot', config='english'),
short_word=5,
min_words=8,
),
).get(pk=self.verse0.pk)
self.assertEqual(searched.headline, (
'<b>Camelot</b>. He was not afraid to die, o Brave Sir Robin. He '
'was not at all afraid'
))
def test_headline_fragments_words_options(self):
searched = Line.objects.annotate(
headline=SearchHeadline(
'dialogue',
SearchQuery('brave sir robin', config='english'),
fragment_delimiter='...<br>',
max_fragments=4,
max_words=3,
min_words=1,
),
).get(pk=self.verse0.pk)
self.assertEqual(
searched.headline,
'<b>Sir</b> <b>Robin</b>, rode...<br>'
'<b>Brave</b> <b>Sir</b> <b>Robin</b>...<br>'
'<b>Brave</b>, <b>brave</b>, <b>brave</b>...<br>'
'<b>brave</b> <b>Sir</b> <b>Robin</b>',
)
|
8085560283e6dddef2a18dc7a80e6cd6f98b375de98c21d35ae0f4e4b99ac02d | import unittest
from unittest import mock
from migrations.test_base import OperationTestBase
from django.db import (
IntegrityError, NotSupportedError, connection, transaction,
)
from django.db.migrations.state import ProjectState
from django.db.models import CheckConstraint, Index, Q, UniqueConstraint
from django.db.utils import ProgrammingError
from django.test import modify_settings, override_settings, skipUnlessDBFeature
from django.test.utils import CaptureQueriesContext
from . import PostgreSQLTestCase
try:
from django.contrib.postgres.indexes import BrinIndex, BTreeIndex
from django.contrib.postgres.operations import (
AddConstraintNotValid, AddIndexConcurrently, BloomExtension,
CreateCollation, CreateExtension, RemoveCollation,
RemoveIndexConcurrently, ValidateConstraint,
)
except ImportError:
pass
@unittest.skipUnless(connection.vendor == 'postgresql', 'PostgreSQL specific tests.')
@modify_settings(INSTALLED_APPS={'append': 'migrations'})
class AddIndexConcurrentlyTests(OperationTestBase):
app_label = 'test_add_concurrently'
def test_requires_atomic_false(self):
project_state = self.set_up_test_model(self.app_label)
new_state = project_state.clone()
operation = AddIndexConcurrently(
'Pony',
Index(fields=['pink'], name='pony_pink_idx'),
)
msg = (
'The AddIndexConcurrently operation cannot be executed inside '
'a transaction (set atomic = False on the migration).'
)
with self.assertRaisesMessage(NotSupportedError, msg):
with connection.schema_editor(atomic=True) as editor:
operation.database_forwards(self.app_label, editor, project_state, new_state)
def test_add(self):
project_state = self.set_up_test_model(self.app_label, index=False)
table_name = '%s_pony' % self.app_label
index = Index(fields=['pink'], name='pony_pink_idx')
new_state = project_state.clone()
operation = AddIndexConcurrently('Pony', index)
self.assertEqual(
operation.describe(),
'Concurrently create index pony_pink_idx on field(s) pink of '
'model Pony'
)
operation.state_forwards(self.app_label, new_state)
self.assertEqual(len(new_state.models[self.app_label, 'pony'].options['indexes']), 1)
self.assertIndexNotExists(table_name, ['pink'])
# Add index.
with connection.schema_editor(atomic=False) as editor:
operation.database_forwards(self.app_label, editor, project_state, new_state)
self.assertIndexExists(table_name, ['pink'])
# Reversal.
with connection.schema_editor(atomic=False) as editor:
operation.database_backwards(self.app_label, editor, new_state, project_state)
self.assertIndexNotExists(table_name, ['pink'])
# Deconstruction.
name, args, kwargs = operation.deconstruct()
self.assertEqual(name, 'AddIndexConcurrently')
self.assertEqual(args, [])
self.assertEqual(kwargs, {'model_name': 'Pony', 'index': index})
def test_add_other_index_type(self):
project_state = self.set_up_test_model(self.app_label, index=False)
table_name = '%s_pony' % self.app_label
new_state = project_state.clone()
operation = AddIndexConcurrently(
'Pony',
BrinIndex(fields=['pink'], name='pony_pink_brin_idx'),
)
self.assertIndexNotExists(table_name, ['pink'])
# Add index.
with connection.schema_editor(atomic=False) as editor:
operation.database_forwards(self.app_label, editor, project_state, new_state)
self.assertIndexExists(table_name, ['pink'], index_type='brin')
# Reversal.
with connection.schema_editor(atomic=False) as editor:
operation.database_backwards(self.app_label, editor, new_state, project_state)
self.assertIndexNotExists(table_name, ['pink'])
def test_add_with_options(self):
project_state = self.set_up_test_model(self.app_label, index=False)
table_name = '%s_pony' % self.app_label
new_state = project_state.clone()
index = BTreeIndex(fields=['pink'], name='pony_pink_btree_idx', fillfactor=70)
operation = AddIndexConcurrently('Pony', index)
self.assertIndexNotExists(table_name, ['pink'])
# Add index.
with connection.schema_editor(atomic=False) as editor:
operation.database_forwards(self.app_label, editor, project_state, new_state)
self.assertIndexExists(table_name, ['pink'], index_type='btree')
# Reversal.
with connection.schema_editor(atomic=False) as editor:
operation.database_backwards(self.app_label, editor, new_state, project_state)
self.assertIndexNotExists(table_name, ['pink'])
@unittest.skipUnless(connection.vendor == 'postgresql', 'PostgreSQL specific tests.')
@modify_settings(INSTALLED_APPS={'append': 'migrations'})
class RemoveIndexConcurrentlyTests(OperationTestBase):
app_label = 'test_rm_concurrently'
def test_requires_atomic_false(self):
project_state = self.set_up_test_model(self.app_label, index=True)
new_state = project_state.clone()
operation = RemoveIndexConcurrently('Pony', 'pony_pink_idx')
msg = (
'The RemoveIndexConcurrently operation cannot be executed inside '
'a transaction (set atomic = False on the migration).'
)
with self.assertRaisesMessage(NotSupportedError, msg):
with connection.schema_editor(atomic=True) as editor:
operation.database_forwards(self.app_label, editor, project_state, new_state)
def test_remove(self):
project_state = self.set_up_test_model(self.app_label, index=True)
table_name = '%s_pony' % self.app_label
self.assertTableExists(table_name)
new_state = project_state.clone()
operation = RemoveIndexConcurrently('Pony', 'pony_pink_idx')
self.assertEqual(
operation.describe(),
'Concurrently remove index pony_pink_idx from Pony',
)
operation.state_forwards(self.app_label, new_state)
self.assertEqual(len(new_state.models[self.app_label, 'pony'].options['indexes']), 0)
self.assertIndexExists(table_name, ['pink'])
# Remove index.
with connection.schema_editor(atomic=False) as editor:
operation.database_forwards(self.app_label, editor, project_state, new_state)
self.assertIndexNotExists(table_name, ['pink'])
# Reversal.
with connection.schema_editor(atomic=False) as editor:
operation.database_backwards(self.app_label, editor, new_state, project_state)
self.assertIndexExists(table_name, ['pink'])
# Deconstruction.
name, args, kwargs = operation.deconstruct()
self.assertEqual(name, 'RemoveIndexConcurrently')
self.assertEqual(args, [])
self.assertEqual(kwargs, {'model_name': 'Pony', 'name': 'pony_pink_idx'})
class NoMigrationRouter():
def allow_migrate(self, db, app_label, **hints):
return False
@unittest.skipUnless(connection.vendor == 'postgresql', 'PostgreSQL specific tests.')
class CreateExtensionTests(PostgreSQLTestCase):
app_label = 'test_allow_create_extention'
@override_settings(DATABASE_ROUTERS=[NoMigrationRouter()])
def test_no_allow_migrate(self):
operation = CreateExtension('tablefunc')
project_state = ProjectState()
new_state = project_state.clone()
# Don't create an extension.
with CaptureQueriesContext(connection) as captured_queries:
with connection.schema_editor(atomic=False) as editor:
operation.database_forwards(self.app_label, editor, project_state, new_state)
self.assertEqual(len(captured_queries), 0)
# Reversal.
with CaptureQueriesContext(connection) as captured_queries:
with connection.schema_editor(atomic=False) as editor:
operation.database_backwards(self.app_label, editor, new_state, project_state)
self.assertEqual(len(captured_queries), 0)
def test_allow_migrate(self):
operation = CreateExtension('tablefunc')
self.assertEqual(operation.migration_name_fragment, 'create_extension_tablefunc')
project_state = ProjectState()
new_state = project_state.clone()
# Create an extension.
with CaptureQueriesContext(connection) as captured_queries:
with connection.schema_editor(atomic=False) as editor:
operation.database_forwards(self.app_label, editor, project_state, new_state)
self.assertEqual(len(captured_queries), 4)
self.assertIn('CREATE EXTENSION IF NOT EXISTS', captured_queries[1]['sql'])
# Reversal.
with CaptureQueriesContext(connection) as captured_queries:
with connection.schema_editor(atomic=False) as editor:
operation.database_backwards(self.app_label, editor, new_state, project_state)
self.assertEqual(len(captured_queries), 2)
self.assertIn('DROP EXTENSION IF EXISTS', captured_queries[1]['sql'])
def test_create_existing_extension(self):
operation = BloomExtension()
self.assertEqual(operation.migration_name_fragment, 'create_extension_bloom')
project_state = ProjectState()
new_state = project_state.clone()
# Don't create an existing extension.
with CaptureQueriesContext(connection) as captured_queries:
with connection.schema_editor(atomic=False) as editor:
operation.database_forwards(self.app_label, editor, project_state, new_state)
self.assertEqual(len(captured_queries), 3)
self.assertIn('SELECT', captured_queries[0]['sql'])
def test_drop_nonexistent_extension(self):
operation = CreateExtension('tablefunc')
project_state = ProjectState()
new_state = project_state.clone()
# Don't drop a nonexistent extension.
with CaptureQueriesContext(connection) as captured_queries:
with connection.schema_editor(atomic=False) as editor:
operation.database_backwards(self.app_label, editor, project_state, new_state)
self.assertEqual(len(captured_queries), 1)
self.assertIn('SELECT', captured_queries[0]['sql'])
@unittest.skipUnless(connection.vendor == 'postgresql', 'PostgreSQL specific tests.')
class CreateCollationTests(PostgreSQLTestCase):
app_label = 'test_allow_create_collation'
@override_settings(DATABASE_ROUTERS=[NoMigrationRouter()])
def test_no_allow_migrate(self):
operation = CreateCollation('C_test', locale='C')
project_state = ProjectState()
new_state = project_state.clone()
# Don't create a collation.
with CaptureQueriesContext(connection) as captured_queries:
with connection.schema_editor(atomic=False) as editor:
operation.database_forwards(self.app_label, editor, project_state, new_state)
self.assertEqual(len(captured_queries), 0)
# Reversal.
with CaptureQueriesContext(connection) as captured_queries:
with connection.schema_editor(atomic=False) as editor:
operation.database_backwards(self.app_label, editor, new_state, project_state)
self.assertEqual(len(captured_queries), 0)
def test_create(self):
operation = CreateCollation('C_test', locale='C')
self.assertEqual(operation.migration_name_fragment, 'create_collation_c_test')
self.assertEqual(operation.describe(), 'Create collation C_test')
project_state = ProjectState()
new_state = project_state.clone()
# Create a collation.
with CaptureQueriesContext(connection) as captured_queries:
with connection.schema_editor(atomic=False) as editor:
operation.database_forwards(self.app_label, editor, project_state, new_state)
self.assertEqual(len(captured_queries), 1)
self.assertIn('CREATE COLLATION', captured_queries[0]['sql'])
# Creating the same collation raises an exception.
with self.assertRaisesMessage(ProgrammingError, 'already exists'):
with connection.schema_editor(atomic=True) as editor:
operation.database_forwards(self.app_label, editor, project_state, new_state)
# Reversal.
with CaptureQueriesContext(connection) as captured_queries:
with connection.schema_editor(atomic=False) as editor:
operation.database_backwards(self.app_label, editor, new_state, project_state)
self.assertEqual(len(captured_queries), 1)
self.assertIn('DROP COLLATION', captured_queries[0]['sql'])
# Deconstruction.
name, args, kwargs = operation.deconstruct()
self.assertEqual(name, 'CreateCollation')
self.assertEqual(args, [])
self.assertEqual(kwargs, {'name': 'C_test', 'locale': 'C'})
@skipUnlessDBFeature('supports_non_deterministic_collations')
def test_create_non_deterministic_collation(self):
operation = CreateCollation(
'case_insensitive_test',
'und-u-ks-level2',
provider='icu',
deterministic=False,
)
project_state = ProjectState()
new_state = project_state.clone()
# Create a collation.
with CaptureQueriesContext(connection) as captured_queries:
with connection.schema_editor(atomic=False) as editor:
operation.database_forwards(self.app_label, editor, project_state, new_state)
self.assertEqual(len(captured_queries), 1)
self.assertIn('CREATE COLLATION', captured_queries[0]['sql'])
# Reversal.
with CaptureQueriesContext(connection) as captured_queries:
with connection.schema_editor(atomic=False) as editor:
operation.database_backwards(self.app_label, editor, new_state, project_state)
self.assertEqual(len(captured_queries), 1)
self.assertIn('DROP COLLATION', captured_queries[0]['sql'])
# Deconstruction.
name, args, kwargs = operation.deconstruct()
self.assertEqual(name, 'CreateCollation')
self.assertEqual(args, [])
self.assertEqual(kwargs, {
'name': 'case_insensitive_test',
'locale': 'und-u-ks-level2',
'provider': 'icu',
'deterministic': False,
})
def test_create_collation_alternate_provider(self):
operation = CreateCollation(
'german_phonebook_test',
provider='icu',
locale='de-u-co-phonebk',
)
project_state = ProjectState()
new_state = project_state.clone()
# Create an collation.
with CaptureQueriesContext(connection) as captured_queries:
with connection.schema_editor(atomic=False) as editor:
operation.database_forwards(self.app_label, editor, project_state, new_state)
self.assertEqual(len(captured_queries), 1)
self.assertIn('CREATE COLLATION', captured_queries[0]['sql'])
# Reversal.
with CaptureQueriesContext(connection) as captured_queries:
with connection.schema_editor(atomic=False) as editor:
operation.database_backwards(self.app_label, editor, new_state, project_state)
self.assertEqual(len(captured_queries), 1)
self.assertIn('DROP COLLATION', captured_queries[0]['sql'])
def test_nondeterministic_collation_not_supported(self):
operation = CreateCollation(
'case_insensitive_test',
provider='icu',
locale='und-u-ks-level2',
deterministic=False,
)
project_state = ProjectState()
new_state = project_state.clone()
msg = 'Non-deterministic collations require PostgreSQL 12+.'
with connection.schema_editor(atomic=False) as editor:
with mock.patch(
'django.db.backends.postgresql.features.DatabaseFeatures.'
'supports_non_deterministic_collations',
False,
):
with self.assertRaisesMessage(NotSupportedError, msg):
operation.database_forwards(self.app_label, editor, project_state, new_state)
@unittest.skipUnless(connection.vendor == 'postgresql', 'PostgreSQL specific tests.')
class RemoveCollationTests(PostgreSQLTestCase):
app_label = 'test_allow_remove_collation'
@override_settings(DATABASE_ROUTERS=[NoMigrationRouter()])
def test_no_allow_migrate(self):
operation = RemoveCollation('C_test', locale='C')
project_state = ProjectState()
new_state = project_state.clone()
# Don't create a collation.
with CaptureQueriesContext(connection) as captured_queries:
with connection.schema_editor(atomic=False) as editor:
operation.database_forwards(self.app_label, editor, project_state, new_state)
self.assertEqual(len(captured_queries), 0)
# Reversal.
with CaptureQueriesContext(connection) as captured_queries:
with connection.schema_editor(atomic=False) as editor:
operation.database_backwards(self.app_label, editor, new_state, project_state)
self.assertEqual(len(captured_queries), 0)
def test_remove(self):
operation = CreateCollation('C_test', locale='C')
project_state = ProjectState()
new_state = project_state.clone()
with connection.schema_editor(atomic=False) as editor:
operation.database_forwards(self.app_label, editor, project_state, new_state)
operation = RemoveCollation('C_test', locale='C')
self.assertEqual(operation.migration_name_fragment, 'remove_collation_c_test')
self.assertEqual(operation.describe(), 'Remove collation C_test')
project_state = ProjectState()
new_state = project_state.clone()
# Remove a collation.
with CaptureQueriesContext(connection) as captured_queries:
with connection.schema_editor(atomic=False) as editor:
operation.database_forwards(self.app_label, editor, project_state, new_state)
self.assertEqual(len(captured_queries), 1)
self.assertIn('DROP COLLATION', captured_queries[0]['sql'])
# Removing a nonexistent collation raises an exception.
with self.assertRaisesMessage(ProgrammingError, 'does not exist'):
with connection.schema_editor(atomic=True) as editor:
operation.database_forwards(self.app_label, editor, project_state, new_state)
# Reversal.
with CaptureQueriesContext(connection) as captured_queries:
with connection.schema_editor(atomic=False) as editor:
operation.database_backwards(self.app_label, editor, new_state, project_state)
self.assertEqual(len(captured_queries), 1)
self.assertIn('CREATE COLLATION', captured_queries[0]['sql'])
# Deconstruction.
name, args, kwargs = operation.deconstruct()
self.assertEqual(name, 'RemoveCollation')
self.assertEqual(args, [])
self.assertEqual(kwargs, {'name': 'C_test', 'locale': 'C'})
@unittest.skipUnless(connection.vendor == 'postgresql', 'PostgreSQL specific tests.')
@modify_settings(INSTALLED_APPS={'append': 'migrations'})
class AddConstraintNotValidTests(OperationTestBase):
app_label = 'test_add_constraint_not_valid'
def test_non_check_constraint_not_supported(self):
constraint = UniqueConstraint(fields=['pink'], name='pony_pink_uniq')
msg = 'AddConstraintNotValid.constraint must be a check constraint.'
with self.assertRaisesMessage(TypeError, msg):
AddConstraintNotValid(model_name='pony', constraint=constraint)
def test_add(self):
table_name = f'{self.app_label}_pony'
constraint_name = 'pony_pink_gte_check'
constraint = CheckConstraint(check=Q(pink__gte=4), name=constraint_name)
operation = AddConstraintNotValid('Pony', constraint=constraint)
project_state, new_state = self.make_test_state(self.app_label, operation)
self.assertEqual(
operation.describe(),
f'Create not valid constraint {constraint_name} on model Pony',
)
self.assertEqual(
operation.migration_name_fragment,
f'pony_{constraint_name}_not_valid',
)
self.assertEqual(
len(new_state.models[self.app_label, 'pony'].options['constraints']),
1,
)
self.assertConstraintNotExists(table_name, constraint_name)
Pony = new_state.apps.get_model(self.app_label, 'Pony')
self.assertEqual(len(Pony._meta.constraints), 1)
Pony.objects.create(pink=2, weight=1.0)
# Add constraint.
with connection.schema_editor(atomic=True) as editor:
operation.database_forwards(self.app_label, editor, project_state, new_state)
msg = f'check constraint "{constraint_name}"'
with self.assertRaisesMessage(IntegrityError, msg), transaction.atomic():
Pony.objects.create(pink=3, weight=1.0)
self.assertConstraintExists(table_name, constraint_name)
# Reversal.
with connection.schema_editor(atomic=True) as editor:
operation.database_backwards(self.app_label, editor, project_state, new_state)
self.assertConstraintNotExists(table_name, constraint_name)
Pony.objects.create(pink=3, weight=1.0)
# Deconstruction.
name, args, kwargs = operation.deconstruct()
self.assertEqual(name, 'AddConstraintNotValid')
self.assertEqual(args, [])
self.assertEqual(kwargs, {'model_name': 'Pony', 'constraint': constraint})
@unittest.skipUnless(connection.vendor == 'postgresql', 'PostgreSQL specific tests.')
@modify_settings(INSTALLED_APPS={'append': 'migrations'})
class ValidateConstraintTests(OperationTestBase):
app_label = 'test_validate_constraint'
def test_validate(self):
constraint_name = 'pony_pink_gte_check'
constraint = CheckConstraint(check=Q(pink__gte=4), name=constraint_name)
operation = AddConstraintNotValid('Pony', constraint=constraint)
project_state, new_state = self.make_test_state(self.app_label, operation)
Pony = new_state.apps.get_model(self.app_label, 'Pony')
obj = Pony.objects.create(pink=2, weight=1.0)
# Add constraint.
with connection.schema_editor(atomic=True) as editor:
operation.database_forwards(self.app_label, editor, project_state, new_state)
project_state = new_state
new_state = new_state.clone()
operation = ValidateConstraint('Pony', name=constraint_name)
operation.state_forwards(self.app_label, new_state)
self.assertEqual(
operation.describe(),
f'Validate constraint {constraint_name} on model Pony',
)
self.assertEqual(
operation.migration_name_fragment,
f'pony_validate_{constraint_name}',
)
# Validate constraint.
with connection.schema_editor(atomic=True) as editor:
msg = f'check constraint "{constraint_name}"'
with self.assertRaisesMessage(IntegrityError, msg):
operation.database_forwards(self.app_label, editor, project_state, new_state)
obj.pink = 5
obj.save()
with connection.schema_editor(atomic=True) as editor:
operation.database_forwards(self.app_label, editor, project_state, new_state)
# Reversal is a noop.
with connection.schema_editor() as editor:
with self.assertNumQueries(0):
operation.database_backwards(self.app_label, editor, new_state, project_state)
# Deconstruction.
name, args, kwargs = operation.deconstruct()
self.assertEqual(name, 'ValidateConstraint')
self.assertEqual(args, [])
self.assertEqual(kwargs, {'model_name': 'Pony', 'name': constraint_name})
|
4ef3811cd50182ff8e70143e343facee4a85b111eb1ca1514bebb04da5e4a95e | import datetime
from unittest import mock
from django.contrib.postgres.indexes import OpClass
from django.db import (
IntegrityError, NotSupportedError, connection, transaction,
)
from django.db.models import (
CheckConstraint, Deferrable, F, Func, IntegerField, Q, UniqueConstraint,
)
from django.db.models.fields.json import KeyTextTransform
from django.db.models.functions import Cast, Left, Lower
from django.test import modify_settings, skipUnlessDBFeature
from django.utils import timezone
from . import PostgreSQLTestCase
from .models import (
HotelReservation, IntegerArrayModel, RangesModel, Room, Scene,
)
try:
from psycopg2.extras import DateRange, NumericRange
from django.contrib.postgres.constraints import ExclusionConstraint
from django.contrib.postgres.fields import (
DateTimeRangeField, RangeBoundary, RangeOperators,
)
except ImportError:
pass
@modify_settings(INSTALLED_APPS={'append': 'django.contrib.postgres'})
class SchemaTests(PostgreSQLTestCase):
get_opclass_query = '''
SELECT opcname, c.relname FROM pg_opclass AS oc
JOIN pg_index as i on oc.oid = ANY(i.indclass)
JOIN pg_class as c on c.oid = i.indexrelid
WHERE c.relname = %s
'''
def get_constraints(self, table):
"""Get the constraints on the table using a new cursor."""
with connection.cursor() as cursor:
return connection.introspection.get_constraints(cursor, table)
def test_check_constraint_range_value(self):
constraint_name = 'ints_between'
self.assertNotIn(constraint_name, self.get_constraints(RangesModel._meta.db_table))
constraint = CheckConstraint(
check=Q(ints__contained_by=NumericRange(10, 30)),
name=constraint_name,
)
with connection.schema_editor() as editor:
editor.add_constraint(RangesModel, constraint)
self.assertIn(constraint_name, self.get_constraints(RangesModel._meta.db_table))
with self.assertRaises(IntegrityError), transaction.atomic():
RangesModel.objects.create(ints=(20, 50))
RangesModel.objects.create(ints=(10, 30))
def test_check_constraint_daterange_contains(self):
constraint_name = 'dates_contains'
self.assertNotIn(constraint_name, self.get_constraints(RangesModel._meta.db_table))
constraint = CheckConstraint(
check=Q(dates__contains=F('dates_inner')),
name=constraint_name,
)
with connection.schema_editor() as editor:
editor.add_constraint(RangesModel, constraint)
self.assertIn(constraint_name, self.get_constraints(RangesModel._meta.db_table))
date_1 = datetime.date(2016, 1, 1)
date_2 = datetime.date(2016, 1, 4)
with self.assertRaises(IntegrityError), transaction.atomic():
RangesModel.objects.create(
dates=(date_1, date_2),
dates_inner=(date_1, date_2.replace(day=5)),
)
RangesModel.objects.create(
dates=(date_1, date_2),
dates_inner=(date_1, date_2),
)
def test_check_constraint_datetimerange_contains(self):
constraint_name = 'timestamps_contains'
self.assertNotIn(constraint_name, self.get_constraints(RangesModel._meta.db_table))
constraint = CheckConstraint(
check=Q(timestamps__contains=F('timestamps_inner')),
name=constraint_name,
)
with connection.schema_editor() as editor:
editor.add_constraint(RangesModel, constraint)
self.assertIn(constraint_name, self.get_constraints(RangesModel._meta.db_table))
datetime_1 = datetime.datetime(2016, 1, 1)
datetime_2 = datetime.datetime(2016, 1, 2, 12)
with self.assertRaises(IntegrityError), transaction.atomic():
RangesModel.objects.create(
timestamps=(datetime_1, datetime_2),
timestamps_inner=(datetime_1, datetime_2.replace(hour=13)),
)
RangesModel.objects.create(
timestamps=(datetime_1, datetime_2),
timestamps_inner=(datetime_1, datetime_2),
)
def test_opclass(self):
constraint = UniqueConstraint(
name='test_opclass',
fields=['scene'],
opclasses=['varchar_pattern_ops'],
)
with connection.schema_editor() as editor:
editor.add_constraint(Scene, constraint)
self.assertIn(constraint.name, self.get_constraints(Scene._meta.db_table))
with editor.connection.cursor() as cursor:
cursor.execute(self.get_opclass_query, [constraint.name])
self.assertEqual(
cursor.fetchall(),
[('varchar_pattern_ops', constraint.name)],
)
# Drop the constraint.
with connection.schema_editor() as editor:
editor.remove_constraint(Scene, constraint)
self.assertNotIn(constraint.name, self.get_constraints(Scene._meta.db_table))
def test_opclass_multiple_columns(self):
constraint = UniqueConstraint(
name='test_opclass_multiple',
fields=['scene', 'setting'],
opclasses=['varchar_pattern_ops', 'text_pattern_ops'],
)
with connection.schema_editor() as editor:
editor.add_constraint(Scene, constraint)
with editor.connection.cursor() as cursor:
cursor.execute(self.get_opclass_query, [constraint.name])
expected_opclasses = (
('varchar_pattern_ops', constraint.name),
('text_pattern_ops', constraint.name),
)
self.assertCountEqual(cursor.fetchall(), expected_opclasses)
def test_opclass_partial(self):
constraint = UniqueConstraint(
name='test_opclass_partial',
fields=['scene'],
opclasses=['varchar_pattern_ops'],
condition=Q(setting__contains="Sir Bedemir's Castle"),
)
with connection.schema_editor() as editor:
editor.add_constraint(Scene, constraint)
with editor.connection.cursor() as cursor:
cursor.execute(self.get_opclass_query, [constraint.name])
self.assertCountEqual(
cursor.fetchall(),
[('varchar_pattern_ops', constraint.name)],
)
@skipUnlessDBFeature('supports_covering_indexes')
def test_opclass_include(self):
constraint = UniqueConstraint(
name='test_opclass_include',
fields=['scene'],
opclasses=['varchar_pattern_ops'],
include=['setting'],
)
with connection.schema_editor() as editor:
editor.add_constraint(Scene, constraint)
with editor.connection.cursor() as cursor:
cursor.execute(self.get_opclass_query, [constraint.name])
self.assertCountEqual(
cursor.fetchall(),
[('varchar_pattern_ops', constraint.name)],
)
@skipUnlessDBFeature('supports_expression_indexes')
def test_opclass_func(self):
constraint = UniqueConstraint(
OpClass(Lower('scene'), name='text_pattern_ops'),
name='test_opclass_func',
)
with connection.schema_editor() as editor:
editor.add_constraint(Scene, constraint)
constraints = self.get_constraints(Scene._meta.db_table)
self.assertIs(constraints[constraint.name]['unique'], True)
self.assertIn(constraint.name, constraints)
with editor.connection.cursor() as cursor:
cursor.execute(self.get_opclass_query, [constraint.name])
self.assertEqual(
cursor.fetchall(),
[('text_pattern_ops', constraint.name)],
)
Scene.objects.create(scene='Scene 10', setting='The dark forest of Ewing')
with self.assertRaises(IntegrityError), transaction.atomic():
Scene.objects.create(scene='ScEnE 10', setting="Sir Bedemir's Castle")
Scene.objects.create(scene='Scene 5', setting="Sir Bedemir's Castle")
# Drop the constraint.
with connection.schema_editor() as editor:
editor.remove_constraint(Scene, constraint)
self.assertNotIn(constraint.name, self.get_constraints(Scene._meta.db_table))
Scene.objects.create(scene='ScEnE 10', setting="Sir Bedemir's Castle")
class ExclusionConstraintTests(PostgreSQLTestCase):
def get_constraints(self, table):
"""Get the constraints on the table using a new cursor."""
with connection.cursor() as cursor:
return connection.introspection.get_constraints(cursor, table)
def test_invalid_condition(self):
msg = 'ExclusionConstraint.condition must be a Q instance.'
with self.assertRaisesMessage(ValueError, msg):
ExclusionConstraint(
index_type='GIST',
name='exclude_invalid_condition',
expressions=[(F('datespan'), RangeOperators.OVERLAPS)],
condition=F('invalid'),
)
def test_invalid_index_type(self):
msg = 'Exclusion constraints only support GiST or SP-GiST indexes.'
with self.assertRaisesMessage(ValueError, msg):
ExclusionConstraint(
index_type='gin',
name='exclude_invalid_index_type',
expressions=[(F('datespan'), RangeOperators.OVERLAPS)],
)
def test_invalid_expressions(self):
msg = 'The expressions must be a list of 2-tuples.'
for expressions in (['foo'], [('foo')], [('foo_1', 'foo_2', 'foo_3')]):
with self.subTest(expressions), self.assertRaisesMessage(ValueError, msg):
ExclusionConstraint(
index_type='GIST',
name='exclude_invalid_expressions',
expressions=expressions,
)
def test_empty_expressions(self):
msg = 'At least one expression is required to define an exclusion constraint.'
for empty_expressions in (None, []):
with self.subTest(empty_expressions), self.assertRaisesMessage(ValueError, msg):
ExclusionConstraint(
index_type='GIST',
name='exclude_empty_expressions',
expressions=empty_expressions,
)
def test_invalid_deferrable(self):
msg = 'ExclusionConstraint.deferrable must be a Deferrable instance.'
with self.assertRaisesMessage(ValueError, msg):
ExclusionConstraint(
name='exclude_invalid_deferrable',
expressions=[(F('datespan'), RangeOperators.OVERLAPS)],
deferrable='invalid',
)
def test_deferrable_with_condition(self):
msg = 'ExclusionConstraint with conditions cannot be deferred.'
with self.assertRaisesMessage(ValueError, msg):
ExclusionConstraint(
name='exclude_invalid_condition',
expressions=[(F('datespan'), RangeOperators.OVERLAPS)],
condition=Q(cancelled=False),
deferrable=Deferrable.DEFERRED,
)
def test_invalid_include_type(self):
msg = 'ExclusionConstraint.include must be a list or tuple.'
with self.assertRaisesMessage(ValueError, msg):
ExclusionConstraint(
name='exclude_invalid_include',
expressions=[(F('datespan'), RangeOperators.OVERLAPS)],
include='invalid',
)
def test_invalid_include_index_type(self):
msg = 'Covering exclusion constraints only support GiST indexes.'
with self.assertRaisesMessage(ValueError, msg):
ExclusionConstraint(
name='exclude_invalid_index_type',
expressions=[(F('datespan'), RangeOperators.OVERLAPS)],
include=['cancelled'],
index_type='spgist',
)
def test_invalid_opclasses_type(self):
msg = 'ExclusionConstraint.opclasses must be a list or tuple.'
with self.assertRaisesMessage(ValueError, msg):
ExclusionConstraint(
name='exclude_invalid_opclasses',
expressions=[(F('datespan'), RangeOperators.OVERLAPS)],
opclasses='invalid',
)
def test_opclasses_and_expressions_same_length(self):
msg = (
'ExclusionConstraint.expressions and '
'ExclusionConstraint.opclasses must have the same number of '
'elements.'
)
with self.assertRaisesMessage(ValueError, msg):
ExclusionConstraint(
name='exclude_invalid_expressions_opclasses_length',
expressions=[(F('datespan'), RangeOperators.OVERLAPS)],
opclasses=['foo', 'bar'],
)
def test_repr(self):
constraint = ExclusionConstraint(
name='exclude_overlapping',
expressions=[
(F('datespan'), RangeOperators.OVERLAPS),
(F('room'), RangeOperators.EQUAL),
],
)
self.assertEqual(
repr(constraint),
"<ExclusionConstraint: index_type='GIST' expressions=["
"(F(datespan), '&&'), (F(room), '=')] name='exclude_overlapping'>",
)
constraint = ExclusionConstraint(
name='exclude_overlapping',
expressions=[(F('datespan'), RangeOperators.ADJACENT_TO)],
condition=Q(cancelled=False),
index_type='SPGiST',
)
self.assertEqual(
repr(constraint),
"<ExclusionConstraint: index_type='SPGiST' expressions=["
"(F(datespan), '-|-')] name='exclude_overlapping' "
"condition=(AND: ('cancelled', False))>",
)
constraint = ExclusionConstraint(
name='exclude_overlapping',
expressions=[(F('datespan'), RangeOperators.ADJACENT_TO)],
deferrable=Deferrable.IMMEDIATE,
)
self.assertEqual(
repr(constraint),
"<ExclusionConstraint: index_type='GIST' expressions=["
"(F(datespan), '-|-')] name='exclude_overlapping' "
"deferrable=Deferrable.IMMEDIATE>",
)
constraint = ExclusionConstraint(
name='exclude_overlapping',
expressions=[(F('datespan'), RangeOperators.ADJACENT_TO)],
include=['cancelled', 'room'],
)
self.assertEqual(
repr(constraint),
"<ExclusionConstraint: index_type='GIST' expressions=["
"(F(datespan), '-|-')] name='exclude_overlapping' "
"include=('cancelled', 'room')>",
)
constraint = ExclusionConstraint(
name='exclude_overlapping',
expressions=[(F('datespan'), RangeOperators.ADJACENT_TO)],
opclasses=['range_ops'],
)
self.assertEqual(
repr(constraint),
"<ExclusionConstraint: index_type='GIST' expressions=["
"(F(datespan), '-|-')] name='exclude_overlapping' "
"opclasses=['range_ops']>",
)
def test_eq(self):
constraint_1 = ExclusionConstraint(
name='exclude_overlapping',
expressions=[
(F('datespan'), RangeOperators.OVERLAPS),
(F('room'), RangeOperators.EQUAL),
],
condition=Q(cancelled=False),
)
constraint_2 = ExclusionConstraint(
name='exclude_overlapping',
expressions=[
('datespan', RangeOperators.OVERLAPS),
('room', RangeOperators.EQUAL),
],
)
constraint_3 = ExclusionConstraint(
name='exclude_overlapping',
expressions=[('datespan', RangeOperators.OVERLAPS)],
condition=Q(cancelled=False),
)
constraint_4 = ExclusionConstraint(
name='exclude_overlapping',
expressions=[
('datespan', RangeOperators.OVERLAPS),
('room', RangeOperators.EQUAL),
],
deferrable=Deferrable.DEFERRED,
)
constraint_5 = ExclusionConstraint(
name='exclude_overlapping',
expressions=[
('datespan', RangeOperators.OVERLAPS),
('room', RangeOperators.EQUAL),
],
deferrable=Deferrable.IMMEDIATE,
)
constraint_6 = ExclusionConstraint(
name='exclude_overlapping',
expressions=[
('datespan', RangeOperators.OVERLAPS),
('room', RangeOperators.EQUAL),
],
deferrable=Deferrable.IMMEDIATE,
include=['cancelled'],
)
constraint_7 = ExclusionConstraint(
name='exclude_overlapping',
expressions=[
('datespan', RangeOperators.OVERLAPS),
('room', RangeOperators.EQUAL),
],
include=['cancelled'],
)
constraint_8 = ExclusionConstraint(
name='exclude_overlapping',
expressions=[
('datespan', RangeOperators.OVERLAPS),
('room', RangeOperators.EQUAL),
],
include=['cancelled'],
opclasses=['range_ops', 'range_ops']
)
constraint_9 = ExclusionConstraint(
name='exclude_overlapping',
expressions=[
('datespan', RangeOperators.OVERLAPS),
('room', RangeOperators.EQUAL),
],
opclasses=['range_ops', 'range_ops']
)
self.assertEqual(constraint_1, constraint_1)
self.assertEqual(constraint_1, mock.ANY)
self.assertNotEqual(constraint_1, constraint_2)
self.assertNotEqual(constraint_1, constraint_3)
self.assertNotEqual(constraint_1, constraint_4)
self.assertNotEqual(constraint_2, constraint_3)
self.assertNotEqual(constraint_2, constraint_4)
self.assertNotEqual(constraint_2, constraint_7)
self.assertNotEqual(constraint_2, constraint_9)
self.assertNotEqual(constraint_4, constraint_5)
self.assertNotEqual(constraint_5, constraint_6)
self.assertNotEqual(constraint_7, constraint_8)
self.assertNotEqual(constraint_1, object())
def test_deconstruct(self):
constraint = ExclusionConstraint(
name='exclude_overlapping',
expressions=[('datespan', RangeOperators.OVERLAPS), ('room', RangeOperators.EQUAL)],
)
path, args, kwargs = constraint.deconstruct()
self.assertEqual(path, 'django.contrib.postgres.constraints.ExclusionConstraint')
self.assertEqual(args, ())
self.assertEqual(kwargs, {
'name': 'exclude_overlapping',
'expressions': [('datespan', RangeOperators.OVERLAPS), ('room', RangeOperators.EQUAL)],
})
def test_deconstruct_index_type(self):
constraint = ExclusionConstraint(
name='exclude_overlapping',
index_type='SPGIST',
expressions=[('datespan', RangeOperators.OVERLAPS), ('room', RangeOperators.EQUAL)],
)
path, args, kwargs = constraint.deconstruct()
self.assertEqual(path, 'django.contrib.postgres.constraints.ExclusionConstraint')
self.assertEqual(args, ())
self.assertEqual(kwargs, {
'name': 'exclude_overlapping',
'index_type': 'SPGIST',
'expressions': [('datespan', RangeOperators.OVERLAPS), ('room', RangeOperators.EQUAL)],
})
def test_deconstruct_condition(self):
constraint = ExclusionConstraint(
name='exclude_overlapping',
expressions=[('datespan', RangeOperators.OVERLAPS), ('room', RangeOperators.EQUAL)],
condition=Q(cancelled=False),
)
path, args, kwargs = constraint.deconstruct()
self.assertEqual(path, 'django.contrib.postgres.constraints.ExclusionConstraint')
self.assertEqual(args, ())
self.assertEqual(kwargs, {
'name': 'exclude_overlapping',
'expressions': [('datespan', RangeOperators.OVERLAPS), ('room', RangeOperators.EQUAL)],
'condition': Q(cancelled=False),
})
def test_deconstruct_deferrable(self):
constraint = ExclusionConstraint(
name='exclude_overlapping',
expressions=[('datespan', RangeOperators.OVERLAPS)],
deferrable=Deferrable.DEFERRED,
)
path, args, kwargs = constraint.deconstruct()
self.assertEqual(path, 'django.contrib.postgres.constraints.ExclusionConstraint')
self.assertEqual(args, ())
self.assertEqual(kwargs, {
'name': 'exclude_overlapping',
'expressions': [('datespan', RangeOperators.OVERLAPS)],
'deferrable': Deferrable.DEFERRED,
})
def test_deconstruct_include(self):
constraint = ExclusionConstraint(
name='exclude_overlapping',
expressions=[('datespan', RangeOperators.OVERLAPS)],
include=['cancelled', 'room'],
)
path, args, kwargs = constraint.deconstruct()
self.assertEqual(path, 'django.contrib.postgres.constraints.ExclusionConstraint')
self.assertEqual(args, ())
self.assertEqual(kwargs, {
'name': 'exclude_overlapping',
'expressions': [('datespan', RangeOperators.OVERLAPS)],
'include': ('cancelled', 'room'),
})
def test_deconstruct_opclasses(self):
constraint = ExclusionConstraint(
name='exclude_overlapping',
expressions=[('datespan', RangeOperators.OVERLAPS)],
opclasses=['range_ops'],
)
path, args, kwargs = constraint.deconstruct()
self.assertEqual(path, 'django.contrib.postgres.constraints.ExclusionConstraint')
self.assertEqual(args, ())
self.assertEqual(kwargs, {
'name': 'exclude_overlapping',
'expressions': [('datespan', RangeOperators.OVERLAPS)],
'opclasses': ['range_ops'],
})
def _test_range_overlaps(self, constraint):
# Create exclusion constraint.
self.assertNotIn(constraint.name, self.get_constraints(HotelReservation._meta.db_table))
with connection.schema_editor() as editor:
editor.add_constraint(HotelReservation, constraint)
self.assertIn(constraint.name, self.get_constraints(HotelReservation._meta.db_table))
# Add initial reservations.
room101 = Room.objects.create(number=101)
room102 = Room.objects.create(number=102)
datetimes = [
timezone.datetime(2018, 6, 20),
timezone.datetime(2018, 6, 24),
timezone.datetime(2018, 6, 26),
timezone.datetime(2018, 6, 28),
timezone.datetime(2018, 6, 29),
]
HotelReservation.objects.create(
datespan=DateRange(datetimes[0].date(), datetimes[1].date()),
start=datetimes[0],
end=datetimes[1],
room=room102,
)
HotelReservation.objects.create(
datespan=DateRange(datetimes[1].date(), datetimes[3].date()),
start=datetimes[1],
end=datetimes[3],
room=room102,
)
# Overlap dates.
with self.assertRaises(IntegrityError), transaction.atomic():
reservation = HotelReservation(
datespan=(datetimes[1].date(), datetimes[2].date()),
start=datetimes[1],
end=datetimes[2],
room=room102,
)
reservation.save()
# Valid range.
HotelReservation.objects.bulk_create([
# Other room.
HotelReservation(
datespan=(datetimes[1].date(), datetimes[2].date()),
start=datetimes[1],
end=datetimes[2],
room=room101,
),
# Cancelled reservation.
HotelReservation(
datespan=(datetimes[1].date(), datetimes[1].date()),
start=datetimes[1],
end=datetimes[2],
room=room102,
cancelled=True,
),
# Other adjacent dates.
HotelReservation(
datespan=(datetimes[3].date(), datetimes[4].date()),
start=datetimes[3],
end=datetimes[4],
room=room102,
),
])
def test_range_overlaps_custom(self):
class TsTzRange(Func):
function = 'TSTZRANGE'
output_field = DateTimeRangeField()
constraint = ExclusionConstraint(
name='exclude_overlapping_reservations_custom',
expressions=[
(TsTzRange('start', 'end', RangeBoundary()), RangeOperators.OVERLAPS),
('room', RangeOperators.EQUAL)
],
condition=Q(cancelled=False),
opclasses=['range_ops', 'gist_int4_ops'],
)
self._test_range_overlaps(constraint)
def test_range_overlaps(self):
constraint = ExclusionConstraint(
name='exclude_overlapping_reservations',
expressions=[
(F('datespan'), RangeOperators.OVERLAPS),
('room', RangeOperators.EQUAL)
],
condition=Q(cancelled=False),
)
self._test_range_overlaps(constraint)
def test_range_adjacent(self):
constraint_name = 'ints_adjacent'
self.assertNotIn(constraint_name, self.get_constraints(RangesModel._meta.db_table))
constraint = ExclusionConstraint(
name=constraint_name,
expressions=[('ints', RangeOperators.ADJACENT_TO)],
)
with connection.schema_editor() as editor:
editor.add_constraint(RangesModel, constraint)
self.assertIn(constraint_name, self.get_constraints(RangesModel._meta.db_table))
RangesModel.objects.create(ints=(20, 50))
with self.assertRaises(IntegrityError), transaction.atomic():
RangesModel.objects.create(ints=(10, 20))
RangesModel.objects.create(ints=(10, 19))
RangesModel.objects.create(ints=(51, 60))
# Drop the constraint.
with connection.schema_editor() as editor:
editor.remove_constraint(RangesModel, constraint)
self.assertNotIn(constraint_name, self.get_constraints(RangesModel._meta.db_table))
def test_expressions_with_params(self):
constraint_name = 'scene_left_equal'
self.assertNotIn(constraint_name, self.get_constraints(Scene._meta.db_table))
constraint = ExclusionConstraint(
name=constraint_name,
expressions=[(Left('scene', 4), RangeOperators.EQUAL)],
)
with connection.schema_editor() as editor:
editor.add_constraint(Scene, constraint)
self.assertIn(constraint_name, self.get_constraints(Scene._meta.db_table))
def test_expressions_with_key_transform(self):
constraint_name = 'exclude_overlapping_reservations_smoking'
constraint = ExclusionConstraint(
name=constraint_name,
expressions=[
(F('datespan'), RangeOperators.OVERLAPS),
(KeyTextTransform('smoking', 'requirements'), RangeOperators.EQUAL),
],
)
with connection.schema_editor() as editor:
editor.add_constraint(HotelReservation, constraint)
self.assertIn(
constraint_name,
self.get_constraints(HotelReservation._meta.db_table),
)
def test_index_transform(self):
constraint_name = 'first_index_equal'
constraint = ExclusionConstraint(
name=constraint_name,
expressions=[('field__0', RangeOperators.EQUAL)],
)
with connection.schema_editor() as editor:
editor.add_constraint(IntegerArrayModel, constraint)
self.assertIn(
constraint_name,
self.get_constraints(IntegerArrayModel._meta.db_table),
)
def test_range_adjacent_initially_deferred(self):
constraint_name = 'ints_adjacent_deferred'
self.assertNotIn(constraint_name, self.get_constraints(RangesModel._meta.db_table))
constraint = ExclusionConstraint(
name=constraint_name,
expressions=[('ints', RangeOperators.ADJACENT_TO)],
deferrable=Deferrable.DEFERRED,
)
with connection.schema_editor() as editor:
editor.add_constraint(RangesModel, constraint)
self.assertIn(constraint_name, self.get_constraints(RangesModel._meta.db_table))
RangesModel.objects.create(ints=(20, 50))
adjacent_range = RangesModel.objects.create(ints=(10, 20))
# Constraint behavior can be changed with SET CONSTRAINTS.
with self.assertRaises(IntegrityError):
with transaction.atomic(), connection.cursor() as cursor:
quoted_name = connection.ops.quote_name(constraint_name)
cursor.execute('SET CONSTRAINTS %s IMMEDIATE' % quoted_name)
# Remove adjacent range before the end of transaction.
adjacent_range.delete()
RangesModel.objects.create(ints=(10, 19))
RangesModel.objects.create(ints=(51, 60))
@skipUnlessDBFeature('supports_covering_gist_indexes')
def test_range_adjacent_include(self):
constraint_name = 'ints_adjacent_include'
self.assertNotIn(constraint_name, self.get_constraints(RangesModel._meta.db_table))
constraint = ExclusionConstraint(
name=constraint_name,
expressions=[('ints', RangeOperators.ADJACENT_TO)],
include=['decimals', 'ints'],
index_type='gist',
)
with connection.schema_editor() as editor:
editor.add_constraint(RangesModel, constraint)
self.assertIn(constraint_name, self.get_constraints(RangesModel._meta.db_table))
RangesModel.objects.create(ints=(20, 50))
with self.assertRaises(IntegrityError), transaction.atomic():
RangesModel.objects.create(ints=(10, 20))
RangesModel.objects.create(ints=(10, 19))
RangesModel.objects.create(ints=(51, 60))
@skipUnlessDBFeature('supports_covering_gist_indexes')
def test_range_adjacent_include_condition(self):
constraint_name = 'ints_adjacent_include_condition'
self.assertNotIn(constraint_name, self.get_constraints(RangesModel._meta.db_table))
constraint = ExclusionConstraint(
name=constraint_name,
expressions=[('ints', RangeOperators.ADJACENT_TO)],
include=['decimals'],
condition=Q(id__gte=100),
)
with connection.schema_editor() as editor:
editor.add_constraint(RangesModel, constraint)
self.assertIn(constraint_name, self.get_constraints(RangesModel._meta.db_table))
@skipUnlessDBFeature('supports_covering_gist_indexes')
def test_range_adjacent_include_deferrable(self):
constraint_name = 'ints_adjacent_include_deferrable'
self.assertNotIn(constraint_name, self.get_constraints(RangesModel._meta.db_table))
constraint = ExclusionConstraint(
name=constraint_name,
expressions=[('ints', RangeOperators.ADJACENT_TO)],
include=['decimals'],
deferrable=Deferrable.DEFERRED,
)
with connection.schema_editor() as editor:
editor.add_constraint(RangesModel, constraint)
self.assertIn(constraint_name, self.get_constraints(RangesModel._meta.db_table))
def test_include_not_supported(self):
constraint_name = 'ints_adjacent_include_not_supported'
constraint = ExclusionConstraint(
name=constraint_name,
expressions=[('ints', RangeOperators.ADJACENT_TO)],
include=['id'],
)
msg = 'Covering exclusion constraints requires PostgreSQL 12+.'
with connection.schema_editor() as editor:
with mock.patch(
'django.db.backends.postgresql.features.DatabaseFeatures.supports_covering_gist_indexes',
False,
):
with self.assertRaisesMessage(NotSupportedError, msg):
editor.add_constraint(RangesModel, constraint)
def test_range_adjacent_opclasses(self):
constraint_name = 'ints_adjacent_opclasses'
self.assertNotIn(constraint_name, self.get_constraints(RangesModel._meta.db_table))
constraint = ExclusionConstraint(
name=constraint_name,
expressions=[('ints', RangeOperators.ADJACENT_TO)],
opclasses=['range_ops'],
)
with connection.schema_editor() as editor:
editor.add_constraint(RangesModel, constraint)
self.assertIn(constraint_name, self.get_constraints(RangesModel._meta.db_table))
RangesModel.objects.create(ints=(20, 50))
with self.assertRaises(IntegrityError), transaction.atomic():
RangesModel.objects.create(ints=(10, 20))
RangesModel.objects.create(ints=(10, 19))
RangesModel.objects.create(ints=(51, 60))
# Drop the constraint.
with connection.schema_editor() as editor:
editor.remove_constraint(RangesModel, constraint)
self.assertNotIn(constraint_name, self.get_constraints(RangesModel._meta.db_table))
def test_range_adjacent_opclasses_condition(self):
constraint_name = 'ints_adjacent_opclasses_condition'
self.assertNotIn(constraint_name, self.get_constraints(RangesModel._meta.db_table))
constraint = ExclusionConstraint(
name=constraint_name,
expressions=[('ints', RangeOperators.ADJACENT_TO)],
opclasses=['range_ops'],
condition=Q(id__gte=100),
)
with connection.schema_editor() as editor:
editor.add_constraint(RangesModel, constraint)
self.assertIn(constraint_name, self.get_constraints(RangesModel._meta.db_table))
def test_range_adjacent_opclasses_deferrable(self):
constraint_name = 'ints_adjacent_opclasses_deferrable'
self.assertNotIn(constraint_name, self.get_constraints(RangesModel._meta.db_table))
constraint = ExclusionConstraint(
name=constraint_name,
expressions=[('ints', RangeOperators.ADJACENT_TO)],
opclasses=['range_ops'],
deferrable=Deferrable.DEFERRED,
)
with connection.schema_editor() as editor:
editor.add_constraint(RangesModel, constraint)
self.assertIn(constraint_name, self.get_constraints(RangesModel._meta.db_table))
@skipUnlessDBFeature('supports_covering_gist_indexes')
def test_range_adjacent_opclasses_include(self):
constraint_name = 'ints_adjacent_opclasses_include'
self.assertNotIn(constraint_name, self.get_constraints(RangesModel._meta.db_table))
constraint = ExclusionConstraint(
name=constraint_name,
expressions=[('ints', RangeOperators.ADJACENT_TO)],
opclasses=['range_ops'],
include=['decimals'],
)
with connection.schema_editor() as editor:
editor.add_constraint(RangesModel, constraint)
self.assertIn(constraint_name, self.get_constraints(RangesModel._meta.db_table))
def test_range_equal_cast(self):
constraint_name = 'exclusion_equal_room_cast'
self.assertNotIn(constraint_name, self.get_constraints(Room._meta.db_table))
constraint = ExclusionConstraint(
name=constraint_name,
expressions=[(Cast('number', IntegerField()), RangeOperators.EQUAL)],
)
with connection.schema_editor() as editor:
editor.add_constraint(Room, constraint)
self.assertIn(constraint_name, self.get_constraints(Room._meta.db_table))
|
966738f530a41afc111b7edfac42db21565b6cdf39827b6643a7dc8f48608a96 | import datetime
from xml.dom import minidom
from django.contrib.sites.models import Site
from django.contrib.syndication import views
from django.core.exceptions import ImproperlyConfigured
from django.test import TestCase, override_settings
from django.test.utils import requires_tz_support
from django.utils import timezone
from django.utils.feedgenerator import (
Atom1Feed, Rss201rev2Feed, rfc2822_date, rfc3339_date,
)
from .models import Article, Entry
TZ = timezone.get_default_timezone()
class FeedTestCase(TestCase):
@classmethod
def setUpTestData(cls):
cls.e1 = Entry.objects.create(
title='My first entry', updated=datetime.datetime(1980, 1, 1, 12, 30),
published=datetime.datetime(1986, 9, 25, 20, 15, 00)
)
cls.e2 = Entry.objects.create(
title='My second entry', updated=datetime.datetime(2008, 1, 2, 12, 30),
published=datetime.datetime(2006, 3, 17, 18, 0)
)
cls.e3 = Entry.objects.create(
title='My third entry', updated=datetime.datetime(2008, 1, 2, 13, 30),
published=datetime.datetime(2005, 6, 14, 10, 45)
)
cls.e4 = Entry.objects.create(
title='A & B < C > D', updated=datetime.datetime(2008, 1, 3, 13, 30),
published=datetime.datetime(2005, 11, 25, 12, 11, 23)
)
cls.e5 = Entry.objects.create(
title='My last entry', updated=datetime.datetime(2013, 1, 20, 0, 0),
published=datetime.datetime(2013, 3, 25, 20, 0)
)
cls.a1 = Article.objects.create(
title='My first article',
entry=cls.e1,
updated=datetime.datetime(1986, 11, 21, 9, 12, 18),
published=datetime.datetime(1986, 10, 21, 9, 12, 18),
)
def assertChildNodes(self, elem, expected):
actual = {n.nodeName for n in elem.childNodes}
expected = set(expected)
self.assertEqual(actual, expected)
def assertChildNodeContent(self, elem, expected):
for k, v in expected.items():
self.assertEqual(
elem.getElementsByTagName(k)[0].firstChild.wholeText, v)
def assertCategories(self, elem, expected):
self.assertEqual(
{i.firstChild.wholeText for i in elem.childNodes if i.nodeName == 'category'},
set(expected)
)
@override_settings(ROOT_URLCONF='syndication_tests.urls')
class SyndicationFeedTest(FeedTestCase):
"""
Tests for the high-level syndication feed framework.
"""
@classmethod
def setUpClass(cls):
super().setUpClass()
# This cleanup is necessary because contrib.sites cache
# makes tests interfere with each other, see #11505
Site.objects.clear_cache()
def test_rss2_feed(self):
"""
Test the structure and content of feeds generated by Rss201rev2Feed.
"""
response = self.client.get('/syndication/rss2/')
doc = minidom.parseString(response.content)
# Making sure there's only 1 `rss` element and that the correct
# RSS version was specified.
feed_elem = doc.getElementsByTagName('rss')
self.assertEqual(len(feed_elem), 1)
feed = feed_elem[0]
self.assertEqual(feed.getAttribute('version'), '2.0')
self.assertEqual(feed.getElementsByTagName('language')[0].firstChild.nodeValue, 'en')
# Making sure there's only one `channel` element w/in the
# `rss` element.
chan_elem = feed.getElementsByTagName('channel')
self.assertEqual(len(chan_elem), 1)
chan = chan_elem[0]
# Find the last build date
d = Entry.objects.latest('published').published
last_build_date = rfc2822_date(timezone.make_aware(d, TZ))
self.assertChildNodes(
chan, [
'title', 'link', 'description', 'language', 'lastBuildDate',
'item', 'atom:link', 'ttl', 'copyright', 'category',
]
)
self.assertChildNodeContent(chan, {
'title': 'My blog',
'description': 'A more thorough description of my blog.',
'link': 'http://example.com/blog/',
'language': 'en',
'lastBuildDate': last_build_date,
'ttl': '600',
'copyright': 'Copyright (c) 2007, Sally Smith',
})
self.assertCategories(chan, ['python', 'django'])
# Ensure the content of the channel is correct
self.assertChildNodeContent(chan, {
'title': 'My blog',
'link': 'http://example.com/blog/',
})
# Check feed_url is passed
self.assertEqual(
chan.getElementsByTagName('atom:link')[0].getAttribute('href'),
'http://example.com/syndication/rss2/'
)
# Find the pubdate of the first feed item
d = Entry.objects.get(pk=self.e1.pk).published
pub_date = rfc2822_date(timezone.make_aware(d, TZ))
items = chan.getElementsByTagName('item')
self.assertEqual(len(items), Entry.objects.count())
self.assertChildNodeContent(items[0], {
'title': 'My first entry',
'description': 'Overridden description: My first entry',
'link': 'http://example.com/blog/%s/' % self.e1.pk,
'guid': 'http://example.com/blog/%s/' % self.e1.pk,
'pubDate': pub_date,
'author': '[email protected] (Sally Smith)',
'comments': '/blog/%s/comments' % self.e1.pk,
})
self.assertCategories(items[0], ['python', 'testing'])
for item in items:
self.assertChildNodes(item, [
'title',
'link',
'description',
'guid',
'category',
'pubDate',
'author',
'comments',
])
# Assert that <guid> does not have any 'isPermaLink' attribute
self.assertIsNone(item.getElementsByTagName(
'guid')[0].attributes.get('isPermaLink'))
def test_rss2_feed_guid_permalink_false(self):
"""
Test if the 'isPermaLink' attribute of <guid> element of an item
in the RSS feed is 'false'.
"""
response = self.client.get(
'/syndication/rss2/guid_ispermalink_false/')
doc = minidom.parseString(response.content)
chan = doc.getElementsByTagName(
'rss')[0].getElementsByTagName('channel')[0]
items = chan.getElementsByTagName('item')
for item in items:
self.assertEqual(
item.getElementsByTagName('guid')[0].attributes.get(
'isPermaLink').value, "false")
def test_rss2_feed_guid_permalink_true(self):
"""
Test if the 'isPermaLink' attribute of <guid> element of an item
in the RSS feed is 'true'.
"""
response = self.client.get(
'/syndication/rss2/guid_ispermalink_true/')
doc = minidom.parseString(response.content)
chan = doc.getElementsByTagName(
'rss')[0].getElementsByTagName('channel')[0]
items = chan.getElementsByTagName('item')
for item in items:
self.assertEqual(
item.getElementsByTagName('guid')[0].attributes.get(
'isPermaLink').value, "true")
def test_rss2_single_enclosure(self):
response = self.client.get('/syndication/rss2/single-enclosure/')
doc = minidom.parseString(response.content)
chan = doc.getElementsByTagName('rss')[0].getElementsByTagName('channel')[0]
items = chan.getElementsByTagName('item')
for item in items:
enclosures = item.getElementsByTagName('enclosure')
self.assertEqual(len(enclosures), 1)
def test_rss2_multiple_enclosures(self):
with self.assertRaisesMessage(
ValueError,
"RSS feed items may only have one enclosure, see "
"http://www.rssboard.org/rss-profile#element-channel-item-enclosure"
):
self.client.get('/syndication/rss2/multiple-enclosure/')
def test_rss091_feed(self):
"""
Test the structure and content of feeds generated by RssUserland091Feed.
"""
response = self.client.get('/syndication/rss091/')
doc = minidom.parseString(response.content)
# Making sure there's only 1 `rss` element and that the correct
# RSS version was specified.
feed_elem = doc.getElementsByTagName('rss')
self.assertEqual(len(feed_elem), 1)
feed = feed_elem[0]
self.assertEqual(feed.getAttribute('version'), '0.91')
# Making sure there's only one `channel` element w/in the
# `rss` element.
chan_elem = feed.getElementsByTagName('channel')
self.assertEqual(len(chan_elem), 1)
chan = chan_elem[0]
self.assertChildNodes(
chan, [
'title', 'link', 'description', 'language', 'lastBuildDate',
'item', 'atom:link', 'ttl', 'copyright', 'category',
]
)
# Ensure the content of the channel is correct
self.assertChildNodeContent(chan, {
'title': 'My blog',
'link': 'http://example.com/blog/',
})
self.assertCategories(chan, ['python', 'django'])
# Check feed_url is passed
self.assertEqual(
chan.getElementsByTagName('atom:link')[0].getAttribute('href'),
'http://example.com/syndication/rss091/'
)
items = chan.getElementsByTagName('item')
self.assertEqual(len(items), Entry.objects.count())
self.assertChildNodeContent(items[0], {
'title': 'My first entry',
'description': 'Overridden description: My first entry',
'link': 'http://example.com/blog/%s/' % self.e1.pk,
})
for item in items:
self.assertChildNodes(item, ['title', 'link', 'description'])
self.assertCategories(item, [])
def test_atom_feed(self):
"""
Test the structure and content of feeds generated by Atom1Feed.
"""
response = self.client.get('/syndication/atom/')
feed = minidom.parseString(response.content).firstChild
self.assertEqual(feed.nodeName, 'feed')
self.assertEqual(feed.getAttribute('xmlns'), 'http://www.w3.org/2005/Atom')
self.assertChildNodes(
feed,
['title', 'subtitle', 'link', 'id', 'updated', 'entry', 'rights', 'category', 'author']
)
for link in feed.getElementsByTagName('link'):
if link.getAttribute('rel') == 'self':
self.assertEqual(link.getAttribute('href'), 'http://example.com/syndication/atom/')
entries = feed.getElementsByTagName('entry')
self.assertEqual(len(entries), Entry.objects.count())
for entry in entries:
self.assertChildNodes(entry, [
'title',
'link',
'id',
'summary',
'category',
'updated',
'published',
'rights',
'author',
])
summary = entry.getElementsByTagName('summary')[0]
self.assertEqual(summary.getAttribute('type'), 'html')
def test_atom_feed_published_and_updated_elements(self):
"""
The published and updated elements are not
the same and now adhere to RFC 4287.
"""
response = self.client.get('/syndication/atom/')
feed = minidom.parseString(response.content).firstChild
entries = feed.getElementsByTagName('entry')
published = entries[0].getElementsByTagName('published')[0].firstChild.wholeText
updated = entries[0].getElementsByTagName('updated')[0].firstChild.wholeText
self.assertNotEqual(published, updated)
def test_atom_single_enclosure(self):
response = self.client.get('/syndication/atom/single-enclosure/')
feed = minidom.parseString(response.content).firstChild
items = feed.getElementsByTagName('entry')
for item in items:
links = item.getElementsByTagName('link')
links = [link for link in links if link.getAttribute('rel') == 'enclosure']
self.assertEqual(len(links), 1)
def test_atom_multiple_enclosures(self):
response = self.client.get('/syndication/atom/multiple-enclosure/')
feed = minidom.parseString(response.content).firstChild
items = feed.getElementsByTagName('entry')
for item in items:
links = item.getElementsByTagName('link')
links = [link for link in links if link.getAttribute('rel') == 'enclosure']
self.assertEqual(len(links), 2)
def test_latest_post_date(self):
"""
Both the published and updated dates are
considered when determining the latest post date.
"""
# this feed has a `published` element with the latest date
response = self.client.get('/syndication/atom/')
feed = minidom.parseString(response.content).firstChild
updated = feed.getElementsByTagName('updated')[0].firstChild.wholeText
d = Entry.objects.latest('published').published
latest_published = rfc3339_date(timezone.make_aware(d, TZ))
self.assertEqual(updated, latest_published)
# this feed has an `updated` element with the latest date
response = self.client.get('/syndication/latest/')
feed = minidom.parseString(response.content).firstChild
updated = feed.getElementsByTagName('updated')[0].firstChild.wholeText
d = Entry.objects.exclude(title='My last entry').latest('updated').updated
latest_updated = rfc3339_date(timezone.make_aware(d, TZ))
self.assertEqual(updated, latest_updated)
def test_custom_feed_generator(self):
response = self.client.get('/syndication/custom/')
feed = minidom.parseString(response.content).firstChild
self.assertEqual(feed.nodeName, 'feed')
self.assertEqual(feed.getAttribute('django'), 'rocks')
self.assertChildNodes(
feed,
['title', 'subtitle', 'link', 'id', 'updated', 'entry', 'spam', 'rights', 'category', 'author']
)
entries = feed.getElementsByTagName('entry')
self.assertEqual(len(entries), Entry.objects.count())
for entry in entries:
self.assertEqual(entry.getAttribute('bacon'), 'yum')
self.assertChildNodes(entry, [
'title',
'link',
'id',
'summary',
'ministry',
'rights',
'author',
'updated',
'published',
'category',
])
summary = entry.getElementsByTagName('summary')[0]
self.assertEqual(summary.getAttribute('type'), 'html')
def test_feed_generator_language_attribute(self):
response = self.client.get('/syndication/language/')
feed = minidom.parseString(response.content).firstChild
self.assertEqual(feed.firstChild.getElementsByTagName('language')[0].firstChild.nodeValue, 'de')
def test_title_escaping(self):
"""
Titles are escaped correctly in RSS feeds.
"""
response = self.client.get('/syndication/rss2/')
doc = minidom.parseString(response.content)
for item in doc.getElementsByTagName('item'):
link = item.getElementsByTagName('link')[0]
if link.firstChild.wholeText == 'http://example.com/blog/4/':
title = item.getElementsByTagName('title')[0]
self.assertEqual(title.firstChild.wholeText, 'A & B < C > D')
def test_naive_datetime_conversion(self):
"""
Datetimes are correctly converted to the local time zone.
"""
# Naive date times passed in get converted to the local time zone, so
# check the received zone offset against the local offset.
response = self.client.get('/syndication/naive-dates/')
doc = minidom.parseString(response.content)
updated = doc.getElementsByTagName('updated')[0].firstChild.wholeText
d = Entry.objects.latest('published').published
latest = rfc3339_date(timezone.make_aware(d, TZ))
self.assertEqual(updated, latest)
def test_aware_datetime_conversion(self):
"""
Datetimes with timezones don't get trodden on.
"""
response = self.client.get('/syndication/aware-dates/')
doc = minidom.parseString(response.content)
published = doc.getElementsByTagName('published')[0].firstChild.wholeText
self.assertEqual(published[-6:], '+00:42')
def test_feed_no_content_self_closing_tag(self):
tests = [
(Atom1Feed, 'link'),
(Rss201rev2Feed, 'atom:link'),
]
for feedgenerator, tag in tests:
with self.subTest(feedgenerator=feedgenerator.__name__):
feed = feedgenerator(
title='title',
link='https://example.com',
description='self closing tags test',
feed_url='https://feed.url.com',
)
doc = feed.writeString('utf-8')
self.assertIn(f'<{tag} href="https://feed.url.com" rel="self"/>', doc)
@requires_tz_support
def test_feed_last_modified_time_naive_date(self):
"""
Tests the Last-Modified header with naive publication dates.
"""
response = self.client.get('/syndication/naive-dates/')
self.assertEqual(response.headers['Last-Modified'], 'Tue, 26 Mar 2013 01:00:00 GMT')
def test_feed_last_modified_time(self):
"""
Tests the Last-Modified header with aware publication dates.
"""
response = self.client.get('/syndication/aware-dates/')
self.assertEqual(response.headers['Last-Modified'], 'Mon, 25 Mar 2013 19:18:00 GMT')
# No last-modified when feed has no item_pubdate
response = self.client.get('/syndication/no_pubdate/')
self.assertFalse(response.has_header('Last-Modified'))
def test_feed_url(self):
"""
The feed_url can be overridden.
"""
response = self.client.get('/syndication/feedurl/')
doc = minidom.parseString(response.content)
for link in doc.getElementsByTagName('link'):
if link.getAttribute('rel') == 'self':
self.assertEqual(link.getAttribute('href'), 'http://example.com/customfeedurl/')
def test_secure_urls(self):
"""
Test URLs are prefixed with https:// when feed is requested over HTTPS.
"""
response = self.client.get('/syndication/rss2/', **{
'wsgi.url_scheme': 'https',
})
doc = minidom.parseString(response.content)
chan = doc.getElementsByTagName('channel')[0]
self.assertEqual(
chan.getElementsByTagName('link')[0].firstChild.wholeText[0:5],
'https'
)
atom_link = chan.getElementsByTagName('atom:link')[0]
self.assertEqual(atom_link.getAttribute('href')[0:5], 'https')
for link in doc.getElementsByTagName('link'):
if link.getAttribute('rel') == 'self':
self.assertEqual(link.getAttribute('href')[0:5], 'https')
def test_item_link_error(self):
"""
An ImproperlyConfigured is raised if no link could be found for the
item(s).
"""
msg = (
'Give your Article class a get_absolute_url() method, or define '
'an item_link() method in your Feed class.'
)
with self.assertRaisesMessage(ImproperlyConfigured, msg):
self.client.get('/syndication/articles/')
def test_template_feed(self):
"""
The item title and description can be overridden with templates.
"""
response = self.client.get('/syndication/template/')
doc = minidom.parseString(response.content)
feed = doc.getElementsByTagName('rss')[0]
chan = feed.getElementsByTagName('channel')[0]
items = chan.getElementsByTagName('item')
self.assertChildNodeContent(items[0], {
'title': 'Title in your templates: My first entry\n',
'description': 'Description in your templates: My first entry\n',
'link': 'http://example.com/blog/%s/' % self.e1.pk,
})
def test_template_context_feed(self):
"""
Custom context data can be passed to templates for title
and description.
"""
response = self.client.get('/syndication/template_context/')
doc = minidom.parseString(response.content)
feed = doc.getElementsByTagName('rss')[0]
chan = feed.getElementsByTagName('channel')[0]
items = chan.getElementsByTagName('item')
self.assertChildNodeContent(items[0], {
'title': 'My first entry (foo is bar)\n',
'description': 'My first entry (foo is bar)\n',
})
def test_add_domain(self):
"""
add_domain() prefixes domains onto the correct URLs.
"""
prefix_domain_mapping = (
(('example.com', '/foo/?arg=value'), 'http://example.com/foo/?arg=value'),
(('example.com', '/foo/?arg=value', True), 'https://example.com/foo/?arg=value'),
(('example.com', 'http://djangoproject.com/doc/'), 'http://djangoproject.com/doc/'),
(('example.com', 'https://djangoproject.com/doc/'), 'https://djangoproject.com/doc/'),
(('example.com', 'mailto:[email protected]'), 'mailto:[email protected]'),
(('example.com', '//example.com/foo/?arg=value'), 'http://example.com/foo/?arg=value'),
)
for prefix in prefix_domain_mapping:
with self.subTest(prefix=prefix):
self.assertEqual(views.add_domain(*prefix[0]), prefix[1])
def test_get_object(self):
response = self.client.get('/syndication/rss2/articles/%s/' % self.e1.pk)
doc = minidom.parseString(response.content)
feed = doc.getElementsByTagName('rss')[0]
chan = feed.getElementsByTagName('channel')[0]
items = chan.getElementsByTagName('item')
self.assertChildNodeContent(items[0], {
'comments': '/blog/%s/article/%s/comments' % (self.e1.pk, self.a1.pk),
'description': 'Article description: My first article',
'link': 'http://example.com/blog/%s/article/%s/' % (self.e1.pk, self.a1.pk),
'title': 'Title: My first article',
'pubDate': rfc2822_date(timezone.make_aware(self.a1.published, TZ)),
})
def test_get_non_existent_object(self):
response = self.client.get('/syndication/rss2/articles/0/')
self.assertEqual(response.status_code, 404)
|
3cd374bee2c9ec3a4726ebb2361f4a2480321c27171d7c14682ed050a3744da7 | import datetime
from decimal import Decimal
from django.core.exceptions import FieldDoesNotExist, FieldError
from django.db.models import (
BooleanField, Case, CharField, Count, DateTimeField, DecimalField, Exists,
ExpressionWrapper, F, FloatField, Func, IntegerField, Max, OuterRef, Q,
Subquery, Sum, Value, When,
)
from django.db.models.expressions import RawSQL
from django.db.models.functions import (
Coalesce, ExtractYear, Floor, Length, Lower, Trim,
)
from django.test import TestCase, skipUnlessDBFeature
from django.test.utils import register_lookup
from .models import (
Author, Book, Company, DepartmentStore, Employee, Publisher, Store, Ticket,
)
class NonAggregateAnnotationTestCase(TestCase):
@classmethod
def setUpTestData(cls):
cls.a1 = Author.objects.create(name='Adrian Holovaty', age=34)
cls.a2 = Author.objects.create(name='Jacob Kaplan-Moss', age=35)
cls.a3 = Author.objects.create(name='Brad Dayley', age=45)
cls.a4 = Author.objects.create(name='James Bennett', age=29)
cls.a5 = Author.objects.create(name='Jeffrey Forcier', age=37)
cls.a6 = Author.objects.create(name='Paul Bissex', age=29)
cls.a7 = Author.objects.create(name='Wesley J. Chun', age=25)
cls.a8 = Author.objects.create(name='Peter Norvig', age=57)
cls.a9 = Author.objects.create(name='Stuart Russell', age=46)
cls.a1.friends.add(cls.a2, cls.a4)
cls.a2.friends.add(cls.a1, cls.a7)
cls.a4.friends.add(cls.a1)
cls.a5.friends.add(cls.a6, cls.a7)
cls.a6.friends.add(cls.a5, cls.a7)
cls.a7.friends.add(cls.a2, cls.a5, cls.a6)
cls.a8.friends.add(cls.a9)
cls.a9.friends.add(cls.a8)
cls.p1 = Publisher.objects.create(name='Apress', num_awards=3)
cls.p2 = Publisher.objects.create(name='Sams', num_awards=1)
cls.p3 = Publisher.objects.create(name='Prentice Hall', num_awards=7)
cls.p4 = Publisher.objects.create(name='Morgan Kaufmann', num_awards=9)
cls.p5 = Publisher.objects.create(name="Jonno's House of Books", num_awards=0)
cls.b1 = Book.objects.create(
isbn='159059725', name='The Definitive Guide to Django: Web Development Done Right',
pages=447, rating=4.5, price=Decimal('30.00'), contact=cls.a1, publisher=cls.p1,
pubdate=datetime.date(2007, 12, 6)
)
cls.b2 = Book.objects.create(
isbn='067232959', name='Sams Teach Yourself Django in 24 Hours',
pages=528, rating=3.0, price=Decimal('23.09'), contact=cls.a3, publisher=cls.p2,
pubdate=datetime.date(2008, 3, 3)
)
cls.b3 = Book.objects.create(
isbn='159059996', name='Practical Django Projects',
pages=300, rating=4.0, price=Decimal('29.69'), contact=cls.a4, publisher=cls.p1,
pubdate=datetime.date(2008, 6, 23)
)
cls.b4 = Book.objects.create(
isbn='013235613', name='Python Web Development with Django',
pages=350, rating=4.0, price=Decimal('29.69'), contact=cls.a5, publisher=cls.p3,
pubdate=datetime.date(2008, 11, 3)
)
cls.b5 = Book.objects.create(
isbn='013790395', name='Artificial Intelligence: A Modern Approach',
pages=1132, rating=4.0, price=Decimal('82.80'), contact=cls.a8, publisher=cls.p3,
pubdate=datetime.date(1995, 1, 15)
)
cls.b6 = Book.objects.create(
isbn='155860191', name='Paradigms of Artificial Intelligence Programming: Case Studies in Common Lisp',
pages=946, rating=5.0, price=Decimal('75.00'), contact=cls.a8, publisher=cls.p4,
pubdate=datetime.date(1991, 10, 15)
)
cls.b1.authors.add(cls.a1, cls.a2)
cls.b2.authors.add(cls.a3)
cls.b3.authors.add(cls.a4)
cls.b4.authors.add(cls.a5, cls.a6, cls.a7)
cls.b5.authors.add(cls.a8, cls.a9)
cls.b6.authors.add(cls.a8)
cls.s1 = Store.objects.create(
name='Amazon.com',
original_opening=datetime.datetime(1994, 4, 23, 9, 17, 42),
friday_night_closing=datetime.time(23, 59, 59)
)
cls.s2 = Store.objects.create(
name='Books.com',
original_opening=datetime.datetime(2001, 3, 15, 11, 23, 37),
friday_night_closing=datetime.time(23, 59, 59)
)
cls.s3 = Store.objects.create(
name="Mamma and Pappa's Books",
original_opening=datetime.datetime(1945, 4, 25, 16, 24, 14),
friday_night_closing=datetime.time(21, 30)
)
cls.s1.books.add(cls.b1, cls.b2, cls.b3, cls.b4, cls.b5, cls.b6)
cls.s2.books.add(cls.b1, cls.b3, cls.b5, cls.b6)
cls.s3.books.add(cls.b3, cls.b4, cls.b6)
def test_basic_annotation(self):
books = Book.objects.annotate(is_book=Value(1))
for book in books:
self.assertEqual(book.is_book, 1)
def test_basic_f_annotation(self):
books = Book.objects.annotate(another_rating=F('rating'))
for book in books:
self.assertEqual(book.another_rating, book.rating)
def test_joined_annotation(self):
books = Book.objects.select_related('publisher').annotate(
num_awards=F('publisher__num_awards'))
for book in books:
self.assertEqual(book.num_awards, book.publisher.num_awards)
def test_joined_transformed_annotation(self):
Employee.objects.bulk_create([
Employee(
first_name='John',
last_name='Doe',
age=18,
store=self.s1,
salary=15000,
),
Employee(
first_name='Jane',
last_name='Jones',
age=30,
store=self.s2,
salary=30000,
),
Employee(
first_name='Jo',
last_name='Smith',
age=55,
store=self.s3,
salary=50000,
),
])
employees = Employee.objects.annotate(
store_opened_year=F('store__original_opening__year'),
)
for employee in employees:
self.assertEqual(
employee.store_opened_year,
employee.store.original_opening.year,
)
def test_custom_transform_annotation(self):
with register_lookup(DecimalField, Floor):
books = Book.objects.annotate(floor_price=F('price__floor'))
self.assertSequenceEqual(books.values_list('pk', 'floor_price'), [
(self.b1.pk, 30),
(self.b2.pk, 23),
(self.b3.pk, 29),
(self.b4.pk, 29),
(self.b5.pk, 82),
(self.b6.pk, 75),
])
def test_chaining_transforms(self):
Company.objects.create(name=' Django Software Foundation ')
Company.objects.create(name='Yahoo')
with register_lookup(CharField, Trim), register_lookup(CharField, Length):
for expr in [Length('name__trim'), F('name__trim__length')]:
with self.subTest(expr=expr):
self.assertCountEqual(
Company.objects.annotate(length=expr).values('name', 'length'),
[
{'name': ' Django Software Foundation ', 'length': 26},
{'name': 'Yahoo', 'length': 5},
],
)
def test_mixed_type_annotation_date_interval(self):
active = datetime.datetime(2015, 3, 20, 14, 0, 0)
duration = datetime.timedelta(hours=1)
expires = datetime.datetime(2015, 3, 20, 14, 0, 0) + duration
Ticket.objects.create(active_at=active, duration=duration)
t = Ticket.objects.annotate(
expires=ExpressionWrapper(F('active_at') + F('duration'), output_field=DateTimeField())
).first()
self.assertEqual(t.expires, expires)
def test_mixed_type_annotation_numbers(self):
test = self.b1
b = Book.objects.annotate(
combined=ExpressionWrapper(F('pages') + F('rating'), output_field=IntegerField())
).get(isbn=test.isbn)
combined = int(test.pages + test.rating)
self.assertEqual(b.combined, combined)
def test_empty_expression_annotation(self):
books = Book.objects.annotate(
selected=ExpressionWrapper(Q(pk__in=[]), output_field=BooleanField())
)
self.assertEqual(len(books), Book.objects.count())
self.assertTrue(all(not book.selected for book in books))
books = Book.objects.annotate(
selected=ExpressionWrapper(Q(pk__in=Book.objects.none()), output_field=BooleanField())
)
self.assertEqual(len(books), Book.objects.count())
self.assertTrue(all(not book.selected for book in books))
def test_annotate_with_aggregation(self):
books = Book.objects.annotate(is_book=Value(1), rating_count=Count('rating'))
for book in books:
self.assertEqual(book.is_book, 1)
self.assertEqual(book.rating_count, 1)
def test_combined_expression_annotation_with_aggregation(self):
book = Book.objects.annotate(
combined=ExpressionWrapper(Value(3) * Value(4), output_field=IntegerField()),
rating_count=Count('rating'),
).first()
self.assertEqual(book.combined, 12)
self.assertEqual(book.rating_count, 1)
def test_combined_f_expression_annotation_with_aggregation(self):
book = Book.objects.filter(isbn='159059725').annotate(
combined=ExpressionWrapper(F('price') * F('pages'), output_field=FloatField()),
rating_count=Count('rating'),
).first()
self.assertEqual(book.combined, 13410.0)
self.assertEqual(book.rating_count, 1)
@skipUnlessDBFeature('supports_boolean_expr_in_select_clause')
def test_q_expression_annotation_with_aggregation(self):
book = Book.objects.filter(isbn='159059725').annotate(
isnull_pubdate=ExpressionWrapper(
Q(pubdate__isnull=True),
output_field=BooleanField(),
),
rating_count=Count('rating'),
).first()
self.assertIs(book.isnull_pubdate, False)
self.assertEqual(book.rating_count, 1)
@skipUnlessDBFeature('supports_boolean_expr_in_select_clause')
def test_grouping_by_q_expression_annotation(self):
authors = Author.objects.annotate(
under_40=ExpressionWrapper(Q(age__lt=40), output_field=BooleanField()),
).values('under_40').annotate(
count_id=Count('id'),
).values('under_40', 'count_id')
self.assertCountEqual(authors, [
{'under_40': False, 'count_id': 3},
{'under_40': True, 'count_id': 6},
])
def test_aggregate_over_annotation(self):
agg = Author.objects.annotate(other_age=F('age')).aggregate(otherage_sum=Sum('other_age'))
other_agg = Author.objects.aggregate(age_sum=Sum('age'))
self.assertEqual(agg['otherage_sum'], other_agg['age_sum'])
@skipUnlessDBFeature('can_distinct_on_fields')
def test_distinct_on_with_annotation(self):
store = Store.objects.create(
name='test store',
original_opening=datetime.datetime.now(),
friday_night_closing=datetime.time(21, 00, 00),
)
names = [
'Theodore Roosevelt',
'Eleanor Roosevelt',
'Franklin Roosevelt',
'Ned Stark',
'Catelyn Stark',
]
for name in names:
Employee.objects.create(
store=store,
first_name=name.split()[0],
last_name=name.split()[1],
age=30, salary=2000,
)
people = Employee.objects.annotate(
name_lower=Lower('last_name'),
).distinct('name_lower')
self.assertEqual({p.last_name for p in people}, {'Stark', 'Roosevelt'})
self.assertEqual(len(people), 2)
people2 = Employee.objects.annotate(
test_alias=F('store__name'),
).distinct('test_alias')
self.assertEqual(len(people2), 1)
lengths = Employee.objects.annotate(
name_len=Length('first_name'),
).distinct('name_len').values_list('name_len', flat=True)
self.assertCountEqual(lengths, [3, 7, 8])
def test_filter_annotation(self):
books = Book.objects.annotate(is_book=Value(1)).filter(is_book=1)
for book in books:
self.assertEqual(book.is_book, 1)
def test_filter_annotation_with_f(self):
books = Book.objects.annotate(
other_rating=F('rating')
).filter(other_rating=3.5)
for book in books:
self.assertEqual(book.other_rating, 3.5)
def test_filter_annotation_with_double_f(self):
books = Book.objects.annotate(
other_rating=F('rating')
).filter(other_rating=F('rating'))
for book in books:
self.assertEqual(book.other_rating, book.rating)
def test_filter_agg_with_double_f(self):
books = Book.objects.annotate(
sum_rating=Sum('rating')
).filter(sum_rating=F('sum_rating'))
for book in books:
self.assertEqual(book.sum_rating, book.rating)
def test_filter_wrong_annotation(self):
with self.assertRaisesMessage(FieldError, "Cannot resolve keyword 'nope' into field."):
list(Book.objects.annotate(
sum_rating=Sum('rating')
).filter(sum_rating=F('nope')))
def test_decimal_annotation(self):
salary = Decimal(10) ** -Employee._meta.get_field('salary').decimal_places
Employee.objects.create(
first_name='Max',
last_name='Paine',
store=Store.objects.first(),
age=23,
salary=salary,
)
self.assertEqual(
Employee.objects.annotate(new_salary=F('salary') / 10).get().new_salary,
salary / 10,
)
def test_filter_decimal_annotation(self):
qs = Book.objects.annotate(new_price=F('price') + 1).filter(new_price=Decimal(31)).values_list('new_price')
self.assertEqual(qs.get(), (Decimal(31),))
def test_combined_annotation_commutative(self):
book1 = Book.objects.annotate(adjusted_rating=F('rating') + 2).get(pk=self.b1.pk)
book2 = Book.objects.annotate(adjusted_rating=2 + F('rating')).get(pk=self.b1.pk)
self.assertEqual(book1.adjusted_rating, book2.adjusted_rating)
book1 = Book.objects.annotate(adjusted_rating=F('rating') + None).get(pk=self.b1.pk)
book2 = Book.objects.annotate(adjusted_rating=None + F('rating')).get(pk=self.b1.pk)
self.assertEqual(book1.adjusted_rating, book2.adjusted_rating)
def test_update_with_annotation(self):
book_preupdate = Book.objects.get(pk=self.b2.pk)
Book.objects.annotate(other_rating=F('rating') - 1).update(rating=F('other_rating'))
book_postupdate = Book.objects.get(pk=self.b2.pk)
self.assertEqual(book_preupdate.rating - 1, book_postupdate.rating)
def test_annotation_with_m2m(self):
books = Book.objects.annotate(author_age=F('authors__age')).filter(pk=self.b1.pk).order_by('author_age')
self.assertEqual(books[0].author_age, 34)
self.assertEqual(books[1].author_age, 35)
def test_annotation_reverse_m2m(self):
books = Book.objects.annotate(
store_name=F('store__name'),
).filter(
name='Practical Django Projects',
).order_by('store_name')
self.assertQuerysetEqual(
books, [
'Amazon.com',
'Books.com',
'Mamma and Pappa\'s Books'
],
lambda b: b.store_name
)
def test_values_annotation(self):
"""
Annotations can reference fields in a values clause,
and contribute to an existing values clause.
"""
# annotate references a field in values()
qs = Book.objects.values('rating').annotate(other_rating=F('rating') - 1)
book = qs.get(pk=self.b1.pk)
self.assertEqual(book['rating'] - 1, book['other_rating'])
# filter refs the annotated value
book = qs.get(other_rating=4)
self.assertEqual(book['other_rating'], 4)
# can annotate an existing values with a new field
book = qs.annotate(other_isbn=F('isbn')).get(other_rating=4)
self.assertEqual(book['other_rating'], 4)
self.assertEqual(book['other_isbn'], '155860191')
def test_values_with_pk_annotation(self):
# annotate references a field in values() with pk
publishers = Publisher.objects.values('id', 'book__rating').annotate(total=Sum('book__rating'))
for publisher in publishers.filter(pk=self.p1.pk):
self.assertEqual(publisher['book__rating'], publisher['total'])
@skipUnlessDBFeature('allows_group_by_pk')
def test_rawsql_group_by_collapse(self):
raw = RawSQL('SELECT MIN(id) FROM annotations_book', [])
qs = Author.objects.values('id').annotate(
min_book_id=raw,
count_friends=Count('friends'),
).order_by()
_, _, group_by = qs.query.get_compiler(using='default').pre_sql_setup()
self.assertEqual(len(group_by), 1)
self.assertNotEqual(raw, group_by[0])
def test_defer_annotation(self):
"""
Deferred attributes can be referenced by an annotation,
but they are not themselves deferred, and cannot be deferred.
"""
qs = Book.objects.defer('rating').annotate(other_rating=F('rating') - 1)
with self.assertNumQueries(2):
book = qs.get(other_rating=4)
self.assertEqual(book.rating, 5)
self.assertEqual(book.other_rating, 4)
with self.assertRaisesMessage(FieldDoesNotExist, "Book has no field named 'other_rating'"):
book = qs.defer('other_rating').get(other_rating=4)
def test_mti_annotations(self):
"""
Fields on an inherited model can be referenced by an
annotated field.
"""
d = DepartmentStore.objects.create(
name='Angus & Robinson',
original_opening=datetime.date(2014, 3, 8),
friday_night_closing=datetime.time(21, 00, 00),
chain='Westfield'
)
books = Book.objects.filter(rating__gt=4)
for b in books:
d.books.add(b)
qs = DepartmentStore.objects.annotate(
other_name=F('name'),
other_chain=F('chain'),
is_open=Value(True, BooleanField()),
book_isbn=F('books__isbn')
).order_by('book_isbn').filter(chain='Westfield')
self.assertQuerysetEqual(
qs, [
('Angus & Robinson', 'Westfield', True, '155860191'),
('Angus & Robinson', 'Westfield', True, '159059725')
],
lambda d: (d.other_name, d.other_chain, d.is_open, d.book_isbn)
)
def test_null_annotation(self):
"""
Annotating None onto a model round-trips
"""
book = Book.objects.annotate(no_value=Value(None, output_field=IntegerField())).first()
self.assertIsNone(book.no_value)
def test_order_by_annotation(self):
authors = Author.objects.annotate(other_age=F('age')).order_by('other_age')
self.assertQuerysetEqual(
authors, [
25, 29, 29, 34, 35, 37, 45, 46, 57,
],
lambda a: a.other_age
)
def test_order_by_aggregate(self):
authors = Author.objects.values('age').annotate(age_count=Count('age')).order_by('age_count', 'age')
self.assertQuerysetEqual(
authors, [
(25, 1), (34, 1), (35, 1), (37, 1), (45, 1), (46, 1), (57, 1), (29, 2),
],
lambda a: (a['age'], a['age_count'])
)
def test_raw_sql_with_inherited_field(self):
DepartmentStore.objects.create(
name='Angus & Robinson',
original_opening=datetime.date(2014, 3, 8),
friday_night_closing=datetime.time(21),
chain='Westfield',
area=123,
)
tests = (
('name', 'Angus & Robinson'),
('surface', 123),
("case when name='Angus & Robinson' then chain else name end", 'Westfield'),
)
for sql, expected_result in tests:
with self.subTest(sql=sql):
self.assertSequenceEqual(
DepartmentStore.objects.annotate(
annotation=RawSQL(sql, ()),
).values_list('annotation', flat=True),
[expected_result],
)
def test_annotate_exists(self):
authors = Author.objects.annotate(c=Count('id')).filter(c__gt=1)
self.assertFalse(authors.exists())
def test_column_field_ordering(self):
"""
Columns are aligned in the correct order for resolve_columns. This test
will fail on MySQL if column ordering is out. Column fields should be
aligned as:
1. extra_select
2. model_fields
3. annotation_fields
4. model_related_fields
"""
store = Store.objects.first()
Employee.objects.create(id=1, first_name='Max', manager=True, last_name='Paine',
store=store, age=23, salary=Decimal(50000.00))
Employee.objects.create(id=2, first_name='Buffy', manager=False, last_name='Summers',
store=store, age=18, salary=Decimal(40000.00))
qs = Employee.objects.extra(
select={'random_value': '42'}
).select_related('store').annotate(
annotated_value=Value(17),
)
rows = [
(1, 'Max', True, 42, 'Paine', 23, Decimal(50000.00), store.name, 17),
(2, 'Buffy', False, 42, 'Summers', 18, Decimal(40000.00), store.name, 17)
]
self.assertQuerysetEqual(
qs.order_by('id'), rows,
lambda e: (
e.id, e.first_name, e.manager, e.random_value, e.last_name, e.age,
e.salary, e.store.name, e.annotated_value))
def test_column_field_ordering_with_deferred(self):
store = Store.objects.first()
Employee.objects.create(id=1, first_name='Max', manager=True, last_name='Paine',
store=store, age=23, salary=Decimal(50000.00))
Employee.objects.create(id=2, first_name='Buffy', manager=False, last_name='Summers',
store=store, age=18, salary=Decimal(40000.00))
qs = Employee.objects.extra(
select={'random_value': '42'}
).select_related('store').annotate(
annotated_value=Value(17),
)
rows = [
(1, 'Max', True, 42, 'Paine', 23, Decimal(50000.00), store.name, 17),
(2, 'Buffy', False, 42, 'Summers', 18, Decimal(40000.00), store.name, 17)
]
# and we respect deferred columns!
self.assertQuerysetEqual(
qs.defer('age').order_by('id'), rows,
lambda e: (
e.id, e.first_name, e.manager, e.random_value, e.last_name, e.age,
e.salary, e.store.name, e.annotated_value))
def test_custom_functions(self):
Company(name='Apple', motto=None, ticker_name='APPL', description='Beautiful Devices').save()
Company(name='Django Software Foundation', motto=None, ticker_name=None, description=None).save()
Company(name='Google', motto='Do No Evil', ticker_name='GOOG', description='Internet Company').save()
Company(name='Yahoo', motto=None, ticker_name=None, description='Internet Company').save()
qs = Company.objects.annotate(
tagline=Func(
F('motto'),
F('ticker_name'),
F('description'),
Value('No Tag'),
function='COALESCE'
)
).order_by('name')
self.assertQuerysetEqual(
qs, [
('Apple', 'APPL'),
('Django Software Foundation', 'No Tag'),
('Google', 'Do No Evil'),
('Yahoo', 'Internet Company')
],
lambda c: (c.name, c.tagline)
)
def test_custom_functions_can_ref_other_functions(self):
Company(name='Apple', motto=None, ticker_name='APPL', description='Beautiful Devices').save()
Company(name='Django Software Foundation', motto=None, ticker_name=None, description=None).save()
Company(name='Google', motto='Do No Evil', ticker_name='GOOG', description='Internet Company').save()
Company(name='Yahoo', motto=None, ticker_name=None, description='Internet Company').save()
class Lower(Func):
function = 'LOWER'
qs = Company.objects.annotate(
tagline=Func(
F('motto'),
F('ticker_name'),
F('description'),
Value('No Tag'),
function='COALESCE',
)
).annotate(
tagline_lower=Lower(F('tagline')),
).order_by('name')
# LOWER function supported by:
# oracle, postgres, mysql, sqlite, sqlserver
self.assertQuerysetEqual(
qs, [
('Apple', 'APPL'.lower()),
('Django Software Foundation', 'No Tag'.lower()),
('Google', 'Do No Evil'.lower()),
('Yahoo', 'Internet Company'.lower())
],
lambda c: (c.name, c.tagline_lower)
)
def test_boolean_value_annotation(self):
books = Book.objects.annotate(
is_book=Value(True, output_field=BooleanField()),
is_pony=Value(False, output_field=BooleanField()),
is_none=Value(None, output_field=BooleanField(null=True)),
)
self.assertGreater(len(books), 0)
for book in books:
self.assertIs(book.is_book, True)
self.assertIs(book.is_pony, False)
self.assertIsNone(book.is_none)
def test_annotation_in_f_grouped_by_annotation(self):
qs = (
Publisher.objects.annotate(multiplier=Value(3))
# group by option => sum of value * multiplier
.values('name')
.annotate(multiplied_value_sum=Sum(F('multiplier') * F('num_awards')))
.order_by()
)
self.assertCountEqual(
qs, [
{'multiplied_value_sum': 9, 'name': 'Apress'},
{'multiplied_value_sum': 0, 'name': "Jonno's House of Books"},
{'multiplied_value_sum': 27, 'name': 'Morgan Kaufmann'},
{'multiplied_value_sum': 21, 'name': 'Prentice Hall'},
{'multiplied_value_sum': 3, 'name': 'Sams'},
]
)
def test_arguments_must_be_expressions(self):
msg = 'QuerySet.annotate() received non-expression(s): %s.'
with self.assertRaisesMessage(TypeError, msg % BooleanField()):
Book.objects.annotate(BooleanField())
with self.assertRaisesMessage(TypeError, msg % True):
Book.objects.annotate(is_book=True)
with self.assertRaisesMessage(TypeError, msg % ', '.join([str(BooleanField()), 'True'])):
Book.objects.annotate(BooleanField(), Value(False), is_book=True)
def test_chaining_annotation_filter_with_m2m(self):
qs = Author.objects.filter(
name='Adrian Holovaty',
friends__age=35,
).annotate(
jacob_name=F('friends__name'),
).filter(
friends__age=29,
).annotate(
james_name=F('friends__name'),
).values('jacob_name', 'james_name')
self.assertCountEqual(
qs,
[{'jacob_name': 'Jacob Kaplan-Moss', 'james_name': 'James Bennett'}],
)
def test_annotation_filter_with_subquery(self):
long_books_qs = Book.objects.filter(
publisher=OuterRef('pk'),
pages__gt=400,
).values('publisher').annotate(count=Count('pk')).values('count')
publisher_books_qs = Publisher.objects.annotate(
total_books=Count('book'),
).filter(
total_books=Subquery(long_books_qs, output_field=IntegerField()),
).values('name')
self.assertCountEqual(publisher_books_qs, [{'name': 'Sams'}, {'name': 'Morgan Kaufmann'}])
def test_annotation_exists_aggregate_values_chaining(self):
qs = Book.objects.values('publisher').annotate(
has_authors=Exists(Book.authors.through.objects.filter(book=OuterRef('pk'))),
max_pubdate=Max('pubdate'),
).values_list('max_pubdate', flat=True).order_by('max_pubdate')
self.assertCountEqual(qs, [
datetime.date(1991, 10, 15),
datetime.date(2008, 3, 3),
datetime.date(2008, 6, 23),
datetime.date(2008, 11, 3),
])
@skipUnlessDBFeature('supports_subqueries_in_group_by')
def test_annotation_subquery_and_aggregate_values_chaining(self):
qs = Book.objects.annotate(
pub_year=ExtractYear('pubdate')
).values('pub_year').annotate(
top_rating=Subquery(
Book.objects.filter(
pubdate__year=OuterRef('pub_year')
).order_by('-rating').values('rating')[:1]
),
total_pages=Sum('pages'),
).values('pub_year', 'total_pages', 'top_rating')
self.assertCountEqual(qs, [
{'pub_year': 1991, 'top_rating': 5.0, 'total_pages': 946},
{'pub_year': 1995, 'top_rating': 4.0, 'total_pages': 1132},
{'pub_year': 2007, 'top_rating': 4.5, 'total_pages': 447},
{'pub_year': 2008, 'top_rating': 4.0, 'total_pages': 1178},
])
def test_annotation_subquery_outerref_transform(self):
qs = Book.objects.annotate(
top_rating_year=Subquery(
Book.objects.filter(
pubdate__year=OuterRef('pubdate__year')
).order_by('-rating').values('rating')[:1]
),
).values('pubdate__year', 'top_rating_year')
self.assertCountEqual(qs, [
{'pubdate__year': 1991, 'top_rating_year': 5.0},
{'pubdate__year': 1995, 'top_rating_year': 4.0},
{'pubdate__year': 2007, 'top_rating_year': 4.5},
{'pubdate__year': 2008, 'top_rating_year': 4.0},
{'pubdate__year': 2008, 'top_rating_year': 4.0},
{'pubdate__year': 2008, 'top_rating_year': 4.0},
])
def test_annotation_aggregate_with_m2o(self):
qs = Author.objects.filter(age__lt=30).annotate(
max_pages=Case(
When(book_contact_set__isnull=True, then=Value(0)),
default=Max(F('book__pages')),
),
).values('name', 'max_pages')
self.assertCountEqual(qs, [
{'name': 'James Bennett', 'max_pages': 300},
{'name': 'Paul Bissex', 'max_pages': 0},
{'name': 'Wesley J. Chun', 'max_pages': 0},
])
class AliasTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.a1 = Author.objects.create(name='Adrian Holovaty', age=34)
cls.a2 = Author.objects.create(name='Jacob Kaplan-Moss', age=35)
cls.a3 = Author.objects.create(name='James Bennett', age=34)
cls.a4 = Author.objects.create(name='Peter Norvig', age=57)
cls.a5 = Author.objects.create(name='Stuart Russell', age=46)
p1 = Publisher.objects.create(name='Apress', num_awards=3)
cls.b1 = Book.objects.create(
isbn='159059725', pages=447, rating=4.5, price=Decimal('30.00'),
contact=cls.a1, publisher=p1, pubdate=datetime.date(2007, 12, 6),
name='The Definitive Guide to Django: Web Development Done Right',
)
cls.b2 = Book.objects.create(
isbn='159059996', pages=300, rating=4.0, price=Decimal('29.69'),
contact=cls.a3, publisher=p1, pubdate=datetime.date(2008, 6, 23),
name='Practical Django Projects',
)
cls.b3 = Book.objects.create(
isbn='013790395', pages=1132, rating=4.0, price=Decimal('82.80'),
contact=cls.a4, publisher=p1, pubdate=datetime.date(1995, 1, 15),
name='Artificial Intelligence: A Modern Approach',
)
cls.b4 = Book.objects.create(
isbn='155860191', pages=946, rating=5.0, price=Decimal('75.00'),
contact=cls.a4, publisher=p1, pubdate=datetime.date(1991, 10, 15),
name='Paradigms of Artificial Intelligence Programming: Case Studies in Common Lisp',
)
cls.b1.authors.add(cls.a1, cls.a2)
cls.b2.authors.add(cls.a3)
cls.b3.authors.add(cls.a4, cls.a5)
cls.b4.authors.add(cls.a4)
Store.objects.create(
name='Amazon.com',
original_opening=datetime.datetime(1994, 4, 23, 9, 17, 42),
friday_night_closing=datetime.time(23, 59, 59)
)
Store.objects.create(
name='Books.com',
original_opening=datetime.datetime(2001, 3, 15, 11, 23, 37),
friday_night_closing=datetime.time(23, 59, 59)
)
def test_basic_alias(self):
qs = Book.objects.alias(is_book=Value(1))
self.assertIs(hasattr(qs.first(), 'is_book'), False)
def test_basic_alias_annotation(self):
qs = Book.objects.alias(
is_book_alias=Value(1),
).annotate(is_book=F('is_book_alias'))
self.assertIs(hasattr(qs.first(), 'is_book_alias'), False)
for book in qs:
with self.subTest(book=book):
self.assertEqual(book.is_book, 1)
def test_basic_alias_f_annotation(self):
qs = Book.objects.alias(
another_rating_alias=F('rating')
).annotate(another_rating=F('another_rating_alias'))
self.assertIs(hasattr(qs.first(), 'another_rating_alias'), False)
for book in qs:
with self.subTest(book=book):
self.assertEqual(book.another_rating, book.rating)
def test_basic_alias_f_transform_annotation(self):
qs = Book.objects.alias(
pubdate_alias=F('pubdate'),
).annotate(pubdate_year=F('pubdate_alias__year'))
self.assertIs(hasattr(qs.first(), 'pubdate_alias'), False)
for book in qs:
with self.subTest(book=book):
self.assertEqual(book.pubdate_year, book.pubdate.year)
def test_alias_after_annotation(self):
qs = Book.objects.annotate(
is_book=Value(1),
).alias(is_book_alias=F('is_book'))
book = qs.first()
self.assertIs(hasattr(book, 'is_book'), True)
self.assertIs(hasattr(book, 'is_book_alias'), False)
def test_overwrite_annotation_with_alias(self):
qs = Book.objects.annotate(is_book=Value(1)).alias(is_book=F('is_book'))
self.assertIs(hasattr(qs.first(), 'is_book'), False)
def test_overwrite_alias_with_annotation(self):
qs = Book.objects.alias(is_book=Value(1)).annotate(is_book=F('is_book'))
for book in qs:
with self.subTest(book=book):
self.assertEqual(book.is_book, 1)
def test_alias_annotation_expression(self):
qs = Book.objects.alias(
is_book_alias=Value(1),
).annotate(is_book=Coalesce('is_book_alias', 0))
self.assertIs(hasattr(qs.first(), 'is_book_alias'), False)
for book in qs:
with self.subTest(book=book):
self.assertEqual(book.is_book, 1)
def test_alias_default_alias_expression(self):
qs = Author.objects.alias(
Sum('book__pages'),
).filter(book__pages__sum__gt=2000)
self.assertIs(hasattr(qs.first(), 'book__pages__sum'), False)
self.assertSequenceEqual(qs, [self.a4])
def test_joined_alias_annotation(self):
qs = Book.objects.select_related('publisher').alias(
num_awards_alias=F('publisher__num_awards'),
).annotate(num_awards=F('num_awards_alias'))
self.assertIs(hasattr(qs.first(), 'num_awards_alias'), False)
for book in qs:
with self.subTest(book=book):
self.assertEqual(book.num_awards, book.publisher.num_awards)
def test_alias_annotate_with_aggregation(self):
qs = Book.objects.alias(
is_book_alias=Value(1),
rating_count_alias=Count('rating'),
).annotate(
is_book=F('is_book_alias'),
rating_count=F('rating_count_alias'),
)
book = qs.first()
self.assertIs(hasattr(book, 'is_book_alias'), False)
self.assertIs(hasattr(book, 'rating_count_alias'), False)
for book in qs:
with self.subTest(book=book):
self.assertEqual(book.is_book, 1)
self.assertEqual(book.rating_count, 1)
def test_filter_alias_with_f(self):
qs = Book.objects.alias(
other_rating=F('rating'),
).filter(other_rating=4.5)
self.assertIs(hasattr(qs.first(), 'other_rating'), False)
self.assertSequenceEqual(qs, [self.b1])
def test_filter_alias_with_double_f(self):
qs = Book.objects.alias(
other_rating=F('rating'),
).filter(other_rating=F('rating'))
self.assertIs(hasattr(qs.first(), 'other_rating'), False)
self.assertEqual(qs.count(), Book.objects.count())
def test_filter_alias_agg_with_double_f(self):
qs = Book.objects.alias(
sum_rating=Sum('rating'),
).filter(sum_rating=F('sum_rating'))
self.assertIs(hasattr(qs.first(), 'sum_rating'), False)
self.assertEqual(qs.count(), Book.objects.count())
def test_update_with_alias(self):
Book.objects.alias(
other_rating=F('rating') - 1,
).update(rating=F('other_rating'))
self.b1.refresh_from_db()
self.assertEqual(self.b1.rating, 3.5)
def test_order_by_alias(self):
qs = Author.objects.alias(other_age=F('age')).order_by('other_age')
self.assertIs(hasattr(qs.first(), 'other_age'), False)
self.assertQuerysetEqual(qs, [34, 34, 35, 46, 57], lambda a: a.age)
def test_order_by_alias_aggregate(self):
qs = Author.objects.values('age').alias(age_count=Count('age')).order_by('age_count', 'age')
self.assertIs(hasattr(qs.first(), 'age_count'), False)
self.assertQuerysetEqual(qs, [35, 46, 57, 34], lambda a: a['age'])
def test_dates_alias(self):
qs = Book.objects.alias(
pubdate_alias=F('pubdate'),
).dates('pubdate_alias', 'month')
self.assertCountEqual(qs, [
datetime.date(1991, 10, 1),
datetime.date(1995, 1, 1),
datetime.date(2007, 12, 1),
datetime.date(2008, 6, 1),
])
def test_datetimes_alias(self):
qs = Store.objects.alias(
original_opening_alias=F('original_opening'),
).datetimes('original_opening_alias', 'year')
self.assertCountEqual(qs, [
datetime.datetime(1994, 1, 1),
datetime.datetime(2001, 1, 1),
])
def test_aggregate_alias(self):
msg = (
"Cannot aggregate over the 'other_age' alias. Use annotate() to "
"promote it."
)
with self.assertRaisesMessage(FieldError, msg):
Author.objects.alias(
other_age=F('age'),
).aggregate(otherage_sum=Sum('other_age'))
def test_defer_only_alias(self):
qs = Book.objects.alias(rating_alias=F('rating') - 1)
msg = "Book has no field named 'rating_alias'"
for operation in ['defer', 'only']:
with self.subTest(operation=operation):
with self.assertRaisesMessage(FieldDoesNotExist, msg):
getattr(qs, operation)('rating_alias').first()
@skipUnlessDBFeature('can_distinct_on_fields')
def test_distinct_on_alias(self):
qs = Book.objects.alias(rating_alias=F('rating') - 1)
msg = "Cannot resolve keyword 'rating_alias' into field."
with self.assertRaisesMessage(FieldError, msg):
qs.distinct('rating_alias').first()
def test_values_alias(self):
qs = Book.objects.alias(rating_alias=F('rating') - 1)
msg = (
"Cannot select the 'rating_alias' alias. Use annotate() to "
"promote it."
)
for operation in ['values', 'values_list']:
with self.subTest(operation=operation):
with self.assertRaisesMessage(FieldError, msg):
getattr(qs, operation)('rating_alias')
|
3e764900b33ff5cc9f2c7a2dd7a7a27494db818607ba917002db6fa2b56bc739 | import re
import types
from datetime import datetime, timedelta
from decimal import Decimal
from unittest import TestCase, mock
from django.core.exceptions import ValidationError
from django.core.files.base import ContentFile
from django.core.validators import (
BaseValidator, DecimalValidator, EmailValidator, FileExtensionValidator,
MaxLengthValidator, MaxValueValidator, MinLengthValidator,
MinValueValidator, ProhibitNullCharactersValidator, RegexValidator,
URLValidator, int_list_validator, validate_comma_separated_integer_list,
validate_email, validate_image_file_extension, validate_integer,
validate_ipv4_address, validate_ipv6_address, validate_ipv46_address,
validate_slug, validate_unicode_slug,
)
from django.test import SimpleTestCase, ignore_warnings
from django.utils.deprecation import RemovedInDjango41Warning
try:
from PIL import Image # noqa
except ImportError:
PILLOW_IS_INSTALLED = False
else:
PILLOW_IS_INSTALLED = True
NOW = datetime.now()
EXTENDED_SCHEMES = ['http', 'https', 'ftp', 'ftps', 'git', 'file', 'git+ssh']
VALID_URLS = [
'http://www.djangoproject.com/',
'HTTP://WWW.DJANGOPROJECT.COM/',
'http://localhost/',
'http://example.com/',
'http://example.com:0',
'http://example.com:0/',
'http://example.com:65535',
'http://example.com:65535/',
'http://example.com./',
'http://www.example.com/',
'http://www.example.com:8000/test',
'http://valid-with-hyphens.com/',
'http://subdomain.example.com/',
'http://a.aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa',
'http://200.8.9.10/',
'http://200.8.9.10:8000/test',
'http://su--b.valid-----hyphens.com/',
'http://example.com?something=value',
'http://example.com/index.php?something=value&another=value2',
'https://example.com/',
'ftp://example.com/',
'ftps://example.com/',
'http://foo.com/blah_blah',
'http://foo.com/blah_blah/',
'http://foo.com/blah_blah_(wikipedia)',
'http://foo.com/blah_blah_(wikipedia)_(again)',
'http://www.example.com/wpstyle/?p=364',
'https://www.example.com/foo/?bar=baz&inga=42&quux',
'http://✪df.ws/123',
'http://[email protected]',
'http://[email protected]/',
'http://[email protected]:8080',
'http://[email protected]:8080/',
'http://[email protected]:65535',
'http://[email protected]:65535/',
'http://userid:@example.com',
'http://userid:@example.com/',
'http://userid:@example.com:8080',
'http://userid:@example.com:8080/',
'http://userid:[email protected]',
'http://userid:[email protected]/',
'http://userid:[email protected]:8',
'http://userid:[email protected]:8/',
'http://userid:[email protected]:8080',
'http://userid:[email protected]:8080/',
'http://userid:[email protected]:65535',
'http://userid:[email protected]:65535/',
'https://userid:paaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa'
'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa'
'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa'
'[email protected]',
'https://userid:paaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa'
'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa'
'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa'
'[email protected]:8080',
'https://useridddddddddddddddddddddddddddddddddddddddddddddddddddddddddddd'
'ddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddd'
'ddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddd'
'dddddddddddddddddddddd:[email protected]',
'https://useridddddddddddddddddddddddddddddddddddddddddddddddddddddddddddd'
'ddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddd'
'ddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddd'
'ddddddddddddddddd:[email protected]:8080',
'http://142.42.1.1/',
'http://142.42.1.1:8080/',
'http://➡.ws/䨹',
'http://⌘.ws',
'http://⌘.ws/',
'http://foo.com/blah_(wikipedia)#cite-1',
'http://foo.com/blah_(wikipedia)_blah#cite-1',
'http://foo.com/unicode_(✪)_in_parens',
'http://foo.com/(something)?after=parens',
'http://☺.damowmow.com/',
'http://djangoproject.com/events/#&product=browser',
'http://j.mp',
'ftp://foo.bar/baz',
'http://foo.bar/?q=Test%20URL-encoded%20stuff',
'http://مثال.إختبار',
'http://例子.测试',
'http://उदाहरण.परीक्षा',
"http://-.~_!$&'()*+,;=%40:80%[email protected]",
'http://xn--7sbb4ac0ad0be6cf.xn--p1ai',
'http://1337.net',
'http://a.b-c.de',
'http://223.255.255.254',
'ftps://foo.bar/',
'http://10.1.1.254',
'http://[FEDC:BA98:7654:3210:FEDC:BA98:7654:3210]:80/index.html',
'http://[::192.9.5.5]/ipng',
'http://[::ffff:192.9.5.5]/ipng',
'http://[::1]:8080/',
'http://0.0.0.0/',
'http://255.255.255.255',
'http://224.0.0.0',
'http://224.1.1.1',
'http://111.112.113.114/',
'http://88.88.88.88/',
'http://11.12.13.14/',
'http://10.20.30.40/',
'http://1.2.3.4/',
'http://127.0.01.09.home.lan',
'http://aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa.ex'
'ample.com',
'http://example.aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa'
'aaaaa.com',
'http://example.aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa'
'aaaaa',
'http://aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa.aaaaaaaaaaaaaaaaaaaaaaaaaaa.aaaa'
'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaa.aaaaaaaaaaaaaaaaaaaaaaaaaaaaaa.aaaaaaaaaaa'
'aaaaaaaaaaaaaaaaaaaaaa.aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa.aaaaaaaaaaaaaaaaaa'
'aaaaaaaaaaaa.aaaaaaaaaaaaaaaaaaaaaaaaaaaa',
'http://dashintld.c-m',
'http://multipledashintld.a-b-c',
'http://evenmoredashintld.a---c',
'http://dashinpunytld.xn---c',
]
INVALID_URLS = [
None,
56,
'no_scheme',
'foo',
'http://',
'http://example',
'http://example.',
'http://example.com:-1',
'http://example.com:-1/',
'http://example.com:000000080',
'http://example.com:000000080/',
'http://.com',
'http://invalid-.com',
'http://-invalid.com',
'http://invalid.com-',
'http://invalid.-com',
'http://inv-.alid-.com',
'http://inv-.-alid.com',
'file://localhost/path',
'git://example.com/',
'http://.',
'http://..',
'http://../',
'http://?',
'http://??',
'http://??/',
'http://#',
'http://##',
'http://##/',
'http://foo.bar?q=Spaces should be encoded',
'//',
'//a',
'///a',
'///',
'http:///a',
'foo.com',
'rdar://1234',
'h://test',
'http:// shouldfail.com',
':// should fail',
'http://foo.bar/foo(bar)baz quux',
'http://-error-.invalid/',
'http://dashinpunytld.trailingdot.xn--.',
'http://dashinpunytld.xn---',
'http://-a.b.co',
'http://a.b-.co',
'http://a.-b.co',
'http://a.b-.c.co',
'http:/',
'http://',
'http://',
'http://1.1.1.1.1',
'http://123.123.123',
'http://3628126748',
'http://123',
'http://000.000.000.000',
'http://016.016.016.016',
'http://192.168.000.001',
'http://01.2.3.4',
'http://01.2.3.4',
'http://1.02.3.4',
'http://1.2.03.4',
'http://1.2.3.04',
'http://.www.foo.bar/',
'http://.www.foo.bar./',
'http://[::1:2::3]:8/',
'http://[::1:2::3]:8080/',
'http://[]',
'http://[]:8080',
'http://example..com/',
'http://aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa.e'
'xample.com',
'http://example.aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa'
'aaaaaa.com',
'http://example.aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa'
'aaaaaa',
'http://aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa.aaaaaaaaaaaaaaaaaaaaaaaaaaaaa.'
'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa.aaaaaaaaaaaaaaaaaaaaaaaaaaaaaa.aaaaaaa'
'aaaaaaaaaaaaaaaaaaaaaaaaaa.aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa.aaaaaaaaaaaaaa'
'aaaaaaaaaaaaaaaa.aaaaaaaaaaaaaaaaaaaaaaaaa',
'https://test.[com',
'http://@example.com',
'http://:@example.com',
'http://:[email protected]',
'http://foo@[email protected]',
'http://foo/[email protected]',
'http://foo:bar:[email protected]',
'http://foo:bar@[email protected]',
'http://foo:bar/[email protected]',
'http://invalid-.com/[email protected]',
# Newlines and tabs are not accepted.
'http://www.djangoproject.com/\n',
'http://[::ffff:192.9.5.5]\n',
'http://www.djangoproject.com/\r',
'http://[::ffff:192.9.5.5]\r',
'http://www.django\rproject.com/',
'http://[::\rffff:192.9.5.5]',
'http://\twww.djangoproject.com/',
'http://\t[::ffff:192.9.5.5]',
# Trailing junk does not take forever to reject.
'http://www.asdasdasdasdsadfm.com.br ',
'http://www.asdasdasdasdsadfm.com.br z',
]
TEST_DATA = [
# (validator, value, expected),
(validate_integer, '42', None),
(validate_integer, '-42', None),
(validate_integer, -42, None),
(validate_integer, -42.5, ValidationError),
(validate_integer, None, ValidationError),
(validate_integer, 'a', ValidationError),
(validate_integer, '\n42', ValidationError),
(validate_integer, '42\n', ValidationError),
(validate_email, '[email protected]', None),
(validate_email, '[email protected]', None),
(validate_email, 'email@[127.0.0.1]', None),
(validate_email, 'email@[2001:dB8::1]', None),
(validate_email, 'email@[2001:dB8:0:0:0:0:0:1]', None),
(validate_email, 'email@[::fffF:127.0.0.1]', None),
(validate_email, '[email protected]', None),
(validate_email, '[email protected]', None),
(validate_email, '[email protected].उदाहरण.परीक्षा', None),
(validate_email, 'email@localhost', None),
(EmailValidator(allowlist=['localdomain']), 'email@localdomain', None),
(validate_email, '"test@test"@example.com', None),
(validate_email, 'example@atm.%s' % ('a' * 63), None),
(validate_email, 'example@%s.atm' % ('a' * 63), None),
(validate_email, 'example@%s.%s.atm' % ('a' * 63, 'b' * 10), None),
(validate_email, 'example@atm.%s' % ('a' * 64), ValidationError),
(validate_email, 'example@%s.atm.%s' % ('b' * 64, 'a' * 63), ValidationError),
(validate_email, None, ValidationError),
(validate_email, '', ValidationError),
(validate_email, 'abc', ValidationError),
(validate_email, 'abc@', ValidationError),
(validate_email, 'abc@bar', ValidationError),
(validate_email, 'a @x.cz', ValidationError),
(validate_email, '[email protected]', ValidationError),
(validate_email, 'something@@somewhere.com', ValidationError),
(validate_email, '[email protected]', ValidationError),
(validate_email, 'email@[127.0.0.256]', ValidationError),
(validate_email, 'email@[2001:db8::12345]', ValidationError),
(validate_email, 'email@[2001:db8:0:0:0:0:1]', ValidationError),
(validate_email, 'email@[::ffff:127.0.0.256]', ValidationError),
(validate_email, '[email protected]', ValidationError),
(validate_email, '[email protected]', ValidationError),
(validate_email, '[email protected]', ValidationError),
(validate_email, '[email protected]', ValidationError),
(validate_email, '[email protected]', ValidationError),
(validate_email, '[email protected]\n\n<script src="x.js">', ValidationError),
# Quoted-string format (CR not allowed)
(validate_email, '"\\\011"@here.com', None),
(validate_email, '"\\\012"@here.com', ValidationError),
(validate_email, '[email protected].', ValidationError),
# Max length of domain name labels is 63 characters per RFC 1034.
(validate_email, 'a@%s.us' % ('a' * 63), None),
(validate_email, 'a@%s.us' % ('a' * 64), ValidationError),
# Trailing newlines in username or domain not allowed
(validate_email, '[email protected]\n', ValidationError),
(validate_email, 'a\[email protected]', ValidationError),
(validate_email, '"test@test"\[email protected]', ValidationError),
(validate_email, 'a@[127.0.0.1]\n', ValidationError),
(validate_slug, 'slug-ok', None),
(validate_slug, 'longer-slug-still-ok', None),
(validate_slug, '--------', None),
(validate_slug, 'nohyphensoranything', None),
(validate_slug, 'a', None),
(validate_slug, '1', None),
(validate_slug, 'a1', None),
(validate_slug, '', ValidationError),
(validate_slug, ' text ', ValidationError),
(validate_slug, ' ', ValidationError),
(validate_slug, '[email protected]', ValidationError),
(validate_slug, '你好', ValidationError),
(validate_slug, '你 好', ValidationError),
(validate_slug, '\n', ValidationError),
(validate_slug, 'trailing-newline\n', ValidationError),
(validate_unicode_slug, 'slug-ok', None),
(validate_unicode_slug, 'longer-slug-still-ok', None),
(validate_unicode_slug, '--------', None),
(validate_unicode_slug, 'nohyphensoranything', None),
(validate_unicode_slug, 'a', None),
(validate_unicode_slug, '1', None),
(validate_unicode_slug, 'a1', None),
(validate_unicode_slug, '你好', None),
(validate_unicode_slug, '', ValidationError),
(validate_unicode_slug, ' text ', ValidationError),
(validate_unicode_slug, ' ', ValidationError),
(validate_unicode_slug, '[email protected]', ValidationError),
(validate_unicode_slug, '\n', ValidationError),
(validate_unicode_slug, '你 好', ValidationError),
(validate_unicode_slug, 'trailing-newline\n', ValidationError),
(validate_ipv4_address, '1.1.1.1', None),
(validate_ipv4_address, '255.0.0.0', None),
(validate_ipv4_address, '0.0.0.0', None),
(validate_ipv4_address, '256.1.1.1', ValidationError),
(validate_ipv4_address, '25.1.1.', ValidationError),
(validate_ipv4_address, '25,1,1,1', ValidationError),
(validate_ipv4_address, '25.1 .1.1', ValidationError),
(validate_ipv4_address, '1.1.1.1\n', ValidationError),
(validate_ipv4_address, '٧.2٥.3٣.243', ValidationError),
# Leading zeros are forbidden to avoid ambiguity with the octal notation.
(validate_ipv4_address, '000.000.000.000', ValidationError),
(validate_ipv4_address, '016.016.016.016', ValidationError),
(validate_ipv4_address, '192.168.000.001', ValidationError),
(validate_ipv4_address, '01.2.3.4', ValidationError),
(validate_ipv4_address, '01.2.3.4', ValidationError),
(validate_ipv4_address, '1.02.3.4', ValidationError),
(validate_ipv4_address, '1.2.03.4', ValidationError),
(validate_ipv4_address, '1.2.3.04', ValidationError),
# validate_ipv6_address uses django.utils.ipv6, which
# is tested in much greater detail in its own testcase
(validate_ipv6_address, 'fe80::1', None),
(validate_ipv6_address, '::1', None),
(validate_ipv6_address, '1:2:3:4:5:6:7:8', None),
(validate_ipv6_address, '1:2', ValidationError),
(validate_ipv6_address, '::zzz', ValidationError),
(validate_ipv6_address, '12345::', ValidationError),
(validate_ipv46_address, '1.1.1.1', None),
(validate_ipv46_address, '255.0.0.0', None),
(validate_ipv46_address, '0.0.0.0', None),
(validate_ipv46_address, 'fe80::1', None),
(validate_ipv46_address, '::1', None),
(validate_ipv46_address, '1:2:3:4:5:6:7:8', None),
(validate_ipv46_address, '256.1.1.1', ValidationError),
(validate_ipv46_address, '25.1.1.', ValidationError),
(validate_ipv46_address, '25,1,1,1', ValidationError),
(validate_ipv46_address, '25.1 .1.1', ValidationError),
(validate_ipv46_address, '1:2', ValidationError),
(validate_ipv46_address, '::zzz', ValidationError),
(validate_ipv46_address, '12345::', ValidationError),
# Leading zeros are forbidden to avoid ambiguity with the octal notation.
(validate_ipv46_address, '000.000.000.000', ValidationError),
(validate_ipv46_address, '016.016.016.016', ValidationError),
(validate_ipv46_address, '192.168.000.001', ValidationError),
(validate_ipv46_address, '01.2.3.4', ValidationError),
(validate_ipv46_address, '01.2.3.4', ValidationError),
(validate_ipv46_address, '1.02.3.4', ValidationError),
(validate_ipv46_address, '1.2.03.4', ValidationError),
(validate_ipv46_address, '1.2.3.04', ValidationError),
(validate_comma_separated_integer_list, '1', None),
(validate_comma_separated_integer_list, '12', None),
(validate_comma_separated_integer_list, '1,2', None),
(validate_comma_separated_integer_list, '1,2,3', None),
(validate_comma_separated_integer_list, '10,32', None),
(validate_comma_separated_integer_list, '', ValidationError),
(validate_comma_separated_integer_list, 'a', ValidationError),
(validate_comma_separated_integer_list, 'a,b,c', ValidationError),
(validate_comma_separated_integer_list, '1, 2, 3', ValidationError),
(validate_comma_separated_integer_list, ',', ValidationError),
(validate_comma_separated_integer_list, '1,2,3,', ValidationError),
(validate_comma_separated_integer_list, '1,2,', ValidationError),
(validate_comma_separated_integer_list, ',1', ValidationError),
(validate_comma_separated_integer_list, '1,,2', ValidationError),
(int_list_validator(sep='.'), '1.2.3', None),
(int_list_validator(sep='.', allow_negative=True), '1.2.3', None),
(int_list_validator(allow_negative=True), '-1,-2,3', None),
(int_list_validator(allow_negative=True), '1,-2,-12', None),
(int_list_validator(), '-1,2,3', ValidationError),
(int_list_validator(sep='.'), '1,2,3', ValidationError),
(int_list_validator(sep='.'), '1.2.3\n', ValidationError),
(MaxValueValidator(10), 10, None),
(MaxValueValidator(10), -10, None),
(MaxValueValidator(10), 0, None),
(MaxValueValidator(NOW), NOW, None),
(MaxValueValidator(NOW), NOW - timedelta(days=1), None),
(MaxValueValidator(0), 1, ValidationError),
(MaxValueValidator(NOW), NOW + timedelta(days=1), ValidationError),
(MinValueValidator(-10), -10, None),
(MinValueValidator(-10), 10, None),
(MinValueValidator(-10), 0, None),
(MinValueValidator(NOW), NOW, None),
(MinValueValidator(NOW), NOW + timedelta(days=1), None),
(MinValueValidator(0), -1, ValidationError),
(MinValueValidator(NOW), NOW - timedelta(days=1), ValidationError),
# limit_value may be a callable.
(MinValueValidator(lambda: 1), 0, ValidationError),
(MinValueValidator(lambda: 1), 1, None),
(MaxLengthValidator(10), '', None),
(MaxLengthValidator(10), 10 * 'x', None),
(MaxLengthValidator(10), 15 * 'x', ValidationError),
(MinLengthValidator(10), 15 * 'x', None),
(MinLengthValidator(10), 10 * 'x', None),
(MinLengthValidator(10), '', ValidationError),
(URLValidator(EXTENDED_SCHEMES), 'file://localhost/path', None),
(URLValidator(EXTENDED_SCHEMES), 'git://example.com/', None),
(URLValidator(EXTENDED_SCHEMES), 'git+ssh://[email protected]/example/hg-git.git', None),
(URLValidator(EXTENDED_SCHEMES), 'git://-invalid.com', ValidationError),
(BaseValidator(True), True, None),
(BaseValidator(True), False, ValidationError),
(RegexValidator(), '', None),
(RegexValidator(), 'x1x2', None),
(RegexValidator('[0-9]+'), 'xxxxxx', ValidationError),
(RegexValidator('[0-9]+'), '1234', None),
(RegexValidator(re.compile('[0-9]+')), '1234', None),
(RegexValidator('.*'), '', None),
(RegexValidator(re.compile('.*')), '', None),
(RegexValidator('.*'), 'xxxxx', None),
(RegexValidator('x'), 'y', ValidationError),
(RegexValidator(re.compile('x')), 'y', ValidationError),
(RegexValidator('x', inverse_match=True), 'y', None),
(RegexValidator(re.compile('x'), inverse_match=True), 'y', None),
(RegexValidator('x', inverse_match=True), 'x', ValidationError),
(RegexValidator(re.compile('x'), inverse_match=True), 'x', ValidationError),
(RegexValidator('x', flags=re.IGNORECASE), 'y', ValidationError),
(RegexValidator('a'), 'A', ValidationError),
(RegexValidator('a', flags=re.IGNORECASE), 'A', None),
(FileExtensionValidator(['txt']), ContentFile('contents', name='fileWithUnsupportedExt.jpg'), ValidationError),
(FileExtensionValidator(['txt']), ContentFile('contents', name='fileWithUnsupportedExt.JPG'), ValidationError),
(FileExtensionValidator(['txt']), ContentFile('contents', name='fileWithNoExtension'), ValidationError),
(FileExtensionValidator(['']), ContentFile('contents', name='fileWithAnExtension.txt'), ValidationError),
(FileExtensionValidator([]), ContentFile('contents', name='file.txt'), ValidationError),
(FileExtensionValidator(['']), ContentFile('contents', name='fileWithNoExtension'), None),
(FileExtensionValidator(['txt']), ContentFile('contents', name='file.txt'), None),
(FileExtensionValidator(['txt']), ContentFile('contents', name='file.TXT'), None),
(FileExtensionValidator(['TXT']), ContentFile('contents', name='file.txt'), None),
(FileExtensionValidator(), ContentFile('contents', name='file.jpg'), None),
(DecimalValidator(max_digits=2, decimal_places=2), Decimal('0.99'), None),
(DecimalValidator(max_digits=2, decimal_places=1), Decimal('0.99'), ValidationError),
(DecimalValidator(max_digits=3, decimal_places=1), Decimal('999'), ValidationError),
(DecimalValidator(max_digits=4, decimal_places=1), Decimal('999'), None),
(DecimalValidator(max_digits=20, decimal_places=2), Decimal('742403889818000000'), None),
(DecimalValidator(20, 2), Decimal('7.42403889818E+17'), None),
(DecimalValidator(max_digits=20, decimal_places=2), Decimal('7424742403889818000000'), ValidationError),
(DecimalValidator(max_digits=5, decimal_places=2), Decimal('7304E-1'), None),
(DecimalValidator(max_digits=5, decimal_places=2), Decimal('7304E-3'), ValidationError),
(DecimalValidator(max_digits=5, decimal_places=5), Decimal('70E-5'), None),
(DecimalValidator(max_digits=5, decimal_places=5), Decimal('70E-6'), ValidationError),
# 'Enter a number.' errors
*[
(DecimalValidator(decimal_places=2, max_digits=10), Decimal(value), ValidationError)
for value in (
'NaN', '-NaN', '+NaN', 'sNaN', '-sNaN', '+sNaN',
'Inf', '-Inf', '+Inf', 'Infinity', '-Infinity', '+Infinity',
)
],
(validate_image_file_extension, ContentFile('contents', name='file.jpg'), None),
(validate_image_file_extension, ContentFile('contents', name='file.png'), None),
(validate_image_file_extension, ContentFile('contents', name='file.PNG'), None),
(validate_image_file_extension, ContentFile('contents', name='file.txt'), ValidationError),
(validate_image_file_extension, ContentFile('contents', name='file'), ValidationError),
(ProhibitNullCharactersValidator(), '\x00something', ValidationError),
(ProhibitNullCharactersValidator(), 'something', None),
(ProhibitNullCharactersValidator(), None, None),
]
# Add valid and invalid URL tests.
# This only tests the validator without extended schemes.
TEST_DATA.extend((URLValidator(), url, None) for url in VALID_URLS)
TEST_DATA.extend((URLValidator(), url, ValidationError) for url in INVALID_URLS)
class TestValidators(SimpleTestCase):
def test_validators(self):
for validator, value, expected in TEST_DATA:
name = validator.__name__ if isinstance(validator, types.FunctionType) else validator.__class__.__name__
exception_expected = expected is not None and issubclass(expected, Exception)
with self.subTest(name, value=value):
if validator is validate_image_file_extension and not PILLOW_IS_INSTALLED:
self.skipTest('Pillow is required to test validate_image_file_extension.')
if exception_expected:
with self.assertRaises(expected):
validator(value)
else:
self.assertEqual(expected, validator(value))
def test_single_message(self):
v = ValidationError('Not Valid')
self.assertEqual(str(v), "['Not Valid']")
self.assertEqual(repr(v), "ValidationError(['Not Valid'])")
def test_message_list(self):
v = ValidationError(['First Problem', 'Second Problem'])
self.assertEqual(str(v), "['First Problem', 'Second Problem']")
self.assertEqual(repr(v), "ValidationError(['First Problem', 'Second Problem'])")
def test_message_dict(self):
v = ValidationError({'first': ['First Problem']})
self.assertEqual(str(v), "{'first': ['First Problem']}")
self.assertEqual(repr(v), "ValidationError({'first': ['First Problem']})")
def test_regex_validator_flags(self):
msg = 'If the flags are set, regex must be a regular expression string.'
with self.assertRaisesMessage(TypeError, msg):
RegexValidator(re.compile('a'), flags=re.IGNORECASE)
def test_max_length_validator_message(self):
v = MaxLengthValidator(16, message='"%(value)s" has more than %(limit_value)d characters.')
with self.assertRaisesMessage(ValidationError, '"djangoproject.com" has more than 16 characters.'):
v('djangoproject.com')
class TestValidatorEquality(TestCase):
"""
Validators have valid equality operators (#21638)
"""
def test_regex_equality(self):
self.assertEqual(
RegexValidator(r'^(?:[a-z0-9\.\-]*)://'),
RegexValidator(r'^(?:[a-z0-9\.\-]*)://'),
)
self.assertNotEqual(
RegexValidator(r'^(?:[a-z0-9\.\-]*)://'),
RegexValidator(r'^(?:[0-9\.\-]*)://'),
)
self.assertEqual(
RegexValidator(r'^(?:[a-z0-9\.\-]*)://', "oh noes", "invalid"),
RegexValidator(r'^(?:[a-z0-9\.\-]*)://', "oh noes", "invalid"),
)
self.assertNotEqual(
RegexValidator(r'^(?:[a-z0-9\.\-]*)://', "oh", "invalid"),
RegexValidator(r'^(?:[a-z0-9\.\-]*)://', "oh noes", "invalid"),
)
self.assertNotEqual(
RegexValidator(r'^(?:[a-z0-9\.\-]*)://', "oh noes", "invalid"),
RegexValidator(r'^(?:[a-z0-9\.\-]*)://'),
)
self.assertNotEqual(
RegexValidator('', flags=re.IGNORECASE),
RegexValidator(''),
)
self.assertNotEqual(
RegexValidator(''),
RegexValidator('', inverse_match=True),
)
def test_regex_equality_nocache(self):
pattern = r'^(?:[a-z0-9\.\-]*)://'
left = RegexValidator(pattern)
re.purge()
right = RegexValidator(pattern)
self.assertEqual(
left,
right,
)
def test_regex_equality_blank(self):
self.assertEqual(
RegexValidator(),
RegexValidator(),
)
def test_email_equality(self):
self.assertEqual(
EmailValidator(),
EmailValidator(),
)
self.assertNotEqual(
EmailValidator(message="BAD EMAIL"),
EmailValidator(),
)
self.assertEqual(
EmailValidator(message="BAD EMAIL", code="bad"),
EmailValidator(message="BAD EMAIL", code="bad"),
)
def test_basic_equality(self):
self.assertEqual(
MaxValueValidator(44),
MaxValueValidator(44),
)
self.assertEqual(MaxValueValidator(44), mock.ANY)
self.assertNotEqual(
MaxValueValidator(44),
MinValueValidator(44),
)
self.assertNotEqual(
MinValueValidator(45),
MinValueValidator(11),
)
def test_decimal_equality(self):
self.assertEqual(
DecimalValidator(1, 2),
DecimalValidator(1, 2),
)
self.assertNotEqual(
DecimalValidator(1, 2),
DecimalValidator(1, 1),
)
self.assertNotEqual(
DecimalValidator(1, 2),
DecimalValidator(2, 2),
)
self.assertNotEqual(
DecimalValidator(1, 2),
MinValueValidator(11),
)
def test_file_extension_equality(self):
self.assertEqual(
FileExtensionValidator(),
FileExtensionValidator()
)
self.assertEqual(
FileExtensionValidator(['txt']),
FileExtensionValidator(['txt'])
)
self.assertEqual(
FileExtensionValidator(['TXT']),
FileExtensionValidator(['txt'])
)
self.assertEqual(
FileExtensionValidator(['TXT', 'png']),
FileExtensionValidator(['txt', 'png'])
)
self.assertEqual(
FileExtensionValidator(['txt']),
FileExtensionValidator(['txt'], code='invalid_extension')
)
self.assertNotEqual(
FileExtensionValidator(['txt']),
FileExtensionValidator(['png'])
)
self.assertNotEqual(
FileExtensionValidator(['txt']),
FileExtensionValidator(['png', 'jpg'])
)
self.assertNotEqual(
FileExtensionValidator(['txt']),
FileExtensionValidator(['txt'], code='custom_code')
)
self.assertNotEqual(
FileExtensionValidator(['txt']),
FileExtensionValidator(['txt'], message='custom error message')
)
def test_prohibit_null_characters_validator_equality(self):
self.assertEqual(
ProhibitNullCharactersValidator(message='message', code='code'),
ProhibitNullCharactersValidator(message='message', code='code')
)
self.assertEqual(
ProhibitNullCharactersValidator(),
ProhibitNullCharactersValidator()
)
self.assertNotEqual(
ProhibitNullCharactersValidator(message='message1', code='code'),
ProhibitNullCharactersValidator(message='message2', code='code')
)
self.assertNotEqual(
ProhibitNullCharactersValidator(message='message', code='code1'),
ProhibitNullCharactersValidator(message='message', code='code2')
)
class DeprecationTests(SimpleTestCase):
@ignore_warnings(category=RemovedInDjango41Warning)
def test_whitelist(self):
validator = EmailValidator(whitelist=['localdomain'])
self.assertEqual(validator.domain_allowlist, ['localdomain'])
self.assertIsNone(validator('email@localdomain'))
self.assertEqual(validator.domain_allowlist, validator.domain_whitelist)
def test_whitelist_warning(self):
msg = "The whitelist argument is deprecated in favor of allowlist."
with self.assertRaisesMessage(RemovedInDjango41Warning, msg):
EmailValidator(whitelist='localdomain')
@ignore_warnings(category=RemovedInDjango41Warning)
def test_domain_whitelist(self):
validator = EmailValidator()
validator.domain_whitelist = ['mydomain']
self.assertEqual(validator.domain_allowlist, ['mydomain'])
self.assertEqual(validator.domain_allowlist, validator.domain_whitelist)
def test_domain_whitelist_access_warning(self):
validator = EmailValidator()
msg = (
'The domain_whitelist attribute is deprecated in favor of '
'domain_allowlist.'
)
with self.assertRaisesMessage(RemovedInDjango41Warning, msg):
validator.domain_whitelist
def test_domain_whitelist_set_warning(self):
validator = EmailValidator()
msg = (
'The domain_whitelist attribute is deprecated in favor of '
'domain_allowlist.'
)
with self.assertRaisesMessage(RemovedInDjango41Warning, msg):
validator.domain_whitelist = ['mydomain']
|
b3d7b0f358012809025f7cfeb5cd967ac6d8f529232bc9c5ce1e8275e7cc230c | from django import forms
from django.core.exceptions import ImproperlyConfigured
from django.test import SimpleTestCase, TestCase, override_settings
from django.test.client import RequestFactory
from django.urls import reverse
from django.views.generic.base import View
from django.views.generic.edit import CreateView, FormMixin, ModelFormMixin
from . import views
from .forms import AuthorForm
from .models import Artist, Author
class FormMixinTests(SimpleTestCase):
request_factory = RequestFactory()
def test_initial_data(self):
""" Test instance independence of initial data dict (see #16138) """
initial_1 = FormMixin().get_initial()
initial_1['foo'] = 'bar'
initial_2 = FormMixin().get_initial()
self.assertNotEqual(initial_1, initial_2)
def test_get_prefix(self):
""" Test prefix can be set (see #18872) """
test_string = 'test'
get_request = self.request_factory.get('/')
class TestFormMixin(FormMixin):
request = get_request
default_kwargs = TestFormMixin().get_form_kwargs()
self.assertIsNone(default_kwargs.get('prefix'))
set_mixin = TestFormMixin()
set_mixin.prefix = test_string
set_kwargs = set_mixin.get_form_kwargs()
self.assertEqual(test_string, set_kwargs.get('prefix'))
def test_get_form(self):
class TestFormMixin(FormMixin):
request = self.request_factory.get('/')
self.assertIsInstance(
TestFormMixin().get_form(forms.Form), forms.Form,
'get_form() should use provided form class.'
)
class FormClassTestFormMixin(TestFormMixin):
form_class = forms.Form
self.assertIsInstance(
FormClassTestFormMixin().get_form(), forms.Form,
'get_form() should fallback to get_form_class() if none is provided.'
)
def test_get_context_data(self):
class FormContext(FormMixin):
request = self.request_factory.get('/')
form_class = forms.Form
self.assertIsInstance(FormContext().get_context_data()['form'], forms.Form)
@override_settings(ROOT_URLCONF='generic_views.urls')
class BasicFormTests(TestCase):
def test_post_data(self):
res = self.client.post('/contact/', {'name': "Me", 'message': "Hello"})
self.assertRedirects(res, '/list/authors/')
def test_late_form_validation(self):
"""
A form can be marked invalid in the form_valid() method (#25548).
"""
res = self.client.post('/late-validation/', {'name': "Me", 'message': "Hello"})
self.assertFalse(res.context['form'].is_valid())
class ModelFormMixinTests(SimpleTestCase):
def test_get_form(self):
form_class = views.AuthorGetQuerySetFormView().get_form_class()
self.assertEqual(form_class._meta.model, Author)
def test_get_form_checks_for_object(self):
mixin = ModelFormMixin()
mixin.request = RequestFactory().get('/')
self.assertEqual({'initial': {}, 'prefix': None},
mixin.get_form_kwargs())
@override_settings(ROOT_URLCONF='generic_views.urls')
class CreateViewTests(TestCase):
def test_create(self):
res = self.client.get('/edit/authors/create/')
self.assertEqual(res.status_code, 200)
self.assertIsInstance(res.context['form'], forms.ModelForm)
self.assertIsInstance(res.context['view'], View)
self.assertNotIn('object', res.context)
self.assertNotIn('author', res.context)
self.assertTemplateUsed(res, 'generic_views/author_form.html')
res = self.client.post('/edit/authors/create/', {'name': 'Randall Munroe', 'slug': 'randall-munroe'})
self.assertEqual(res.status_code, 302)
self.assertRedirects(res, '/list/authors/')
self.assertQuerysetEqual(Author.objects.values_list('name', flat=True), ['Randall Munroe'])
def test_create_invalid(self):
res = self.client.post('/edit/authors/create/', {'name': 'A' * 101, 'slug': 'randall-munroe'})
self.assertEqual(res.status_code, 200)
self.assertTemplateUsed(res, 'generic_views/author_form.html')
self.assertEqual(len(res.context['form'].errors), 1)
self.assertEqual(Author.objects.count(), 0)
def test_create_with_object_url(self):
res = self.client.post('/edit/artists/create/', {'name': 'Rene Magritte'})
self.assertEqual(res.status_code, 302)
artist = Artist.objects.get(name='Rene Magritte')
self.assertRedirects(res, '/detail/artist/%d/' % artist.pk)
self.assertQuerysetEqual(Artist.objects.all(), [artist])
def test_create_with_redirect(self):
res = self.client.post('/edit/authors/create/redirect/', {'name': 'Randall Munroe', 'slug': 'randall-munroe'})
self.assertEqual(res.status_code, 302)
self.assertRedirects(res, '/edit/authors/create/')
self.assertQuerysetEqual(Author.objects.values_list('name', flat=True), ['Randall Munroe'])
def test_create_with_interpolated_redirect(self):
res = self.client.post(
'/edit/authors/create/interpolate_redirect/',
{'name': 'Randall Munroe', 'slug': 'randall-munroe'}
)
self.assertQuerysetEqual(Author.objects.values_list('name', flat=True), ['Randall Munroe'])
self.assertEqual(res.status_code, 302)
pk = Author.objects.first().pk
self.assertRedirects(res, '/edit/author/%d/update/' % pk)
# Also test with escaped chars in URL
res = self.client.post(
'/edit/authors/create/interpolate_redirect_nonascii/',
{'name': 'John Doe', 'slug': 'john-doe'}
)
self.assertEqual(res.status_code, 302)
pk = Author.objects.get(name='John Doe').pk
self.assertRedirects(res, '/%C3%A9dit/author/{}/update/'.format(pk))
def test_create_with_special_properties(self):
res = self.client.get('/edit/authors/create/special/')
self.assertEqual(res.status_code, 200)
self.assertIsInstance(res.context['form'], views.AuthorForm)
self.assertNotIn('object', res.context)
self.assertNotIn('author', res.context)
self.assertTemplateUsed(res, 'generic_views/form.html')
res = self.client.post('/edit/authors/create/special/', {'name': 'Randall Munroe', 'slug': 'randall-munroe'})
self.assertEqual(res.status_code, 302)
obj = Author.objects.get(slug='randall-munroe')
self.assertRedirects(res, reverse('author_detail', kwargs={'pk': obj.pk}))
self.assertQuerysetEqual(Author.objects.all(), [obj])
def test_create_without_redirect(self):
msg = (
'No URL to redirect to. Either provide a url or define a '
'get_absolute_url method on the Model.'
)
with self.assertRaisesMessage(ImproperlyConfigured, msg):
self.client.post('/edit/authors/create/naive/', {'name': 'Randall Munroe', 'slug': 'randall-munroe'})
def test_create_restricted(self):
res = self.client.post(
'/edit/authors/create/restricted/',
{'name': 'Randall Munroe', 'slug': 'randall-munroe'}
)
self.assertEqual(res.status_code, 302)
self.assertRedirects(res, '/accounts/login/?next=/edit/authors/create/restricted/')
def test_create_view_with_restricted_fields(self):
class MyCreateView(CreateView):
model = Author
fields = ['name']
self.assertEqual(list(MyCreateView().get_form_class().base_fields), ['name'])
def test_create_view_all_fields(self):
class MyCreateView(CreateView):
model = Author
fields = '__all__'
self.assertEqual(list(MyCreateView().get_form_class().base_fields), ['name', 'slug'])
def test_create_view_without_explicit_fields(self):
class MyCreateView(CreateView):
model = Author
message = (
"Using ModelFormMixin (base class of MyCreateView) without the "
"'fields' attribute is prohibited."
)
with self.assertRaisesMessage(ImproperlyConfigured, message):
MyCreateView().get_form_class()
def test_define_both_fields_and_form_class(self):
class MyCreateView(CreateView):
model = Author
form_class = AuthorForm
fields = ['name']
message = "Specifying both 'fields' and 'form_class' is not permitted."
with self.assertRaisesMessage(ImproperlyConfigured, message):
MyCreateView().get_form_class()
@override_settings(ROOT_URLCONF='generic_views.urls')
class UpdateViewTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.author = Author.objects.create(
pk=1, # Required for OneAuthorUpdate.
name='Randall Munroe',
slug='randall-munroe',
)
def test_update_post(self):
res = self.client.get('/edit/author/%d/update/' % self.author.pk)
self.assertEqual(res.status_code, 200)
self.assertIsInstance(res.context['form'], forms.ModelForm)
self.assertEqual(res.context['object'], self.author)
self.assertEqual(res.context['author'], self.author)
self.assertTemplateUsed(res, 'generic_views/author_form.html')
self.assertEqual(res.context['view'].get_form_called_count, 1)
# Modification with both POST and PUT (browser compatible)
res = self.client.post(
'/edit/author/%d/update/' % self.author.pk,
{'name': 'Randall Munroe (xkcd)', 'slug': 'randall-munroe'}
)
self.assertEqual(res.status_code, 302)
self.assertRedirects(res, '/list/authors/')
self.assertQuerysetEqual(Author.objects.values_list('name', flat=True), ['Randall Munroe (xkcd)'])
def test_update_invalid(self):
res = self.client.post(
'/edit/author/%d/update/' % self.author.pk,
{'name': 'A' * 101, 'slug': 'randall-munroe'}
)
self.assertEqual(res.status_code, 200)
self.assertTemplateUsed(res, 'generic_views/author_form.html')
self.assertEqual(len(res.context['form'].errors), 1)
self.assertQuerysetEqual(Author.objects.all(), [self.author])
self.assertEqual(res.context['view'].get_form_called_count, 1)
def test_update_with_object_url(self):
a = Artist.objects.create(name='Rene Magritte')
res = self.client.post('/edit/artists/%d/update/' % a.pk, {'name': 'Rene Magritte'})
self.assertEqual(res.status_code, 302)
self.assertRedirects(res, '/detail/artist/%d/' % a.pk)
self.assertQuerysetEqual(Artist.objects.all(), [a])
def test_update_with_redirect(self):
res = self.client.post(
'/edit/author/%d/update/redirect/' % self.author.pk,
{'name': 'Randall Munroe (author of xkcd)', 'slug': 'randall-munroe'}
)
self.assertEqual(res.status_code, 302)
self.assertRedirects(res, '/edit/authors/create/')
self.assertQuerysetEqual(Author.objects.values_list('name', flat=True), ['Randall Munroe (author of xkcd)'])
def test_update_with_interpolated_redirect(self):
res = self.client.post(
'/edit/author/%d/update/interpolate_redirect/' % self.author.pk,
{'name': 'Randall Munroe (author of xkcd)', 'slug': 'randall-munroe'}
)
self.assertQuerysetEqual(Author.objects.values_list('name', flat=True), ['Randall Munroe (author of xkcd)'])
self.assertEqual(res.status_code, 302)
pk = Author.objects.first().pk
self.assertRedirects(res, '/edit/author/%d/update/' % pk)
# Also test with escaped chars in URL
res = self.client.post(
'/edit/author/%d/update/interpolate_redirect_nonascii/' % self.author.pk,
{'name': 'John Doe', 'slug': 'john-doe'}
)
self.assertEqual(res.status_code, 302)
pk = Author.objects.get(name='John Doe').pk
self.assertRedirects(res, '/%C3%A9dit/author/{}/update/'.format(pk))
def test_update_with_special_properties(self):
res = self.client.get('/edit/author/%d/update/special/' % self.author.pk)
self.assertEqual(res.status_code, 200)
self.assertIsInstance(res.context['form'], views.AuthorForm)
self.assertEqual(res.context['object'], self.author)
self.assertEqual(res.context['thingy'], self.author)
self.assertNotIn('author', res.context)
self.assertTemplateUsed(res, 'generic_views/form.html')
res = self.client.post(
'/edit/author/%d/update/special/' % self.author.pk,
{'name': 'Randall Munroe (author of xkcd)', 'slug': 'randall-munroe'}
)
self.assertEqual(res.status_code, 302)
self.assertRedirects(res, '/detail/author/%d/' % self.author.pk)
self.assertQuerysetEqual(Author.objects.values_list('name', flat=True), ['Randall Munroe (author of xkcd)'])
def test_update_without_redirect(self):
msg = (
'No URL to redirect to. Either provide a url or define a '
'get_absolute_url method on the Model.'
)
with self.assertRaisesMessage(ImproperlyConfigured, msg):
self.client.post(
'/edit/author/%d/update/naive/' % self.author.pk,
{'name': 'Randall Munroe (author of xkcd)', 'slug': 'randall-munroe'}
)
def test_update_get_object(self):
res = self.client.get('/edit/author/update/')
self.assertEqual(res.status_code, 200)
self.assertIsInstance(res.context['form'], forms.ModelForm)
self.assertIsInstance(res.context['view'], View)
self.assertEqual(res.context['object'], self.author)
self.assertEqual(res.context['author'], self.author)
self.assertTemplateUsed(res, 'generic_views/author_form.html')
# Modification with both POST and PUT (browser compatible)
res = self.client.post('/edit/author/update/', {'name': 'Randall Munroe (xkcd)', 'slug': 'randall-munroe'})
self.assertEqual(res.status_code, 302)
self.assertRedirects(res, '/list/authors/')
self.assertQuerysetEqual(Author.objects.values_list('name', flat=True), ['Randall Munroe (xkcd)'])
@override_settings(ROOT_URLCONF='generic_views.urls')
class DeleteViewTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.author = Author.objects.create(
name='Randall Munroe',
slug='randall-munroe',
)
def test_delete_by_post(self):
res = self.client.get('/edit/author/%d/delete/' % self.author.pk)
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context['object'], self.author)
self.assertEqual(res.context['author'], self.author)
self.assertTemplateUsed(res, 'generic_views/author_confirm_delete.html')
# Deletion with POST
res = self.client.post('/edit/author/%d/delete/' % self.author.pk)
self.assertEqual(res.status_code, 302)
self.assertRedirects(res, '/list/authors/')
self.assertQuerysetEqual(Author.objects.all(), [])
def test_delete_by_delete(self):
# Deletion with browser compatible DELETE method
res = self.client.delete('/edit/author/%d/delete/' % self.author.pk)
self.assertEqual(res.status_code, 302)
self.assertRedirects(res, '/list/authors/')
self.assertQuerysetEqual(Author.objects.all(), [])
def test_delete_with_redirect(self):
res = self.client.post('/edit/author/%d/delete/redirect/' % self.author.pk)
self.assertEqual(res.status_code, 302)
self.assertRedirects(res, '/edit/authors/create/')
self.assertQuerysetEqual(Author.objects.all(), [])
def test_delete_with_interpolated_redirect(self):
res = self.client.post('/edit/author/%d/delete/interpolate_redirect/' % self.author.pk)
self.assertEqual(res.status_code, 302)
self.assertRedirects(res, '/edit/authors/create/?deleted=%d' % self.author.pk)
self.assertQuerysetEqual(Author.objects.all(), [])
# Also test with escaped chars in URL
a = Author.objects.create(**{'name': 'Randall Munroe', 'slug': 'randall-munroe'})
res = self.client.post('/edit/author/{}/delete/interpolate_redirect_nonascii/'.format(a.pk))
self.assertEqual(res.status_code, 302)
self.assertRedirects(res, '/%C3%A9dit/authors/create/?deleted={}'.format(a.pk))
def test_delete_with_special_properties(self):
res = self.client.get('/edit/author/%d/delete/special/' % self.author.pk)
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context['object'], self.author)
self.assertEqual(res.context['thingy'], self.author)
self.assertNotIn('author', res.context)
self.assertTemplateUsed(res, 'generic_views/confirm_delete.html')
res = self.client.post('/edit/author/%d/delete/special/' % self.author.pk)
self.assertEqual(res.status_code, 302)
self.assertRedirects(res, '/list/authors/')
self.assertQuerysetEqual(Author.objects.all(), [])
def test_delete_without_redirect(self):
msg = 'No URL to redirect to. Provide a success_url.'
with self.assertRaisesMessage(ImproperlyConfigured, msg):
self.client.post('/edit/author/%d/delete/naive/' % self.author.pk)
def test_delete_with_form_as_post(self):
res = self.client.get('/edit/author/%d/delete/form/' % self.author.pk)
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context['object'], self.author)
self.assertEqual(res.context['author'], self.author)
self.assertTemplateUsed(res, 'generic_views/author_confirm_delete.html')
res = self.client.post(
'/edit/author/%d/delete/form/' % self.author.pk, data={'confirm': True}
)
self.assertEqual(res.status_code, 302)
self.assertRedirects(res, '/list/authors/')
self.assertSequenceEqual(Author.objects.all(), [])
def test_delete_with_form_as_post_with_validation_error(self):
res = self.client.get('/edit/author/%d/delete/form/' % self.author.pk)
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context['object'], self.author)
self.assertEqual(res.context['author'], self.author)
self.assertTemplateUsed(res, 'generic_views/author_confirm_delete.html')
res = self.client.post('/edit/author/%d/delete/form/' % self.author.pk)
self.assertEqual(res.status_code, 200)
self.assertEqual(len(res.context_data['form'].errors), 2)
self.assertEqual(
res.context_data['form'].errors['__all__'],
['You must confirm the delete.'],
)
self.assertEqual(
res.context_data['form'].errors['confirm'],
['This field is required.'],
)
|
8682ffef9d8e5dd7c3edbd004ede4bcc966fce93cb7b0bf84097e77405772340 | from django.contrib.auth import views as auth_views
from django.contrib.auth.decorators import login_required
from django.urls import path, re_path
from django.views.decorators.cache import cache_page
from django.views.generic import TemplateView, dates
from . import views
from .models import Book
urlpatterns = [
# TemplateView
path('template/no_template/', TemplateView.as_view()),
path('template/login_required/', login_required(TemplateView.as_view())),
path('template/simple/<foo>/', TemplateView.as_view(template_name='generic_views/about.html')),
path('template/custom/<foo>/', views.CustomTemplateView.as_view(template_name='generic_views/about.html')),
path(
'template/content_type/',
TemplateView.as_view(template_name='generic_views/robots.txt', content_type='text/plain'),
),
path(
'template/cached/<foo>/',
cache_page(2.0)(TemplateView.as_view(template_name='generic_views/about.html')),
),
path(
'template/extra_context/',
TemplateView.as_view(template_name='generic_views/about.html', extra_context={'title': 'Title'}),
),
# DetailView
path('detail/obj/', views.ObjectDetail.as_view()),
path('detail/artist/<int:pk>/', views.ArtistDetail.as_view(), name='artist_detail'),
path('detail/author/<int:pk>/', views.AuthorDetail.as_view(), name='author_detail'),
path('detail/author/bycustompk/<foo>/', views.AuthorDetail.as_view(pk_url_kwarg='foo')),
path('detail/author/byslug/<slug>/', views.AuthorDetail.as_view()),
path('detail/author/bycustomslug/<foo>/', views.AuthorDetail.as_view(slug_url_kwarg='foo')),
path('detail/author/bypkignoreslug/<int:pk>-<slug>/', views.AuthorDetail.as_view()),
path('detail/author/bypkandslug/<int:pk>-<slug>/', views.AuthorDetail.as_view(query_pk_and_slug=True)),
path('detail/author/<int:pk>/template_name_suffix/', views.AuthorDetail.as_view(template_name_suffix='_view')),
path(
'detail/author/<int:pk>/template_name/',
views.AuthorDetail.as_view(template_name='generic_views/about.html'),
),
path('detail/author/<int:pk>/context_object_name/', views.AuthorDetail.as_view(context_object_name='thingy')),
path('detail/author/<int:pk>/custom_detail/', views.AuthorCustomDetail.as_view()),
path('detail/author/<int:pk>/dupe_context_object_name/', views.AuthorDetail.as_view(context_object_name='object')),
path('detail/page/<int:pk>/field/', views.PageDetail.as_view()),
path(r'detail/author/invalid/url/', views.AuthorDetail.as_view()),
path('detail/author/invalid/qs/', views.AuthorDetail.as_view(queryset=None)),
path('detail/nonmodel/1/', views.NonModelDetail.as_view()),
path('detail/doesnotexist/<pk>/', views.ObjectDoesNotExistDetail.as_view()),
# FormView
path('contact/', views.ContactView.as_view()),
path('late-validation/', views.LateValidationView.as_view()),
# Create/UpdateView
path('edit/artists/create/', views.ArtistCreate.as_view()),
path('edit/artists/<int:pk>/update/', views.ArtistUpdate.as_view()),
path('edit/authors/create/naive/', views.NaiveAuthorCreate.as_view()),
path('edit/authors/create/redirect/', views.NaiveAuthorCreate.as_view(success_url='/edit/authors/create/')),
path(
'edit/authors/create/interpolate_redirect/',
views.NaiveAuthorCreate.as_view(success_url='/edit/author/{id}/update/'),
),
path(
'edit/authors/create/interpolate_redirect_nonascii/',
views.NaiveAuthorCreate.as_view(success_url='/%C3%A9dit/author/{id}/update/'),
),
path('edit/authors/create/restricted/', views.AuthorCreateRestricted.as_view()),
re_path('^[eé]dit/authors/create/$', views.AuthorCreate.as_view()),
path('edit/authors/create/special/', views.SpecializedAuthorCreate.as_view()),
path('edit/author/<int:pk>/update/naive/', views.NaiveAuthorUpdate.as_view()),
path(
'edit/author/<int:pk>/update/redirect/',
views.NaiveAuthorUpdate.as_view(success_url='/edit/authors/create/')
),
path(
'edit/author/<int:pk>/update/interpolate_redirect/',
views.NaiveAuthorUpdate.as_view(success_url='/edit/author/{id}/update/')
),
path(
'edit/author/<int:pk>/update/interpolate_redirect_nonascii/',
views.NaiveAuthorUpdate.as_view(success_url='/%C3%A9dit/author/{id}/update/'),
),
re_path('^[eé]dit/author/(?P<pk>[0-9]+)/update/$', views.AuthorUpdate.as_view()),
path('edit/author/update/', views.OneAuthorUpdate.as_view()),
path('edit/author/<int:pk>/update/special/', views.SpecializedAuthorUpdate.as_view()),
path('edit/author/<int:pk>/delete/naive/', views.NaiveAuthorDelete.as_view()),
path(
'edit/author/<int:pk>/delete/redirect/',
views.NaiveAuthorDelete.as_view(success_url='/edit/authors/create/'),
),
path(
'edit/author/<int:pk>/delete/interpolate_redirect/',
views.NaiveAuthorDelete.as_view(success_url='/edit/authors/create/?deleted={id}')
),
path(
'edit/author/<int:pk>/delete/interpolate_redirect_nonascii/',
views.NaiveAuthorDelete.as_view(success_url='/%C3%A9dit/authors/create/?deleted={id}')
),
path('edit/author/<int:pk>/delete/', views.AuthorDelete.as_view()),
path('edit/author/<int:pk>/delete/special/', views.SpecializedAuthorDelete.as_view()),
path('edit/author/<int:pk>/delete/form/', views.AuthorDeleteFormView.as_view()),
# ArchiveIndexView
path('dates/books/', views.BookArchive.as_view()),
path('dates/books/context_object_name/', views.BookArchive.as_view(context_object_name='thingies')),
path('dates/books/allow_empty/', views.BookArchive.as_view(allow_empty=True)),
path('dates/books/template_name/', views.BookArchive.as_view(template_name='generic_views/list.html')),
path('dates/books/template_name_suffix/', views.BookArchive.as_view(template_name_suffix='_detail')),
path('dates/books/invalid/', views.BookArchive.as_view(queryset=None)),
path('dates/books/paginated/', views.BookArchive.as_view(paginate_by=10)),
path('dates/books/reverse/', views.BookArchive.as_view(queryset=Book.objects.order_by('pubdate'))),
path('dates/books/by_month/', views.BookArchive.as_view(date_list_period='month')),
path('dates/booksignings/', views.BookSigningArchive.as_view()),
path('dates/books/sortedbyname/', views.BookArchive.as_view(ordering='name')),
path('dates/books/sortedbynamedec/', views.BookArchive.as_view(ordering='-name')),
path('dates/books/without_date_field/', views.BookArchiveWithoutDateField.as_view()),
# ListView
path('list/dict/', views.DictList.as_view()),
path('list/dict/paginated/', views.DictList.as_view(paginate_by=1)),
path('list/artists/', views.ArtistList.as_view(), name='artists_list'),
path('list/authors/', views.AuthorList.as_view(), name='authors_list'),
path('list/authors/paginated/', views.AuthorList.as_view(paginate_by=30)),
path('list/authors/paginated/<int:page>/', views.AuthorList.as_view(paginate_by=30)),
path('list/authors/paginated-orphaned/', views.AuthorList.as_view(paginate_by=30, paginate_orphans=2)),
path('list/authors/notempty/', views.AuthorList.as_view(allow_empty=False)),
path('list/authors/notempty/paginated/', views.AuthorList.as_view(allow_empty=False, paginate_by=2)),
path('list/authors/template_name/', views.AuthorList.as_view(template_name='generic_views/list.html')),
path('list/authors/template_name_suffix/', views.AuthorList.as_view(template_name_suffix='_objects')),
path('list/authors/context_object_name/', views.AuthorList.as_view(context_object_name='author_list')),
path('list/authors/dupe_context_object_name/', views.AuthorList.as_view(context_object_name='object_list')),
path('list/authors/invalid/', views.AuthorList.as_view(queryset=None)),
path(
'list/authors/get_queryset/',
views.AuthorListGetQuerysetReturnsNone.as_view(),
),
path(
'list/authors/paginated/custom_class/',
views.AuthorList.as_view(paginate_by=5, paginator_class=views.CustomPaginator),
),
path('list/authors/paginated/custom_page_kwarg/', views.AuthorList.as_view(paginate_by=30, page_kwarg='pagina')),
path('list/authors/paginated/custom_constructor/', views.AuthorListCustomPaginator.as_view()),
path('list/books/sorted/', views.BookList.as_view(ordering='name')),
path('list/books/sortedbypagesandnamedec/', views.BookList.as_view(ordering=('pages', '-name'))),
# YearArchiveView
# Mixing keyword and positional captures below is intentional; the views
# ought to be able to accept either.
path('dates/books/<int:year>/', views.BookYearArchive.as_view()),
path('dates/books/<int:year>/make_object_list/', views.BookYearArchive.as_view(make_object_list=True)),
path('dates/books/<int:year>/allow_empty/', views.BookYearArchive.as_view(allow_empty=True)),
path('dates/books/<int:year>/allow_future/', views.BookYearArchive.as_view(allow_future=True)),
path('dates/books/<int:year>/paginated/', views.BookYearArchive.as_view(make_object_list=True, paginate_by=30)),
path(
'dates/books/<int:year>/sortedbyname/',
views.BookYearArchive.as_view(make_object_list=True, ordering='name'),
),
path(
'dates/books/<int:year>/sortedbypageandnamedec/',
views.BookYearArchive.as_view(make_object_list=True, ordering=('pages', '-name')),
),
path('dates/books/no_year/', views.BookYearArchive.as_view()),
path('dates/books/<int:year>/reverse/', views.BookYearArchive.as_view(queryset=Book.objects.order_by('pubdate'))),
path('dates/booksignings/<int:year>/', views.BookSigningYearArchive.as_view()),
# MonthArchiveView
path('dates/books/<int:year>/<int:month>/', views.BookMonthArchive.as_view(month_format='%m')),
path('dates/books/<int:year>/<month>/', views.BookMonthArchive.as_view()),
path('dates/books/without_month/<int:year>/', views.BookMonthArchive.as_view()),
path('dates/books/<int:year>/<month>/allow_empty/', views.BookMonthArchive.as_view(allow_empty=True)),
path('dates/books/<int:year>/<month>/allow_future/', views.BookMonthArchive.as_view(allow_future=True)),
path('dates/books/<int:year>/<month>/paginated/', views.BookMonthArchive.as_view(paginate_by=30)),
path('dates/books/<int:year>/no_month/', views.BookMonthArchive.as_view()),
path('dates/booksignings/<int:year>/<month>/', views.BookSigningMonthArchive.as_view()),
# WeekArchiveView
path('dates/books/<int:year>/week/<int:week>/', views.BookWeekArchive.as_view()),
path('dates/books/<int:year>/week/<int:week>/allow_empty/', views.BookWeekArchive.as_view(allow_empty=True)),
path('dates/books/<int:year>/week/<int:week>/allow_future/', views.BookWeekArchive.as_view(allow_future=True)),
path('dates/books/<int:year>/week/<int:week>/paginated/', views.BookWeekArchive.as_view(paginate_by=30)),
path('dates/books/<int:year>/week/no_week/', views.BookWeekArchive.as_view()),
path('dates/books/<int:year>/week/<int:week>/monday/', views.BookWeekArchive.as_view(week_format='%W')),
path(
'dates/books/<int:year>/week/<int:week>/unknown_week_format/',
views.BookWeekArchive.as_view(week_format='%T'),
),
path(
'dates/books/<int:year>/week/<int:week>/iso_format/',
views.BookWeekArchive.as_view(year_format='%G', week_format='%V'),
),
path(
'dates/books/<int:year>/week/<int:week>/invalid_iso_week_year_format/',
views.BookWeekArchive.as_view(week_format='%V'),
),
path('dates/booksignings/<int:year>/week/<int:week>/', views.BookSigningWeekArchive.as_view()),
# DayArchiveView
path('dates/books/<int:year>/<int:month>/<int:day>/', views.BookDayArchive.as_view(month_format='%m')),
path('dates/books/<int:year>/<month>/<int:day>/', views.BookDayArchive.as_view()),
path('dates/books/<int:year>/<month>/<int:day>/allow_empty/', views.BookDayArchive.as_view(allow_empty=True)),
path('dates/books/<int:year>/<month>/<int:day>/allow_future/', views.BookDayArchive.as_view(allow_future=True)),
path(
'dates/books/<int:year>/<month>/<int:day>/allow_empty_and_future/',
views.BookDayArchive.as_view(allow_empty=True, allow_future=True),
),
path('dates/books/<int:year>/<month>/<int:day>/paginated/', views.BookDayArchive.as_view(paginate_by=True)),
path('dates/books/<int:year>/<month>/no_day/', views.BookDayArchive.as_view()),
path('dates/booksignings/<int:year>/<month>/<int:day>/', views.BookSigningDayArchive.as_view()),
# TodayArchiveView
path('dates/books/today/', views.BookTodayArchive.as_view()),
path('dates/books/today/allow_empty/', views.BookTodayArchive.as_view(allow_empty=True)),
path('dates/booksignings/today/', views.BookSigningTodayArchive.as_view()),
# DateDetailView
path('dates/books/<int:year>/<int:month>/<day>/<int:pk>/', views.BookDetail.as_view(month_format='%m')),
path('dates/books/<int:year>/<month>/<day>/<int:pk>/', views.BookDetail.as_view()),
path(
'dates/books/<int:year>/<month>/<int:day>/<int:pk>/allow_future/',
views.BookDetail.as_view(allow_future=True),
),
path('dates/books/<int:year>/<month>/<int:day>/nopk/', views.BookDetail.as_view()),
path('dates/books/<int:year>/<month>/<int:day>/byslug/<slug:slug>/', views.BookDetail.as_view()),
path(
'dates/books/get_object_custom_queryset/<int:year>/<month>/<int:day>/<int:pk>/',
views.BookDetailGetObjectCustomQueryset.as_view(),
),
path('dates/booksignings/<int:year>/<month>/<int:day>/<int:pk>/', views.BookSigningDetail.as_view()),
# Useful for testing redirects
path('accounts/login/', auth_views.LoginView.as_view()),
path('BaseDateListViewTest/', dates.BaseDateListView.as_view()),
]
|
91b6c77ca4b315b5a31b71715187b09a6456aec4e8bfac4082438c65e21921b8 | from django import forms
from .models import Author
class AuthorForm(forms.ModelForm):
name = forms.CharField()
slug = forms.SlugField()
class Meta:
model = Author
fields = ['name', 'slug']
class ContactForm(forms.Form):
name = forms.CharField()
message = forms.CharField(widget=forms.Textarea)
class ConfirmDeleteForm(forms.Form):
confirm = forms.BooleanField()
def clean(self):
cleaned_data = super().clean()
if 'confirm' not in cleaned_data:
raise forms.ValidationError('You must confirm the delete.')
|
e8e3d5be72514dad758a69a473963cbaab44c3e2a24696b928ec7407a6d89076 | from django.contrib.auth.decorators import login_required
from django.core.paginator import Paginator
from django.urls import reverse, reverse_lazy
from django.utils.decorators import method_decorator
from django.views import generic
from .forms import AuthorForm, ConfirmDeleteForm, ContactForm
from .models import Artist, Author, Book, BookSigning, Page
class CustomTemplateView(generic.TemplateView):
template_name = 'generic_views/about.html'
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
context.update({'key': 'value'})
return context
class ObjectDetail(generic.DetailView):
template_name = 'generic_views/detail.html'
def get_object(self):
return {'foo': 'bar'}
class ArtistDetail(generic.DetailView):
queryset = Artist.objects.all()
class AuthorDetail(generic.DetailView):
queryset = Author.objects.all()
class AuthorCustomDetail(generic.DetailView):
template_name = 'generic_views/author_detail.html'
queryset = Author.objects.all()
def get(self, request, *args, **kwargs):
# Ensures get_context_object_name() doesn't reference self.object.
author = self.get_object()
context = {'custom_' + self.get_context_object_name(author): author}
return self.render_to_response(context)
class PageDetail(generic.DetailView):
queryset = Page.objects.all()
template_name_field = 'template'
class DictList(generic.ListView):
"""A ListView that doesn't use a model."""
queryset = [
{'first': 'John', 'last': 'Lennon'},
{'first': 'Yoko', 'last': 'Ono'}
]
template_name = 'generic_views/list.html'
class ArtistList(generic.ListView):
template_name = 'generic_views/list.html'
queryset = Artist.objects.all()
class AuthorList(generic.ListView):
queryset = Author.objects.all()
class AuthorListGetQuerysetReturnsNone(AuthorList):
def get_queryset(self):
return None
class BookList(generic.ListView):
model = Book
class CustomPaginator(Paginator):
def __init__(self, queryset, page_size, orphans=0, allow_empty_first_page=True):
super().__init__(queryset, page_size, orphans=2, allow_empty_first_page=allow_empty_first_page)
class AuthorListCustomPaginator(AuthorList):
paginate_by = 5
def get_paginator(self, queryset, page_size, orphans=0, allow_empty_first_page=True):
return super().get_paginator(queryset, page_size, orphans=2, allow_empty_first_page=allow_empty_first_page)
class ContactView(generic.FormView):
form_class = ContactForm
success_url = reverse_lazy('authors_list')
template_name = 'generic_views/form.html'
class ArtistCreate(generic.CreateView):
model = Artist
fields = '__all__'
class NaiveAuthorCreate(generic.CreateView):
queryset = Author.objects.all()
fields = '__all__'
class TemplateResponseWithoutTemplate(generic.detail.SingleObjectTemplateResponseMixin, generic.View):
# we don't define the usual template_name here
def __init__(self):
# Dummy object, but attr is required by get_template_name()
self.object = None
class AuthorCreate(generic.CreateView):
model = Author
success_url = '/list/authors/'
fields = '__all__'
class SpecializedAuthorCreate(generic.CreateView):
model = Author
form_class = AuthorForm
template_name = 'generic_views/form.html'
context_object_name = 'thingy'
def get_success_url(self):
return reverse('author_detail', args=[self.object.id])
class AuthorCreateRestricted(AuthorCreate):
post = method_decorator(login_required)(AuthorCreate.post)
class ArtistUpdate(generic.UpdateView):
model = Artist
fields = '__all__'
class NaiveAuthorUpdate(generic.UpdateView):
queryset = Author.objects.all()
fields = '__all__'
class AuthorUpdate(generic.UpdateView):
get_form_called_count = 0 # Used to ensure get_form() is called once.
model = Author
success_url = '/list/authors/'
fields = '__all__'
def get_form(self, *args, **kwargs):
self.get_form_called_count += 1
return super().get_form(*args, **kwargs)
class OneAuthorUpdate(generic.UpdateView):
success_url = '/list/authors/'
fields = '__all__'
def get_object(self):
return Author.objects.get(pk=1)
class SpecializedAuthorUpdate(generic.UpdateView):
model = Author
form_class = AuthorForm
template_name = 'generic_views/form.html'
context_object_name = 'thingy'
def get_success_url(self):
return reverse('author_detail', args=[self.object.id])
class NaiveAuthorDelete(generic.DeleteView):
queryset = Author.objects.all()
class AuthorDelete(generic.DeleteView):
model = Author
success_url = '/list/authors/'
class AuthorDeleteFormView(generic.DeleteView):
model = Author
form_class = ConfirmDeleteForm
def get_success_url(self):
return reverse('authors_list')
class SpecializedAuthorDelete(generic.DeleteView):
queryset = Author.objects.all()
template_name = 'generic_views/confirm_delete.html'
context_object_name = 'thingy'
success_url = reverse_lazy('authors_list')
class BookConfig:
queryset = Book.objects.all()
date_field = 'pubdate'
class BookArchive(BookConfig, generic.ArchiveIndexView):
pass
class BookYearArchive(BookConfig, generic.YearArchiveView):
pass
class BookMonthArchive(BookConfig, generic.MonthArchiveView):
pass
class BookWeekArchive(BookConfig, generic.WeekArchiveView):
pass
class BookDayArchive(BookConfig, generic.DayArchiveView):
pass
class BookTodayArchive(BookConfig, generic.TodayArchiveView):
pass
class BookDetail(BookConfig, generic.DateDetailView):
pass
class AuthorGetQuerySetFormView(generic.edit.ModelFormMixin):
fields = '__all__'
def get_queryset(self):
return Author.objects.all()
class BookDetailGetObjectCustomQueryset(BookDetail):
def get_object(self, queryset=None):
return super().get_object(queryset=Book.objects.filter(pk=self.kwargs['pk']))
class CustomMultipleObjectMixinView(generic.list.MultipleObjectMixin, generic.View):
queryset = [
{'name': 'John'},
{'name': 'Yoko'},
]
def get(self, request):
self.object_list = self.get_queryset()
class CustomContextView(generic.detail.SingleObjectMixin, generic.View):
model = Book
object = Book(name='dummy')
def get_object(self):
return Book(name="dummy")
def get_context_data(self, **kwargs):
context = {'custom_key': 'custom_value'}
context.update(kwargs)
return super().get_context_data(**context)
def get_context_object_name(self, obj):
return "test_name"
class CustomSingleObjectView(generic.detail.SingleObjectMixin, generic.View):
model = Book
object = Book(name="dummy")
class BookSigningConfig:
model = BookSigning
date_field = 'event_date'
# use the same templates as for books
def get_template_names(self):
return ['generic_views/book%s.html' % self.template_name_suffix]
class BookSigningArchive(BookSigningConfig, generic.ArchiveIndexView):
pass
class BookSigningYearArchive(BookSigningConfig, generic.YearArchiveView):
pass
class BookSigningMonthArchive(BookSigningConfig, generic.MonthArchiveView):
pass
class BookSigningWeekArchive(BookSigningConfig, generic.WeekArchiveView):
pass
class BookSigningDayArchive(BookSigningConfig, generic.DayArchiveView):
pass
class BookSigningTodayArchive(BookSigningConfig, generic.TodayArchiveView):
pass
class BookArchiveWithoutDateField(generic.ArchiveIndexView):
queryset = Book.objects.all()
class BookSigningDetail(BookSigningConfig, generic.DateDetailView):
context_object_name = 'book'
class NonModel:
id = "non_model_1"
_meta = None
class NonModelDetail(generic.DetailView):
template_name = 'generic_views/detail.html'
model = NonModel
def get_object(self, queryset=None):
return NonModel()
class ObjectDoesNotExistDetail(generic.DetailView):
def get_queryset(self):
return Book.does_not_exist.all()
class LateValidationView(generic.FormView):
form_class = ContactForm
success_url = reverse_lazy('authors_list')
template_name = 'generic_views/form.html'
def form_valid(self, form):
form.add_error(None, 'There is an error')
return self.form_invalid(form)
|
eb8ad84ee8b7ae70ca2f3987dd905151d79ad4a75ad9233f88b681c2e563e7a6 | import gzip
import random
import re
import struct
from io import BytesIO
from urllib.parse import quote
from django.conf import settings
from django.core import mail
from django.core.exceptions import PermissionDenied
from django.http import (
FileResponse, HttpRequest, HttpResponse, HttpResponseNotFound,
HttpResponsePermanentRedirect, HttpResponseRedirect, StreamingHttpResponse,
)
from django.middleware.clickjacking import XFrameOptionsMiddleware
from django.middleware.common import (
BrokenLinkEmailsMiddleware, CommonMiddleware,
)
from django.middleware.gzip import GZipMiddleware
from django.middleware.http import ConditionalGetMiddleware
from django.test import RequestFactory, SimpleTestCase, override_settings
int2byte = struct.Struct(">B").pack
def get_response_empty(request):
return HttpResponse()
def get_response_404(request):
return HttpResponseNotFound()
@override_settings(ROOT_URLCONF='middleware.urls')
class CommonMiddlewareTest(SimpleTestCase):
rf = RequestFactory()
@override_settings(APPEND_SLASH=True)
def test_append_slash_have_slash(self):
"""
URLs with slashes should go unmolested.
"""
request = self.rf.get('/slash/')
self.assertIsNone(CommonMiddleware(get_response_404).process_request(request))
self.assertEqual(CommonMiddleware(get_response_404)(request).status_code, 404)
@override_settings(APPEND_SLASH=True)
def test_append_slash_slashless_resource(self):
"""
Matches to explicit slashless URLs should go unmolested.
"""
def get_response(req):
return HttpResponse("Here's the text of the web page.")
request = self.rf.get('/noslash')
self.assertIsNone(CommonMiddleware(get_response).process_request(request))
self.assertEqual(
CommonMiddleware(get_response)(request).content,
b"Here's the text of the web page.",
)
@override_settings(APPEND_SLASH=True)
def test_append_slash_slashless_unknown(self):
"""
APPEND_SLASH should not redirect to unknown resources.
"""
request = self.rf.get('/unknown')
response = CommonMiddleware(get_response_404)(request)
self.assertEqual(response.status_code, 404)
@override_settings(APPEND_SLASH=True)
def test_append_slash_redirect(self):
"""
APPEND_SLASH should redirect slashless URLs to a valid pattern.
"""
request = self.rf.get('/slash')
r = CommonMiddleware(get_response_empty).process_request(request)
self.assertEqual(r.status_code, 301)
@override_settings(APPEND_SLASH=True)
def test_append_slash_redirect_querystring(self):
"""
APPEND_SLASH should preserve querystrings when redirecting.
"""
request = self.rf.get('/slash?test=1')
resp = CommonMiddleware(get_response_404)(request)
self.assertEqual(resp.url, '/slash/?test=1')
@override_settings(APPEND_SLASH=True)
def test_append_slash_redirect_querystring_have_slash(self):
"""
APPEND_SLASH should append slash to path when redirecting a request
with a querystring ending with slash.
"""
request = self.rf.get('/slash?test=slash/')
resp = CommonMiddleware(get_response_404)(request)
self.assertIsInstance(resp, HttpResponsePermanentRedirect)
self.assertEqual(resp.url, '/slash/?test=slash/')
@override_settings(APPEND_SLASH=True, DEBUG=True)
def test_append_slash_no_redirect_on_POST_in_DEBUG(self):
"""
While in debug mode, an exception is raised with a warning
when a failed attempt is made to POST, PUT, or PATCH to an URL which
would normally be redirected to a slashed version.
"""
msg = "maintaining %s data. Change your form to point to testserver/slash/"
request = self.rf.get('/slash')
request.method = 'POST'
with self.assertRaisesMessage(RuntimeError, msg % request.method):
CommonMiddleware(get_response_404)(request)
request = self.rf.get('/slash')
request.method = 'PUT'
with self.assertRaisesMessage(RuntimeError, msg % request.method):
CommonMiddleware(get_response_404)(request)
request = self.rf.get('/slash')
request.method = 'PATCH'
with self.assertRaisesMessage(RuntimeError, msg % request.method):
CommonMiddleware(get_response_404)(request)
@override_settings(APPEND_SLASH=False)
def test_append_slash_disabled(self):
"""
Disabling append slash functionality should leave slashless URLs alone.
"""
request = self.rf.get('/slash')
self.assertEqual(CommonMiddleware(get_response_404)(request).status_code, 404)
@override_settings(APPEND_SLASH=True)
def test_append_slash_opt_out(self):
"""
Views marked with @no_append_slash should be left alone.
"""
request = self.rf.get('/sensitive_fbv')
self.assertEqual(CommonMiddleware(get_response_404)(request).status_code, 404)
request = self.rf.get('/sensitive_cbv')
self.assertEqual(CommonMiddleware(get_response_404)(request).status_code, 404)
@override_settings(APPEND_SLASH=True)
def test_append_slash_quoted(self):
"""
URLs which require quoting should be redirected to their slash version.
"""
request = self.rf.get(quote('/needsquoting#'))
r = CommonMiddleware(get_response_404)(request)
self.assertEqual(r.status_code, 301)
self.assertEqual(r.url, '/needsquoting%23/')
@override_settings(APPEND_SLASH=True)
def test_append_slash_leading_slashes(self):
"""
Paths starting with two slashes are escaped to prevent open redirects.
If there's a URL pattern that allows paths to start with two slashes, a
request with path //evil.com must not redirect to //evil.com/ (appended
slash) which is a schemaless absolute URL. The browser would navigate
to evil.com/.
"""
# Use 4 slashes because of RequestFactory behavior.
request = self.rf.get('////evil.com/security')
r = CommonMiddleware(get_response_404).process_request(request)
self.assertEqual(r.status_code, 301)
self.assertEqual(r.url, '/%2Fevil.com/security/')
r = CommonMiddleware(get_response_404)(request)
self.assertEqual(r.status_code, 301)
self.assertEqual(r.url, '/%2Fevil.com/security/')
@override_settings(APPEND_SLASH=False, PREPEND_WWW=True)
def test_prepend_www(self):
request = self.rf.get('/path/')
r = CommonMiddleware(get_response_empty).process_request(request)
self.assertEqual(r.status_code, 301)
self.assertEqual(r.url, 'http://www.testserver/path/')
@override_settings(APPEND_SLASH=True, PREPEND_WWW=True)
def test_prepend_www_append_slash_have_slash(self):
request = self.rf.get('/slash/')
r = CommonMiddleware(get_response_empty).process_request(request)
self.assertEqual(r.status_code, 301)
self.assertEqual(r.url, 'http://www.testserver/slash/')
@override_settings(APPEND_SLASH=True, PREPEND_WWW=True)
def test_prepend_www_append_slash_slashless(self):
request = self.rf.get('/slash')
r = CommonMiddleware(get_response_empty).process_request(request)
self.assertEqual(r.status_code, 301)
self.assertEqual(r.url, 'http://www.testserver/slash/')
# The following tests examine expected behavior given a custom URLconf that
# overrides the default one through the request object.
@override_settings(APPEND_SLASH=True)
def test_append_slash_have_slash_custom_urlconf(self):
"""
URLs with slashes should go unmolested.
"""
request = self.rf.get('/customurlconf/slash/')
request.urlconf = 'middleware.extra_urls'
self.assertIsNone(CommonMiddleware(get_response_404).process_request(request))
self.assertEqual(CommonMiddleware(get_response_404)(request).status_code, 404)
@override_settings(APPEND_SLASH=True)
def test_append_slash_slashless_resource_custom_urlconf(self):
"""
Matches to explicit slashless URLs should go unmolested.
"""
def get_response(req):
return HttpResponse("web content")
request = self.rf.get('/customurlconf/noslash')
request.urlconf = 'middleware.extra_urls'
self.assertIsNone(CommonMiddleware(get_response).process_request(request))
self.assertEqual(CommonMiddleware(get_response)(request).content, b'web content')
@override_settings(APPEND_SLASH=True)
def test_append_slash_slashless_unknown_custom_urlconf(self):
"""
APPEND_SLASH should not redirect to unknown resources.
"""
request = self.rf.get('/customurlconf/unknown')
request.urlconf = 'middleware.extra_urls'
self.assertIsNone(CommonMiddleware(get_response_404).process_request(request))
self.assertEqual(CommonMiddleware(get_response_404)(request).status_code, 404)
@override_settings(APPEND_SLASH=True)
def test_append_slash_redirect_custom_urlconf(self):
"""
APPEND_SLASH should redirect slashless URLs to a valid pattern.
"""
request = self.rf.get('/customurlconf/slash')
request.urlconf = 'middleware.extra_urls'
r = CommonMiddleware(get_response_404)(request)
self.assertIsNotNone(r, "CommonMiddleware failed to return APPEND_SLASH redirect using request.urlconf")
self.assertEqual(r.status_code, 301)
self.assertEqual(r.url, '/customurlconf/slash/')
@override_settings(APPEND_SLASH=True, DEBUG=True)
def test_append_slash_no_redirect_on_POST_in_DEBUG_custom_urlconf(self):
"""
While in debug mode, an exception is raised with a warning
when a failed attempt is made to POST to an URL which would normally be
redirected to a slashed version.
"""
request = self.rf.get('/customurlconf/slash')
request.urlconf = 'middleware.extra_urls'
request.method = 'POST'
with self.assertRaisesMessage(RuntimeError, 'end in a slash'):
CommonMiddleware(get_response_404)(request)
@override_settings(APPEND_SLASH=False)
def test_append_slash_disabled_custom_urlconf(self):
"""
Disabling append slash functionality should leave slashless URLs alone.
"""
request = self.rf.get('/customurlconf/slash')
request.urlconf = 'middleware.extra_urls'
self.assertIsNone(CommonMiddleware(get_response_404).process_request(request))
self.assertEqual(CommonMiddleware(get_response_404)(request).status_code, 404)
@override_settings(APPEND_SLASH=True)
def test_append_slash_quoted_custom_urlconf(self):
"""
URLs which require quoting should be redirected to their slash version.
"""
request = self.rf.get(quote('/customurlconf/needsquoting#'))
request.urlconf = 'middleware.extra_urls'
r = CommonMiddleware(get_response_404)(request)
self.assertIsNotNone(r, "CommonMiddleware failed to return APPEND_SLASH redirect using request.urlconf")
self.assertEqual(r.status_code, 301)
self.assertEqual(r.url, '/customurlconf/needsquoting%23/')
@override_settings(APPEND_SLASH=False, PREPEND_WWW=True)
def test_prepend_www_custom_urlconf(self):
request = self.rf.get('/customurlconf/path/')
request.urlconf = 'middleware.extra_urls'
r = CommonMiddleware(get_response_empty).process_request(request)
self.assertEqual(r.status_code, 301)
self.assertEqual(r.url, 'http://www.testserver/customurlconf/path/')
@override_settings(APPEND_SLASH=True, PREPEND_WWW=True)
def test_prepend_www_append_slash_have_slash_custom_urlconf(self):
request = self.rf.get('/customurlconf/slash/')
request.urlconf = 'middleware.extra_urls'
r = CommonMiddleware(get_response_empty).process_request(request)
self.assertEqual(r.status_code, 301)
self.assertEqual(r.url, 'http://www.testserver/customurlconf/slash/')
@override_settings(APPEND_SLASH=True, PREPEND_WWW=True)
def test_prepend_www_append_slash_slashless_custom_urlconf(self):
request = self.rf.get('/customurlconf/slash')
request.urlconf = 'middleware.extra_urls'
r = CommonMiddleware(get_response_empty).process_request(request)
self.assertEqual(r.status_code, 301)
self.assertEqual(r.url, 'http://www.testserver/customurlconf/slash/')
# Tests for the Content-Length header
def test_content_length_header_added(self):
def get_response(req):
response = HttpResponse('content')
self.assertNotIn('Content-Length', response)
return response
response = CommonMiddleware(get_response)(self.rf.get('/'))
self.assertEqual(int(response.headers['Content-Length']), len(response.content))
def test_content_length_header_not_added_for_streaming_response(self):
def get_response(req):
response = StreamingHttpResponse('content')
self.assertNotIn('Content-Length', response)
return response
response = CommonMiddleware(get_response)(self.rf.get('/'))
self.assertNotIn('Content-Length', response)
def test_content_length_header_not_changed(self):
bad_content_length = 500
def get_response(req):
response = HttpResponse()
response.headers['Content-Length'] = bad_content_length
return response
response = CommonMiddleware(get_response)(self.rf.get('/'))
self.assertEqual(int(response.headers['Content-Length']), bad_content_length)
# Other tests
@override_settings(DISALLOWED_USER_AGENTS=[re.compile(r'foo')])
def test_disallowed_user_agents(self):
request = self.rf.get('/slash')
request.META['HTTP_USER_AGENT'] = 'foo'
with self.assertRaisesMessage(PermissionDenied, 'Forbidden user agent'):
CommonMiddleware(get_response_empty).process_request(request)
def test_non_ascii_query_string_does_not_crash(self):
"""Regression test for #15152"""
request = self.rf.get('/slash')
request.META['QUERY_STRING'] = 'drink=café'
r = CommonMiddleware(get_response_empty).process_request(request)
self.assertEqual(r.status_code, 301)
def test_response_redirect_class(self):
request = self.rf.get('/slash')
r = CommonMiddleware(get_response_404)(request)
self.assertEqual(r.status_code, 301)
self.assertEqual(r.url, '/slash/')
self.assertIsInstance(r, HttpResponsePermanentRedirect)
def test_response_redirect_class_subclass(self):
class MyCommonMiddleware(CommonMiddleware):
response_redirect_class = HttpResponseRedirect
request = self.rf.get('/slash')
r = MyCommonMiddleware(get_response_404)(request)
self.assertEqual(r.status_code, 302)
self.assertEqual(r.url, '/slash/')
self.assertIsInstance(r, HttpResponseRedirect)
@override_settings(
IGNORABLE_404_URLS=[re.compile(r'foo')],
MANAGERS=[('PHD', '[email protected]')],
)
class BrokenLinkEmailsMiddlewareTest(SimpleTestCase):
rf = RequestFactory()
def setUp(self):
self.req = self.rf.get('/regular_url/that/does/not/exist')
def get_response(self, req):
return self.client.get(req.path)
def test_404_error_reporting(self):
self.req.META['HTTP_REFERER'] = '/another/url/'
BrokenLinkEmailsMiddleware(self.get_response)(self.req)
self.assertEqual(len(mail.outbox), 1)
self.assertIn('Broken', mail.outbox[0].subject)
def test_404_error_reporting_no_referer(self):
BrokenLinkEmailsMiddleware(self.get_response)(self.req)
self.assertEqual(len(mail.outbox), 0)
def test_404_error_reporting_ignored_url(self):
self.req.path = self.req.path_info = 'foo_url/that/does/not/exist'
BrokenLinkEmailsMiddleware(self.get_response)(self.req)
self.assertEqual(len(mail.outbox), 0)
def test_custom_request_checker(self):
class SubclassedMiddleware(BrokenLinkEmailsMiddleware):
ignored_user_agent_patterns = (re.compile(r'Spider.*'), re.compile(r'Robot.*'))
def is_ignorable_request(self, request, uri, domain, referer):
'''Check user-agent in addition to normal checks.'''
if super().is_ignorable_request(request, uri, domain, referer):
return True
user_agent = request.META['HTTP_USER_AGENT']
return any(pattern.search(user_agent) for pattern in self.ignored_user_agent_patterns)
self.req.META['HTTP_REFERER'] = '/another/url/'
self.req.META['HTTP_USER_AGENT'] = 'Spider machine 3.4'
SubclassedMiddleware(self.get_response)(self.req)
self.assertEqual(len(mail.outbox), 0)
self.req.META['HTTP_USER_AGENT'] = 'My user agent'
SubclassedMiddleware(self.get_response)(self.req)
self.assertEqual(len(mail.outbox), 1)
def test_referer_equal_to_requested_url(self):
"""
Some bots set the referer to the current URL to avoid being blocked by
an referer check (#25302).
"""
self.req.META['HTTP_REFERER'] = self.req.path
BrokenLinkEmailsMiddleware(self.get_response)(self.req)
self.assertEqual(len(mail.outbox), 0)
# URL with scheme and domain should also be ignored
self.req.META['HTTP_REFERER'] = 'http://testserver%s' % self.req.path
BrokenLinkEmailsMiddleware(self.get_response)(self.req)
self.assertEqual(len(mail.outbox), 0)
# URL with a different scheme should be ignored as well because bots
# tend to use http:// in referers even when browsing HTTPS websites.
self.req.META['HTTP_X_PROTO'] = 'https'
self.req.META['SERVER_PORT'] = 443
with self.settings(SECURE_PROXY_SSL_HEADER=('HTTP_X_PROTO', 'https')):
BrokenLinkEmailsMiddleware(self.get_response)(self.req)
self.assertEqual(len(mail.outbox), 0)
def test_referer_equal_to_requested_url_on_another_domain(self):
self.req.META['HTTP_REFERER'] = 'http://anotherserver%s' % self.req.path
BrokenLinkEmailsMiddleware(self.get_response)(self.req)
self.assertEqual(len(mail.outbox), 1)
@override_settings(APPEND_SLASH=True)
def test_referer_equal_to_requested_url_without_trailing_slash_when_append_slash_is_set(self):
self.req.path = self.req.path_info = '/regular_url/that/does/not/exist/'
self.req.META['HTTP_REFERER'] = self.req.path_info[:-1]
BrokenLinkEmailsMiddleware(self.get_response)(self.req)
self.assertEqual(len(mail.outbox), 0)
@override_settings(APPEND_SLASH=False)
def test_referer_equal_to_requested_url_without_trailing_slash_when_append_slash_is_unset(self):
self.req.path = self.req.path_info = '/regular_url/that/does/not/exist/'
self.req.META['HTTP_REFERER'] = self.req.path_info[:-1]
BrokenLinkEmailsMiddleware(self.get_response)(self.req)
self.assertEqual(len(mail.outbox), 1)
@override_settings(ROOT_URLCONF='middleware.cond_get_urls')
class ConditionalGetMiddlewareTest(SimpleTestCase):
request_factory = RequestFactory()
def setUp(self):
self.req = self.request_factory.get('/')
self.resp_headers = {}
def get_response(self, req):
resp = self.client.get(req.path_info)
for key, value in self.resp_headers.items():
resp[key] = value
return resp
# Tests for the ETag header
def test_middleware_calculates_etag(self):
resp = ConditionalGetMiddleware(self.get_response)(self.req)
self.assertEqual(resp.status_code, 200)
self.assertNotEqual('', resp['ETag'])
def test_middleware_wont_overwrite_etag(self):
self.resp_headers['ETag'] = 'eggs'
resp = ConditionalGetMiddleware(self.get_response)(self.req)
self.assertEqual(resp.status_code, 200)
self.assertEqual('eggs', resp['ETag'])
def test_no_etag_streaming_response(self):
def get_response(req):
return StreamingHttpResponse(['content'])
self.assertFalse(ConditionalGetMiddleware(get_response)(self.req).has_header('ETag'))
def test_no_etag_response_empty_content(self):
def get_response(req):
return HttpResponse()
self.assertFalse(ConditionalGetMiddleware(get_response)(self.req).has_header('ETag'))
def test_no_etag_no_store_cache(self):
self.resp_headers['Cache-Control'] = 'No-Cache, No-Store, Max-age=0'
self.assertFalse(ConditionalGetMiddleware(self.get_response)(self.req).has_header('ETag'))
def test_etag_extended_cache_control(self):
self.resp_headers['Cache-Control'] = 'my-directive="my-no-store"'
self.assertTrue(ConditionalGetMiddleware(self.get_response)(self.req).has_header('ETag'))
def test_if_none_match_and_no_etag(self):
self.req.META['HTTP_IF_NONE_MATCH'] = 'spam'
resp = ConditionalGetMiddleware(self.get_response)(self.req)
self.assertEqual(resp.status_code, 200)
def test_no_if_none_match_and_etag(self):
self.resp_headers['ETag'] = 'eggs'
resp = ConditionalGetMiddleware(self.get_response)(self.req)
self.assertEqual(resp.status_code, 200)
def test_if_none_match_and_same_etag(self):
self.req.META['HTTP_IF_NONE_MATCH'] = '"spam"'
self.resp_headers['ETag'] = '"spam"'
resp = ConditionalGetMiddleware(self.get_response)(self.req)
self.assertEqual(resp.status_code, 304)
def test_if_none_match_and_different_etag(self):
self.req.META['HTTP_IF_NONE_MATCH'] = 'spam'
self.resp_headers['ETag'] = 'eggs'
resp = ConditionalGetMiddleware(self.get_response)(self.req)
self.assertEqual(resp.status_code, 200)
def test_if_none_match_and_redirect(self):
def get_response(req):
resp = self.client.get(req.path_info)
resp['ETag'] = 'spam'
resp['Location'] = '/'
resp.status_code = 301
return resp
self.req.META['HTTP_IF_NONE_MATCH'] = 'spam'
resp = ConditionalGetMiddleware(get_response)(self.req)
self.assertEqual(resp.status_code, 301)
def test_if_none_match_and_client_error(self):
def get_response(req):
resp = self.client.get(req.path_info)
resp['ETag'] = 'spam'
resp.status_code = 400
return resp
self.req.META['HTTP_IF_NONE_MATCH'] = 'spam'
resp = ConditionalGetMiddleware(get_response)(self.req)
self.assertEqual(resp.status_code, 400)
# Tests for the Last-Modified header
def test_if_modified_since_and_no_last_modified(self):
self.req.META['HTTP_IF_MODIFIED_SINCE'] = 'Sat, 12 Feb 2011 17:38:44 GMT'
resp = ConditionalGetMiddleware(self.get_response)(self.req)
self.assertEqual(resp.status_code, 200)
def test_no_if_modified_since_and_last_modified(self):
self.resp_headers['Last-Modified'] = 'Sat, 12 Feb 2011 17:38:44 GMT'
resp = ConditionalGetMiddleware(self.get_response)(self.req)
self.assertEqual(resp.status_code, 200)
def test_if_modified_since_and_same_last_modified(self):
self.req.META['HTTP_IF_MODIFIED_SINCE'] = 'Sat, 12 Feb 2011 17:38:44 GMT'
self.resp_headers['Last-Modified'] = 'Sat, 12 Feb 2011 17:38:44 GMT'
self.resp = ConditionalGetMiddleware(self.get_response)(self.req)
self.assertEqual(self.resp.status_code, 304)
def test_if_modified_since_and_last_modified_in_the_past(self):
self.req.META['HTTP_IF_MODIFIED_SINCE'] = 'Sat, 12 Feb 2011 17:38:44 GMT'
self.resp_headers['Last-Modified'] = 'Sat, 12 Feb 2011 17:35:44 GMT'
resp = ConditionalGetMiddleware(self.get_response)(self.req)
self.assertEqual(resp.status_code, 304)
def test_if_modified_since_and_last_modified_in_the_future(self):
self.req.META['HTTP_IF_MODIFIED_SINCE'] = 'Sat, 12 Feb 2011 17:38:44 GMT'
self.resp_headers['Last-Modified'] = 'Sat, 12 Feb 2011 17:41:44 GMT'
self.resp = ConditionalGetMiddleware(self.get_response)(self.req)
self.assertEqual(self.resp.status_code, 200)
def test_if_modified_since_and_redirect(self):
def get_response(req):
resp = self.client.get(req.path_info)
resp['Last-Modified'] = 'Sat, 12 Feb 2011 17:35:44 GMT'
resp['Location'] = '/'
resp.status_code = 301
return resp
self.req.META['HTTP_IF_MODIFIED_SINCE'] = 'Sat, 12 Feb 2011 17:38:44 GMT'
resp = ConditionalGetMiddleware(get_response)(self.req)
self.assertEqual(resp.status_code, 301)
def test_if_modified_since_and_client_error(self):
def get_response(req):
resp = self.client.get(req.path_info)
resp['Last-Modified'] = 'Sat, 12 Feb 2011 17:35:44 GMT'
resp.status_code = 400
return resp
self.req.META['HTTP_IF_MODIFIED_SINCE'] = 'Sat, 12 Feb 2011 17:38:44 GMT'
resp = ConditionalGetMiddleware(get_response)(self.req)
self.assertEqual(resp.status_code, 400)
def test_not_modified_headers(self):
"""
The 304 Not Modified response should include only the headers required
by section 4.1 of RFC 7232, Last-Modified, and the cookies.
"""
def get_response(req):
resp = self.client.get(req.path_info)
resp['Date'] = 'Sat, 12 Feb 2011 17:35:44 GMT'
resp['Last-Modified'] = 'Sat, 12 Feb 2011 17:35:44 GMT'
resp['Expires'] = 'Sun, 13 Feb 2011 17:35:44 GMT'
resp['Vary'] = 'Cookie'
resp['Cache-Control'] = 'public'
resp['Content-Location'] = '/alt'
resp['Content-Language'] = 'en' # shouldn't be preserved
resp['ETag'] = '"spam"'
resp.set_cookie('key', 'value')
return resp
self.req.META['HTTP_IF_NONE_MATCH'] = '"spam"'
new_response = ConditionalGetMiddleware(get_response)(self.req)
self.assertEqual(new_response.status_code, 304)
base_response = get_response(self.req)
for header in ('Cache-Control', 'Content-Location', 'Date', 'ETag', 'Expires', 'Last-Modified', 'Vary'):
self.assertEqual(new_response.headers[header], base_response.headers[header])
self.assertEqual(new_response.cookies, base_response.cookies)
self.assertNotIn('Content-Language', new_response)
def test_no_unsafe(self):
"""
ConditionalGetMiddleware shouldn't return a conditional response on an
unsafe request. A response has already been generated by the time
ConditionalGetMiddleware is called, so it's too late to return a 412
Precondition Failed.
"""
def get_200_response(req):
return HttpResponse(status=200)
response = ConditionalGetMiddleware(self.get_response)(self.req)
etag = response.headers['ETag']
put_request = self.request_factory.put('/', HTTP_IF_MATCH=etag)
conditional_get_response = ConditionalGetMiddleware(get_200_response)(put_request)
self.assertEqual(conditional_get_response.status_code, 200) # should never be a 412
def test_no_head(self):
"""
ConditionalGetMiddleware shouldn't compute and return an ETag on a
HEAD request since it can't do so accurately without access to the
response body of the corresponding GET.
"""
def get_200_response(req):
return HttpResponse(status=200)
request = self.request_factory.head('/')
conditional_get_response = ConditionalGetMiddleware(get_200_response)(request)
self.assertNotIn('ETag', conditional_get_response)
class XFrameOptionsMiddlewareTest(SimpleTestCase):
"""
Tests for the X-Frame-Options clickjacking prevention middleware.
"""
def test_same_origin(self):
"""
The X_FRAME_OPTIONS setting can be set to SAMEORIGIN to have the
middleware use that value for the HTTP header.
"""
with override_settings(X_FRAME_OPTIONS='SAMEORIGIN'):
r = XFrameOptionsMiddleware(get_response_empty)(HttpRequest())
self.assertEqual(r.headers['X-Frame-Options'], 'SAMEORIGIN')
with override_settings(X_FRAME_OPTIONS='sameorigin'):
r = XFrameOptionsMiddleware(get_response_empty)(HttpRequest())
self.assertEqual(r.headers['X-Frame-Options'], 'SAMEORIGIN')
def test_deny(self):
"""
The X_FRAME_OPTIONS setting can be set to DENY to have the middleware
use that value for the HTTP header.
"""
with override_settings(X_FRAME_OPTIONS='DENY'):
r = XFrameOptionsMiddleware(get_response_empty)(HttpRequest())
self.assertEqual(r.headers['X-Frame-Options'], 'DENY')
with override_settings(X_FRAME_OPTIONS='deny'):
r = XFrameOptionsMiddleware(get_response_empty)(HttpRequest())
self.assertEqual(r.headers['X-Frame-Options'], 'DENY')
def test_defaults_sameorigin(self):
"""
If the X_FRAME_OPTIONS setting is not set then it defaults to
DENY.
"""
with override_settings(X_FRAME_OPTIONS=None):
del settings.X_FRAME_OPTIONS # restored by override_settings
r = XFrameOptionsMiddleware(get_response_empty)(HttpRequest())
self.assertEqual(r.headers['X-Frame-Options'], 'DENY')
def test_dont_set_if_set(self):
"""
If the X-Frame-Options header is already set then the middleware does
not attempt to override it.
"""
def same_origin_response(request):
response = HttpResponse()
response.headers['X-Frame-Options'] = 'SAMEORIGIN'
return response
def deny_response(request):
response = HttpResponse()
response.headers['X-Frame-Options'] = 'DENY'
return response
with override_settings(X_FRAME_OPTIONS='DENY'):
r = XFrameOptionsMiddleware(same_origin_response)(HttpRequest())
self.assertEqual(r.headers['X-Frame-Options'], 'SAMEORIGIN')
with override_settings(X_FRAME_OPTIONS='SAMEORIGIN'):
r = XFrameOptionsMiddleware(deny_response)(HttpRequest())
self.assertEqual(r.headers['X-Frame-Options'], 'DENY')
def test_response_exempt(self):
"""
If the response has an xframe_options_exempt attribute set to False
then it still sets the header, but if it's set to True then it doesn't.
"""
def xframe_exempt_response(request):
response = HttpResponse()
response.xframe_options_exempt = True
return response
def xframe_not_exempt_response(request):
response = HttpResponse()
response.xframe_options_exempt = False
return response
with override_settings(X_FRAME_OPTIONS='SAMEORIGIN'):
r = XFrameOptionsMiddleware(xframe_not_exempt_response)(HttpRequest())
self.assertEqual(r.headers['X-Frame-Options'], 'SAMEORIGIN')
r = XFrameOptionsMiddleware(xframe_exempt_response)(HttpRequest())
self.assertIsNone(r.headers.get('X-Frame-Options'))
def test_is_extendable(self):
"""
The XFrameOptionsMiddleware method that determines the X-Frame-Options
header value can be overridden based on something in the request or
response.
"""
class OtherXFrameOptionsMiddleware(XFrameOptionsMiddleware):
# This is just an example for testing purposes...
def get_xframe_options_value(self, request, response):
if getattr(request, 'sameorigin', False):
return 'SAMEORIGIN'
if getattr(response, 'sameorigin', False):
return 'SAMEORIGIN'
return 'DENY'
def same_origin_response(request):
response = HttpResponse()
response.sameorigin = True
return response
with override_settings(X_FRAME_OPTIONS='DENY'):
r = OtherXFrameOptionsMiddleware(same_origin_response)(HttpRequest())
self.assertEqual(r.headers['X-Frame-Options'], 'SAMEORIGIN')
request = HttpRequest()
request.sameorigin = True
r = OtherXFrameOptionsMiddleware(get_response_empty)(request)
self.assertEqual(r.headers['X-Frame-Options'], 'SAMEORIGIN')
with override_settings(X_FRAME_OPTIONS='SAMEORIGIN'):
r = OtherXFrameOptionsMiddleware(get_response_empty)(HttpRequest())
self.assertEqual(r.headers['X-Frame-Options'], 'DENY')
class GZipMiddlewareTest(SimpleTestCase):
"""
Tests the GZipMiddleware.
"""
short_string = b"This string is too short to be worth compressing."
compressible_string = b'a' * 500
incompressible_string = b''.join(int2byte(random.randint(0, 255)) for _ in range(500))
sequence = [b'a' * 500, b'b' * 200, b'a' * 300]
sequence_unicode = ['a' * 500, 'é' * 200, 'a' * 300]
request_factory = RequestFactory()
def setUp(self):
self.req = self.request_factory.get('/')
self.req.META['HTTP_ACCEPT_ENCODING'] = 'gzip, deflate'
self.req.META['HTTP_USER_AGENT'] = 'Mozilla/5.0 (Windows NT 5.1; rv:9.0.1) Gecko/20100101 Firefox/9.0.1'
self.resp = HttpResponse()
self.resp.status_code = 200
self.resp.content = self.compressible_string
self.resp['Content-Type'] = 'text/html; charset=UTF-8'
def get_response(self, request):
return self.resp
@staticmethod
def decompress(gzipped_string):
with gzip.GzipFile(mode='rb', fileobj=BytesIO(gzipped_string)) as f:
return f.read()
@staticmethod
def get_mtime(gzipped_string):
with gzip.GzipFile(mode='rb', fileobj=BytesIO(gzipped_string)) as f:
f.read() # must read the data before accessing the header
return f.mtime
def test_compress_response(self):
"""
Compression is performed on responses with compressible content.
"""
r = GZipMiddleware(self.get_response)(self.req)
self.assertEqual(self.decompress(r.content), self.compressible_string)
self.assertEqual(r.get('Content-Encoding'), 'gzip')
self.assertEqual(r.get('Content-Length'), str(len(r.content)))
def test_compress_streaming_response(self):
"""
Compression is performed on responses with streaming content.
"""
def get_stream_response(request):
resp = StreamingHttpResponse(self.sequence)
resp['Content-Type'] = 'text/html; charset=UTF-8'
return resp
r = GZipMiddleware(get_stream_response)(self.req)
self.assertEqual(self.decompress(b''.join(r)), b''.join(self.sequence))
self.assertEqual(r.get('Content-Encoding'), 'gzip')
self.assertFalse(r.has_header('Content-Length'))
def test_compress_streaming_response_unicode(self):
"""
Compression is performed on responses with streaming Unicode content.
"""
def get_stream_response_unicode(request):
resp = StreamingHttpResponse(self.sequence_unicode)
resp['Content-Type'] = 'text/html; charset=UTF-8'
return resp
r = GZipMiddleware(get_stream_response_unicode)(self.req)
self.assertEqual(
self.decompress(b''.join(r)),
b''.join(x.encode() for x in self.sequence_unicode)
)
self.assertEqual(r.get('Content-Encoding'), 'gzip')
self.assertFalse(r.has_header('Content-Length'))
def test_compress_file_response(self):
"""
Compression is performed on FileResponse.
"""
with open(__file__, 'rb') as file1:
def get_response(req):
file_resp = FileResponse(file1)
file_resp['Content-Type'] = 'text/html; charset=UTF-8'
return file_resp
r = GZipMiddleware(get_response)(self.req)
with open(__file__, 'rb') as file2:
self.assertEqual(self.decompress(b''.join(r)), file2.read())
self.assertEqual(r.get('Content-Encoding'), 'gzip')
self.assertIsNot(r.file_to_stream, file1)
def test_compress_non_200_response(self):
"""
Compression is performed on responses with a status other than 200
(#10762).
"""
self.resp.status_code = 404
r = GZipMiddleware(self.get_response)(self.req)
self.assertEqual(self.decompress(r.content), self.compressible_string)
self.assertEqual(r.get('Content-Encoding'), 'gzip')
def test_no_compress_short_response(self):
"""
Compression isn't performed on responses with short content.
"""
self.resp.content = self.short_string
r = GZipMiddleware(self.get_response)(self.req)
self.assertEqual(r.content, self.short_string)
self.assertIsNone(r.get('Content-Encoding'))
def test_no_compress_compressed_response(self):
"""
Compression isn't performed on responses that are already compressed.
"""
self.resp['Content-Encoding'] = 'deflate'
r = GZipMiddleware(self.get_response)(self.req)
self.assertEqual(r.content, self.compressible_string)
self.assertEqual(r.get('Content-Encoding'), 'deflate')
def test_no_compress_incompressible_response(self):
"""
Compression isn't performed on responses with incompressible content.
"""
self.resp.content = self.incompressible_string
r = GZipMiddleware(self.get_response)(self.req)
self.assertEqual(r.content, self.incompressible_string)
self.assertIsNone(r.get('Content-Encoding'))
def test_compress_deterministic(self):
"""
Compression results are the same for the same content and don't
include a modification time (since that would make the results
of compression non-deterministic and prevent
ConditionalGetMiddleware from recognizing conditional matches
on gzipped content).
"""
r1 = GZipMiddleware(self.get_response)(self.req)
r2 = GZipMiddleware(self.get_response)(self.req)
self.assertEqual(r1.content, r2.content)
self.assertEqual(self.get_mtime(r1.content), 0)
self.assertEqual(self.get_mtime(r2.content), 0)
class ETagGZipMiddlewareTest(SimpleTestCase):
"""
ETags are handled properly by GZipMiddleware.
"""
rf = RequestFactory()
compressible_string = b'a' * 500
def test_strong_etag_modified(self):
"""
GZipMiddleware makes a strong ETag weak.
"""
def get_response(req):
response = HttpResponse(self.compressible_string)
response.headers['ETag'] = '"eggs"'
return response
request = self.rf.get('/', HTTP_ACCEPT_ENCODING='gzip, deflate')
gzip_response = GZipMiddleware(get_response)(request)
self.assertEqual(gzip_response.headers['ETag'], 'W/"eggs"')
def test_weak_etag_not_modified(self):
"""
GZipMiddleware doesn't modify a weak ETag.
"""
def get_response(req):
response = HttpResponse(self.compressible_string)
response.headers['ETag'] = 'W/"eggs"'
return response
request = self.rf.get('/', HTTP_ACCEPT_ENCODING='gzip, deflate')
gzip_response = GZipMiddleware(get_response)(request)
self.assertEqual(gzip_response.headers['ETag'], 'W/"eggs"')
def test_etag_match(self):
"""
GZipMiddleware allows 304 Not Modified responses.
"""
def get_response(req):
response = HttpResponse(self.compressible_string)
return response
def get_cond_response(req):
return ConditionalGetMiddleware(get_response)(req)
request = self.rf.get('/', HTTP_ACCEPT_ENCODING='gzip, deflate')
response = GZipMiddleware(get_cond_response)(request)
gzip_etag = response.headers['ETag']
next_request = self.rf.get('/', HTTP_ACCEPT_ENCODING='gzip, deflate', HTTP_IF_NONE_MATCH=gzip_etag)
next_response = ConditionalGetMiddleware(get_response)(next_request)
self.assertEqual(next_response.status_code, 304)
|
afe2d806742294992dd77f578f6495bc94f183b8975f99d2d0d66670020ca365 | from django.http import HttpResponse
from django.test import RequestFactory, SimpleTestCase
from django.test.utils import override_settings
class SecurityMiddlewareTest(SimpleTestCase):
def middleware(self, *args, **kwargs):
from django.middleware.security import SecurityMiddleware
return SecurityMiddleware(self.response(*args, **kwargs))
@property
def secure_request_kwargs(self):
return {"wsgi.url_scheme": "https"}
def response(self, *args, headers=None, **kwargs):
def get_response(req):
response = HttpResponse(*args, **kwargs)
if headers:
for k, v in headers.items():
response.headers[k] = v
return response
return get_response
def process_response(self, *args, secure=False, request=None, **kwargs):
request_kwargs = {}
if secure:
request_kwargs.update(self.secure_request_kwargs)
if request is None:
request = self.request.get("/some/url", **request_kwargs)
ret = self.middleware(*args, **kwargs).process_request(request)
if ret:
return ret
return self.middleware(*args, **kwargs)(request)
request = RequestFactory()
def process_request(self, method, *args, secure=False, **kwargs):
if secure:
kwargs.update(self.secure_request_kwargs)
req = getattr(self.request, method.lower())(*args, **kwargs)
return self.middleware().process_request(req)
@override_settings(SECURE_HSTS_SECONDS=3600)
def test_sts_on(self):
"""
With SECURE_HSTS_SECONDS=3600, the middleware adds
"Strict-Transport-Security: max-age=3600" to the response.
"""
self.assertEqual(
self.process_response(secure=True).headers['Strict-Transport-Security'],
'max-age=3600',
)
@override_settings(SECURE_HSTS_SECONDS=3600)
def test_sts_already_present(self):
"""
The middleware will not override a "Strict-Transport-Security" header
already present in the response.
"""
response = self.process_response(
secure=True,
headers={"Strict-Transport-Security": "max-age=7200"})
self.assertEqual(response.headers["Strict-Transport-Security"], "max-age=7200")
@override_settings(SECURE_HSTS_SECONDS=3600)
def test_sts_only_if_secure(self):
"""
The "Strict-Transport-Security" header is not added to responses going
over an insecure connection.
"""
self.assertNotIn(
'Strict-Transport-Security',
self.process_response(secure=False).headers,
)
@override_settings(SECURE_HSTS_SECONDS=0)
def test_sts_off(self):
"""
With SECURE_HSTS_SECONDS=0, the middleware does not add a
"Strict-Transport-Security" header to the response.
"""
self.assertNotIn(
'Strict-Transport-Security',
self.process_response(secure=True).headers,
)
@override_settings(SECURE_HSTS_SECONDS=600, SECURE_HSTS_INCLUDE_SUBDOMAINS=True)
def test_sts_include_subdomains(self):
"""
With SECURE_HSTS_SECONDS non-zero and SECURE_HSTS_INCLUDE_SUBDOMAINS
True, the middleware adds a "Strict-Transport-Security" header with the
"includeSubDomains" directive to the response.
"""
response = self.process_response(secure=True)
self.assertEqual(
response.headers['Strict-Transport-Security'],
'max-age=600; includeSubDomains',
)
@override_settings(SECURE_HSTS_SECONDS=600, SECURE_HSTS_INCLUDE_SUBDOMAINS=False)
def test_sts_no_include_subdomains(self):
"""
With SECURE_HSTS_SECONDS non-zero and SECURE_HSTS_INCLUDE_SUBDOMAINS
False, the middleware adds a "Strict-Transport-Security" header without
the "includeSubDomains" directive to the response.
"""
response = self.process_response(secure=True)
self.assertEqual(response.headers["Strict-Transport-Security"], "max-age=600")
@override_settings(SECURE_HSTS_SECONDS=10886400, SECURE_HSTS_PRELOAD=True)
def test_sts_preload(self):
"""
With SECURE_HSTS_SECONDS non-zero and SECURE_HSTS_PRELOAD True, the
middleware adds a "Strict-Transport-Security" header with the "preload"
directive to the response.
"""
response = self.process_response(secure=True)
self.assertEqual(
response.headers['Strict-Transport-Security'],
'max-age=10886400; preload',
)
@override_settings(SECURE_HSTS_SECONDS=10886400, SECURE_HSTS_INCLUDE_SUBDOMAINS=True, SECURE_HSTS_PRELOAD=True)
def test_sts_subdomains_and_preload(self):
"""
With SECURE_HSTS_SECONDS non-zero, SECURE_HSTS_INCLUDE_SUBDOMAINS and
SECURE_HSTS_PRELOAD True, the middleware adds a "Strict-Transport-Security"
header containing both the "includeSubDomains" and "preload" directives
to the response.
"""
response = self.process_response(secure=True)
self.assertEqual(
response.headers['Strict-Transport-Security'],
'max-age=10886400; includeSubDomains; preload',
)
@override_settings(SECURE_HSTS_SECONDS=10886400, SECURE_HSTS_PRELOAD=False)
def test_sts_no_preload(self):
"""
With SECURE_HSTS_SECONDS non-zero and SECURE_HSTS_PRELOAD
False, the middleware adds a "Strict-Transport-Security" header without
the "preload" directive to the response.
"""
response = self.process_response(secure=True)
self.assertEqual(
response.headers['Strict-Transport-Security'],
'max-age=10886400',
)
@override_settings(SECURE_CONTENT_TYPE_NOSNIFF=True)
def test_content_type_on(self):
"""
With SECURE_CONTENT_TYPE_NOSNIFF set to True, the middleware adds
"X-Content-Type-Options: nosniff" header to the response.
"""
self.assertEqual(
self.process_response().headers['X-Content-Type-Options'],
'nosniff',
)
@override_settings(SECURE_CONTENT_TYPE_NOSNIFF=True)
def test_content_type_already_present(self):
"""
The middleware will not override an "X-Content-Type-Options" header
already present in the response.
"""
response = self.process_response(secure=True, headers={"X-Content-Type-Options": "foo"})
self.assertEqual(response.headers["X-Content-Type-Options"], "foo")
@override_settings(SECURE_CONTENT_TYPE_NOSNIFF=False)
def test_content_type_off(self):
"""
With SECURE_CONTENT_TYPE_NOSNIFF False, the middleware does not add an
"X-Content-Type-Options" header to the response.
"""
self.assertNotIn('X-Content-Type-Options', self.process_response().headers)
@override_settings(SECURE_SSL_REDIRECT=True)
def test_ssl_redirect_on(self):
"""
With SECURE_SSL_REDIRECT True, the middleware redirects any non-secure
requests to the https:// version of the same URL.
"""
ret = self.process_request("get", "/some/url?query=string")
self.assertEqual(ret.status_code, 301)
self.assertEqual(ret["Location"], "https://testserver/some/url?query=string")
@override_settings(SECURE_SSL_REDIRECT=True)
def test_no_redirect_ssl(self):
"""
The middleware does not redirect secure requests.
"""
ret = self.process_request("get", "/some/url", secure=True)
self.assertIsNone(ret)
@override_settings(SECURE_SSL_REDIRECT=True, SECURE_REDIRECT_EXEMPT=["^insecure/"])
def test_redirect_exempt(self):
"""
The middleware does not redirect requests with URL path matching an
exempt pattern.
"""
ret = self.process_request("get", "/insecure/page")
self.assertIsNone(ret)
@override_settings(SECURE_SSL_REDIRECT=True, SECURE_SSL_HOST="secure.example.com")
def test_redirect_ssl_host(self):
"""
The middleware redirects to SECURE_SSL_HOST if given.
"""
ret = self.process_request("get", "/some/url")
self.assertEqual(ret.status_code, 301)
self.assertEqual(ret["Location"], "https://secure.example.com/some/url")
@override_settings(SECURE_SSL_REDIRECT=False)
def test_ssl_redirect_off(self):
"""
With SECURE_SSL_REDIRECT False, the middleware does not redirect.
"""
ret = self.process_request("get", "/some/url")
self.assertIsNone(ret)
@override_settings(SECURE_REFERRER_POLICY=None)
def test_referrer_policy_off(self):
"""
With SECURE_REFERRER_POLICY set to None, the middleware does not add a
"Referrer-Policy" header to the response.
"""
self.assertNotIn('Referrer-Policy', self.process_response().headers)
def test_referrer_policy_on(self):
"""
With SECURE_REFERRER_POLICY set to a valid value, the middleware adds a
"Referrer-Policy" header to the response.
"""
tests = (
('strict-origin', 'strict-origin'),
('strict-origin,origin', 'strict-origin,origin'),
('strict-origin, origin', 'strict-origin,origin'),
(['strict-origin', 'origin'], 'strict-origin,origin'),
(('strict-origin', 'origin'), 'strict-origin,origin'),
)
for value, expected in tests:
with self.subTest(value=value), override_settings(SECURE_REFERRER_POLICY=value):
self.assertEqual(
self.process_response().headers['Referrer-Policy'],
expected,
)
@override_settings(SECURE_REFERRER_POLICY='strict-origin')
def test_referrer_policy_already_present(self):
"""
The middleware will not override a "Referrer-Policy" header already
present in the response.
"""
response = self.process_response(headers={'Referrer-Policy': 'unsafe-url'})
self.assertEqual(response.headers['Referrer-Policy'], 'unsafe-url')
@override_settings(SECURE_CROSS_ORIGIN_OPENER_POLICY=None)
def test_coop_off(self):
"""
With SECURE_CROSS_ORIGIN_OPENER_POLICY set to None, the middleware does
not add a "Cross-Origin-Opener-Policy" header to the response.
"""
self.assertNotIn('Cross-Origin-Opener-Policy', self.process_response())
def test_coop_default(self):
"""SECURE_CROSS_ORIGIN_OPENER_POLICY defaults to same-origin."""
self.assertEqual(
self.process_response().headers['Cross-Origin-Opener-Policy'],
'same-origin',
)
def test_coop_on(self):
"""
With SECURE_CROSS_ORIGIN_OPENER_POLICY set to a valid value, the
middleware adds a "Cross-Origin_Opener-Policy" header to the response.
"""
tests = ['same-origin', 'same-origin-allow-popups', 'unsafe-none']
for value in tests:
with self.subTest(value=value), override_settings(
SECURE_CROSS_ORIGIN_OPENER_POLICY=value,
):
self.assertEqual(
self.process_response().headers['Cross-Origin-Opener-Policy'],
value,
)
@override_settings(SECURE_CROSS_ORIGIN_OPENER_POLICY='unsafe-none')
def test_coop_already_present(self):
"""
The middleware doesn't override a "Cross-Origin-Opener-Policy" header
already present in the response.
"""
response = self.process_response(headers={'Cross-Origin-Opener-Policy': 'same-origin'})
self.assertEqual(response.headers['Cross-Origin-Opener-Policy'], 'same-origin')
|
1d63c3e33ef8d8783475efd18fd6b7d1f88c6417ac46d6af59ca592f9a119d72 | import asyncio
import sys
import threading
from pathlib import Path
from unittest import skipIf
from asgiref.testing import ApplicationCommunicator
from django.contrib.staticfiles.handlers import ASGIStaticFilesHandler
from django.core.asgi import get_asgi_application
from django.core.signals import request_finished, request_started
from django.db import close_old_connections
from django.test import (
AsyncRequestFactory, SimpleTestCase, modify_settings, override_settings,
)
from django.utils.http import http_date
from .urls import sync_waiter, test_filename
TEST_STATIC_ROOT = Path(__file__).parent / 'project' / 'static'
@skipIf(sys.platform == 'win32' and (3, 8, 0) < sys.version_info < (3, 8, 1), 'https://bugs.python.org/issue38563')
@override_settings(ROOT_URLCONF='asgi.urls')
class ASGITest(SimpleTestCase):
async_request_factory = AsyncRequestFactory()
def setUp(self):
request_started.disconnect(close_old_connections)
def tearDown(self):
request_started.connect(close_old_connections)
async def test_get_asgi_application(self):
"""
get_asgi_application() returns a functioning ASGI callable.
"""
application = get_asgi_application()
# Construct HTTP request.
scope = self.async_request_factory._base_scope(path='/')
communicator = ApplicationCommunicator(application, scope)
await communicator.send_input({'type': 'http.request'})
# Read the response.
response_start = await communicator.receive_output()
self.assertEqual(response_start['type'], 'http.response.start')
self.assertEqual(response_start['status'], 200)
self.assertEqual(
set(response_start['headers']),
{
(b'Content-Length', b'12'),
(b'Content-Type', b'text/html; charset=utf-8'),
},
)
response_body = await communicator.receive_output()
self.assertEqual(response_body['type'], 'http.response.body')
self.assertEqual(response_body['body'], b'Hello World!')
async def test_file_response(self):
"""
Makes sure that FileResponse works over ASGI.
"""
application = get_asgi_application()
# Construct HTTP request.
scope = self.async_request_factory._base_scope(path='/file/')
communicator = ApplicationCommunicator(application, scope)
await communicator.send_input({'type': 'http.request'})
# Get the file content.
with open(test_filename, 'rb') as test_file:
test_file_contents = test_file.read()
# Read the response.
response_start = await communicator.receive_output()
self.assertEqual(response_start['type'], 'http.response.start')
self.assertEqual(response_start['status'], 200)
headers = response_start['headers']
self.assertEqual(len(headers), 3)
expected_headers = {
b'Content-Length': str(len(test_file_contents)).encode('ascii'),
b'Content-Type': b'text/x-python',
b'Content-Disposition': b'inline; filename="urls.py"',
}
for key, value in headers:
try:
self.assertEqual(value, expected_headers[key])
except AssertionError:
# Windows registry may not be configured with correct
# mimetypes.
if sys.platform == 'win32' and key == b'Content-Type':
self.assertEqual(value, b'text/plain')
else:
raise
response_body = await communicator.receive_output()
self.assertEqual(response_body['type'], 'http.response.body')
self.assertEqual(response_body['body'], test_file_contents)
# Allow response.close() to finish.
await communicator.wait()
@modify_settings(INSTALLED_APPS={'append': 'django.contrib.staticfiles'})
@override_settings(
STATIC_URL='static/',
STATIC_ROOT=TEST_STATIC_ROOT,
STATICFILES_DIRS=[TEST_STATIC_ROOT],
STATICFILES_FINDERS=[
'django.contrib.staticfiles.finders.FileSystemFinder',
],
)
async def test_static_file_response(self):
application = ASGIStaticFilesHandler(get_asgi_application())
# Construct HTTP request.
scope = self.async_request_factory._base_scope(path='/static/file.txt')
communicator = ApplicationCommunicator(application, scope)
await communicator.send_input({'type': 'http.request'})
# Get the file content.
file_path = TEST_STATIC_ROOT / 'file.txt'
with open(file_path, 'rb') as test_file:
test_file_contents = test_file.read()
# Read the response.
stat = file_path.stat()
response_start = await communicator.receive_output()
self.assertEqual(response_start['type'], 'http.response.start')
self.assertEqual(response_start['status'], 200)
self.assertEqual(
set(response_start['headers']),
{
(b'Content-Length', str(len(test_file_contents)).encode('ascii')),
(b'Content-Type', b'text/plain'),
(b'Content-Disposition', b'inline; filename="file.txt"'),
(b'Last-Modified', http_date(stat.st_mtime).encode('ascii')),
},
)
response_body = await communicator.receive_output()
self.assertEqual(response_body['type'], 'http.response.body')
self.assertEqual(response_body['body'], test_file_contents)
# Allow response.close() to finish.
await communicator.wait()
async def test_headers(self):
application = get_asgi_application()
communicator = ApplicationCommunicator(
application,
self.async_request_factory._base_scope(
path='/meta/',
headers=[
[b'content-type', b'text/plain; charset=utf-8'],
[b'content-length', b'77'],
[b'referer', b'Scotland'],
[b'referer', b'Wales'],
],
),
)
await communicator.send_input({'type': 'http.request'})
response_start = await communicator.receive_output()
self.assertEqual(response_start['type'], 'http.response.start')
self.assertEqual(response_start['status'], 200)
self.assertEqual(
set(response_start['headers']),
{
(b'Content-Length', b'19'),
(b'Content-Type', b'text/plain; charset=utf-8'),
},
)
response_body = await communicator.receive_output()
self.assertEqual(response_body['type'], 'http.response.body')
self.assertEqual(response_body['body'], b'From Scotland,Wales')
async def test_get_query_string(self):
application = get_asgi_application()
for query_string in (b'name=Andrew', 'name=Andrew'):
with self.subTest(query_string=query_string):
scope = self.async_request_factory._base_scope(
path='/',
query_string=query_string,
)
communicator = ApplicationCommunicator(application, scope)
await communicator.send_input({'type': 'http.request'})
response_start = await communicator.receive_output()
self.assertEqual(response_start['type'], 'http.response.start')
self.assertEqual(response_start['status'], 200)
response_body = await communicator.receive_output()
self.assertEqual(response_body['type'], 'http.response.body')
self.assertEqual(response_body['body'], b'Hello Andrew!')
async def test_disconnect(self):
application = get_asgi_application()
scope = self.async_request_factory._base_scope(path='/')
communicator = ApplicationCommunicator(application, scope)
await communicator.send_input({'type': 'http.disconnect'})
with self.assertRaises(asyncio.TimeoutError):
await communicator.receive_output()
async def test_wrong_connection_type(self):
application = get_asgi_application()
scope = self.async_request_factory._base_scope(path='/', type='other')
communicator = ApplicationCommunicator(application, scope)
await communicator.send_input({'type': 'http.request'})
msg = 'Django can only handle ASGI/HTTP connections, not other.'
with self.assertRaisesMessage(ValueError, msg):
await communicator.receive_output()
async def test_non_unicode_query_string(self):
application = get_asgi_application()
scope = self.async_request_factory._base_scope(path='/', query_string=b'\xff')
communicator = ApplicationCommunicator(application, scope)
await communicator.send_input({'type': 'http.request'})
response_start = await communicator.receive_output()
self.assertEqual(response_start['type'], 'http.response.start')
self.assertEqual(response_start['status'], 400)
response_body = await communicator.receive_output()
self.assertEqual(response_body['type'], 'http.response.body')
self.assertEqual(response_body['body'], b'')
async def test_request_lifecycle_signals_dispatched_with_thread_sensitive(self):
class SignalHandler:
"""Track threads handler is dispatched on."""
threads = []
def __call__(self, **kwargs):
self.threads.append(threading.current_thread())
signal_handler = SignalHandler()
request_started.connect(signal_handler)
request_finished.connect(signal_handler)
# Perform a basic request.
application = get_asgi_application()
scope = self.async_request_factory._base_scope(path='/')
communicator = ApplicationCommunicator(application, scope)
await communicator.send_input({'type': 'http.request'})
response_start = await communicator.receive_output()
self.assertEqual(response_start['type'], 'http.response.start')
self.assertEqual(response_start['status'], 200)
response_body = await communicator.receive_output()
self.assertEqual(response_body['type'], 'http.response.body')
self.assertEqual(response_body['body'], b'Hello World!')
# Give response.close() time to finish.
await communicator.wait()
# AsyncToSync should have executed the signals in the same thread.
request_started_thread, request_finished_thread = signal_handler.threads
self.assertEqual(request_started_thread, request_finished_thread)
request_started.disconnect(signal_handler)
request_finished.disconnect(signal_handler)
async def test_concurrent_async_uses_multiple_thread_pools(self):
sync_waiter.active_threads.clear()
# Send 2 requests concurrently
application = get_asgi_application()
scope = self.async_request_factory._base_scope(path='/wait/')
communicators = []
for _ in range(2):
communicators.append(ApplicationCommunicator(application, scope))
await communicators[-1].send_input({'type': 'http.request'})
# Each request must complete with a status code of 200
# If requests aren't scheduled concurrently, the barrier in the
# sync_wait view will time out, resulting in a 500 status code.
for communicator in communicators:
response_start = await communicator.receive_output()
self.assertEqual(response_start['type'], 'http.response.start')
self.assertEqual(response_start['status'], 200)
response_body = await communicator.receive_output()
self.assertEqual(response_body['type'], 'http.response.body')
self.assertEqual(response_body['body'], b'Hello World!')
# Give response.close() time to finish.
await communicator.wait()
# The requests should have scheduled on different threads. Note
# active_threads is a set (a thread can only appear once), therefore
# length is a sufficient check.
self.assertEqual(len(sync_waiter.active_threads), 2)
sync_waiter.active_threads.clear()
|
e0cf5eeba6437d740f15e85409659840ffad7fbcd2ca5590e6d591aa7374fe7b | import threading
from django.http import FileResponse, HttpResponse
from django.urls import path
def hello(request):
name = request.GET.get('name') or 'World'
return HttpResponse('Hello %s!' % name)
def hello_meta(request):
return HttpResponse(
'From %s' % request.META.get('HTTP_REFERER') or '',
content_type=request.META.get('CONTENT_TYPE'),
)
def sync_waiter(request):
with sync_waiter.lock:
sync_waiter.active_threads.add(threading.current_thread())
sync_waiter.barrier.wait(timeout=0.5)
return hello(request)
sync_waiter.active_threads = set()
sync_waiter.lock = threading.Lock()
sync_waiter.barrier = threading.Barrier(2)
test_filename = __file__
urlpatterns = [
path('', hello),
path('file/', lambda x: FileResponse(open(test_filename, 'rb'))),
path('meta/', hello_meta),
path('wait/', sync_waiter),
]
|
a90afdca113c26c941c66320bc5fbd39463697ee4c98a175a4f24087be998823 | import binascii
import json
import random
from django.conf import settings
from django.contrib.messages import constants
from django.contrib.messages.storage.base import Message
from django.contrib.messages.storage.cookie import (
CookieStorage, MessageDecoder, MessageEncoder,
)
from django.core.signing import b64_decode, get_cookie_signer
from django.test import SimpleTestCase, override_settings
from django.utils.crypto import get_random_string
from django.utils.safestring import SafeData, mark_safe
from .base import BaseTests
def set_cookie_data(storage, messages, invalid=False, encode_empty=False):
"""
Set ``request.COOKIES`` with the encoded data and remove the storage
backend's loaded data cache.
"""
encoded_data = storage._encode(messages, encode_empty=encode_empty)
if invalid:
# Truncate the first character so that the hash is invalid.
encoded_data = encoded_data[1:]
storage.request.COOKIES = {CookieStorage.cookie_name: encoded_data}
if hasattr(storage, '_loaded_data'):
del storage._loaded_data
def stored_cookie_messages_count(storage, response):
"""
Return an integer containing the number of messages stored.
"""
# Get a list of cookies, excluding ones with a max-age of 0 (because
# they have been marked for deletion).
cookie = response.cookies.get(storage.cookie_name)
if not cookie or cookie['max-age'] == 0:
return 0
data = storage._decode(cookie.value)
if not data:
return 0
if data[-1] == CookieStorage.not_finished:
data.pop()
return len(data)
@override_settings(SESSION_COOKIE_DOMAIN='.example.com', SESSION_COOKIE_SECURE=True, SESSION_COOKIE_HTTPONLY=True)
class CookieTests(BaseTests, SimpleTestCase):
storage_class = CookieStorage
def stored_messages_count(self, storage, response):
return stored_cookie_messages_count(storage, response)
def test_get(self):
storage = self.storage_class(self.get_request())
# Set initial data.
example_messages = ['test', 'me']
set_cookie_data(storage, example_messages)
# The message contains what's expected.
self.assertEqual(list(storage), example_messages)
@override_settings(SESSION_COOKIE_SAMESITE='Strict')
def test_cookie_setings(self):
"""
CookieStorage honors SESSION_COOKIE_DOMAIN, SESSION_COOKIE_SECURE, and
SESSION_COOKIE_HTTPONLY (#15618, #20972).
"""
# Test before the messages have been consumed
storage = self.get_storage()
response = self.get_response()
storage.add(constants.INFO, 'test')
storage.update(response)
messages = storage._decode(response.cookies['messages'].value)
self.assertEqual(len(messages), 1)
self.assertEqual(messages[0].message, 'test')
self.assertEqual(response.cookies['messages']['domain'], '.example.com')
self.assertEqual(response.cookies['messages']['expires'], '')
self.assertIs(response.cookies['messages']['secure'], True)
self.assertIs(response.cookies['messages']['httponly'], True)
self.assertEqual(response.cookies['messages']['samesite'], 'Strict')
# Test deletion of the cookie (storing with an empty value) after the messages have been consumed
storage = self.get_storage()
response = self.get_response()
storage.add(constants.INFO, 'test')
for m in storage:
pass # Iterate through the storage to simulate consumption of messages.
storage.update(response)
self.assertEqual(response.cookies['messages'].value, '')
self.assertEqual(response.cookies['messages']['domain'], '.example.com')
self.assertEqual(response.cookies['messages']['expires'], 'Thu, 01 Jan 1970 00:00:00 GMT')
self.assertEqual(
response.cookies['messages']['samesite'],
settings.SESSION_COOKIE_SAMESITE,
)
def test_get_bad_cookie(self):
request = self.get_request()
storage = self.storage_class(request)
# Set initial (invalid) data.
example_messages = ['test', 'me']
set_cookie_data(storage, example_messages, invalid=True)
# The message actually contains what we expect.
self.assertEqual(list(storage), [])
def test_max_cookie_length(self):
"""
If the data exceeds what is allowed in a cookie, older messages are
removed before saving (and returned by the ``update`` method).
"""
storage = self.get_storage()
response = self.get_response()
# When storing as a cookie, the cookie has constant overhead of approx
# 54 chars, and each message has a constant overhead of about 37 chars
# and a variable overhead of zero in the best case. We aim for a message
# size which will fit 4 messages into the cookie, but not 5.
# See also FallbackTest.test_session_fallback
msg_size = int((CookieStorage.max_cookie_size - 54) / 4.5 - 37)
first_msg = None
# Generate the same (tested) content every time that does not get run
# through zlib compression.
random.seed(42)
for i in range(5):
msg = get_random_string(msg_size)
storage.add(constants.INFO, msg)
if i == 0:
first_msg = msg
unstored_messages = storage.update(response)
cookie_storing = self.stored_messages_count(storage, response)
self.assertEqual(cookie_storing, 4)
self.assertEqual(len(unstored_messages), 1)
self.assertEqual(unstored_messages[0].message, first_msg)
def test_message_rfc6265(self):
non_compliant_chars = ['\\', ',', ';', '"']
messages = ['\\te,st', ';m"e', '\u2019', '123"NOTRECEIVED"']
storage = self.get_storage()
encoded = storage._encode(messages)
for illegal in non_compliant_chars:
self.assertEqual(encoded.find(illegal), -1)
def test_json_encoder_decoder(self):
"""
A complex nested data structure containing Message
instances is properly encoded/decoded by the custom JSON
encoder/decoder classes.
"""
messages = [
{
'message': Message(constants.INFO, 'Test message'),
'message_list': [
Message(constants.INFO, 'message %s') for x in range(5)
] + [{'another-message': Message(constants.ERROR, 'error')}],
},
Message(constants.INFO, 'message %s'),
]
encoder = MessageEncoder()
value = encoder.encode(messages)
decoded_messages = json.loads(value, cls=MessageDecoder)
self.assertEqual(messages, decoded_messages)
def test_safedata(self):
"""
A message containing SafeData is keeping its safe status when
retrieved from the message storage.
"""
def encode_decode(data):
message = Message(constants.DEBUG, data)
encoded = storage._encode(message)
decoded = storage._decode(encoded)
return decoded.message
storage = self.get_storage()
self.assertIsInstance(encode_decode(mark_safe("<b>Hello Django!</b>")), SafeData)
self.assertNotIsInstance(encode_decode("<b>Hello Django!</b>"), SafeData)
def test_legacy_encode_decode(self):
# RemovedInDjango41Warning: pre-Django 3.2 encoded messages will be
# invalid.
storage = self.storage_class(self.get_request())
messages = ['this', Message(0, 'Successfully signed in as [email protected]')]
# Encode/decode a message using the pre-Django 3.2 format.
encoder = MessageEncoder()
value = encoder.encode(messages)
with self.assertRaises(binascii.Error):
b64_decode(value.encode())
signer = get_cookie_signer(salt=storage.key_salt)
encoded_messages = signer.sign(value)
decoded_messages = storage._decode(encoded_messages)
self.assertEqual(messages, decoded_messages)
|
22a374c886e7a777ebca7e39e5273d883f0240fa8438870ecc3c4c5037b4dd8e | from django.db import models
class SomeObject(models.Model):
name = models.CharField(max_length=255)
|
acd42717486b77dc37d9344e097df05f859c20cd1328cfa095ecfab64371cfa7 | from django.core.signing import b64_decode
from django.test import TestCase, override_settings
from django.urls import reverse
from .models import SomeObject
from .urls import ContactFormViewWithMsg, DeleteFormViewWithMsg
@override_settings(ROOT_URLCONF='messages_tests.urls')
class SuccessMessageMixinTests(TestCase):
def test_set_messages_success(self):
author = {'name': 'John Doe', 'slug': 'success-msg'}
add_url = reverse('add_success_msg')
req = self.client.post(add_url, author)
# Uncompressed message is stored in the cookie.
value = b64_decode(
req.cookies['messages'].value.split(":")[0].encode(),
).decode()
self.assertIn(ContactFormViewWithMsg.success_message % author, value)
def test_set_messages_success_on_delete(self):
object_to_delete = SomeObject.objects.create(name='MyObject')
delete_url = reverse('success_msg_on_delete', args=[object_to_delete.pk])
response = self.client.post(delete_url, follow=True)
self.assertContains(response, DeleteFormViewWithMsg.success_message)
|
06afeb89b0cdb6591def63bf0cebef84747e4616f333fbc71cc783689c46fe69 | from django.contrib.messages import constants, get_level, set_level, utils
from django.contrib.messages.api import MessageFailure
from django.contrib.messages.constants import DEFAULT_LEVELS
from django.contrib.messages.storage import base, default_storage
from django.contrib.messages.storage.base import Message
from django.http import HttpRequest, HttpResponse
from django.test import modify_settings, override_settings
from django.urls import reverse
from django.utils.translation import gettext_lazy
def add_level_messages(storage):
"""
Add 6 messages from different levels (including a custom one) to a storage
instance.
"""
storage.add(constants.INFO, 'A generic info message')
storage.add(29, 'Some custom level')
storage.add(constants.DEBUG, 'A debugging message', extra_tags='extra-tag')
storage.add(constants.WARNING, 'A warning')
storage.add(constants.ERROR, 'An error')
storage.add(constants.SUCCESS, 'This was a triumph.')
class override_settings_tags(override_settings):
def enable(self):
super().enable()
# LEVEL_TAGS is a constant defined in the
# django.contrib.messages.storage.base module, so after changing
# settings.MESSAGE_TAGS, update that constant also.
self.old_level_tags = base.LEVEL_TAGS
base.LEVEL_TAGS = utils.get_level_tags()
def disable(self):
super().disable()
base.LEVEL_TAGS = self.old_level_tags
class BaseTests:
storage_class = default_storage
levels = {
'debug': constants.DEBUG,
'info': constants.INFO,
'success': constants.SUCCESS,
'warning': constants.WARNING,
'error': constants.ERROR,
}
def setUp(self):
self.settings_override = override_settings_tags(
TEMPLATES=[{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': (
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
),
},
}],
ROOT_URLCONF='messages_tests.urls',
MESSAGE_TAGS={},
MESSAGE_STORAGE='%s.%s' % (self.storage_class.__module__, self.storage_class.__name__),
SESSION_SERIALIZER='django.contrib.sessions.serializers.JSONSerializer',
)
self.settings_override.enable()
def tearDown(self):
self.settings_override.disable()
def get_request(self):
return HttpRequest()
def get_response(self):
return HttpResponse()
def get_storage(self, data=None):
"""
Return the storage backend, setting its loaded data to the ``data``
argument.
This method avoids the storage ``_get`` method from getting called so
that other parts of the storage backend can be tested independent of
the message retrieval logic.
"""
storage = self.storage_class(self.get_request())
storage._loaded_data = data or []
return storage
def test_repr(self):
request = self.get_request()
storage = self.storage_class(request)
self.assertEqual(
repr(storage),
f'<{self.storage_class.__qualname__}: request=<HttpRequest>>',
)
def test_add(self):
storage = self.get_storage()
self.assertFalse(storage.added_new)
storage.add(constants.INFO, 'Test message 1')
self.assertTrue(storage.added_new)
storage.add(constants.INFO, 'Test message 2', extra_tags='tag')
self.assertEqual(len(storage), 2)
def test_add_lazy_translation(self):
storage = self.get_storage()
response = self.get_response()
storage.add(constants.INFO, gettext_lazy('lazy message'))
storage.update(response)
storing = self.stored_messages_count(storage, response)
self.assertEqual(storing, 1)
def test_no_update(self):
storage = self.get_storage()
response = self.get_response()
storage.update(response)
storing = self.stored_messages_count(storage, response)
self.assertEqual(storing, 0)
def test_add_update(self):
storage = self.get_storage()
response = self.get_response()
storage.add(constants.INFO, 'Test message 1')
storage.add(constants.INFO, 'Test message 1', extra_tags='tag')
storage.update(response)
storing = self.stored_messages_count(storage, response)
self.assertEqual(storing, 2)
def test_existing_add_read_update(self):
storage = self.get_existing_storage()
response = self.get_response()
storage.add(constants.INFO, 'Test message 3')
list(storage) # Simulates a read
storage.update(response)
storing = self.stored_messages_count(storage, response)
self.assertEqual(storing, 0)
def test_existing_read_add_update(self):
storage = self.get_existing_storage()
response = self.get_response()
list(storage) # Simulates a read
storage.add(constants.INFO, 'Test message 3')
storage.update(response)
storing = self.stored_messages_count(storage, response)
self.assertEqual(storing, 1)
@override_settings(MESSAGE_LEVEL=constants.DEBUG)
def test_full_request_response_cycle(self):
"""
With the message middleware enabled, messages are properly stored and
retrieved across the full request/redirect/response cycle.
"""
data = {
'messages': ['Test message %d' % x for x in range(5)],
}
show_url = reverse('show_message')
for level in ('debug', 'info', 'success', 'warning', 'error'):
add_url = reverse('add_message', args=(level,))
response = self.client.post(add_url, data, follow=True)
self.assertRedirects(response, show_url)
self.assertIn('messages', response.context)
messages = [Message(self.levels[level], msg) for msg in data['messages']]
self.assertEqual(list(response.context['messages']), messages)
for msg in data['messages']:
self.assertContains(response, msg)
@override_settings(MESSAGE_LEVEL=constants.DEBUG)
def test_with_template_response(self):
data = {
'messages': ['Test message %d' % x for x in range(5)],
}
show_url = reverse('show_template_response')
for level in self.levels:
add_url = reverse('add_template_response', args=(level,))
response = self.client.post(add_url, data, follow=True)
self.assertRedirects(response, show_url)
self.assertIn('messages', response.context)
for msg in data['messages']:
self.assertContains(response, msg)
# there shouldn't be any messages on second GET request
response = self.client.get(show_url)
for msg in data['messages']:
self.assertNotContains(response, msg)
def test_context_processor_message_levels(self):
show_url = reverse('show_template_response')
response = self.client.get(show_url)
self.assertIn('DEFAULT_MESSAGE_LEVELS', response.context)
self.assertEqual(response.context['DEFAULT_MESSAGE_LEVELS'], DEFAULT_LEVELS)
@override_settings(MESSAGE_LEVEL=constants.DEBUG)
def test_multiple_posts(self):
"""
Messages persist properly when multiple POSTs are made before a GET.
"""
data = {
'messages': ['Test message %d' % x for x in range(5)],
}
show_url = reverse('show_message')
messages = []
for level in ('debug', 'info', 'success', 'warning', 'error'):
messages.extend(Message(self.levels[level], msg) for msg in data['messages'])
add_url = reverse('add_message', args=(level,))
self.client.post(add_url, data)
response = self.client.get(show_url)
self.assertIn('messages', response.context)
self.assertEqual(list(response.context['messages']), messages)
for msg in data['messages']:
self.assertContains(response, msg)
@modify_settings(
INSTALLED_APPS={'remove': 'django.contrib.messages'},
MIDDLEWARE={'remove': 'django.contrib.messages.middleware.MessageMiddleware'},
)
@override_settings(
MESSAGE_LEVEL=constants.DEBUG,
TEMPLATES=[{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
}],
)
def test_middleware_disabled(self):
"""
When the middleware is disabled, an exception is raised when one
attempts to store a message.
"""
data = {
'messages': ['Test message %d' % x for x in range(5)],
}
reverse('show_message')
for level in ('debug', 'info', 'success', 'warning', 'error'):
add_url = reverse('add_message', args=(level,))
with self.assertRaises(MessageFailure):
self.client.post(add_url, data, follow=True)
@modify_settings(
INSTALLED_APPS={'remove': 'django.contrib.messages'},
MIDDLEWARE={'remove': 'django.contrib.messages.middleware.MessageMiddleware'},
)
@override_settings(
TEMPLATES=[{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
}],
)
def test_middleware_disabled_fail_silently(self):
"""
When the middleware is disabled, an exception is not raised
if 'fail_silently' is True.
"""
data = {
'messages': ['Test message %d' % x for x in range(5)],
'fail_silently': True,
}
show_url = reverse('show_message')
for level in ('debug', 'info', 'success', 'warning', 'error'):
add_url = reverse('add_message', args=(level,))
response = self.client.post(add_url, data, follow=True)
self.assertRedirects(response, show_url)
self.assertNotIn('messages', response.context)
def stored_messages_count(self, storage, response):
"""
Return the number of messages being stored after a
``storage.update()`` call.
"""
raise NotImplementedError('This method must be set by a subclass.')
def test_get(self):
raise NotImplementedError('This method must be set by a subclass.')
def get_existing_storage(self):
return self.get_storage([
Message(constants.INFO, 'Test message 1'),
Message(constants.INFO, 'Test message 2', extra_tags='tag'),
])
def test_existing_read(self):
"""
Reading the existing storage doesn't cause the data to be lost.
"""
storage = self.get_existing_storage()
self.assertFalse(storage.used)
# After iterating the storage engine directly, the used flag is set.
data = list(storage)
self.assertTrue(storage.used)
# The data does not disappear because it has been iterated.
self.assertEqual(data, list(storage))
def test_existing_add(self):
storage = self.get_existing_storage()
self.assertFalse(storage.added_new)
storage.add(constants.INFO, 'Test message 3')
self.assertTrue(storage.added_new)
def test_default_level(self):
# get_level works even with no storage on the request.
request = self.get_request()
self.assertEqual(get_level(request), constants.INFO)
# get_level returns the default level if it hasn't been set.
storage = self.get_storage()
request._messages = storage
self.assertEqual(get_level(request), constants.INFO)
# Only messages of sufficient level get recorded.
add_level_messages(storage)
self.assertEqual(len(storage), 5)
def test_low_level(self):
request = self.get_request()
storage = self.storage_class(request)
request._messages = storage
self.assertTrue(set_level(request, 5))
self.assertEqual(get_level(request), 5)
add_level_messages(storage)
self.assertEqual(len(storage), 6)
def test_high_level(self):
request = self.get_request()
storage = self.storage_class(request)
request._messages = storage
self.assertTrue(set_level(request, 30))
self.assertEqual(get_level(request), 30)
add_level_messages(storage)
self.assertEqual(len(storage), 2)
@override_settings(MESSAGE_LEVEL=29)
def test_settings_level(self):
request = self.get_request()
storage = self.storage_class(request)
self.assertEqual(get_level(request), 29)
add_level_messages(storage)
self.assertEqual(len(storage), 3)
def test_tags(self):
storage = self.get_storage()
storage.level = 0
add_level_messages(storage)
storage.add(constants.INFO, 'A generic info message', extra_tags=None)
tags = [msg.tags for msg in storage]
self.assertEqual(tags, ['info', '', 'extra-tag debug', 'warning', 'error', 'success', 'info'])
def test_level_tag(self):
storage = self.get_storage()
storage.level = 0
add_level_messages(storage)
tags = [msg.level_tag for msg in storage]
self.assertEqual(tags, ['info', '', 'debug', 'warning', 'error', 'success'])
@override_settings_tags(MESSAGE_TAGS={
constants.INFO: 'info',
constants.DEBUG: '',
constants.WARNING: '',
constants.ERROR: 'bad',
29: 'custom',
})
def test_custom_tags(self):
storage = self.get_storage()
storage.level = 0
add_level_messages(storage)
tags = [msg.tags for msg in storage]
self.assertEqual(tags, ['info', 'custom', 'extra-tag', '', 'bad', 'success'])
|
35b45d1d38c897feb869f02b5f68ea5162cdae665984b4c05457e08c30156c11 | from django import forms
from django.contrib import messages
from django.contrib.messages.views import SuccessMessageMixin
from django.http import HttpResponse, HttpResponseRedirect
from django.template import engines
from django.template.response import TemplateResponse
from django.urls import path, re_path, reverse
from django.views.decorators.cache import never_cache
from django.views.generic.edit import DeleteView, FormView
from .models import SomeObject
TEMPLATE = """{% if messages %}
<ul class="messages">
{% for message in messages %}
<li{% if message.tags %} class="{{ message.tags }}"{% endif %}>
{{ message }}
</li>
{% endfor %}
</ul>
{% endif %}
"""
@never_cache
def add(request, message_type):
# Don't default to False here to test that it defaults to False if
# unspecified.
fail_silently = request.POST.get('fail_silently', None)
for msg in request.POST.getlist('messages'):
if fail_silently is not None:
getattr(messages, message_type)(request, msg, fail_silently=fail_silently)
else:
getattr(messages, message_type)(request, msg)
return HttpResponseRedirect(reverse('show_message'))
@never_cache
def add_template_response(request, message_type):
for msg in request.POST.getlist('messages'):
getattr(messages, message_type)(request, msg)
return HttpResponseRedirect(reverse('show_template_response'))
@never_cache
def show(request):
template = engines['django'].from_string(TEMPLATE)
return HttpResponse(template.render(request=request))
@never_cache
def show_template_response(request):
template = engines['django'].from_string(TEMPLATE)
return TemplateResponse(request, template)
class ContactForm(forms.Form):
name = forms.CharField(required=True)
slug = forms.SlugField(required=True)
class ContactFormViewWithMsg(SuccessMessageMixin, FormView):
form_class = ContactForm
success_url = show
success_message = "%(name)s was created successfully"
class DeleteFormViewWithMsg(SuccessMessageMixin, DeleteView):
model = SomeObject
success_url = '/show/'
success_message = 'Object was deleted successfully'
urlpatterns = [
re_path('^add/(debug|info|success|warning|error)/$', add, name='add_message'),
path('add/msg/', ContactFormViewWithMsg.as_view(), name='add_success_msg'),
path('delete/msg/<int:pk>', DeleteFormViewWithMsg.as_view(), name='success_msg_on_delete'),
path('show/', show, name='show_message'),
re_path(
'^template_response/add/(debug|info|success|warning|error)/$',
add_template_response, name='add_template_response',
),
path('template_response/show/', show_template_response, name='show_template_response'),
]
|
e60727d2111bc5c37cd2e0da98c81d87f0cb7f266baf0d43c3edabeaef93f944 | """
Unit tests for reverse URL lookups.
"""
import pickle
import sys
import threading
from admin_scripts.tests import AdminScriptTestCase
from django.conf import settings
from django.contrib.auth.models import User
from django.core.exceptions import ImproperlyConfigured, ViewDoesNotExist
from django.http import (
HttpRequest, HttpResponsePermanentRedirect, HttpResponseRedirect,
)
from django.shortcuts import redirect
from django.test import (
RequestFactory, SimpleTestCase, TestCase, override_settings,
)
from django.test.utils import override_script_prefix
from django.urls import (
NoReverseMatch, Resolver404, ResolverMatch, URLPattern, URLResolver,
get_callable, get_resolver, get_urlconf, include, path, re_path, resolve,
reverse, reverse_lazy,
)
from django.urls.resolvers import RegexPattern
from . import middleware, urlconf_outer, views
from .utils import URLObject
from .views import empty_view
resolve_test_data = (
# These entries are in the format: (path, url_name, app_name, namespace, view_name, func, args, kwargs)
# Simple case
('/normal/42/37/', 'normal-view', '', '', 'normal-view', views.empty_view, (), {'arg1': '42', 'arg2': '37'}),
(
'/view_class/42/37/', 'view-class', '', '', 'view-class', views.view_class_instance, (),
{'arg1': '42', 'arg2': '37'}
),
(
'/included/normal/42/37/', 'inc-normal-view', 'included_namespace_urls',
'included_namespace_urls', 'included_namespace_urls:inc-normal-view',
views.empty_view, (), {'arg1': '42', 'arg2': '37'}
),
(
'/included/view_class/42/37/', 'inc-view-class', 'included_namespace_urls',
'included_namespace_urls', 'included_namespace_urls:inc-view-class',
views.view_class_instance, (), {'arg1': '42', 'arg2': '37'}
),
# Unnamed args are dropped if you have *any* kwargs in a pattern
('/mixed_args/42/37/', 'mixed-args', '', '', 'mixed-args', views.empty_view, (), {'arg2': '37'}),
(
'/included/mixed_args/42/37/', 'inc-mixed-args', 'included_namespace_urls',
'included_namespace_urls', 'included_namespace_urls:inc-mixed-args',
views.empty_view, (), {'arg2': '37'}
),
(
'/included/12/mixed_args/42/37/', 'inc-mixed-args', 'included_namespace_urls',
'included_namespace_urls', 'included_namespace_urls:inc-mixed-args',
views.empty_view, (), {'arg2': '37'}
),
# Unnamed views should have None as the url_name. Regression data for #21157.
(
'/unnamed/normal/42/37/', None, '', '', 'urlpatterns_reverse.views.empty_view', views.empty_view, (),
{'arg1': '42', 'arg2': '37'}
),
(
'/unnamed/view_class/42/37/', None, '', '', 'urlpatterns_reverse.views.ViewClass', views.view_class_instance,
(), {'arg1': '42', 'arg2': '37'}
),
# If you have no kwargs, you get an args list.
('/no_kwargs/42/37/', 'no-kwargs', '', '', 'no-kwargs', views.empty_view, ('42', '37'), {}),
(
'/included/no_kwargs/42/37/', 'inc-no-kwargs', 'included_namespace_urls',
'included_namespace_urls', 'included_namespace_urls:inc-no-kwargs',
views.empty_view, ('42', '37'), {}
),
(
'/included/12/no_kwargs/42/37/', 'inc-no-kwargs', 'included_namespace_urls',
'included_namespace_urls', 'included_namespace_urls:inc-no-kwargs',
views.empty_view, ('12', '42', '37'), {}
),
# Namespaces
(
'/test1/inner/42/37/', 'urlobject-view', 'testapp', 'test-ns1', 'test-ns1:urlobject-view',
views.empty_view, (), {'arg1': '42', 'arg2': '37'}
),
(
'/included/test3/inner/42/37/', 'urlobject-view', 'included_namespace_urls:testapp',
'included_namespace_urls:test-ns3', 'included_namespace_urls:test-ns3:urlobject-view',
views.empty_view, (), {'arg1': '42', 'arg2': '37'}
),
(
'/ns-included1/normal/42/37/', 'inc-normal-view', 'included_namespace_urls',
'inc-ns1', 'inc-ns1:inc-normal-view',
views.empty_view, (), {'arg1': '42', 'arg2': '37'}
),
(
'/included/test3/inner/42/37/', 'urlobject-view', 'included_namespace_urls:testapp',
'included_namespace_urls:test-ns3', 'included_namespace_urls:test-ns3:urlobject-view',
views.empty_view, (), {'arg1': '42', 'arg2': '37'}
),
(
'/default/inner/42/37/', 'urlobject-view', 'testapp', 'testapp', 'testapp:urlobject-view',
views.empty_view, (), {'arg1': '42', 'arg2': '37'}
),
(
'/other2/inner/42/37/', 'urlobject-view', 'nodefault', 'other-ns2', 'other-ns2:urlobject-view',
views.empty_view, (), {'arg1': '42', 'arg2': '37'}
),
(
'/other1/inner/42/37/', 'urlobject-view', 'nodefault', 'other-ns1', 'other-ns1:urlobject-view',
views.empty_view, (), {'arg1': '42', 'arg2': '37'}
),
# Nested namespaces
(
'/ns-included1/test3/inner/42/37/', 'urlobject-view', 'included_namespace_urls:testapp',
'inc-ns1:test-ns3', 'inc-ns1:test-ns3:urlobject-view',
views.empty_view, (), {'arg1': '42', 'arg2': '37'}
),
(
'/ns-included1/ns-included4/ns-included2/test3/inner/42/37/', 'urlobject-view',
'included_namespace_urls:namespace_urls:included_namespace_urls:testapp',
'inc-ns1:inc-ns4:inc-ns2:test-ns3',
'inc-ns1:inc-ns4:inc-ns2:test-ns3:urlobject-view',
views.empty_view, (), {'arg1': '42', 'arg2': '37'}
),
(
'/app-included/test3/inner/42/37/', 'urlobject-view', 'included_namespace_urls:testapp', 'inc-app:test-ns3',
'inc-app:test-ns3:urlobject-view', views.empty_view, (), {'arg1': '42', 'arg2': '37'}
),
(
'/app-included/ns-included4/ns-included2/test3/inner/42/37/', 'urlobject-view',
'included_namespace_urls:namespace_urls:included_namespace_urls:testapp',
'inc-app:inc-ns4:inc-ns2:test-ns3',
'inc-app:inc-ns4:inc-ns2:test-ns3:urlobject-view',
views.empty_view, (), {'arg1': '42', 'arg2': '37'}
),
# Namespaces capturing variables
(
'/inc70/', 'inner-nothing', 'included_urls', 'inc-ns5', 'inc-ns5:inner-nothing',
views.empty_view, (), {'outer': '70'}
),
(
'/inc78/extra/foobar/', 'inner-extra', 'included_urls', 'inc-ns5', 'inc-ns5:inner-extra',
views.empty_view, (), {'outer': '78', 'extra': 'foobar'}
),
)
test_data = (
('places', '/places/3/', [3], {}),
('places', '/places/3/', ['3'], {}),
('places', NoReverseMatch, ['a'], {}),
('places', NoReverseMatch, [], {}),
('places?', '/place/', [], {}),
('places+', '/places/', [], {}),
('places*', '/place/', [], {}),
('places2?', '/', [], {}),
('places2+', '/places/', [], {}),
('places2*', '/', [], {}),
('places3', '/places/4/', [4], {}),
('places3', '/places/harlem/', ['harlem'], {}),
('places3', NoReverseMatch, ['harlem64'], {}),
('places4', '/places/3/', [], {'id': 3}),
('people', NoReverseMatch, [], {}),
('people', '/people/adrian/', ['adrian'], {}),
('people', '/people/adrian/', [], {'name': 'adrian'}),
('people', NoReverseMatch, ['name with spaces'], {}),
('people', NoReverseMatch, [], {'name': 'name with spaces'}),
('people2', '/people/name/', [], {}),
('people2a', '/people/name/fred/', ['fred'], {}),
('people_backref', '/people/nate-nate/', ['nate'], {}),
('people_backref', '/people/nate-nate/', [], {'name': 'nate'}),
('optional', '/optional/fred/', [], {'name': 'fred'}),
('optional', '/optional/fred/', ['fred'], {}),
('named_optional', '/optional/1/', [1], {}),
('named_optional', '/optional/1/', [], {'arg1': 1}),
('named_optional', '/optional/1/2/', [1, 2], {}),
('named_optional', '/optional/1/2/', [], {'arg1': 1, 'arg2': 2}),
('named_optional_terminated', '/optional/1/', [1], {}),
('named_optional_terminated', '/optional/1/', [], {'arg1': 1}),
('named_optional_terminated', '/optional/1/2/', [1, 2], {}),
('named_optional_terminated', '/optional/1/2/', [], {'arg1': 1, 'arg2': 2}),
('hardcoded', '/hardcoded/', [], {}),
('hardcoded2', '/hardcoded/doc.pdf', [], {}),
('people3', '/people/il/adrian/', [], {'state': 'il', 'name': 'adrian'}),
('people3', NoReverseMatch, [], {'state': 'il'}),
('people3', NoReverseMatch, [], {'name': 'adrian'}),
('people4', NoReverseMatch, [], {'state': 'il', 'name': 'adrian'}),
('people6', '/people/il/test/adrian/', ['il/test', 'adrian'], {}),
('people6', '/people//adrian/', ['adrian'], {}),
('range', '/character_set/a/', [], {}),
('range2', '/character_set/x/', [], {}),
('price', '/price/$10/', ['10'], {}),
('price2', '/price/$10/', ['10'], {}),
('price3', '/price/$10/', ['10'], {}),
('product', '/product/chocolate+($2.00)/', [], {'price': '2.00', 'product': 'chocolate'}),
('headlines', '/headlines/2007.5.21/', [], {'year': 2007, 'month': 5, 'day': 21}),
(
'windows', r'/windows_path/C:%5CDocuments%20and%20Settings%5Cspam/', [],
{'drive_name': 'C', 'path': r'Documents and Settings\spam'}
),
('special', r'/special_chars/~@+%5C$*%7C/', [r'~@+\$*|'], {}),
('special', r'/special_chars/some%20resource/', [r'some resource'], {}),
('special', r'/special_chars/10%25%20complete/', [r'10% complete'], {}),
('special', r'/special_chars/some%20resource/', [], {'chars': r'some resource'}),
('special', r'/special_chars/10%25%20complete/', [], {'chars': r'10% complete'}),
('special', NoReverseMatch, [''], {}),
('mixed', '/john/0/', [], {'name': 'john'}),
('repeats', '/repeats/a/', [], {}),
('repeats2', '/repeats/aa/', [], {}),
('repeats3', '/repeats/aa/', [], {}),
('test', '/test/1', [], {}),
('inner-nothing', '/outer/42/', [], {'outer': '42'}),
('inner-nothing', '/outer/42/', ['42'], {}),
('inner-nothing', NoReverseMatch, ['foo'], {}),
('inner-extra', '/outer/42/extra/inner/', [], {'extra': 'inner', 'outer': '42'}),
('inner-extra', '/outer/42/extra/inner/', ['42', 'inner'], {}),
('inner-extra', NoReverseMatch, ['fred', 'inner'], {}),
('inner-no-kwargs', '/outer-no-kwargs/42/inner-no-kwargs/1/', ['42', '1'], {}),
('disjunction', NoReverseMatch, ['foo'], {}),
('inner-disjunction', NoReverseMatch, ['10', '11'], {}),
('extra-places', '/e-places/10/', ['10'], {}),
('extra-people', '/e-people/fred/', ['fred'], {}),
('extra-people', '/e-people/fred/', [], {'name': 'fred'}),
('part', '/part/one/', [], {'value': 'one'}),
('part', '/prefix/xx/part/one/', [], {'value': 'one', 'prefix': 'xx'}),
('part2', '/part2/one/', [], {'value': 'one'}),
('part2', '/part2/', [], {}),
('part2', '/prefix/xx/part2/one/', [], {'value': 'one', 'prefix': 'xx'}),
('part2', '/prefix/xx/part2/', [], {'prefix': 'xx'}),
# Tests for nested groups. Nested capturing groups will only work if you
# *only* supply the correct outer group.
('nested-noncapture', '/nested/noncapture/opt', [], {'p': 'opt'}),
('nested-capture', '/nested/capture/opt/', ['opt/'], {}),
('nested-capture', NoReverseMatch, [], {'p': 'opt'}),
('nested-mixedcapture', '/nested/capture/mixed/opt', ['opt'], {}),
('nested-mixedcapture', NoReverseMatch, [], {'p': 'opt'}),
('nested-namedcapture', '/nested/capture/named/opt/', [], {'outer': 'opt/'}),
('nested-namedcapture', NoReverseMatch, [], {'outer': 'opt/', 'inner': 'opt'}),
('nested-namedcapture', NoReverseMatch, [], {'inner': 'opt'}),
('non_path_include', '/includes/non_path_include/', [], {}),
# Tests for #13154
('defaults', '/defaults_view1/3/', [], {'arg1': 3, 'arg2': 1}),
('defaults', '/defaults_view2/3/', [], {'arg1': 3, 'arg2': 2}),
('defaults', NoReverseMatch, [], {'arg1': 3, 'arg2': 3}),
('defaults', NoReverseMatch, [], {'arg2': 1}),
# Security tests
('security', '/%2Fexample.com/security/', ['/example.com'], {}),
)
@override_settings(ROOT_URLCONF='urlpatterns_reverse.no_urls')
class NoURLPatternsTests(SimpleTestCase):
def test_no_urls_exception(self):
"""
URLResolver should raise an exception when no urlpatterns exist.
"""
resolver = URLResolver(RegexPattern(r'^$'), settings.ROOT_URLCONF)
with self.assertRaisesMessage(
ImproperlyConfigured,
"The included URLconf 'urlpatterns_reverse.no_urls' does not "
"appear to have any patterns in it. If you see the 'urlpatterns' "
"variable with valid patterns in the file then the issue is "
"probably caused by a circular import."
):
getattr(resolver, 'url_patterns')
@override_settings(ROOT_URLCONF='urlpatterns_reverse.urls')
class URLPatternReverse(SimpleTestCase):
def test_urlpattern_reverse(self):
for name, expected, args, kwargs in test_data:
with self.subTest(name=name, args=args, kwargs=kwargs):
try:
got = reverse(name, args=args, kwargs=kwargs)
except NoReverseMatch:
self.assertEqual(NoReverseMatch, expected)
else:
self.assertEqual(got, expected)
def test_reverse_none(self):
# Reversing None should raise an error, not return the last un-named view.
with self.assertRaises(NoReverseMatch):
reverse(None)
def test_mixing_args_and_kwargs(self):
msg = "Don't mix *args and **kwargs in call to reverse()!"
with self.assertRaisesMessage(ValueError, msg):
reverse('name', args=['a'], kwargs={'b': 'c'})
@override_script_prefix('/{{invalid}}/')
def test_prefix_braces(self):
self.assertEqual(
'/%7B%7Binvalid%7D%7D/includes/non_path_include/',
reverse('non_path_include')
)
def test_prefix_parenthesis(self):
# Parentheses are allowed and should not cause errors or be escaped
with override_script_prefix('/bogus)/'):
self.assertEqual(
'/bogus)/includes/non_path_include/',
reverse('non_path_include')
)
with override_script_prefix('/(bogus)/'):
self.assertEqual(
'/(bogus)/includes/non_path_include/',
reverse('non_path_include')
)
@override_script_prefix('/bump%20map/')
def test_prefix_format_char(self):
self.assertEqual(
'/bump%2520map/includes/non_path_include/',
reverse('non_path_include')
)
@override_script_prefix('/%7Eme/')
def test_non_urlsafe_prefix_with_args(self):
# Regression for #20022, adjusted for #24013 because ~ is an unreserved
# character. Tests whether % is escaped.
self.assertEqual('/%257Eme/places/1/', reverse('places', args=[1]))
def test_patterns_reported(self):
# Regression for #17076
with self.assertRaisesMessage(NoReverseMatch, r"1 pattern(s) tried: ['people/(?P<name>\\w+)/$']"):
# this url exists, but requires an argument
reverse("people", args=[])
@override_script_prefix('/script:name/')
def test_script_name_escaping(self):
self.assertEqual(
reverse('optional', args=['foo:bar']),
'/script:name/optional/foo:bar/'
)
def test_view_not_found_message(self):
msg = (
"Reverse for 'nonexistent-view' not found. 'nonexistent-view' "
"is not a valid view function or pattern name."
)
with self.assertRaisesMessage(NoReverseMatch, msg):
reverse('nonexistent-view')
def test_no_args_message(self):
msg = "Reverse for 'places' with no arguments not found. 1 pattern(s) tried:"
with self.assertRaisesMessage(NoReverseMatch, msg):
reverse('places')
def test_illegal_args_message(self):
msg = "Reverse for 'places' with arguments '(1, 2)' not found. 1 pattern(s) tried:"
with self.assertRaisesMessage(NoReverseMatch, msg):
reverse('places', args=(1, 2))
def test_illegal_kwargs_message(self):
msg = "Reverse for 'places' with keyword arguments '{'arg1': 2}' not found. 1 pattern(s) tried:"
with self.assertRaisesMessage(NoReverseMatch, msg):
reverse('places', kwargs={'arg1': 2})
class ResolverTests(SimpleTestCase):
def test_resolver_repr(self):
"""
Test repr of URLResolver, especially when urlconf_name is a list
(#17892).
"""
# Pick a resolver from a namespaced URLconf
resolver = get_resolver('urlpatterns_reverse.namespace_urls')
sub_resolver = resolver.namespace_dict['test-ns1'][1]
self.assertIn('<URLPattern list>', repr(sub_resolver))
def test_reverse_lazy_object_coercion_by_resolve(self):
"""
Verifies lazy object returned by reverse_lazy is coerced to
text by resolve(). Previous to #21043, this would raise a TypeError.
"""
urls = 'urlpatterns_reverse.named_urls'
proxy_url = reverse_lazy('named-url1', urlconf=urls)
resolver = get_resolver(urls)
resolver.resolve(proxy_url)
def test_resolver_reverse(self):
resolver = get_resolver('urlpatterns_reverse.named_urls')
test_urls = [
# (name, args, kwargs, expected)
('named-url1', (), {}, ''),
('named-url2', ('arg',), {}, 'extra/arg/'),
('named-url2', (), {'extra': 'arg'}, 'extra/arg/'),
]
for name, args, kwargs, expected in test_urls:
with self.subTest(name=name, args=args, kwargs=kwargs):
self.assertEqual(resolver.reverse(name, *args, **kwargs), expected)
def test_resolver_reverse_conflict(self):
"""
URL pattern name arguments don't need to be unique. The last registered
pattern takes precedence for conflicting names.
"""
resolver = get_resolver('urlpatterns_reverse.named_urls_conflict')
test_urls = [
# (name, args, kwargs, expected)
# Without arguments, the last URL in urlpatterns has precedence.
('name-conflict', (), {}, 'conflict/'),
# With an arg, the last URL in urlpatterns has precedence.
('name-conflict', ('arg',), {}, 'conflict-last/arg/'),
# With a kwarg, other URL patterns can be reversed.
('name-conflict', (), {'first': 'arg'}, 'conflict-first/arg/'),
('name-conflict', (), {'middle': 'arg'}, 'conflict-middle/arg/'),
('name-conflict', (), {'last': 'arg'}, 'conflict-last/arg/'),
# The number and order of the arguments don't interfere with reversing.
('name-conflict', ('arg', 'arg'), {}, 'conflict/arg/arg/'),
]
for name, args, kwargs, expected in test_urls:
with self.subTest(name=name, args=args, kwargs=kwargs):
self.assertEqual(resolver.reverse(name, *args, **kwargs), expected)
def test_non_regex(self):
"""
A Resolver404 is raised if resolving doesn't meet the basic
requirements of a path to match - i.e., at the very least, it matches
the root pattern '^/'. Never return None from resolve() to prevent a
TypeError from occurring later (#10834).
"""
test_urls = ['', 'a', '\\', '.']
for path_ in test_urls:
with self.subTest(path=path_):
with self.assertRaises(Resolver404):
resolve(path_)
def test_404_tried_urls_have_names(self):
"""
The list of URLs that come back from a Resolver404 exception contains
a list in the right format for printing out in the DEBUG 404 page with
both the patterns and URL names, if available.
"""
urls = 'urlpatterns_reverse.named_urls'
# this list matches the expected URL types and names returned when
# you try to resolve a nonexistent URL in the first level of included
# URLs in named_urls.py (e.g., '/included/nonexistent-url')
url_types_names = [
[{'type': URLPattern, 'name': 'named-url1'}],
[{'type': URLPattern, 'name': 'named-url2'}],
[{'type': URLPattern, 'name': None}],
[{'type': URLResolver}, {'type': URLPattern, 'name': 'named-url3'}],
[{'type': URLResolver}, {'type': URLPattern, 'name': 'named-url4'}],
[{'type': URLResolver}, {'type': URLPattern, 'name': None}],
[{'type': URLResolver}, {'type': URLResolver}],
]
with self.assertRaisesMessage(Resolver404, 'tried') as cm:
resolve('/included/nonexistent-url', urlconf=urls)
e = cm.exception
# make sure we at least matched the root ('/') url resolver:
self.assertIn('tried', e.args[0])
self.assertEqual(
len(e.args[0]['tried']),
len(url_types_names),
'Wrong number of tried URLs returned. Expected %s, got %s.' % (
len(url_types_names), len(e.args[0]['tried'])
)
)
for tried, expected in zip(e.args[0]['tried'], url_types_names):
for t, e in zip(tried, expected):
with self.subTest(t):
self.assertIsInstance(t, e['type']), '%s is not an instance of %s' % (t, e['type'])
if 'name' in e:
if not e['name']:
self.assertIsNone(t.name, 'Expected no URL name but found %s.' % t.name)
else:
self.assertEqual(
t.name,
e['name'],
'Wrong URL name. Expected "%s", got "%s".' % (e['name'], t.name)
)
def test_namespaced_view_detail(self):
resolver = get_resolver('urlpatterns_reverse.nested_urls')
self.assertTrue(resolver._is_callback('urlpatterns_reverse.nested_urls.view1'))
self.assertTrue(resolver._is_callback('urlpatterns_reverse.nested_urls.view2'))
self.assertTrue(resolver._is_callback('urlpatterns_reverse.nested_urls.View3'))
self.assertFalse(resolver._is_callback('urlpatterns_reverse.nested_urls.blub'))
def test_view_detail_as_method(self):
# Views which have a class name as part of their path.
resolver = get_resolver('urlpatterns_reverse.method_view_urls')
self.assertTrue(resolver._is_callback('urlpatterns_reverse.method_view_urls.ViewContainer.method_view'))
self.assertTrue(resolver._is_callback('urlpatterns_reverse.method_view_urls.ViewContainer.classmethod_view'))
def test_populate_concurrency(self):
"""
URLResolver._populate() can be called concurrently, but not more
than once per thread (#26888).
"""
resolver = URLResolver(RegexPattern(r'^/'), 'urlpatterns_reverse.urls')
resolver._local.populating = True
thread = threading.Thread(target=resolver._populate)
thread.start()
thread.join()
self.assertNotEqual(resolver._reverse_dict, {})
@override_settings(ROOT_URLCONF='urlpatterns_reverse.reverse_lazy_urls')
class ReverseLazyTest(TestCase):
def test_redirect_with_lazy_reverse(self):
response = self.client.get('/redirect/')
self.assertRedirects(response, "/redirected_to/", status_code=302)
def test_user_permission_with_lazy_reverse(self):
alfred = User.objects.create_user('alfred', '[email protected]', password='testpw')
response = self.client.get('/login_required_view/')
self.assertRedirects(response, "/login/?next=/login_required_view/", status_code=302)
self.client.force_login(alfred)
response = self.client.get('/login_required_view/')
self.assertEqual(response.status_code, 200)
def test_inserting_reverse_lazy_into_string(self):
self.assertEqual(
'Some URL: %s' % reverse_lazy('some-login-page'),
'Some URL: /login/'
)
def test_build_absolute_uri(self):
factory = RequestFactory()
request = factory.get('/')
self.assertEqual(
request.build_absolute_uri(reverse_lazy('some-login-page')),
'http://testserver/login/',
)
class ReverseLazySettingsTest(AdminScriptTestCase):
"""
reverse_lazy can be used in settings without causing a circular
import error.
"""
def setUp(self):
super().setUp()
self.write_settings(
'settings.py',
extra="from django.urls import reverse_lazy\nLOGIN_URL = reverse_lazy('login')",
)
def test_lazy_in_settings(self):
out, err = self.run_manage(['check'])
self.assertNoOutput(err)
@override_settings(ROOT_URLCONF='urlpatterns_reverse.urls')
class ReverseShortcutTests(SimpleTestCase):
def test_redirect_to_object(self):
# We don't really need a model; just something with a get_absolute_url
class FakeObj:
def get_absolute_url(self):
return "/hi-there/"
res = redirect(FakeObj())
self.assertIsInstance(res, HttpResponseRedirect)
self.assertEqual(res.url, '/hi-there/')
res = redirect(FakeObj(), permanent=True)
self.assertIsInstance(res, HttpResponsePermanentRedirect)
self.assertEqual(res.url, '/hi-there/')
def test_redirect_to_view_name(self):
res = redirect('hardcoded2')
self.assertEqual(res.url, '/hardcoded/doc.pdf')
res = redirect('places', 1)
self.assertEqual(res.url, '/places/1/')
res = redirect('headlines', year='2008', month='02', day='17')
self.assertEqual(res.url, '/headlines/2008.02.17/')
with self.assertRaises(NoReverseMatch):
redirect('not-a-view')
def test_redirect_to_url(self):
res = redirect('/foo/')
self.assertEqual(res.url, '/foo/')
res = redirect('http://example.com/')
self.assertEqual(res.url, 'http://example.com/')
# Assert that we can redirect using UTF-8 strings
res = redirect('/æøå/abc/')
self.assertEqual(res.url, '/%C3%A6%C3%B8%C3%A5/abc/')
# Assert that no imports are attempted when dealing with a relative path
# (previously, the below would resolve in a UnicodeEncodeError from __import__ )
res = redirect('/æøå.abc/')
self.assertEqual(res.url, '/%C3%A6%C3%B8%C3%A5.abc/')
res = redirect('os.path')
self.assertEqual(res.url, 'os.path')
def test_no_illegal_imports(self):
# modules that are not listed in urlpatterns should not be importable
redirect("urlpatterns_reverse.nonimported_module.view")
self.assertNotIn("urlpatterns_reverse.nonimported_module", sys.modules)
def test_reverse_by_path_nested(self):
# Views added to urlpatterns using include() should be reversible.
from .views import nested_view
self.assertEqual(reverse(nested_view), '/includes/nested_path/')
def test_redirect_view_object(self):
from .views import absolute_kwargs_view
res = redirect(absolute_kwargs_view)
self.assertEqual(res.url, '/absolute_arg_view/')
with self.assertRaises(NoReverseMatch):
redirect(absolute_kwargs_view, wrong_argument=None)
@override_settings(ROOT_URLCONF='urlpatterns_reverse.namespace_urls')
class NamespaceTests(SimpleTestCase):
def test_ambiguous_object(self):
"""
Names deployed via dynamic URL objects that require namespaces can't
be resolved.
"""
test_urls = [
('urlobject-view', [], {}),
('urlobject-view', [37, 42], {}),
('urlobject-view', [], {'arg1': 42, 'arg2': 37}),
]
for name, args, kwargs in test_urls:
with self.subTest(name=name, args=args, kwargs=kwargs):
with self.assertRaises(NoReverseMatch):
reverse(name, args=args, kwargs=kwargs)
def test_ambiguous_urlpattern(self):
"""
Names deployed via dynamic URL objects that require namespaces can't
be resolved.
"""
test_urls = [
('inner-nothing', [], {}),
('inner-nothing', [37, 42], {}),
('inner-nothing', [], {'arg1': 42, 'arg2': 37}),
]
for name, args, kwargs in test_urls:
with self.subTest(name=name, args=args, kwargs=kwargs):
with self.assertRaises(NoReverseMatch):
reverse(name, args=args, kwargs=kwargs)
def test_non_existent_namespace(self):
"""Nonexistent namespaces raise errors."""
test_urls = [
'blahblah:urlobject-view',
'test-ns1:blahblah:urlobject-view',
]
for name in test_urls:
with self.subTest(name=name):
with self.assertRaises(NoReverseMatch):
reverse(name)
def test_normal_name(self):
"""Normal lookups work as expected."""
test_urls = [
('normal-view', [], {}, '/normal/'),
('normal-view', [37, 42], {}, '/normal/37/42/'),
('normal-view', [], {'arg1': 42, 'arg2': 37}, '/normal/42/37/'),
('special-view', [], {}, '/+%5C$*/'),
]
for name, args, kwargs, expected in test_urls:
with self.subTest(name=name, args=args, kwargs=kwargs):
self.assertEqual(reverse(name, args=args, kwargs=kwargs), expected)
def test_simple_included_name(self):
"""Normal lookups work on names included from other patterns."""
test_urls = [
('included_namespace_urls:inc-normal-view', [], {}, '/included/normal/'),
('included_namespace_urls:inc-normal-view', [37, 42], {}, '/included/normal/37/42/'),
('included_namespace_urls:inc-normal-view', [], {'arg1': 42, 'arg2': 37}, '/included/normal/42/37/'),
('included_namespace_urls:inc-special-view', [], {}, '/included/+%5C$*/'),
]
for name, args, kwargs, expected in test_urls:
with self.subTest(name=name, args=args, kwargs=kwargs):
self.assertEqual(reverse(name, args=args, kwargs=kwargs), expected)
def test_namespace_object(self):
"""Dynamic URL objects can be found using a namespace."""
test_urls = [
('test-ns1:urlobject-view', [], {}, '/test1/inner/'),
('test-ns1:urlobject-view', [37, 42], {}, '/test1/inner/37/42/'),
('test-ns1:urlobject-view', [], {'arg1': 42, 'arg2': 37}, '/test1/inner/42/37/'),
('test-ns1:urlobject-special-view', [], {}, '/test1/inner/+%5C$*/'),
]
for name, args, kwargs, expected in test_urls:
with self.subTest(name=name, args=args, kwargs=kwargs):
self.assertEqual(reverse(name, args=args, kwargs=kwargs), expected)
def test_app_object(self):
"""
Dynamic URL objects can return a (pattern, app_name) 2-tuple, and
include() can set the namespace.
"""
test_urls = [
('new-ns1:urlobject-view', [], {}, '/newapp1/inner/'),
('new-ns1:urlobject-view', [37, 42], {}, '/newapp1/inner/37/42/'),
('new-ns1:urlobject-view', [], {'arg1': 42, 'arg2': 37}, '/newapp1/inner/42/37/'),
('new-ns1:urlobject-special-view', [], {}, '/newapp1/inner/+%5C$*/'),
]
for name, args, kwargs, expected in test_urls:
with self.subTest(name=name, args=args, kwargs=kwargs):
self.assertEqual(reverse(name, args=args, kwargs=kwargs), expected)
def test_app_object_default_namespace(self):
"""
Namespace defaults to app_name when including a (pattern, app_name)
2-tuple.
"""
test_urls = [
('newapp:urlobject-view', [], {}, '/new-default/inner/'),
('newapp:urlobject-view', [37, 42], {}, '/new-default/inner/37/42/'),
('newapp:urlobject-view', [], {'arg1': 42, 'arg2': 37}, '/new-default/inner/42/37/'),
('newapp:urlobject-special-view', [], {}, '/new-default/inner/+%5C$*/'),
]
for name, args, kwargs, expected in test_urls:
with self.subTest(name=name, args=args, kwargs=kwargs):
self.assertEqual(reverse(name, args=args, kwargs=kwargs), expected)
def test_embedded_namespace_object(self):
"""Namespaces can be installed anywhere in the URL pattern tree."""
test_urls = [
('included_namespace_urls:test-ns3:urlobject-view', [], {}, '/included/test3/inner/'),
('included_namespace_urls:test-ns3:urlobject-view', [37, 42], {}, '/included/test3/inner/37/42/'),
(
'included_namespace_urls:test-ns3:urlobject-view', [], {'arg1': 42, 'arg2': 37},
'/included/test3/inner/42/37/',
),
('included_namespace_urls:test-ns3:urlobject-special-view', [], {}, '/included/test3/inner/+%5C$*/'),
]
for name, args, kwargs, expected in test_urls:
with self.subTest(name=name, args=args, kwargs=kwargs):
self.assertEqual(reverse(name, args=args, kwargs=kwargs), expected)
def test_namespace_pattern(self):
"""Namespaces can be applied to include()'d urlpatterns."""
test_urls = [
('inc-ns1:inc-normal-view', [], {}, '/ns-included1/normal/'),
('inc-ns1:inc-normal-view', [37, 42], {}, '/ns-included1/normal/37/42/'),
('inc-ns1:inc-normal-view', [], {'arg1': 42, 'arg2': 37}, '/ns-included1/normal/42/37/'),
('inc-ns1:inc-special-view', [], {}, '/ns-included1/+%5C$*/'),
]
for name, args, kwargs, expected in test_urls:
with self.subTest(name=name, args=args, kwargs=kwargs):
self.assertEqual(reverse(name, args=args, kwargs=kwargs), expected)
def test_app_name_pattern(self):
"""
Namespaces can be applied to include()'d urlpatterns that set an
app_name attribute.
"""
test_urls = [
('app-ns1:inc-normal-view', [], {}, '/app-included1/normal/'),
('app-ns1:inc-normal-view', [37, 42], {}, '/app-included1/normal/37/42/'),
('app-ns1:inc-normal-view', [], {'arg1': 42, 'arg2': 37}, '/app-included1/normal/42/37/'),
('app-ns1:inc-special-view', [], {}, '/app-included1/+%5C$*/'),
]
for name, args, kwargs, expected in test_urls:
with self.subTest(name=name, args=args, kwargs=kwargs):
self.assertEqual(reverse(name, args=args, kwargs=kwargs), expected)
def test_namespace_pattern_with_variable_prefix(self):
"""
Using include() with namespaces when there is a regex variable in front
of it.
"""
test_urls = [
('inc-outer:inc-normal-view', [], {'outer': 42}, '/ns-outer/42/normal/'),
('inc-outer:inc-normal-view', [42], {}, '/ns-outer/42/normal/'),
('inc-outer:inc-normal-view', [], {'arg1': 37, 'arg2': 4, 'outer': 42}, '/ns-outer/42/normal/37/4/'),
('inc-outer:inc-normal-view', [42, 37, 4], {}, '/ns-outer/42/normal/37/4/'),
('inc-outer:inc-special-view', [], {'outer': 42}, '/ns-outer/42/+%5C$*/'),
('inc-outer:inc-special-view', [42], {}, '/ns-outer/42/+%5C$*/'),
]
for name, args, kwargs, expected in test_urls:
with self.subTest(name=name, args=args, kwargs=kwargs):
self.assertEqual(reverse(name, args=args, kwargs=kwargs), expected)
def test_multiple_namespace_pattern(self):
"""Namespaces can be embedded."""
test_urls = [
('inc-ns1:test-ns3:urlobject-view', [], {}, '/ns-included1/test3/inner/'),
('inc-ns1:test-ns3:urlobject-view', [37, 42], {}, '/ns-included1/test3/inner/37/42/'),
(
'inc-ns1:test-ns3:urlobject-view', [], {'arg1': 42, 'arg2': 37},
'/ns-included1/test3/inner/42/37/',
),
('inc-ns1:test-ns3:urlobject-special-view', [], {}, '/ns-included1/test3/inner/+%5C$*/'),
]
for name, args, kwargs, expected in test_urls:
with self.subTest(name=name, args=args, kwargs=kwargs):
self.assertEqual(reverse(name, args=args, kwargs=kwargs), expected)
def test_nested_namespace_pattern(self):
"""Namespaces can be nested."""
test_urls = [
(
'inc-ns1:inc-ns4:inc-ns1:test-ns3:urlobject-view', [], {},
'/ns-included1/ns-included4/ns-included1/test3/inner/',
),
(
'inc-ns1:inc-ns4:inc-ns1:test-ns3:urlobject-view', [37, 42], {},
'/ns-included1/ns-included4/ns-included1/test3/inner/37/42/',
),
(
'inc-ns1:inc-ns4:inc-ns1:test-ns3:urlobject-view', [], {'arg1': 42, 'arg2': 37},
'/ns-included1/ns-included4/ns-included1/test3/inner/42/37/',
),
(
'inc-ns1:inc-ns4:inc-ns1:test-ns3:urlobject-special-view', [], {},
'/ns-included1/ns-included4/ns-included1/test3/inner/+%5C$*/',
),
]
for name, args, kwargs, expected in test_urls:
with self.subTest(name=name, args=args, kwargs=kwargs):
self.assertEqual(reverse(name, args=args, kwargs=kwargs), expected)
def test_app_lookup_object(self):
"""A default application namespace can be used for lookup."""
test_urls = [
('testapp:urlobject-view', [], {}, '/default/inner/'),
('testapp:urlobject-view', [37, 42], {}, '/default/inner/37/42/'),
('testapp:urlobject-view', [], {'arg1': 42, 'arg2': 37}, '/default/inner/42/37/'),
('testapp:urlobject-special-view', [], {}, '/default/inner/+%5C$*/'),
]
for name, args, kwargs, expected in test_urls:
with self.subTest(name=name, args=args, kwargs=kwargs):
self.assertEqual(reverse(name, args=args, kwargs=kwargs), expected)
def test_app_lookup_object_with_default(self):
"""A default application namespace is sensitive to the current app."""
test_urls = [
('testapp:urlobject-view', [], {}, 'test-ns3', '/default/inner/'),
('testapp:urlobject-view', [37, 42], {}, 'test-ns3', '/default/inner/37/42/'),
('testapp:urlobject-view', [], {'arg1': 42, 'arg2': 37}, 'test-ns3', '/default/inner/42/37/'),
('testapp:urlobject-special-view', [], {}, 'test-ns3', '/default/inner/+%5C$*/'),
]
for name, args, kwargs, current_app, expected in test_urls:
with self.subTest(name=name, args=args, kwargs=kwargs, current_app=current_app):
self.assertEqual(reverse(name, args=args, kwargs=kwargs, current_app=current_app), expected)
def test_app_lookup_object_without_default(self):
"""
An application namespace without a default is sensitive to the current
app.
"""
test_urls = [
('nodefault:urlobject-view', [], {}, None, '/other2/inner/'),
('nodefault:urlobject-view', [37, 42], {}, None, '/other2/inner/37/42/'),
('nodefault:urlobject-view', [], {'arg1': 42, 'arg2': 37}, None, '/other2/inner/42/37/'),
('nodefault:urlobject-special-view', [], {}, None, '/other2/inner/+%5C$*/'),
('nodefault:urlobject-view', [], {}, 'other-ns1', '/other1/inner/'),
('nodefault:urlobject-view', [37, 42], {}, 'other-ns1', '/other1/inner/37/42/'),
('nodefault:urlobject-view', [], {'arg1': 42, 'arg2': 37}, 'other-ns1', '/other1/inner/42/37/'),
('nodefault:urlobject-special-view', [], {}, 'other-ns1', '/other1/inner/+%5C$*/'),
]
for name, args, kwargs, current_app, expected in test_urls:
with self.subTest(name=name, args=args, kwargs=kwargs, current_app=current_app):
self.assertEqual(reverse(name, args=args, kwargs=kwargs, current_app=current_app), expected)
def test_special_chars_namespace(self):
test_urls = [
('special:included_namespace_urls:inc-normal-view', [], {}, '/+%5C$*/included/normal/'),
('special:included_namespace_urls:inc-normal-view', [37, 42], {}, '/+%5C$*/included/normal/37/42/'),
(
'special:included_namespace_urls:inc-normal-view', [], {'arg1': 42, 'arg2': 37},
'/+%5C$*/included/normal/42/37/',
),
('special:included_namespace_urls:inc-special-view', [], {}, '/+%5C$*/included/+%5C$*/'),
]
for name, args, kwargs, expected in test_urls:
with self.subTest(name=name, args=args, kwargs=kwargs):
self.assertEqual(reverse(name, args=args, kwargs=kwargs), expected)
def test_namespaces_with_variables(self):
"""Namespace prefixes can capture variables."""
test_urls = [
('inc-ns5:inner-nothing', [], {'outer': '70'}, '/inc70/'),
('inc-ns5:inner-extra', [], {'extra': 'foobar', 'outer': '78'}, '/inc78/extra/foobar/'),
('inc-ns5:inner-nothing', ['70'], {}, '/inc70/'),
('inc-ns5:inner-extra', ['78', 'foobar'], {}, '/inc78/extra/foobar/'),
]
for name, args, kwargs, expected in test_urls:
with self.subTest(name=name, args=args, kwargs=kwargs):
self.assertEqual(reverse(name, args=args, kwargs=kwargs), expected)
def test_nested_app_lookup(self):
"""
A nested current_app should be split in individual namespaces (#24904).
"""
test_urls = [
('inc-ns1:testapp:urlobject-view', [], {}, None, '/ns-included1/test4/inner/'),
('inc-ns1:testapp:urlobject-view', [37, 42], {}, None, '/ns-included1/test4/inner/37/42/'),
('inc-ns1:testapp:urlobject-view', [], {'arg1': 42, 'arg2': 37}, None, '/ns-included1/test4/inner/42/37/'),
('inc-ns1:testapp:urlobject-special-view', [], {}, None, '/ns-included1/test4/inner/+%5C$*/'),
('inc-ns1:testapp:urlobject-view', [], {}, 'inc-ns1:test-ns3', '/ns-included1/test3/inner/'),
('inc-ns1:testapp:urlobject-view', [37, 42], {}, 'inc-ns1:test-ns3', '/ns-included1/test3/inner/37/42/'),
(
'inc-ns1:testapp:urlobject-view', [], {'arg1': 42, 'arg2': 37}, 'inc-ns1:test-ns3',
'/ns-included1/test3/inner/42/37/',
),
(
'inc-ns1:testapp:urlobject-special-view', [], {}, 'inc-ns1:test-ns3',
'/ns-included1/test3/inner/+%5C$*/',
),
]
for name, args, kwargs, current_app, expected in test_urls:
with self.subTest(name=name, args=args, kwargs=kwargs, current_app=current_app):
self.assertEqual(reverse(name, args=args, kwargs=kwargs, current_app=current_app), expected)
def test_current_app_no_partial_match(self):
"""current_app shouldn't be used unless it matches the whole path."""
test_urls = [
('inc-ns1:testapp:urlobject-view', [], {}, 'nonexistent:test-ns3', '/ns-included1/test4/inner/'),
(
'inc-ns1:testapp:urlobject-view', [37, 42], {}, 'nonexistent:test-ns3',
'/ns-included1/test4/inner/37/42/',
),
(
'inc-ns1:testapp:urlobject-view', [], {'arg1': 42, 'arg2': 37}, 'nonexistent:test-ns3',
'/ns-included1/test4/inner/42/37/',
),
(
'inc-ns1:testapp:urlobject-special-view', [], {}, 'nonexistent:test-ns3',
'/ns-included1/test4/inner/+%5C$*/',
),
]
for name, args, kwargs, current_app, expected in test_urls:
with self.subTest(name=name, args=args, kwargs=kwargs, current_app=current_app):
self.assertEqual(reverse(name, args=args, kwargs=kwargs, current_app=current_app), expected)
@override_settings(ROOT_URLCONF=urlconf_outer.__name__)
class RequestURLconfTests(SimpleTestCase):
def test_urlconf(self):
response = self.client.get('/test/me/')
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, b'outer:/test/me/,inner:/inner_urlconf/second_test/')
response = self.client.get('/inner_urlconf/second_test/')
self.assertEqual(response.status_code, 200)
response = self.client.get('/second_test/')
self.assertEqual(response.status_code, 404)
@override_settings(
MIDDLEWARE=[
'%s.ChangeURLconfMiddleware' % middleware.__name__,
]
)
def test_urlconf_overridden(self):
response = self.client.get('/test/me/')
self.assertEqual(response.status_code, 404)
response = self.client.get('/inner_urlconf/second_test/')
self.assertEqual(response.status_code, 404)
response = self.client.get('/second_test/')
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, b'outer:,inner:/second_test/')
@override_settings(
MIDDLEWARE=[
'%s.NullChangeURLconfMiddleware' % middleware.__name__,
]
)
def test_urlconf_overridden_with_null(self):
"""
Overriding request.urlconf with None will fall back to the default
URLconf.
"""
response = self.client.get('/test/me/')
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, b'outer:/test/me/,inner:/inner_urlconf/second_test/')
response = self.client.get('/inner_urlconf/second_test/')
self.assertEqual(response.status_code, 200)
response = self.client.get('/second_test/')
self.assertEqual(response.status_code, 404)
@override_settings(
MIDDLEWARE=[
'%s.ChangeURLconfMiddleware' % middleware.__name__,
'%s.ReverseInnerInResponseMiddleware' % middleware.__name__,
]
)
def test_reverse_inner_in_response_middleware(self):
"""
Test reversing an URL from the *overridden* URLconf from inside
a response middleware.
"""
response = self.client.get('/second_test/')
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, b'/second_test/')
@override_settings(
MIDDLEWARE=[
'%s.ChangeURLconfMiddleware' % middleware.__name__,
'%s.ReverseOuterInResponseMiddleware' % middleware.__name__,
]
)
def test_reverse_outer_in_response_middleware(self):
"""
Test reversing an URL from the *default* URLconf from inside
a response middleware.
"""
msg = (
"Reverse for 'outer' not found. 'outer' is not a valid view "
"function or pattern name."
)
with self.assertRaisesMessage(NoReverseMatch, msg):
self.client.get('/second_test/')
@override_settings(
MIDDLEWARE=[
'%s.ChangeURLconfMiddleware' % middleware.__name__,
'%s.ReverseInnerInStreaming' % middleware.__name__,
]
)
def test_reverse_inner_in_streaming(self):
"""
Test reversing an URL from the *overridden* URLconf from inside
a streaming response.
"""
response = self.client.get('/second_test/')
self.assertEqual(response.status_code, 200)
self.assertEqual(b''.join(response), b'/second_test/')
@override_settings(
MIDDLEWARE=[
'%s.ChangeURLconfMiddleware' % middleware.__name__,
'%s.ReverseOuterInStreaming' % middleware.__name__,
]
)
def test_reverse_outer_in_streaming(self):
"""
Test reversing an URL from the *default* URLconf from inside
a streaming response.
"""
message = "Reverse for 'outer' not found."
with self.assertRaisesMessage(NoReverseMatch, message):
self.client.get('/second_test/')
b''.join(self.client.get('/second_test/'))
def test_urlconf_is_reset_after_request(self):
"""The URLconf is reset after each request."""
self.assertIsNone(get_urlconf())
with override_settings(MIDDLEWARE=['%s.ChangeURLconfMiddleware' % middleware.__name__]):
self.client.get(reverse('inner'))
self.assertIsNone(get_urlconf())
class ErrorHandlerResolutionTests(SimpleTestCase):
"""Tests for handler400, handler404 and handler500"""
def setUp(self):
urlconf = 'urlpatterns_reverse.urls_error_handlers'
urlconf_callables = 'urlpatterns_reverse.urls_error_handlers_callables'
self.resolver = URLResolver(RegexPattern(r'^$'), urlconf)
self.callable_resolver = URLResolver(RegexPattern(r'^$'), urlconf_callables)
def test_named_handlers(self):
for code in [400, 404, 500]:
with self.subTest(code=code):
self.assertEqual(self.resolver.resolve_error_handler(code), empty_view)
def test_callable_handlers(self):
for code in [400, 404, 500]:
with self.subTest(code=code):
self.assertEqual(self.callable_resolver.resolve_error_handler(code), empty_view)
@override_settings(ROOT_URLCONF='urlpatterns_reverse.urls_without_handlers')
class DefaultErrorHandlerTests(SimpleTestCase):
def test_default_handler(self):
"If the urls.py doesn't specify handlers, the defaults are used"
response = self.client.get('/test/')
self.assertEqual(response.status_code, 404)
msg = "I don't think I'm getting good value for this view"
with self.assertRaisesMessage(ValueError, msg):
self.client.get('/bad_view/')
@override_settings(ROOT_URLCONF=None)
class NoRootUrlConfTests(SimpleTestCase):
"""Tests for handler404 and handler500 if ROOT_URLCONF is None"""
def test_no_handler_exception(self):
msg = (
"The included URLconf 'None' does not appear to have any patterns "
"in it. If you see the 'urlpatterns' variable with valid patterns "
"in the file then the issue is probably caused by a circular "
"import."
)
with self.assertRaisesMessage(ImproperlyConfigured, msg):
self.client.get('/test/me/')
@override_settings(ROOT_URLCONF='urlpatterns_reverse.namespace_urls')
class ResolverMatchTests(SimpleTestCase):
def test_urlpattern_resolve(self):
for path_, url_name, app_name, namespace, view_name, func, args, kwargs in resolve_test_data:
with self.subTest(path=path_):
# Legacy support for extracting "function, args, kwargs".
match_func, match_args, match_kwargs = resolve(path_)
self.assertEqual(match_func, func)
self.assertEqual(match_args, args)
self.assertEqual(match_kwargs, kwargs)
# ResolverMatch capabilities.
match = resolve(path_)
self.assertEqual(match.__class__, ResolverMatch)
self.assertEqual(match.url_name, url_name)
self.assertEqual(match.app_name, app_name)
self.assertEqual(match.namespace, namespace)
self.assertEqual(match.view_name, view_name)
self.assertEqual(match.func, func)
self.assertEqual(match.args, args)
self.assertEqual(match.kwargs, kwargs)
# and for legacy purposes:
self.assertEqual(match[0], func)
self.assertEqual(match[1], args)
self.assertEqual(match[2], kwargs)
def test_resolver_match_on_request(self):
response = self.client.get('/resolver_match/')
resolver_match = response.resolver_match
self.assertEqual(resolver_match.url_name, 'test-resolver-match')
def test_resolver_match_on_request_before_resolution(self):
request = HttpRequest()
self.assertIsNone(request.resolver_match)
def test_repr(self):
self.assertEqual(
repr(resolve('/no_kwargs/42/37/')),
"ResolverMatch(func=urlpatterns_reverse.views.empty_view, "
"args=('42', '37'), kwargs={}, url_name='no-kwargs', app_names=[], "
"namespaces=[], route='^no_kwargs/([0-9]+)/([0-9]+)/$')",
)
@override_settings(ROOT_URLCONF='urlpatterns_reverse.urls')
def test_repr_functools_partial(self):
tests = [
('partial', 'template.html'),
('partial_nested', 'nested_partial.html'),
('partial_wrapped', 'template.html'),
]
for name, template_name in tests:
with self.subTest(name=name):
func = (
f"functools.partial({views.empty_view!r}, "
f"template_name='{template_name}')"
)
self.assertEqual(
repr(resolve(f'/{name}/')),
f"ResolverMatch(func={func}, args=(), kwargs={{}}, "
f"url_name='{name}', app_names=[], namespaces=[], "
f"route='{name}/')",
)
@override_settings(ROOT_URLCONF='urlpatterns.path_urls')
def test_pickling(self):
msg = 'Cannot pickle ResolverMatch.'
with self.assertRaisesMessage(pickle.PicklingError, msg):
pickle.dumps(resolve('/users/'))
@override_settings(ROOT_URLCONF='urlpatterns_reverse.erroneous_urls')
class ErroneousViewTests(SimpleTestCase):
def test_noncallable_view(self):
# View is not a callable (explicit import; arbitrary Python object)
with self.assertRaisesMessage(TypeError, 'view must be a callable'):
path('uncallable-object/', views.uncallable)
def test_invalid_regex(self):
# Regex contains an error (refs #6170)
msg = '(regex_error/$" is not a valid regular expression'
with self.assertRaisesMessage(ImproperlyConfigured, msg):
reverse(views.empty_view)
class ViewLoadingTests(SimpleTestCase):
def test_view_loading(self):
self.assertEqual(get_callable('urlpatterns_reverse.views.empty_view'), empty_view)
self.assertEqual(get_callable(empty_view), empty_view)
def test_view_does_not_exist(self):
msg = "View does not exist in module urlpatterns_reverse.views."
with self.assertRaisesMessage(ViewDoesNotExist, msg):
get_callable('urlpatterns_reverse.views.i_should_not_exist')
def test_attributeerror_not_hidden(self):
msg = 'I am here to confuse django.urls.get_callable'
with self.assertRaisesMessage(AttributeError, msg):
get_callable('urlpatterns_reverse.views_broken.i_am_broken')
def test_non_string_value(self):
msg = "'1' is not a callable or a dot-notation path"
with self.assertRaisesMessage(ViewDoesNotExist, msg):
get_callable(1)
def test_string_without_dot(self):
msg = "Could not import 'test'. The path must be fully qualified."
with self.assertRaisesMessage(ImportError, msg):
get_callable('test')
def test_module_does_not_exist(self):
with self.assertRaisesMessage(ImportError, "No module named 'foo'"):
get_callable('foo.bar')
def test_parent_module_does_not_exist(self):
msg = 'Parent module urlpatterns_reverse.foo does not exist.'
with self.assertRaisesMessage(ViewDoesNotExist, msg):
get_callable('urlpatterns_reverse.foo.bar')
def test_not_callable(self):
msg = (
"Could not import 'urlpatterns_reverse.tests.resolve_test_data'. "
"View is not callable."
)
with self.assertRaisesMessage(ViewDoesNotExist, msg):
get_callable('urlpatterns_reverse.tests.resolve_test_data')
class IncludeTests(SimpleTestCase):
url_patterns = [
path('inner/', views.empty_view, name='urlobject-view'),
re_path(r'^inner/(?P<arg1>[0-9]+)/(?P<arg2>[0-9]+)/$', views.empty_view, name='urlobject-view'),
re_path(r'^inner/\+\\\$\*/$', views.empty_view, name='urlobject-special-view'),
]
app_urls = URLObject('inc-app')
def test_include_urls(self):
self.assertEqual(include(self.url_patterns), (self.url_patterns, None, None))
def test_include_namespace(self):
msg = (
'Specifying a namespace in include() without providing an '
'app_name is not supported.'
)
with self.assertRaisesMessage(ImproperlyConfigured, msg):
include(self.url_patterns, 'namespace')
def test_include_4_tuple(self):
msg = 'Passing a 4-tuple to include() is not supported.'
with self.assertRaisesMessage(ImproperlyConfigured, msg):
include((self.url_patterns, 'app_name', 'namespace', 'blah'))
def test_include_3_tuple(self):
msg = 'Passing a 3-tuple to include() is not supported.'
with self.assertRaisesMessage(ImproperlyConfigured, msg):
include((self.url_patterns, 'app_name', 'namespace'))
def test_include_3_tuple_namespace(self):
msg = 'Cannot override the namespace for a dynamic module that provides a namespace.'
with self.assertRaisesMessage(ImproperlyConfigured, msg):
include((self.url_patterns, 'app_name', 'namespace'), 'namespace')
def test_include_2_tuple(self):
self.assertEqual(
include((self.url_patterns, 'app_name')),
(self.url_patterns, 'app_name', 'app_name')
)
def test_include_2_tuple_namespace(self):
self.assertEqual(
include((self.url_patterns, 'app_name'), namespace='namespace'),
(self.url_patterns, 'app_name', 'namespace')
)
def test_include_app_name(self):
self.assertEqual(
include(self.app_urls),
(self.app_urls, 'inc-app', 'inc-app')
)
def test_include_app_name_namespace(self):
self.assertEqual(
include(self.app_urls, 'namespace'),
(self.app_urls, 'inc-app', 'namespace')
)
@override_settings(ROOT_URLCONF='urlpatterns_reverse.urls')
class LookaheadTests(SimpleTestCase):
def test_valid_resolve(self):
test_urls = [
'/lookahead-/a-city/',
'/lookbehind-/a-city/',
'/lookahead+/a-city/',
'/lookbehind+/a-city/',
]
for test_url in test_urls:
with self.subTest(url=test_url):
self.assertEqual(resolve(test_url).kwargs, {'city': 'a-city'})
def test_invalid_resolve(self):
test_urls = [
'/lookahead-/not-a-city/',
'/lookbehind-/not-a-city/',
'/lookahead+/other-city/',
'/lookbehind+/other-city/',
]
for test_url in test_urls:
with self.subTest(url=test_url):
with self.assertRaises(Resolver404):
resolve(test_url)
def test_valid_reverse(self):
test_urls = [
('lookahead-positive', {'city': 'a-city'}, '/lookahead+/a-city/'),
('lookahead-negative', {'city': 'a-city'}, '/lookahead-/a-city/'),
('lookbehind-positive', {'city': 'a-city'}, '/lookbehind+/a-city/'),
('lookbehind-negative', {'city': 'a-city'}, '/lookbehind-/a-city/'),
]
for name, kwargs, expected in test_urls:
with self.subTest(name=name, kwargs=kwargs):
self.assertEqual(reverse(name, kwargs=kwargs), expected)
def test_invalid_reverse(self):
test_urls = [
('lookahead-positive', {'city': 'other-city'}),
('lookahead-negative', {'city': 'not-a-city'}),
('lookbehind-positive', {'city': 'other-city'}),
('lookbehind-negative', {'city': 'not-a-city'}),
]
for name, kwargs in test_urls:
with self.subTest(name=name, kwargs=kwargs):
with self.assertRaises(NoReverseMatch):
reverse(name, kwargs=kwargs)
|
11ca1efb58420a8ef430f3c3143f194684107578761e5b5e3f68d4e625389628 | import os
import shutil
import sys
import tempfile
import unittest
from io import StringIO
from pathlib import Path
from unittest import mock
from django.conf import settings
from django.contrib.staticfiles import finders, storage
from django.contrib.staticfiles.management.commands.collectstatic import (
Command as CollectstaticCommand,
)
from django.core.management import call_command
from django.test import override_settings
from .cases import CollectionTestCase
from .settings import TEST_ROOT
def hashed_file_path(test, path):
fullpath = test.render_template(test.static_template_snippet(path))
return fullpath.replace(settings.STATIC_URL, '')
class TestHashedFiles:
hashed_file_path = hashed_file_path
def tearDown(self):
# Clear hashed files to avoid side effects among tests.
storage.staticfiles_storage.hashed_files.clear()
def assertPostCondition(self):
"""
Assert post conditions for a test are met. Must be manually called at
the end of each test.
"""
pass
def test_template_tag_return(self):
self.assertStaticRaises(ValueError, "does/not/exist.png", "/static/does/not/exist.png")
self.assertStaticRenders("test/file.txt", "/static/test/file.dad0999e4f8f.txt")
self.assertStaticRenders("test/file.txt", "/static/test/file.dad0999e4f8f.txt", asvar=True)
self.assertStaticRenders("cached/styles.css", "/static/cached/styles.5e0040571e1a.css")
self.assertStaticRenders("path/", "/static/path/")
self.assertStaticRenders("path/?query", "/static/path/?query")
self.assertPostCondition()
def test_template_tag_simple_content(self):
relpath = self.hashed_file_path("cached/styles.css")
self.assertEqual(relpath, "cached/styles.5e0040571e1a.css")
with storage.staticfiles_storage.open(relpath) as relfile:
content = relfile.read()
self.assertNotIn(b"cached/other.css", content)
self.assertIn(b"other.d41d8cd98f00.css", content)
self.assertPostCondition()
def test_path_ignored_completely(self):
relpath = self.hashed_file_path("cached/css/ignored.css")
self.assertEqual(relpath, "cached/css/ignored.554da52152af.css")
with storage.staticfiles_storage.open(relpath) as relfile:
content = relfile.read()
self.assertIn(b'#foobar', content)
self.assertIn(b'http:foobar', content)
self.assertIn(b'https:foobar', content)
self.assertIn(b'data:foobar', content)
self.assertIn(b'chrome:foobar', content)
self.assertIn(b'//foobar', content)
self.assertPostCondition()
def test_path_with_querystring(self):
relpath = self.hashed_file_path("cached/styles.css?spam=eggs")
self.assertEqual(relpath, "cached/styles.5e0040571e1a.css?spam=eggs")
with storage.staticfiles_storage.open("cached/styles.5e0040571e1a.css") as relfile:
content = relfile.read()
self.assertNotIn(b"cached/other.css", content)
self.assertIn(b"other.d41d8cd98f00.css", content)
self.assertPostCondition()
def test_path_with_fragment(self):
relpath = self.hashed_file_path("cached/styles.css#eggs")
self.assertEqual(relpath, "cached/styles.5e0040571e1a.css#eggs")
with storage.staticfiles_storage.open("cached/styles.5e0040571e1a.css") as relfile:
content = relfile.read()
self.assertNotIn(b"cached/other.css", content)
self.assertIn(b"other.d41d8cd98f00.css", content)
self.assertPostCondition()
def test_path_with_querystring_and_fragment(self):
relpath = self.hashed_file_path("cached/css/fragments.css")
self.assertEqual(relpath, "cached/css/fragments.a60c0e74834f.css")
with storage.staticfiles_storage.open(relpath) as relfile:
content = relfile.read()
self.assertIn(b'fonts/font.b9b105392eb8.eot?#iefix', content)
self.assertIn(b'fonts/font.b8d603e42714.svg#webfontIyfZbseF', content)
self.assertIn(b'fonts/font.b8d603e42714.svg#path/to/../../fonts/font.svg', content)
self.assertIn(b'data:font/woff;charset=utf-8;base64,d09GRgABAAAAADJoAA0AAAAAR2QAAQAAAAAAAAAAAAA', content)
self.assertIn(b'#default#VML', content)
self.assertPostCondition()
def test_template_tag_absolute(self):
relpath = self.hashed_file_path("cached/absolute.css")
self.assertEqual(relpath, "cached/absolute.eb04def9f9a4.css")
with storage.staticfiles_storage.open(relpath) as relfile:
content = relfile.read()
self.assertNotIn(b"/static/cached/styles.css", content)
self.assertIn(b"/static/cached/styles.5e0040571e1a.css", content)
self.assertNotIn(b"/static/styles_root.css", content)
self.assertIn(b"/static/styles_root.401f2509a628.css", content)
self.assertIn(b'/static/cached/img/relative.acae32e4532b.png', content)
self.assertPostCondition()
def test_template_tag_absolute_root(self):
"""
Like test_template_tag_absolute, but for a file in STATIC_ROOT (#26249).
"""
relpath = self.hashed_file_path("absolute_root.css")
self.assertEqual(relpath, "absolute_root.f821df1b64f7.css")
with storage.staticfiles_storage.open(relpath) as relfile:
content = relfile.read()
self.assertNotIn(b"/static/styles_root.css", content)
self.assertIn(b"/static/styles_root.401f2509a628.css", content)
self.assertPostCondition()
def test_template_tag_relative(self):
relpath = self.hashed_file_path("cached/relative.css")
self.assertEqual(relpath, "cached/relative.c3e9e1ea6f2e.css")
with storage.staticfiles_storage.open(relpath) as relfile:
content = relfile.read()
self.assertNotIn(b"../cached/styles.css", content)
self.assertNotIn(b'@import "styles.css"', content)
self.assertNotIn(b'url(img/relative.png)', content)
self.assertIn(b'url("img/relative.acae32e4532b.png")', content)
self.assertIn(b"../cached/styles.5e0040571e1a.css", content)
self.assertPostCondition()
def test_import_replacement(self):
"See #18050"
relpath = self.hashed_file_path("cached/import.css")
self.assertEqual(relpath, "cached/import.f53576679e5a.css")
with storage.staticfiles_storage.open(relpath) as relfile:
self.assertIn(b"""import url("styles.5e0040571e1a.css")""", relfile.read())
self.assertPostCondition()
def test_template_tag_deep_relative(self):
relpath = self.hashed_file_path("cached/css/window.css")
self.assertEqual(relpath, "cached/css/window.5d5c10836967.css")
with storage.staticfiles_storage.open(relpath) as relfile:
content = relfile.read()
self.assertNotIn(b'url(img/window.png)', content)
self.assertIn(b'url("img/window.acae32e4532b.png")', content)
self.assertPostCondition()
def test_template_tag_url(self):
relpath = self.hashed_file_path("cached/url.css")
self.assertEqual(relpath, "cached/url.902310b73412.css")
with storage.staticfiles_storage.open(relpath) as relfile:
self.assertIn(b"https://", relfile.read())
self.assertPostCondition()
def test_module_import(self):
relpath = self.hashed_file_path('cached/module.js')
self.assertEqual(relpath, 'cached/module.91b9cf9935da.js')
tests = [
# Relative imports.
b'import testConst from "./module_test.d489af3cf882.js";',
b'import relativeModule from "../nested/js/nested.866475c46bb4.js";',
b'import { firstConst, secondConst } from "./module_test.d489af3cf882.js";',
# Absolute import.
b'import rootConst from "/static/absolute_root.5586327fe78c.js";',
# Dynamic import.
b'const dynamicModule = import("./module_test.d489af3cf882.js");',
# Creating a module object.
b'import * as NewModule from "./module_test.d489af3cf882.js";',
# Aliases.
b'import { testConst as alias } from "./module_test.d489af3cf882.js";',
b'import {\n'
b' firstVar as firstVarAlias,\n'
b' secondVar as secondVarAlias\n'
b'} from "./module_test.d489af3cf882.js";',
]
with storage.staticfiles_storage.open(relpath) as relfile:
content = relfile.read()
for module_import in tests:
with self.subTest(module_import=module_import):
self.assertIn(module_import, content)
self.assertPostCondition()
def test_aggregating_modules(self):
relpath = self.hashed_file_path('cached/module.js')
self.assertEqual(relpath, 'cached/module.91b9cf9935da.js')
tests = [
b'export * from "./module_test.d489af3cf882.js";',
b'export { testConst } from "./module_test.d489af3cf882.js";',
b'export {\n'
b' firstVar as firstVarAlias,\n'
b' secondVar as secondVarAlias\n'
b'} from "./module_test.d489af3cf882.js";',
]
with storage.staticfiles_storage.open(relpath) as relfile:
content = relfile.read()
for module_import in tests:
with self.subTest(module_import=module_import):
self.assertIn(module_import, content)
self.assertPostCondition()
@override_settings(
STATICFILES_DIRS=[os.path.join(TEST_ROOT, 'project', 'loop')],
STATICFILES_FINDERS=['django.contrib.staticfiles.finders.FileSystemFinder'],
)
def test_import_loop(self):
finders.get_finder.cache_clear()
err = StringIO()
with self.assertRaisesMessage(RuntimeError, 'Max post-process passes exceeded'):
call_command('collectstatic', interactive=False, verbosity=0, stderr=err)
self.assertEqual("Post-processing 'All' failed!\n\n", err.getvalue())
self.assertPostCondition()
def test_post_processing(self):
"""
post_processing behaves correctly.
Files that are alterable should always be post-processed; files that
aren't should be skipped.
collectstatic has already been called once in setUp() for this testcase,
therefore we check by verifying behavior on a second run.
"""
collectstatic_args = {
'interactive': False,
'verbosity': 0,
'link': False,
'clear': False,
'dry_run': False,
'post_process': True,
'use_default_ignore_patterns': True,
'ignore_patterns': ['*.ignoreme'],
}
collectstatic_cmd = CollectstaticCommand()
collectstatic_cmd.set_options(**collectstatic_args)
stats = collectstatic_cmd.collect()
self.assertIn(os.path.join('cached', 'css', 'window.css'), stats['post_processed'])
self.assertIn(os.path.join('cached', 'css', 'img', 'window.png'), stats['unmodified'])
self.assertIn(os.path.join('test', 'nonascii.css'), stats['post_processed'])
# No file should be yielded twice.
self.assertCountEqual(stats['post_processed'], set(stats['post_processed']))
self.assertPostCondition()
def test_css_import_case_insensitive(self):
relpath = self.hashed_file_path("cached/styles_insensitive.css")
self.assertEqual(relpath, "cached/styles_insensitive.3fa427592a53.css")
with storage.staticfiles_storage.open(relpath) as relfile:
content = relfile.read()
self.assertNotIn(b"cached/other.css", content)
self.assertIn(b"other.d41d8cd98f00.css", content)
self.assertPostCondition()
def test_js_source_map(self):
relpath = self.hashed_file_path('cached/source_map.js')
self.assertEqual(relpath, 'cached/source_map.9371cbb02a26.js')
with storage.staticfiles_storage.open(relpath) as relfile:
content = relfile.read()
self.assertNotIn(b'//# sourceMappingURL=source_map.js.map', content)
self.assertIn(
b'//# sourceMappingURL=source_map.js.99914b932bd3.map',
content,
)
self.assertPostCondition()
def test_js_source_map_sensitive(self):
relpath = self.hashed_file_path('cached/source_map_sensitive.js')
self.assertEqual(relpath, 'cached/source_map_sensitive.5da96fdd3cb3.js')
with storage.staticfiles_storage.open(relpath) as relfile:
content = relfile.read()
self.assertIn(b'//# sOuRcEMaPpInGURL=source_map.js.map', content)
self.assertNotIn(
b'//# sourceMappingURL=source_map.js.99914b932bd3.map',
content,
)
self.assertPostCondition()
@override_settings(
STATICFILES_DIRS=[os.path.join(TEST_ROOT, 'project', 'faulty')],
STATICFILES_FINDERS=['django.contrib.staticfiles.finders.FileSystemFinder'],
)
def test_post_processing_failure(self):
"""
post_processing indicates the origin of the error when it fails.
"""
finders.get_finder.cache_clear()
err = StringIO()
with self.assertRaises(Exception):
call_command('collectstatic', interactive=False, verbosity=0, stderr=err)
self.assertEqual("Post-processing 'faulty.css' failed!\n\n", err.getvalue())
self.assertPostCondition()
@override_settings(STATICFILES_STORAGE='staticfiles_tests.storage.ExtraPatternsStorage')
class TestExtraPatternsStorage(CollectionTestCase):
def setUp(self):
storage.staticfiles_storage.hashed_files.clear() # avoid cache interference
super().setUp()
def cached_file_path(self, path):
fullpath = self.render_template(self.static_template_snippet(path))
return fullpath.replace(settings.STATIC_URL, '')
def test_multi_extension_patterns(self):
"""
With storage classes having several file extension patterns, only the
files matching a specific file pattern should be affected by the
substitution (#19670).
"""
# CSS files shouldn't be touched by JS patterns.
relpath = self.cached_file_path("cached/import.css")
self.assertEqual(relpath, "cached/import.f53576679e5a.css")
with storage.staticfiles_storage.open(relpath) as relfile:
self.assertIn(b'import url("styles.5e0040571e1a.css")', relfile.read())
# Confirm JS patterns have been applied to JS files.
relpath = self.cached_file_path("cached/test.js")
self.assertEqual(relpath, "cached/test.388d7a790d46.js")
with storage.staticfiles_storage.open(relpath) as relfile:
self.assertIn(b'JS_URL("import.f53576679e5a.css")', relfile.read())
@override_settings(
STATICFILES_STORAGE='django.contrib.staticfiles.storage.ManifestStaticFilesStorage',
)
class TestCollectionManifestStorage(TestHashedFiles, CollectionTestCase):
"""
Tests for the Cache busting storage
"""
def setUp(self):
super().setUp()
temp_dir = tempfile.mkdtemp()
os.makedirs(os.path.join(temp_dir, 'test'))
self._clear_filename = os.path.join(temp_dir, 'test', 'cleared.txt')
with open(self._clear_filename, 'w') as f:
f.write('to be deleted in one test')
self.patched_settings = self.settings(
STATICFILES_DIRS=settings.STATICFILES_DIRS + [temp_dir],
)
self.patched_settings.enable()
self.addCleanup(shutil.rmtree, temp_dir)
self._manifest_strict = storage.staticfiles_storage.manifest_strict
def tearDown(self):
self.patched_settings.disable()
if os.path.exists(self._clear_filename):
os.unlink(self._clear_filename)
storage.staticfiles_storage.manifest_strict = self._manifest_strict
super().tearDown()
def assertPostCondition(self):
hashed_files = storage.staticfiles_storage.hashed_files
# The in-memory version of the manifest matches the one on disk
# since a properly created manifest should cover all filenames.
if hashed_files:
manifest = storage.staticfiles_storage.load_manifest()
self.assertEqual(hashed_files, manifest)
def test_manifest_exists(self):
filename = storage.staticfiles_storage.manifest_name
path = storage.staticfiles_storage.path(filename)
self.assertTrue(os.path.exists(path))
def test_manifest_does_not_exist(self):
storage.staticfiles_storage.manifest_name = 'does.not.exist.json'
self.assertIsNone(storage.staticfiles_storage.read_manifest())
def test_manifest_does_not_ignore_permission_error(self):
with mock.patch('builtins.open', side_effect=PermissionError):
with self.assertRaises(PermissionError):
storage.staticfiles_storage.read_manifest()
def test_loaded_cache(self):
self.assertNotEqual(storage.staticfiles_storage.hashed_files, {})
manifest_content = storage.staticfiles_storage.read_manifest()
self.assertIn(
'"version": "%s"' % storage.staticfiles_storage.manifest_version,
manifest_content
)
def test_parse_cache(self):
hashed_files = storage.staticfiles_storage.hashed_files
manifest = storage.staticfiles_storage.load_manifest()
self.assertEqual(hashed_files, manifest)
def test_clear_empties_manifest(self):
cleared_file_name = storage.staticfiles_storage.clean_name(os.path.join('test', 'cleared.txt'))
# collect the additional file
self.run_collectstatic()
hashed_files = storage.staticfiles_storage.hashed_files
self.assertIn(cleared_file_name, hashed_files)
manifest_content = storage.staticfiles_storage.load_manifest()
self.assertIn(cleared_file_name, manifest_content)
original_path = storage.staticfiles_storage.path(cleared_file_name)
self.assertTrue(os.path.exists(original_path))
# delete the original file form the app, collect with clear
os.unlink(self._clear_filename)
self.run_collectstatic(clear=True)
self.assertFileNotFound(original_path)
hashed_files = storage.staticfiles_storage.hashed_files
self.assertNotIn(cleared_file_name, hashed_files)
manifest_content = storage.staticfiles_storage.load_manifest()
self.assertNotIn(cleared_file_name, manifest_content)
def test_missing_entry(self):
missing_file_name = 'cached/missing.css'
configured_storage = storage.staticfiles_storage
self.assertNotIn(missing_file_name, configured_storage.hashed_files)
# File name not found in manifest
with self.assertRaisesMessage(ValueError, "Missing staticfiles manifest entry for '%s'" % missing_file_name):
self.hashed_file_path(missing_file_name)
configured_storage.manifest_strict = False
# File doesn't exist on disk
err_msg = "The file '%s' could not be found with %r." % (missing_file_name, configured_storage._wrapped)
with self.assertRaisesMessage(ValueError, err_msg):
self.hashed_file_path(missing_file_name)
content = StringIO()
content.write('Found')
configured_storage.save(missing_file_name, content)
# File exists on disk
self.hashed_file_path(missing_file_name)
def test_intermediate_files(self):
cached_files = os.listdir(os.path.join(settings.STATIC_ROOT, 'cached'))
# Intermediate files shouldn't be created for reference.
self.assertEqual(
len([
cached_file
for cached_file in cached_files
if cached_file.startswith('relative.')
]),
2,
)
@override_settings(STATICFILES_STORAGE='staticfiles_tests.storage.NoneHashStorage')
class TestCollectionNoneHashStorage(CollectionTestCase):
hashed_file_path = hashed_file_path
def test_hashed_name(self):
relpath = self.hashed_file_path('cached/styles.css')
self.assertEqual(relpath, 'cached/styles.css')
@override_settings(
STATICFILES_STORAGE='staticfiles_tests.storage.NoPostProcessReplacedPathStorage'
)
class TestCollectionNoPostProcessReplacedPaths(CollectionTestCase):
run_collectstatic_in_setUp = False
def test_collectstatistic_no_post_process_replaced_paths(self):
stdout = StringIO()
self.run_collectstatic(verbosity=1, stdout=stdout)
self.assertIn('post-processed', stdout.getvalue())
@override_settings(STATICFILES_STORAGE='staticfiles_tests.storage.SimpleStorage')
class TestCollectionSimpleStorage(CollectionTestCase):
hashed_file_path = hashed_file_path
def setUp(self):
storage.staticfiles_storage.hashed_files.clear() # avoid cache interference
super().setUp()
def test_template_tag_return(self):
self.assertStaticRaises(ValueError, "does/not/exist.png", "/static/does/not/exist.png")
self.assertStaticRenders("test/file.txt", "/static/test/file.deploy12345.txt")
self.assertStaticRenders("cached/styles.css", "/static/cached/styles.deploy12345.css")
self.assertStaticRenders("path/", "/static/path/")
self.assertStaticRenders("path/?query", "/static/path/?query")
def test_template_tag_simple_content(self):
relpath = self.hashed_file_path("cached/styles.css")
self.assertEqual(relpath, "cached/styles.deploy12345.css")
with storage.staticfiles_storage.open(relpath) as relfile:
content = relfile.read()
self.assertNotIn(b"cached/other.css", content)
self.assertIn(b"other.deploy12345.css", content)
class CustomStaticFilesStorage(storage.StaticFilesStorage):
"""
Used in TestStaticFilePermissions
"""
def __init__(self, *args, **kwargs):
kwargs['file_permissions_mode'] = 0o640
kwargs['directory_permissions_mode'] = 0o740
super().__init__(*args, **kwargs)
@unittest.skipIf(sys.platform == 'win32', "Windows only partially supports chmod.")
class TestStaticFilePermissions(CollectionTestCase):
command_params = {
'interactive': False,
'verbosity': 0,
'ignore_patterns': ['*.ignoreme'],
}
def setUp(self):
self.umask = 0o027
self.old_umask = os.umask(self.umask)
super().setUp()
def tearDown(self):
os.umask(self.old_umask)
super().tearDown()
# Don't run collectstatic command in this test class.
def run_collectstatic(self, **kwargs):
pass
@override_settings(
FILE_UPLOAD_PERMISSIONS=0o655,
FILE_UPLOAD_DIRECTORY_PERMISSIONS=0o765,
)
def test_collect_static_files_permissions(self):
call_command('collectstatic', **self.command_params)
static_root = Path(settings.STATIC_ROOT)
test_file = static_root / 'test.txt'
file_mode = test_file.stat().st_mode & 0o777
self.assertEqual(file_mode, 0o655)
tests = [
static_root / 'subdir',
static_root / 'nested',
static_root / 'nested' / 'css',
]
for directory in tests:
with self.subTest(directory=directory):
dir_mode = directory.stat().st_mode & 0o777
self.assertEqual(dir_mode, 0o765)
@override_settings(
FILE_UPLOAD_PERMISSIONS=None,
FILE_UPLOAD_DIRECTORY_PERMISSIONS=None,
)
def test_collect_static_files_default_permissions(self):
call_command('collectstatic', **self.command_params)
static_root = Path(settings.STATIC_ROOT)
test_file = static_root / 'test.txt'
file_mode = test_file.stat().st_mode & 0o777
self.assertEqual(file_mode, 0o666 & ~self.umask)
tests = [
static_root / 'subdir',
static_root / 'nested',
static_root / 'nested' / 'css',
]
for directory in tests:
with self.subTest(directory=directory):
dir_mode = directory.stat().st_mode & 0o777
self.assertEqual(dir_mode, 0o777 & ~self.umask)
@override_settings(
FILE_UPLOAD_PERMISSIONS=0o655,
FILE_UPLOAD_DIRECTORY_PERMISSIONS=0o765,
STATICFILES_STORAGE='staticfiles_tests.test_storage.CustomStaticFilesStorage',
)
def test_collect_static_files_subclass_of_static_storage(self):
call_command('collectstatic', **self.command_params)
static_root = Path(settings.STATIC_ROOT)
test_file = static_root / 'test.txt'
file_mode = test_file.stat().st_mode & 0o777
self.assertEqual(file_mode, 0o640)
tests = [
static_root / 'subdir',
static_root / 'nested',
static_root / 'nested' / 'css',
]
for directory in tests:
with self.subTest(directory=directory):
dir_mode = directory.stat().st_mode & 0o777
self.assertEqual(dir_mode, 0o740)
@override_settings(
STATICFILES_STORAGE='django.contrib.staticfiles.storage.ManifestStaticFilesStorage',
)
class TestCollectionHashedFilesCache(CollectionTestCase):
"""
Files referenced from CSS use the correct final hashed name regardless of
the order in which the files are post-processed.
"""
hashed_file_path = hashed_file_path
def setUp(self):
super().setUp()
self._temp_dir = temp_dir = tempfile.mkdtemp()
os.makedirs(os.path.join(temp_dir, 'test'))
self.addCleanup(shutil.rmtree, temp_dir)
def _get_filename_path(self, filename):
return os.path.join(self._temp_dir, 'test', filename)
def test_file_change_after_collectstatic(self):
# Create initial static files.
file_contents = (
('foo.png', 'foo'),
('bar.css', 'url("foo.png")\nurl("xyz.png")'),
('xyz.png', 'xyz'),
)
for filename, content in file_contents:
with open(self._get_filename_path(filename), 'w') as f:
f.write(content)
with self.modify_settings(STATICFILES_DIRS={'append': self._temp_dir}):
finders.get_finder.cache_clear()
err = StringIO()
# First collectstatic run.
call_command('collectstatic', interactive=False, verbosity=0, stderr=err)
relpath = self.hashed_file_path('test/bar.css')
with storage.staticfiles_storage.open(relpath) as relfile:
content = relfile.read()
self.assertIn(b'foo.acbd18db4cc2.png', content)
self.assertIn(b'xyz.d16fb36f0911.png', content)
# Change the contents of the png files.
for filename in ('foo.png', 'xyz.png'):
with open(self._get_filename_path(filename), 'w+b') as f:
f.write(b"new content of file to change its hash")
# The hashes of the png files in the CSS file are updated after
# a second collectstatic.
call_command('collectstatic', interactive=False, verbosity=0, stderr=err)
relpath = self.hashed_file_path('test/bar.css')
with storage.staticfiles_storage.open(relpath) as relfile:
content = relfile.read()
self.assertIn(b'foo.57a5cb9ba68d.png', content)
self.assertIn(b'xyz.57a5cb9ba68d.png', content)
|
e8f8f146c5a00416d9949ee10776a88aa3281fa9fbba25dba75215b827c6423b | import os
from datetime import datetime, timedelta
from django.conf import settings
from django.contrib.staticfiles.storage import ManifestStaticFilesStorage
from django.core.files import storage
from django.utils import timezone
class DummyStorage(storage.Storage):
"""
A storage class that implements get_modified_time() but raises
NotImplementedError for path().
"""
def _save(self, name, content):
return 'dummy'
def delete(self, name):
pass
def exists(self, name):
pass
def get_modified_time(self, name):
return datetime(1970, 1, 1, tzinfo=timezone.utc)
class PathNotImplementedStorage(storage.Storage):
def _save(self, name, content):
return 'dummy'
def _path(self, name):
return os.path.join(settings.STATIC_ROOT, name)
def exists(self, name):
return os.path.exists(self._path(name))
def listdir(self, path):
path = self._path(path)
directories, files = [], []
with os.scandir(path) as entries:
for entry in entries:
if entry.is_dir():
directories.append(entry.name)
else:
files.append(entry.name)
return directories, files
def delete(self, name):
name = self._path(name)
try:
os.remove(name)
except FileNotFoundError:
pass
def path(self, name):
raise NotImplementedError
class NeverCopyRemoteStorage(PathNotImplementedStorage):
"""
Return a future modified time for all files so that nothing is collected.
"""
def get_modified_time(self, name):
return datetime.now() + timedelta(days=30)
class QueryStringStorage(storage.Storage):
def url(self, path):
return path + '?a=b&c=d'
class SimpleStorage(ManifestStaticFilesStorage):
def file_hash(self, name, content=None):
return 'deploy12345'
class ExtraPatternsStorage(ManifestStaticFilesStorage):
"""
A storage class to test pattern substitutions with more than one pattern
entry. The added pattern rewrites strings like "url(...)" to JS_URL("...").
"""
patterns = tuple(ManifestStaticFilesStorage.patterns) + (
(
"*.js", (
(
r"""(?P<matched>url\(['"]{0,1}\s*(?P<url>.*?)["']{0,1}\))""",
'JS_URL("%(url)s")',
),
),
),
)
class NoneHashStorage(ManifestStaticFilesStorage):
def file_hash(self, name, content=None):
return None
class NoPostProcessReplacedPathStorage(ManifestStaticFilesStorage):
max_post_process_passes = 0
|
0f0d266aad6f4a8ee60779fc4a46cccca182a0ff678009e50858d8034cc8ad66 | import time
from datetime import datetime, timedelta
from http import cookies
from django.http import HttpResponse
from django.test import SimpleTestCase
from django.test.utils import freeze_time
from django.utils.http import http_date
from django.utils.timezone import utc
class SetCookieTests(SimpleTestCase):
def test_near_expiration(self):
"""Cookie will expire when a near expiration time is provided."""
response = HttpResponse()
# There's a timing weakness in this test; The expected result for
# max-age requires that there be a very slight difference between the
# evaluated expiration time and the time evaluated in set_cookie(). If
# this difference doesn't exist, the cookie time will be 1 second
# larger. The sleep guarantees that there will be a time difference.
expires = datetime.now(tz=utc).replace(tzinfo=None) + timedelta(seconds=10)
time.sleep(0.001)
response.set_cookie('datetime', expires=expires)
datetime_cookie = response.cookies['datetime']
self.assertEqual(datetime_cookie['max-age'], 10)
def test_aware_expiration(self):
"""set_cookie() accepts an aware datetime as expiration time."""
response = HttpResponse()
expires = datetime.now(tz=utc) + timedelta(seconds=10)
time.sleep(0.001)
response.set_cookie('datetime', expires=expires)
datetime_cookie = response.cookies['datetime']
self.assertEqual(datetime_cookie['max-age'], 10)
def test_create_cookie_after_deleting_cookie(self):
"""Setting a cookie after deletion clears the expiry date."""
response = HttpResponse()
response.set_cookie('c', 'old-value')
self.assertEqual(response.cookies['c']['expires'], '')
response.delete_cookie('c')
self.assertEqual(response.cookies['c']['expires'], 'Thu, 01 Jan 1970 00:00:00 GMT')
response.set_cookie('c', 'new-value')
self.assertEqual(response.cookies['c']['expires'], '')
def test_far_expiration(self):
"""Cookie will expire when a distant expiration time is provided."""
response = HttpResponse()
response.set_cookie('datetime', expires=datetime(2038, 1, 1, 4, 5, 6))
datetime_cookie = response.cookies['datetime']
self.assertIn(
datetime_cookie['expires'],
# assertIn accounts for slight time dependency (#23450)
('Fri, 01 Jan 2038 04:05:06 GMT', 'Fri, 01 Jan 2038 04:05:07 GMT')
)
def test_max_age_expiration(self):
"""Cookie will expire if max_age is provided."""
response = HttpResponse()
set_cookie_time = time.time()
with freeze_time(set_cookie_time):
response.set_cookie('max_age', max_age=10)
max_age_cookie = response.cookies['max_age']
self.assertEqual(max_age_cookie['max-age'], 10)
self.assertEqual(max_age_cookie['expires'], http_date(set_cookie_time + 10))
def test_max_age_int(self):
response = HttpResponse()
response.set_cookie('max_age', max_age=10.6)
self.assertEqual(response.cookies['max_age']['max-age'], 10)
def test_httponly_cookie(self):
response = HttpResponse()
response.set_cookie('example', httponly=True)
example_cookie = response.cookies['example']
self.assertIn('; %s' % cookies.Morsel._reserved['httponly'], str(example_cookie))
self.assertIs(example_cookie['httponly'], True)
def test_unicode_cookie(self):
"""HttpResponse.set_cookie() works with Unicode data."""
response = HttpResponse()
cookie_value = '清風'
response.set_cookie('test', cookie_value)
self.assertEqual(response.cookies['test'].value, cookie_value)
def test_samesite(self):
response = HttpResponse()
response.set_cookie('example', samesite='None')
self.assertEqual(response.cookies['example']['samesite'], 'None')
response.set_cookie('example', samesite='Lax')
self.assertEqual(response.cookies['example']['samesite'], 'Lax')
response.set_cookie('example', samesite='strict')
self.assertEqual(response.cookies['example']['samesite'], 'strict')
def test_invalid_samesite(self):
msg = 'samesite must be "lax", "none", or "strict".'
with self.assertRaisesMessage(ValueError, msg):
HttpResponse().set_cookie('example', samesite='invalid')
class DeleteCookieTests(SimpleTestCase):
def test_default(self):
response = HttpResponse()
response.delete_cookie('c')
cookie = response.cookies['c']
self.assertEqual(cookie['expires'], 'Thu, 01 Jan 1970 00:00:00 GMT')
self.assertEqual(cookie['max-age'], 0)
self.assertEqual(cookie['path'], '/')
self.assertEqual(cookie['secure'], '')
self.assertEqual(cookie['domain'], '')
self.assertEqual(cookie['samesite'], '')
def test_delete_cookie_secure_prefix(self):
"""
delete_cookie() sets the secure flag if the cookie name starts with
__Host- or __Secure- (without that, browsers ignore cookies with those
prefixes).
"""
response = HttpResponse()
for prefix in ('Secure', 'Host'):
with self.subTest(prefix=prefix):
cookie_name = '__%s-c' % prefix
response.delete_cookie(cookie_name)
self.assertIs(response.cookies[cookie_name]['secure'], True)
def test_delete_cookie_secure_samesite_none(self):
# delete_cookie() sets the secure flag if samesite='none'.
response = HttpResponse()
response.delete_cookie('c', samesite='none')
self.assertIs(response.cookies['c']['secure'], True)
def test_delete_cookie_samesite(self):
response = HttpResponse()
response.delete_cookie('c', samesite='lax')
self.assertEqual(response.cookies['c']['samesite'], 'lax')
|
03d277ee194f69797ce094ebb47b026fb2ca362e48295f308e496e13b8ddff0a | import io
import os
import sys
import tempfile
from unittest import skipIf
from django.core.files.base import ContentFile
from django.http import FileResponse
from django.test import SimpleTestCase
class FileResponseTests(SimpleTestCase):
def test_file_from_disk_response(self):
response = FileResponse(open(__file__, 'rb'))
self.assertEqual(response.headers['Content-Length'], str(os.path.getsize(__file__)))
self.assertIn(response.headers['Content-Type'], ['text/x-python', 'text/plain'])
self.assertEqual(
response.headers['Content-Disposition'],
'inline; filename="test_fileresponse.py"',
)
response.close()
def test_file_from_buffer_response(self):
response = FileResponse(io.BytesIO(b'binary content'))
self.assertEqual(response.headers['Content-Length'], '14')
self.assertEqual(response.headers['Content-Type'], 'application/octet-stream')
self.assertFalse(response.has_header('Content-Disposition'))
self.assertEqual(list(response), [b'binary content'])
def test_file_from_buffer_unnamed_attachment(self):
response = FileResponse(io.BytesIO(b'binary content'), as_attachment=True)
self.assertEqual(response.headers['Content-Length'], '14')
self.assertEqual(response.headers['Content-Type'], 'application/octet-stream')
self.assertEqual(response.headers['Content-Disposition'], 'attachment')
self.assertEqual(list(response), [b'binary content'])
@skipIf(sys.platform == 'win32', "Named pipes are Unix-only.")
def test_file_from_named_pipe_response(self):
with tempfile.TemporaryDirectory() as temp_dir:
pipe_file = os.path.join(temp_dir, 'named_pipe')
os.mkfifo(pipe_file)
pipe_for_read = os.open(pipe_file, os.O_RDONLY | os.O_NONBLOCK)
with open(pipe_file, 'wb') as pipe_for_write:
pipe_for_write.write(b'binary content')
response = FileResponse(os.fdopen(pipe_for_read, mode='rb'))
self.assertEqual(list(response), [b'binary content'])
response.close()
self.assertFalse(response.has_header('Content-Length'))
def test_file_from_disk_as_attachment(self):
response = FileResponse(open(__file__, 'rb'), as_attachment=True)
self.assertEqual(response.headers['Content-Length'], str(os.path.getsize(__file__)))
self.assertIn(response.headers['Content-Type'], ['text/x-python', 'text/plain'])
self.assertEqual(
response.headers['Content-Disposition'],
'attachment; filename="test_fileresponse.py"',
)
response.close()
def test_compressed_response(self):
"""
If compressed responses are served with the uncompressed Content-Type
and a compression Content-Encoding, browsers might automatically
uncompress the file, which is most probably not wanted.
"""
test_tuples = (
('.tar.gz', 'application/gzip'),
('.tar.bz2', 'application/x-bzip'),
('.tar.xz', 'application/x-xz'),
)
for extension, mimetype in test_tuples:
with self.subTest(ext=extension):
with tempfile.NamedTemporaryFile(suffix=extension) as tmp:
response = FileResponse(tmp)
self.assertEqual(response.headers['Content-Type'], mimetype)
self.assertFalse(response.has_header('Content-Encoding'))
def test_unicode_attachment(self):
response = FileResponse(
ContentFile(b'binary content', name="祝您平安.odt"), as_attachment=True,
content_type='application/vnd.oasis.opendocument.text',
)
self.assertEqual(
response.headers['Content-Type'],
'application/vnd.oasis.opendocument.text',
)
self.assertEqual(
response.headers['Content-Disposition'],
"attachment; filename*=utf-8''%E7%A5%9D%E6%82%A8%E5%B9%B3%E5%AE%89.odt"
)
def test_repr(self):
response = FileResponse(io.BytesIO(b'binary content'))
self.assertEqual(
repr(response),
'<FileResponse status_code=200, "application/octet-stream">',
)
|
a8938cd58c81308be483afa7c2ede6120ee95a6d0c7ef74a572c6e648b14b20a | from django.apps import apps
from django.db import models
from django.test import SimpleTestCase, override_settings
from django.test.utils import isolate_lru_cache
class FieldDeconstructionTests(SimpleTestCase):
"""
Tests the deconstruct() method on all core fields.
"""
def test_name(self):
"""
Tests the outputting of the correct name if assigned one.
"""
# First try using a "normal" field
field = models.CharField(max_length=65)
name, path, args, kwargs = field.deconstruct()
self.assertIsNone(name)
field.set_attributes_from_name("is_awesome_test")
name, path, args, kwargs = field.deconstruct()
self.assertEqual(name, "is_awesome_test")
# Now try with a ForeignKey
field = models.ForeignKey("some_fake.ModelName", models.CASCADE)
name, path, args, kwargs = field.deconstruct()
self.assertIsNone(name)
field.set_attributes_from_name("author")
name, path, args, kwargs = field.deconstruct()
self.assertEqual(name, "author")
def test_db_tablespace(self):
field = models.Field()
_, _, args, kwargs = field.deconstruct()
self.assertEqual(args, [])
self.assertEqual(kwargs, {})
# With a DEFAULT_DB_TABLESPACE.
with self.settings(DEFAULT_DB_TABLESPACE='foo'):
_, _, args, kwargs = field.deconstruct()
self.assertEqual(args, [])
self.assertEqual(kwargs, {})
# With a db_tablespace.
field = models.Field(db_tablespace='foo')
_, _, args, kwargs = field.deconstruct()
self.assertEqual(args, [])
self.assertEqual(kwargs, {'db_tablespace': 'foo'})
# With a db_tablespace equal to DEFAULT_DB_TABLESPACE.
with self.settings(DEFAULT_DB_TABLESPACE='foo'):
_, _, args, kwargs = field.deconstruct()
self.assertEqual(args, [])
self.assertEqual(kwargs, {'db_tablespace': 'foo'})
def test_auto_field(self):
field = models.AutoField(primary_key=True)
field.set_attributes_from_name("id")
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.AutoField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"primary_key": True})
def test_big_integer_field(self):
field = models.BigIntegerField()
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.BigIntegerField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {})
def test_boolean_field(self):
field = models.BooleanField()
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.BooleanField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {})
field = models.BooleanField(default=True)
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.BooleanField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"default": True})
def test_char_field(self):
field = models.CharField(max_length=65)
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.CharField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"max_length": 65})
field = models.CharField(max_length=65, null=True, blank=True)
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.CharField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"max_length": 65, "null": True, "blank": True})
def test_char_field_choices(self):
field = models.CharField(max_length=1, choices=(("A", "One"), ("B", "Two")))
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.CharField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"choices": [("A", "One"), ("B", "Two")], "max_length": 1})
def test_csi_field(self):
field = models.CommaSeparatedIntegerField(max_length=100)
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.CommaSeparatedIntegerField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"max_length": 100})
def test_date_field(self):
field = models.DateField()
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.DateField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {})
field = models.DateField(auto_now=True)
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.DateField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"auto_now": True})
def test_datetime_field(self):
field = models.DateTimeField()
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.DateTimeField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {})
field = models.DateTimeField(auto_now_add=True)
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.DateTimeField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"auto_now_add": True})
# Bug #21785
field = models.DateTimeField(auto_now=True, auto_now_add=True)
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.DateTimeField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"auto_now_add": True, "auto_now": True})
def test_decimal_field(self):
field = models.DecimalField(max_digits=5, decimal_places=2)
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.DecimalField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"max_digits": 5, "decimal_places": 2})
def test_decimal_field_0_decimal_places(self):
"""
A DecimalField with decimal_places=0 should work (#22272).
"""
field = models.DecimalField(max_digits=5, decimal_places=0)
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.DecimalField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"max_digits": 5, "decimal_places": 0})
def test_email_field(self):
field = models.EmailField()
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.EmailField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"max_length": 254})
field = models.EmailField(max_length=255)
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.EmailField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"max_length": 255})
def test_file_field(self):
field = models.FileField(upload_to="foo/bar")
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.FileField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"upload_to": "foo/bar"})
# Test max_length
field = models.FileField(upload_to="foo/bar", max_length=200)
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.FileField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"upload_to": "foo/bar", "max_length": 200})
def test_file_path_field(self):
field = models.FilePathField(match=r".*\.txt$")
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.FilePathField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"match": r".*\.txt$"})
field = models.FilePathField(recursive=True, allow_folders=True, max_length=123)
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.FilePathField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"recursive": True, "allow_folders": True, "max_length": 123})
def test_float_field(self):
field = models.FloatField()
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.FloatField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {})
def test_foreign_key(self):
# Test basic pointing
from django.contrib.auth.models import Permission
field = models.ForeignKey("auth.Permission", models.CASCADE)
field.remote_field.model = Permission
field.remote_field.field_name = "id"
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.ForeignKey")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"to": "auth.permission", "on_delete": models.CASCADE})
self.assertFalse(hasattr(kwargs['to'], "setting_name"))
# Test swap detection for swappable model
field = models.ForeignKey("auth.User", models.CASCADE)
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.ForeignKey")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"to": "auth.user", "on_delete": models.CASCADE})
self.assertEqual(kwargs['to'].setting_name, "AUTH_USER_MODEL")
# Test nonexistent (for now) model
field = models.ForeignKey("something.Else", models.CASCADE)
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.ForeignKey")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"to": "something.else", "on_delete": models.CASCADE})
# Test on_delete
field = models.ForeignKey("auth.User", models.SET_NULL)
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.ForeignKey")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"to": "auth.user", "on_delete": models.SET_NULL})
# Test to_field preservation
field = models.ForeignKey("auth.Permission", models.CASCADE, to_field="foobar")
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.ForeignKey")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"to": "auth.permission", "to_field": "foobar", "on_delete": models.CASCADE})
# Test related_name preservation
field = models.ForeignKey("auth.Permission", models.CASCADE, related_name="foobar")
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.ForeignKey")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"to": "auth.permission", "related_name": "foobar", "on_delete": models.CASCADE})
# Test related_query_name
field = models.ForeignKey("auth.Permission", models.CASCADE, related_query_name="foobar")
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.ForeignKey")
self.assertEqual(args, [])
self.assertEqual(
kwargs,
{"to": "auth.permission", "related_query_name": "foobar", "on_delete": models.CASCADE}
)
# Test limit_choices_to
field = models.ForeignKey("auth.Permission", models.CASCADE, limit_choices_to={'foo': 'bar'})
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.ForeignKey")
self.assertEqual(args, [])
self.assertEqual(
kwargs,
{"to": "auth.permission", "limit_choices_to": {'foo': 'bar'}, "on_delete": models.CASCADE}
)
# Test unique
field = models.ForeignKey("auth.Permission", models.CASCADE, unique=True)
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.ForeignKey")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"to": "auth.permission", "unique": True, "on_delete": models.CASCADE})
@override_settings(AUTH_USER_MODEL="auth.Permission")
def test_foreign_key_swapped(self):
with isolate_lru_cache(apps.get_swappable_settings_name):
# It doesn't matter that we swapped out user for permission;
# there's no validation. We just want to check the setting stuff works.
field = models.ForeignKey("auth.Permission", models.CASCADE)
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.ForeignKey")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"to": "auth.permission", "on_delete": models.CASCADE})
self.assertEqual(kwargs['to'].setting_name, "AUTH_USER_MODEL")
def test_one_to_one(self):
# Test basic pointing
from django.contrib.auth.models import Permission
field = models.OneToOneField("auth.Permission", models.CASCADE)
field.remote_field.model = Permission
field.remote_field.field_name = "id"
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.OneToOneField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"to": "auth.permission", "on_delete": models.CASCADE})
self.assertFalse(hasattr(kwargs['to'], "setting_name"))
# Test swap detection for swappable model
field = models.OneToOneField("auth.User", models.CASCADE)
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.OneToOneField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"to": "auth.user", "on_delete": models.CASCADE})
self.assertEqual(kwargs['to'].setting_name, "AUTH_USER_MODEL")
# Test nonexistent (for now) model
field = models.OneToOneField("something.Else", models.CASCADE)
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.OneToOneField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"to": "something.else", "on_delete": models.CASCADE})
# Test on_delete
field = models.OneToOneField("auth.User", models.SET_NULL)
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.OneToOneField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"to": "auth.user", "on_delete": models.SET_NULL})
# Test to_field
field = models.OneToOneField("auth.Permission", models.CASCADE, to_field="foobar")
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.OneToOneField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"to": "auth.permission", "to_field": "foobar", "on_delete": models.CASCADE})
# Test related_name
field = models.OneToOneField("auth.Permission", models.CASCADE, related_name="foobar")
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.OneToOneField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"to": "auth.permission", "related_name": "foobar", "on_delete": models.CASCADE})
# Test related_query_name
field = models.OneToOneField("auth.Permission", models.CASCADE, related_query_name="foobar")
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.OneToOneField")
self.assertEqual(args, [])
self.assertEqual(
kwargs,
{"to": "auth.permission", "related_query_name": "foobar", "on_delete": models.CASCADE}
)
# Test limit_choices_to
field = models.OneToOneField("auth.Permission", models.CASCADE, limit_choices_to={'foo': 'bar'})
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.OneToOneField")
self.assertEqual(args, [])
self.assertEqual(
kwargs,
{"to": "auth.permission", "limit_choices_to": {'foo': 'bar'}, "on_delete": models.CASCADE}
)
# Test unique
field = models.OneToOneField("auth.Permission", models.CASCADE, unique=True)
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.OneToOneField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"to": "auth.permission", "on_delete": models.CASCADE})
def test_image_field(self):
field = models.ImageField(upload_to="foo/barness", width_field="width", height_field="height")
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.ImageField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"upload_to": "foo/barness", "width_field": "width", "height_field": "height"})
def test_integer_field(self):
field = models.IntegerField()
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.IntegerField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {})
def test_ip_address_field(self):
field = models.IPAddressField()
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.IPAddressField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {})
def test_generic_ip_address_field(self):
field = models.GenericIPAddressField()
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.GenericIPAddressField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {})
field = models.GenericIPAddressField(protocol="IPv6")
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.GenericIPAddressField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"protocol": "IPv6"})
def test_many_to_many_field(self):
# Test normal
field = models.ManyToManyField("auth.Permission")
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.ManyToManyField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"to": "auth.Permission"})
self.assertFalse(hasattr(kwargs['to'], "setting_name"))
# Test swappable
field = models.ManyToManyField("auth.User")
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.ManyToManyField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"to": "auth.User"})
self.assertEqual(kwargs['to'].setting_name, "AUTH_USER_MODEL")
# Test through
field = models.ManyToManyField("auth.Permission", through="auth.Group")
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.ManyToManyField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"to": "auth.Permission", "through": "auth.Group"})
# Test custom db_table
field = models.ManyToManyField("auth.Permission", db_table="custom_table")
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.ManyToManyField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"to": "auth.Permission", "db_table": "custom_table"})
# Test related_name
field = models.ManyToManyField("auth.Permission", related_name="custom_table")
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.ManyToManyField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"to": "auth.Permission", "related_name": "custom_table"})
# Test related_query_name
field = models.ManyToManyField("auth.Permission", related_query_name="foobar")
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.ManyToManyField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"to": "auth.Permission", "related_query_name": "foobar"})
# Test limit_choices_to
field = models.ManyToManyField("auth.Permission", limit_choices_to={'foo': 'bar'})
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.ManyToManyField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"to": "auth.Permission", "limit_choices_to": {'foo': 'bar'}})
@override_settings(AUTH_USER_MODEL="auth.Permission")
def test_many_to_many_field_swapped(self):
with isolate_lru_cache(apps.get_swappable_settings_name):
# It doesn't matter that we swapped out user for permission;
# there's no validation. We just want to check the setting stuff works.
field = models.ManyToManyField("auth.Permission")
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.ManyToManyField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"to": "auth.Permission"})
self.assertEqual(kwargs['to'].setting_name, "AUTH_USER_MODEL")
def test_many_to_many_field_related_name(self):
class MyModel(models.Model):
flag = models.BooleanField(default=True)
m2m = models.ManyToManyField('self')
m2m_related_name = models.ManyToManyField(
'self',
related_name='custom_name',
related_query_name='custom_query_name',
limit_choices_to={'flag': True},
)
name, path, args, kwargs = MyModel.m2m.field.deconstruct()
self.assertEqual(path, 'django.db.models.ManyToManyField')
self.assertEqual(args, [])
# deconstruct() should not include attributes which were not passed to
# the field during initialization.
self.assertEqual(kwargs, {'to': 'field_deconstruction.MyModel'})
# Passed attributes.
name, path, args, kwargs = MyModel.m2m_related_name.field.deconstruct()
self.assertEqual(path, 'django.db.models.ManyToManyField')
self.assertEqual(args, [])
self.assertEqual(kwargs, {
'to': 'field_deconstruction.MyModel',
'related_name': 'custom_name',
'related_query_name': 'custom_query_name',
'limit_choices_to': {'flag': True},
})
def test_positive_integer_field(self):
field = models.PositiveIntegerField()
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.PositiveIntegerField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {})
def test_positive_small_integer_field(self):
field = models.PositiveSmallIntegerField()
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.PositiveSmallIntegerField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {})
def test_positive_big_integer_field(self):
field = models.PositiveBigIntegerField()
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, 'django.db.models.PositiveBigIntegerField')
self.assertEqual(args, [])
self.assertEqual(kwargs, {})
def test_slug_field(self):
field = models.SlugField()
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.SlugField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {})
field = models.SlugField(db_index=False, max_length=231)
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.SlugField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"db_index": False, "max_length": 231})
def test_small_integer_field(self):
field = models.SmallIntegerField()
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.SmallIntegerField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {})
def test_text_field(self):
field = models.TextField()
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.TextField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {})
def test_time_field(self):
field = models.TimeField()
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.TimeField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {})
field = models.TimeField(auto_now=True)
name, path, args, kwargs = field.deconstruct()
self.assertEqual(args, [])
self.assertEqual(kwargs, {'auto_now': True})
field = models.TimeField(auto_now_add=True)
name, path, args, kwargs = field.deconstruct()
self.assertEqual(args, [])
self.assertEqual(kwargs, {'auto_now_add': True})
def test_url_field(self):
field = models.URLField()
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.URLField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {})
field = models.URLField(max_length=231)
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.URLField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"max_length": 231})
def test_binary_field(self):
field = models.BinaryField()
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.BinaryField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {})
field = models.BinaryField(editable=True)
name, path, args, kwargs = field.deconstruct()
self.assertEqual(args, [])
self.assertEqual(kwargs, {'editable': True})
|
c1c74f77a7e50c934c5e7f9259378986f3a9079816da985f39b2d680f6cb5ae5 | import unittest
from django.core.exceptions import FieldError
from django.db import IntegrityError, connection, transaction
from django.db.models import CharField, Count, F, IntegerField, Max
from django.db.models.functions import Abs, Concat, Lower
from django.test import TestCase
from django.test.utils import register_lookup
from .models import (
A, B, Bar, D, DataPoint, Foo, RelatedPoint, UniqueNumber,
UniqueNumberChild,
)
class SimpleTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.a1 = A.objects.create()
cls.a2 = A.objects.create()
for x in range(20):
B.objects.create(a=cls.a1)
D.objects.create(a=cls.a1)
def test_nonempty_update(self):
"""
Update changes the right number of rows for a nonempty queryset
"""
num_updated = self.a1.b_set.update(y=100)
self.assertEqual(num_updated, 20)
cnt = B.objects.filter(y=100).count()
self.assertEqual(cnt, 20)
def test_empty_update(self):
"""
Update changes the right number of rows for an empty queryset
"""
num_updated = self.a2.b_set.update(y=100)
self.assertEqual(num_updated, 0)
cnt = B.objects.filter(y=100).count()
self.assertEqual(cnt, 0)
def test_nonempty_update_with_inheritance(self):
"""
Update changes the right number of rows for an empty queryset
when the update affects only a base table
"""
num_updated = self.a1.d_set.update(y=100)
self.assertEqual(num_updated, 20)
cnt = D.objects.filter(y=100).count()
self.assertEqual(cnt, 20)
def test_empty_update_with_inheritance(self):
"""
Update changes the right number of rows for an empty queryset
when the update affects only a base table
"""
num_updated = self.a2.d_set.update(y=100)
self.assertEqual(num_updated, 0)
cnt = D.objects.filter(y=100).count()
self.assertEqual(cnt, 0)
def test_foreign_key_update_with_id(self):
"""
Update works using <field>_id for foreign keys
"""
num_updated = self.a1.d_set.update(a_id=self.a2)
self.assertEqual(num_updated, 20)
self.assertEqual(self.a2.d_set.count(), 20)
class AdvancedTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.d0 = DataPoint.objects.create(name="d0", value="apple")
cls.d2 = DataPoint.objects.create(name="d2", value="banana")
cls.d3 = DataPoint.objects.create(name="d3", value="banana")
cls.r1 = RelatedPoint.objects.create(name="r1", data=cls.d3)
def test_update(self):
"""
Objects are updated by first filtering the candidates into a queryset
and then calling the update() method. It executes immediately and
returns nothing.
"""
resp = DataPoint.objects.filter(value="apple").update(name="d1")
self.assertEqual(resp, 1)
resp = DataPoint.objects.filter(value="apple")
self.assertEqual(list(resp), [self.d0])
def test_update_multiple_objects(self):
"""
We can update multiple objects at once.
"""
resp = DataPoint.objects.filter(value='banana').update(value='pineapple')
self.assertEqual(resp, 2)
self.assertEqual(DataPoint.objects.get(name="d2").value, 'pineapple')
def test_update_fk(self):
"""
Foreign key fields can also be updated, although you can only update
the object referred to, not anything inside the related object.
"""
resp = RelatedPoint.objects.filter(name="r1").update(data=self.d0)
self.assertEqual(resp, 1)
resp = RelatedPoint.objects.filter(data__name="d0")
self.assertEqual(list(resp), [self.r1])
def test_update_multiple_fields(self):
"""
Multiple fields can be updated at once
"""
resp = DataPoint.objects.filter(value="apple").update(
value="fruit", another_value="peach")
self.assertEqual(resp, 1)
d = DataPoint.objects.get(name="d0")
self.assertEqual(d.value, 'fruit')
self.assertEqual(d.another_value, 'peach')
def test_update_all(self):
"""
In the rare case you want to update every instance of a model, update()
is also a manager method.
"""
self.assertEqual(DataPoint.objects.update(value='thing'), 3)
resp = DataPoint.objects.values('value').distinct()
self.assertEqual(list(resp), [{'value': 'thing'}])
def test_update_slice_fail(self):
"""
We do not support update on already sliced query sets.
"""
method = DataPoint.objects.all()[:2].update
msg = 'Cannot update a query once a slice has been taken.'
with self.assertRaisesMessage(TypeError, msg):
method(another_value='another thing')
def test_update_respects_to_field(self):
"""
Update of an FK field which specifies a to_field works.
"""
a_foo = Foo.objects.create(target='aaa')
b_foo = Foo.objects.create(target='bbb')
bar = Bar.objects.create(foo=a_foo)
self.assertEqual(bar.foo_id, a_foo.target)
bar_qs = Bar.objects.filter(pk=bar.pk)
self.assertEqual(bar_qs[0].foo_id, a_foo.target)
bar_qs.update(foo=b_foo)
self.assertEqual(bar_qs[0].foo_id, b_foo.target)
def test_update_m2m_field(self):
msg = (
'Cannot update model field '
'<django.db.models.fields.related.ManyToManyField: m2m_foo> '
'(only non-relations and foreign keys permitted).'
)
with self.assertRaisesMessage(FieldError, msg):
Bar.objects.update(m2m_foo='whatever')
def test_update_transformed_field(self):
A.objects.create(x=5)
A.objects.create(x=-6)
with register_lookup(IntegerField, Abs):
A.objects.update(x=F('x__abs'))
self.assertCountEqual(A.objects.values_list('x', flat=True), [5, 6])
def test_update_annotated_queryset(self):
"""
Update of a queryset that's been annotated.
"""
# Trivial annotated update
qs = DataPoint.objects.annotate(alias=F('value'))
self.assertEqual(qs.update(another_value='foo'), 3)
# Update where annotation is used for filtering
qs = DataPoint.objects.annotate(alias=F('value')).filter(alias='apple')
self.assertEqual(qs.update(another_value='foo'), 1)
# Update where annotation is used in update parameters
qs = DataPoint.objects.annotate(alias=F('value'))
self.assertEqual(qs.update(another_value=F('alias')), 3)
# Update where aggregation annotation is used in update parameters
qs = DataPoint.objects.annotate(max=Max('value'))
msg = (
'Aggregate functions are not allowed in this query '
'(another_value=Max(Col(update_datapoint, update.DataPoint.value))).'
)
with self.assertRaisesMessage(FieldError, msg):
qs.update(another_value=F('max'))
def test_update_annotated_multi_table_queryset(self):
"""
Update of a queryset that's been annotated and involves multiple tables.
"""
# Trivial annotated update
qs = DataPoint.objects.annotate(related_count=Count('relatedpoint'))
self.assertEqual(qs.update(value='Foo'), 3)
# Update where annotation is used for filtering
qs = DataPoint.objects.annotate(related_count=Count('relatedpoint'))
self.assertEqual(qs.filter(related_count=1).update(value='Foo'), 1)
# Update where aggregation annotation is used in update parameters
qs = RelatedPoint.objects.annotate(max=Max('data__value'))
msg = 'Joined field references are not permitted in this query'
with self.assertRaisesMessage(FieldError, msg):
qs.update(name=F('max'))
def test_update_with_joined_field_annotation(self):
msg = 'Joined field references are not permitted in this query'
with register_lookup(CharField, Lower):
for annotation in (
F('data__name'),
F('data__name__lower'),
Lower('data__name'),
Concat('data__name', 'data__value'),
):
with self.subTest(annotation=annotation):
with self.assertRaisesMessage(FieldError, msg):
RelatedPoint.objects.annotate(
new_name=annotation,
).update(name=F('new_name'))
@unittest.skipUnless(
connection.vendor == 'mysql',
'UPDATE...ORDER BY syntax is supported on MySQL/MariaDB',
)
class MySQLUpdateOrderByTest(TestCase):
"""Update field with a unique constraint using an ordered queryset."""
@classmethod
def setUpTestData(cls):
UniqueNumber.objects.create(number=1)
UniqueNumber.objects.create(number=2)
def test_order_by_update_on_unique_constraint(self):
tests = [
('-number', 'id'),
(F('number').desc(), 'id'),
(F('number') * -1, 'id'),
]
for ordering in tests:
with self.subTest(ordering=ordering), transaction.atomic():
updated = UniqueNumber.objects.order_by(*ordering).update(
number=F('number') + 1,
)
self.assertEqual(updated, 2)
def test_order_by_update_on_unique_constraint_annotation(self):
# Ordering by annotations is omitted because they cannot be resolved in
# .update().
with self.assertRaises(IntegrityError):
UniqueNumber.objects.annotate(
number_inverse=F('number').desc(),
).order_by('number_inverse').update(
number=F('number') + 1,
)
def test_order_by_update_on_parent_unique_constraint(self):
# Ordering by inherited fields is omitted because joined fields cannot
# be used in the ORDER BY clause.
UniqueNumberChild.objects.create(number=3)
UniqueNumberChild.objects.create(number=4)
with self.assertRaises(IntegrityError):
UniqueNumberChild.objects.order_by('number').update(
number=F('number') + 1,
)
def test_order_by_update_on_related_field(self):
# Ordering by related fields is omitted because joined fields cannot be
# used in the ORDER BY clause.
data = DataPoint.objects.create(name='d0', value='apple')
related = RelatedPoint.objects.create(name='r0', data=data)
with self.assertNumQueries(1) as ctx:
updated = RelatedPoint.objects.order_by('data__name').update(name='new')
sql = ctx.captured_queries[0]['sql']
self.assertNotIn('ORDER BY', sql)
self.assertEqual(updated, 1)
related.refresh_from_db()
self.assertEqual(related.name, 'new')
|
041cb50dc497ea31a90f2c0af209dd084519b4b1a686f0c59ecc7d4c27a0c379 | """
Tests for the update() queryset method that allows in-place, multi-object
updates.
"""
from django.db import models
class DataPoint(models.Model):
name = models.CharField(max_length=20)
value = models.CharField(max_length=20)
another_value = models.CharField(max_length=20, blank=True)
class RelatedPoint(models.Model):
name = models.CharField(max_length=20)
data = models.ForeignKey(DataPoint, models.CASCADE)
class A(models.Model):
x = models.IntegerField(default=10)
class B(models.Model):
a = models.ForeignKey(A, models.CASCADE)
y = models.IntegerField(default=10)
class C(models.Model):
y = models.IntegerField(default=10)
class D(C):
a = models.ForeignKey(A, models.CASCADE)
class Foo(models.Model):
target = models.CharField(max_length=10, unique=True)
class Bar(models.Model):
foo = models.ForeignKey(Foo, models.CASCADE, to_field='target')
m2m_foo = models.ManyToManyField(Foo, related_name='m2m_foo')
class UniqueNumber(models.Model):
number = models.IntegerField(unique=True)
class UniqueNumberChild(UniqueNumber):
pass
|
bfaa4cdf513ca41093f6b9cd85a418187438aed34937f27a064141deec77affc | import sys
import threading
import time
from unittest import skipIf, skipUnless
from django.db import (
DatabaseError, Error, IntegrityError, OperationalError, connection,
transaction,
)
from django.test import (
TestCase, TransactionTestCase, skipIfDBFeature, skipUnlessDBFeature,
)
from .models import Reporter
@skipUnlessDBFeature('uses_savepoints')
class AtomicTests(TransactionTestCase):
"""
Tests for the atomic decorator and context manager.
The tests make assertions on internal attributes because there isn't a
robust way to ask the database for its current transaction state.
Since the decorator syntax is converted into a context manager (see the
implementation), there are only a few basic tests with the decorator
syntax and the bulk of the tests use the context manager syntax.
"""
available_apps = ['transactions']
def test_decorator_syntax_commit(self):
@transaction.atomic
def make_reporter():
return Reporter.objects.create(first_name="Tintin")
reporter = make_reporter()
self.assertSequenceEqual(Reporter.objects.all(), [reporter])
def test_decorator_syntax_rollback(self):
@transaction.atomic
def make_reporter():
Reporter.objects.create(first_name="Haddock")
raise Exception("Oops, that's his last name")
with self.assertRaisesMessage(Exception, "Oops"):
make_reporter()
self.assertSequenceEqual(Reporter.objects.all(), [])
def test_alternate_decorator_syntax_commit(self):
@transaction.atomic()
def make_reporter():
return Reporter.objects.create(first_name="Tintin")
reporter = make_reporter()
self.assertSequenceEqual(Reporter.objects.all(), [reporter])
def test_alternate_decorator_syntax_rollback(self):
@transaction.atomic()
def make_reporter():
Reporter.objects.create(first_name="Haddock")
raise Exception("Oops, that's his last name")
with self.assertRaisesMessage(Exception, "Oops"):
make_reporter()
self.assertSequenceEqual(Reporter.objects.all(), [])
def test_commit(self):
with transaction.atomic():
reporter = Reporter.objects.create(first_name="Tintin")
self.assertSequenceEqual(Reporter.objects.all(), [reporter])
def test_rollback(self):
with self.assertRaisesMessage(Exception, "Oops"):
with transaction.atomic():
Reporter.objects.create(first_name="Haddock")
raise Exception("Oops, that's his last name")
self.assertSequenceEqual(Reporter.objects.all(), [])
def test_nested_commit_commit(self):
with transaction.atomic():
reporter1 = Reporter.objects.create(first_name="Tintin")
with transaction.atomic():
reporter2 = Reporter.objects.create(first_name="Archibald", last_name="Haddock")
self.assertSequenceEqual(Reporter.objects.all(), [reporter2, reporter1])
def test_nested_commit_rollback(self):
with transaction.atomic():
reporter = Reporter.objects.create(first_name="Tintin")
with self.assertRaisesMessage(Exception, "Oops"):
with transaction.atomic():
Reporter.objects.create(first_name="Haddock")
raise Exception("Oops, that's his last name")
self.assertSequenceEqual(Reporter.objects.all(), [reporter])
def test_nested_rollback_commit(self):
with self.assertRaisesMessage(Exception, "Oops"):
with transaction.atomic():
Reporter.objects.create(last_name="Tintin")
with transaction.atomic():
Reporter.objects.create(last_name="Haddock")
raise Exception("Oops, that's his first name")
self.assertSequenceEqual(Reporter.objects.all(), [])
def test_nested_rollback_rollback(self):
with self.assertRaisesMessage(Exception, "Oops"):
with transaction.atomic():
Reporter.objects.create(last_name="Tintin")
with self.assertRaisesMessage(Exception, "Oops"):
with transaction.atomic():
Reporter.objects.create(first_name="Haddock")
raise Exception("Oops, that's his last name")
raise Exception("Oops, that's his first name")
self.assertSequenceEqual(Reporter.objects.all(), [])
def test_merged_commit_commit(self):
with transaction.atomic():
reporter1 = Reporter.objects.create(first_name="Tintin")
with transaction.atomic(savepoint=False):
reporter2 = Reporter.objects.create(first_name="Archibald", last_name="Haddock")
self.assertSequenceEqual(Reporter.objects.all(), [reporter2, reporter1])
def test_merged_commit_rollback(self):
with transaction.atomic():
Reporter.objects.create(first_name="Tintin")
with self.assertRaisesMessage(Exception, "Oops"):
with transaction.atomic(savepoint=False):
Reporter.objects.create(first_name="Haddock")
raise Exception("Oops, that's his last name")
# Writes in the outer block are rolled back too.
self.assertSequenceEqual(Reporter.objects.all(), [])
def test_merged_rollback_commit(self):
with self.assertRaisesMessage(Exception, "Oops"):
with transaction.atomic():
Reporter.objects.create(last_name="Tintin")
with transaction.atomic(savepoint=False):
Reporter.objects.create(last_name="Haddock")
raise Exception("Oops, that's his first name")
self.assertSequenceEqual(Reporter.objects.all(), [])
def test_merged_rollback_rollback(self):
with self.assertRaisesMessage(Exception, "Oops"):
with transaction.atomic():
Reporter.objects.create(last_name="Tintin")
with self.assertRaisesMessage(Exception, "Oops"):
with transaction.atomic(savepoint=False):
Reporter.objects.create(first_name="Haddock")
raise Exception("Oops, that's his last name")
raise Exception("Oops, that's his first name")
self.assertSequenceEqual(Reporter.objects.all(), [])
def test_reuse_commit_commit(self):
atomic = transaction.atomic()
with atomic:
reporter1 = Reporter.objects.create(first_name="Tintin")
with atomic:
reporter2 = Reporter.objects.create(first_name="Archibald", last_name="Haddock")
self.assertSequenceEqual(Reporter.objects.all(), [reporter2, reporter1])
def test_reuse_commit_rollback(self):
atomic = transaction.atomic()
with atomic:
reporter = Reporter.objects.create(first_name="Tintin")
with self.assertRaisesMessage(Exception, "Oops"):
with atomic:
Reporter.objects.create(first_name="Haddock")
raise Exception("Oops, that's his last name")
self.assertSequenceEqual(Reporter.objects.all(), [reporter])
def test_reuse_rollback_commit(self):
atomic = transaction.atomic()
with self.assertRaisesMessage(Exception, "Oops"):
with atomic:
Reporter.objects.create(last_name="Tintin")
with atomic:
Reporter.objects.create(last_name="Haddock")
raise Exception("Oops, that's his first name")
self.assertSequenceEqual(Reporter.objects.all(), [])
def test_reuse_rollback_rollback(self):
atomic = transaction.atomic()
with self.assertRaisesMessage(Exception, "Oops"):
with atomic:
Reporter.objects.create(last_name="Tintin")
with self.assertRaisesMessage(Exception, "Oops"):
with atomic:
Reporter.objects.create(first_name="Haddock")
raise Exception("Oops, that's his last name")
raise Exception("Oops, that's his first name")
self.assertSequenceEqual(Reporter.objects.all(), [])
def test_force_rollback(self):
with transaction.atomic():
Reporter.objects.create(first_name="Tintin")
# atomic block shouldn't rollback, but force it.
self.assertFalse(transaction.get_rollback())
transaction.set_rollback(True)
self.assertSequenceEqual(Reporter.objects.all(), [])
def test_prevent_rollback(self):
with transaction.atomic():
reporter = Reporter.objects.create(first_name="Tintin")
sid = transaction.savepoint()
# trigger a database error inside an inner atomic without savepoint
with self.assertRaises(DatabaseError):
with transaction.atomic(savepoint=False):
with connection.cursor() as cursor:
cursor.execute(
"SELECT no_such_col FROM transactions_reporter")
# prevent atomic from rolling back since we're recovering manually
self.assertTrue(transaction.get_rollback())
transaction.set_rollback(False)
transaction.savepoint_rollback(sid)
self.assertSequenceEqual(Reporter.objects.all(), [reporter])
class AtomicInsideTransactionTests(AtomicTests):
"""All basic tests for atomic should also pass within an existing transaction."""
def setUp(self):
self.atomic = transaction.atomic()
self.atomic.__enter__()
def tearDown(self):
self.atomic.__exit__(*sys.exc_info())
class AtomicWithoutAutocommitTests(AtomicTests):
"""All basic tests for atomic should also pass when autocommit is turned off."""
def setUp(self):
transaction.set_autocommit(False)
def tearDown(self):
# The tests access the database after exercising 'atomic', initiating
# a transaction ; a rollback is required before restoring autocommit.
transaction.rollback()
transaction.set_autocommit(True)
@skipUnlessDBFeature('uses_savepoints')
class AtomicMergeTests(TransactionTestCase):
"""Test merging transactions with savepoint=False."""
available_apps = ['transactions']
def test_merged_outer_rollback(self):
with transaction.atomic():
Reporter.objects.create(first_name="Tintin")
with transaction.atomic(savepoint=False):
Reporter.objects.create(first_name="Archibald", last_name="Haddock")
with self.assertRaisesMessage(Exception, "Oops"):
with transaction.atomic(savepoint=False):
Reporter.objects.create(first_name="Calculus")
raise Exception("Oops, that's his last name")
# The third insert couldn't be roll back. Temporarily mark the
# connection as not needing rollback to check it.
self.assertTrue(transaction.get_rollback())
transaction.set_rollback(False)
self.assertEqual(Reporter.objects.count(), 3)
transaction.set_rollback(True)
# The second insert couldn't be roll back. Temporarily mark the
# connection as not needing rollback to check it.
self.assertTrue(transaction.get_rollback())
transaction.set_rollback(False)
self.assertEqual(Reporter.objects.count(), 3)
transaction.set_rollback(True)
# The first block has a savepoint and must roll back.
self.assertSequenceEqual(Reporter.objects.all(), [])
def test_merged_inner_savepoint_rollback(self):
with transaction.atomic():
reporter = Reporter.objects.create(first_name="Tintin")
with transaction.atomic():
Reporter.objects.create(first_name="Archibald", last_name="Haddock")
with self.assertRaisesMessage(Exception, "Oops"):
with transaction.atomic(savepoint=False):
Reporter.objects.create(first_name="Calculus")
raise Exception("Oops, that's his last name")
# The third insert couldn't be roll back. Temporarily mark the
# connection as not needing rollback to check it.
self.assertTrue(transaction.get_rollback())
transaction.set_rollback(False)
self.assertEqual(Reporter.objects.count(), 3)
transaction.set_rollback(True)
# The second block has a savepoint and must roll back.
self.assertEqual(Reporter.objects.count(), 1)
self.assertSequenceEqual(Reporter.objects.all(), [reporter])
@skipUnlessDBFeature('uses_savepoints')
class AtomicErrorsTests(TransactionTestCase):
available_apps = ['transactions']
forbidden_atomic_msg = "This is forbidden when an 'atomic' block is active."
def test_atomic_prevents_setting_autocommit(self):
autocommit = transaction.get_autocommit()
with transaction.atomic():
with self.assertRaisesMessage(transaction.TransactionManagementError, self.forbidden_atomic_msg):
transaction.set_autocommit(not autocommit)
# Make sure autocommit wasn't changed.
self.assertEqual(connection.autocommit, autocommit)
def test_atomic_prevents_calling_transaction_methods(self):
with transaction.atomic():
with self.assertRaisesMessage(transaction.TransactionManagementError, self.forbidden_atomic_msg):
transaction.commit()
with self.assertRaisesMessage(transaction.TransactionManagementError, self.forbidden_atomic_msg):
transaction.rollback()
def test_atomic_prevents_queries_in_broken_transaction(self):
r1 = Reporter.objects.create(first_name="Archibald", last_name="Haddock")
with transaction.atomic():
r2 = Reporter(first_name="Cuthbert", last_name="Calculus", id=r1.id)
with self.assertRaises(IntegrityError):
r2.save(force_insert=True)
# The transaction is marked as needing rollback.
msg = (
"An error occurred in the current transaction. You can't "
"execute queries until the end of the 'atomic' block."
)
with self.assertRaisesMessage(transaction.TransactionManagementError, msg):
r2.save(force_update=True)
self.assertEqual(Reporter.objects.get(pk=r1.pk).last_name, "Haddock")
@skipIfDBFeature('atomic_transactions')
def test_atomic_allows_queries_after_fixing_transaction(self):
r1 = Reporter.objects.create(first_name="Archibald", last_name="Haddock")
with transaction.atomic():
r2 = Reporter(first_name="Cuthbert", last_name="Calculus", id=r1.id)
with self.assertRaises(IntegrityError):
r2.save(force_insert=True)
# Mark the transaction as no longer needing rollback.
transaction.set_rollback(False)
r2.save(force_update=True)
self.assertEqual(Reporter.objects.get(pk=r1.pk).last_name, "Calculus")
@skipUnlessDBFeature('test_db_allows_multiple_connections')
def test_atomic_prevents_queries_in_broken_transaction_after_client_close(self):
with transaction.atomic():
Reporter.objects.create(first_name="Archibald", last_name="Haddock")
connection.close()
# The connection is closed and the transaction is marked as
# needing rollback. This will raise an InterfaceError on databases
# that refuse to create cursors on closed connections (PostgreSQL)
# and a TransactionManagementError on other databases.
with self.assertRaises(Error):
Reporter.objects.create(first_name="Cuthbert", last_name="Calculus")
# The connection is usable again .
self.assertEqual(Reporter.objects.count(), 0)
@skipUnless(connection.vendor == 'mysql', "MySQL-specific behaviors")
class AtomicMySQLTests(TransactionTestCase):
available_apps = ['transactions']
@skipIf(threading is None, "Test requires threading")
def test_implicit_savepoint_rollback(self):
"""MySQL implicitly rolls back savepoints when it deadlocks (#22291)."""
Reporter.objects.create(id=1)
Reporter.objects.create(id=2)
main_thread_ready = threading.Event()
def other_thread():
try:
with transaction.atomic():
Reporter.objects.select_for_update().get(id=1)
main_thread_ready.wait()
# 1) This line locks... (see below for 2)
Reporter.objects.exclude(id=1).update(id=2)
finally:
# This is the thread-local connection, not the main connection.
connection.close()
other_thread = threading.Thread(target=other_thread)
other_thread.start()
with self.assertRaisesMessage(OperationalError, 'Deadlock found'):
# Double atomic to enter a transaction and create a savepoint.
with transaction.atomic():
with transaction.atomic():
Reporter.objects.select_for_update().get(id=2)
main_thread_ready.set()
# The two threads can't be synchronized with an event here
# because the other thread locks. Sleep for a little while.
time.sleep(1)
# 2) ... and this line deadlocks. (see above for 1)
Reporter.objects.exclude(id=2).update(id=1)
other_thread.join()
class AtomicMiscTests(TransactionTestCase):
available_apps = ['transactions']
def test_wrap_callable_instance(self):
"""#20028 -- Atomic must support wrapping callable instances."""
class Callable:
def __call__(self):
pass
# Must not raise an exception
transaction.atomic(Callable())
@skipUnlessDBFeature('can_release_savepoints')
def test_atomic_does_not_leak_savepoints_on_failure(self):
"""#23074 -- Savepoints must be released after rollback."""
# Expect an error when rolling back a savepoint that doesn't exist.
# Done outside of the transaction block to ensure proper recovery.
with self.assertRaises(Error):
# Start a plain transaction.
with transaction.atomic():
# Swallow the intentional error raised in the sub-transaction.
with self.assertRaisesMessage(Exception, "Oops"):
# Start a sub-transaction with a savepoint.
with transaction.atomic():
sid = connection.savepoint_ids[-1]
raise Exception("Oops")
# This is expected to fail because the savepoint no longer exists.
connection.savepoint_rollback(sid)
def test_mark_for_rollback_on_error_in_transaction(self):
with transaction.atomic(savepoint=False):
# Swallow the intentional error raised.
with self.assertRaisesMessage(Exception, "Oops"):
# Wrap in `mark_for_rollback_on_error` to check if the transaction is marked broken.
with transaction.mark_for_rollback_on_error():
# Ensure that we are still in a good state.
self.assertFalse(transaction.get_rollback())
raise Exception("Oops")
# Ensure that `mark_for_rollback_on_error` marked the transaction as broken …
self.assertTrue(transaction.get_rollback())
# … and further queries fail.
msg = "You can't execute queries until the end of the 'atomic' block."
with self.assertRaisesMessage(transaction.TransactionManagementError, msg):
Reporter.objects.create()
# Transaction errors are reset at the end of an transaction, so this should just work.
Reporter.objects.create()
def test_mark_for_rollback_on_error_in_autocommit(self):
self.assertTrue(transaction.get_autocommit())
# Swallow the intentional error raised.
with self.assertRaisesMessage(Exception, "Oops"):
# Wrap in `mark_for_rollback_on_error` to check if the transaction is marked broken.
with transaction.mark_for_rollback_on_error():
# Ensure that we are still in a good state.
self.assertFalse(transaction.get_connection().needs_rollback)
raise Exception("Oops")
# Ensure that `mark_for_rollback_on_error` did not mark the transaction
# as broken, since we are in autocommit mode …
self.assertFalse(transaction.get_connection().needs_rollback)
# … and further queries work nicely.
Reporter.objects.create()
class NonAutocommitTests(TransactionTestCase):
available_apps = []
def setUp(self):
transaction.set_autocommit(False)
def tearDown(self):
transaction.rollback()
transaction.set_autocommit(True)
def test_orm_query_after_error_and_rollback(self):
"""
ORM queries are allowed after an error and a rollback in non-autocommit
mode (#27504).
"""
r1 = Reporter.objects.create(first_name='Archibald', last_name='Haddock')
r2 = Reporter(first_name='Cuthbert', last_name='Calculus', id=r1.id)
with self.assertRaises(IntegrityError):
r2.save(force_insert=True)
transaction.rollback()
Reporter.objects.last()
def test_orm_query_without_autocommit(self):
"""#24921 -- ORM queries must be possible after set_autocommit(False)."""
Reporter.objects.create(first_name="Tintin")
class DurableTests(TransactionTestCase):
available_apps = ['transactions']
def test_commit(self):
with transaction.atomic(durable=True):
reporter = Reporter.objects.create(first_name='Tintin')
self.assertEqual(Reporter.objects.get(), reporter)
def test_nested_outer_durable(self):
with transaction.atomic(durable=True):
reporter1 = Reporter.objects.create(first_name='Tintin')
with transaction.atomic():
reporter2 = Reporter.objects.create(
first_name='Archibald',
last_name='Haddock',
)
self.assertSequenceEqual(Reporter.objects.all(), [reporter2, reporter1])
def test_nested_both_durable(self):
msg = 'A durable atomic block cannot be nested within another atomic block.'
with transaction.atomic(durable=True):
with self.assertRaisesMessage(RuntimeError, msg):
with transaction.atomic(durable=True):
pass
def test_nested_inner_durable(self):
msg = 'A durable atomic block cannot be nested within another atomic block.'
with transaction.atomic():
with self.assertRaisesMessage(RuntimeError, msg):
with transaction.atomic(durable=True):
pass
class DisableDurabiltityCheckTests(TestCase):
"""
TestCase runs all tests in a transaction by default. Code using
durable=True would always fail when run from TestCase. This would mean
these tests would be forced to use the slower TransactionTestCase even when
not testing durability. For this reason, TestCase disables the durability
check.
"""
available_apps = ['transactions']
def test_commit(self):
with transaction.atomic(durable=True):
reporter = Reporter.objects.create(first_name='Tintin')
self.assertEqual(Reporter.objects.get(), reporter)
def test_nested_outer_durable(self):
with transaction.atomic(durable=True):
reporter1 = Reporter.objects.create(first_name='Tintin')
with transaction.atomic():
reporter2 = Reporter.objects.create(
first_name='Archibald',
last_name='Haddock',
)
self.assertSequenceEqual(Reporter.objects.all(), [reporter2, reporter1])
def test_nested_both_durable(self):
with transaction.atomic(durable=True):
# Error is not raised.
with transaction.atomic(durable=True):
reporter = Reporter.objects.create(first_name='Tintin')
self.assertEqual(Reporter.objects.get(), reporter)
def test_nested_inner_durable(self):
with transaction.atomic():
# Error is not raised.
with transaction.atomic(durable=True):
reporter = Reporter.objects.create(first_name='Tintin')
self.assertEqual(Reporter.objects.get(), reporter)
|
646f49dba387fea1aa9800a4eab7428d9aac178469a0e1cee28f964f3536d019 | # Unit tests for cache framework
# Uses whatever cache backend is set in the test settings file.
import copy
import io
import os
import pickle
import re
import shutil
import sys
import tempfile
import threading
import time
import unittest
import warnings
from pathlib import Path
from unittest import mock, skipIf
from django.conf import settings
from django.core import management, signals
from django.core.cache import (
DEFAULT_CACHE_ALIAS, CacheHandler, CacheKeyWarning, InvalidCacheKey, cache,
caches,
)
from django.core.cache.backends.base import InvalidCacheBackendError
from django.core.cache.utils import make_template_fragment_key
from django.db import close_old_connections, connection, connections
from django.db.backends.utils import CursorWrapper
from django.http import (
HttpRequest, HttpResponse, HttpResponseNotModified, StreamingHttpResponse,
)
from django.middleware.cache import (
CacheMiddleware, FetchFromCacheMiddleware, UpdateCacheMiddleware,
)
from django.middleware.csrf import CsrfViewMiddleware
from django.template import engines
from django.template.context_processors import csrf
from django.template.response import TemplateResponse
from django.test import (
RequestFactory, SimpleTestCase, TestCase, TransactionTestCase,
ignore_warnings, override_settings,
)
from django.test.signals import setting_changed
from django.test.utils import CaptureQueriesContext
from django.utils import timezone, translation
from django.utils.cache import (
get_cache_key, learn_cache_key, patch_cache_control, patch_vary_headers,
)
from django.utils.deprecation import RemovedInDjango41Warning
from django.views.decorators.cache import cache_control, cache_page
from .models import Poll, expensive_calculation
# functions/classes for complex data type tests
def f():
return 42
class C:
def m(n):
return 24
class Unpicklable:
def __getstate__(self):
raise pickle.PickleError()
def empty_response(request):
return HttpResponse()
KEY_ERRORS_WITH_MEMCACHED_MSG = (
'Cache key contains characters that will cause errors if used with '
'memcached: %r'
)
@override_settings(CACHES={
'default': {
'BACKEND': 'django.core.cache.backends.dummy.DummyCache',
}
})
class DummyCacheTests(SimpleTestCase):
# The Dummy cache backend doesn't really behave like a test backend,
# so it has its own test case.
def test_simple(self):
"Dummy cache backend ignores cache set calls"
cache.set("key", "value")
self.assertIsNone(cache.get("key"))
def test_add(self):
"Add doesn't do anything in dummy cache backend"
self.assertIs(cache.add("addkey1", "value"), True)
self.assertIs(cache.add("addkey1", "newvalue"), True)
self.assertIsNone(cache.get("addkey1"))
def test_non_existent(self):
"Nonexistent keys aren't found in the dummy cache backend"
self.assertIsNone(cache.get("does_not_exist"))
self.assertEqual(cache.get("does_not_exist", "bang!"), "bang!")
def test_get_many(self):
"get_many returns nothing for the dummy cache backend"
cache.set_many({'a': 'a', 'b': 'b', 'c': 'c', 'd': 'd'})
self.assertEqual(cache.get_many(['a', 'c', 'd']), {})
self.assertEqual(cache.get_many(['a', 'b', 'e']), {})
def test_get_many_invalid_key(self):
msg = KEY_ERRORS_WITH_MEMCACHED_MSG % ':1:key with spaces'
with self.assertWarnsMessage(CacheKeyWarning, msg):
cache.get_many(['key with spaces'])
def test_delete(self):
"Cache deletion is transparently ignored on the dummy cache backend"
cache.set_many({'key1': 'spam', 'key2': 'eggs'})
self.assertIsNone(cache.get("key1"))
self.assertIs(cache.delete("key1"), False)
self.assertIsNone(cache.get("key1"))
self.assertIsNone(cache.get("key2"))
def test_has_key(self):
"The has_key method doesn't ever return True for the dummy cache backend"
cache.set("hello1", "goodbye1")
self.assertIs(cache.has_key("hello1"), False)
self.assertIs(cache.has_key("goodbye1"), False)
def test_in(self):
"The in operator doesn't ever return True for the dummy cache backend"
cache.set("hello2", "goodbye2")
self.assertNotIn("hello2", cache)
self.assertNotIn("goodbye2", cache)
def test_incr(self):
"Dummy cache values can't be incremented"
cache.set('answer', 42)
with self.assertRaises(ValueError):
cache.incr('answer')
with self.assertRaises(ValueError):
cache.incr('does_not_exist')
def test_decr(self):
"Dummy cache values can't be decremented"
cache.set('answer', 42)
with self.assertRaises(ValueError):
cache.decr('answer')
with self.assertRaises(ValueError):
cache.decr('does_not_exist')
def test_touch(self):
"""Dummy cache can't do touch()."""
self.assertIs(cache.touch('whatever'), False)
def test_data_types(self):
"All data types are ignored equally by the dummy cache"
stuff = {
'string': 'this is a string',
'int': 42,
'list': [1, 2, 3, 4],
'tuple': (1, 2, 3, 4),
'dict': {'A': 1, 'B': 2},
'function': f,
'class': C,
}
cache.set("stuff", stuff)
self.assertIsNone(cache.get("stuff"))
def test_expiration(self):
"Expiration has no effect on the dummy cache"
cache.set('expire1', 'very quickly', 1)
cache.set('expire2', 'very quickly', 1)
cache.set('expire3', 'very quickly', 1)
time.sleep(2)
self.assertIsNone(cache.get("expire1"))
self.assertIs(cache.add("expire2", "newvalue"), True)
self.assertIsNone(cache.get("expire2"))
self.assertIs(cache.has_key("expire3"), False)
def test_unicode(self):
"Unicode values are ignored by the dummy cache"
stuff = {
'ascii': 'ascii_value',
'unicode_ascii': 'Iñtërnâtiônàlizætiøn1',
'Iñtërnâtiônàlizætiøn': 'Iñtërnâtiônàlizætiøn2',
'ascii2': {'x': 1}
}
for (key, value) in stuff.items():
with self.subTest(key=key):
cache.set(key, value)
self.assertIsNone(cache.get(key))
def test_set_many(self):
"set_many does nothing for the dummy cache backend"
self.assertEqual(cache.set_many({'a': 1, 'b': 2}), [])
self.assertEqual(cache.set_many({'a': 1, 'b': 2}, timeout=2, version='1'), [])
def test_set_many_invalid_key(self):
msg = KEY_ERRORS_WITH_MEMCACHED_MSG % ':1:key with spaces'
with self.assertWarnsMessage(CacheKeyWarning, msg):
cache.set_many({'key with spaces': 'foo'})
def test_delete_many(self):
"delete_many does nothing for the dummy cache backend"
cache.delete_many(['a', 'b'])
def test_delete_many_invalid_key(self):
msg = KEY_ERRORS_WITH_MEMCACHED_MSG % ':1:key with spaces'
with self.assertWarnsMessage(CacheKeyWarning, msg):
cache.delete_many({'key with spaces': 'foo'})
def test_clear(self):
"clear does nothing for the dummy cache backend"
cache.clear()
def test_incr_version(self):
"Dummy cache versions can't be incremented"
cache.set('answer', 42)
with self.assertRaises(ValueError):
cache.incr_version('answer')
with self.assertRaises(ValueError):
cache.incr_version('does_not_exist')
def test_decr_version(self):
"Dummy cache versions can't be decremented"
cache.set('answer', 42)
with self.assertRaises(ValueError):
cache.decr_version('answer')
with self.assertRaises(ValueError):
cache.decr_version('does_not_exist')
def test_get_or_set(self):
self.assertEqual(cache.get_or_set('mykey', 'default'), 'default')
self.assertIsNone(cache.get_or_set('mykey', None))
def test_get_or_set_callable(self):
def my_callable():
return 'default'
self.assertEqual(cache.get_or_set('mykey', my_callable), 'default')
self.assertEqual(cache.get_or_set('mykey', my_callable()), 'default')
def custom_key_func(key, key_prefix, version):
"A customized cache key function"
return 'CUSTOM-' + '-'.join([key_prefix, str(version), key])
_caches_setting_base = {
'default': {},
'prefix': {'KEY_PREFIX': 'cacheprefix{}'.format(os.getpid())},
'v2': {'VERSION': 2},
'custom_key': {'KEY_FUNCTION': custom_key_func},
'custom_key2': {'KEY_FUNCTION': 'cache.tests.custom_key_func'},
'cull': {'OPTIONS': {'MAX_ENTRIES': 30}},
'zero_cull': {'OPTIONS': {'CULL_FREQUENCY': 0, 'MAX_ENTRIES': 30}},
}
def caches_setting_for_tests(base=None, exclude=None, **params):
# `base` is used to pull in the memcached config from the original settings,
# `exclude` is a set of cache names denoting which `_caches_setting_base` keys
# should be omitted.
# `params` are test specific overrides and `_caches_settings_base` is the
# base config for the tests.
# This results in the following search order:
# params -> _caches_setting_base -> base
base = base or {}
exclude = exclude or set()
setting = {k: base.copy() for k in _caches_setting_base if k not in exclude}
for key, cache_params in setting.items():
cache_params.update(_caches_setting_base[key])
cache_params.update(params)
return setting
class BaseCacheTests:
# A common set of tests to apply to all cache backends
factory = RequestFactory()
# RemovedInDjango41Warning: python-memcached doesn't support .get() with
# default.
supports_get_with_default = True
# Some clients raise custom exceptions when .incr() or .decr() are called
# with a non-integer value.
incr_decr_type_error = TypeError
def tearDown(self):
cache.clear()
def test_simple(self):
# Simple cache set/get works
cache.set("key", "value")
self.assertEqual(cache.get("key"), "value")
def test_default_used_when_none_is_set(self):
"""If None is cached, get() returns it instead of the default."""
cache.set('key_default_none', None)
self.assertIsNone(cache.get('key_default_none', default='default'))
def test_add(self):
# A key can be added to a cache
self.assertIs(cache.add("addkey1", "value"), True)
self.assertIs(cache.add("addkey1", "newvalue"), False)
self.assertEqual(cache.get("addkey1"), "value")
def test_prefix(self):
# Test for same cache key conflicts between shared backend
cache.set('somekey', 'value')
# should not be set in the prefixed cache
self.assertIs(caches['prefix'].has_key('somekey'), False)
caches['prefix'].set('somekey', 'value2')
self.assertEqual(cache.get('somekey'), 'value')
self.assertEqual(caches['prefix'].get('somekey'), 'value2')
def test_non_existent(self):
"""Nonexistent cache keys return as None/default."""
self.assertIsNone(cache.get("does_not_exist"))
self.assertEqual(cache.get("does_not_exist", "bang!"), "bang!")
def test_get_many(self):
# Multiple cache keys can be returned using get_many
cache.set_many({'a': 'a', 'b': 'b', 'c': 'c', 'd': 'd'})
self.assertEqual(cache.get_many(['a', 'c', 'd']), {'a': 'a', 'c': 'c', 'd': 'd'})
self.assertEqual(cache.get_many(['a', 'b', 'e']), {'a': 'a', 'b': 'b'})
self.assertEqual(cache.get_many(iter(['a', 'b', 'e'])), {'a': 'a', 'b': 'b'})
cache.set_many({'x': None, 'y': 1})
self.assertEqual(cache.get_many(['x', 'y']), {'x': None, 'y': 1})
def test_delete(self):
# Cache keys can be deleted
cache.set_many({'key1': 'spam', 'key2': 'eggs'})
self.assertEqual(cache.get("key1"), "spam")
self.assertIs(cache.delete("key1"), True)
self.assertIsNone(cache.get("key1"))
self.assertEqual(cache.get("key2"), "eggs")
def test_delete_nonexistent(self):
self.assertIs(cache.delete('nonexistent_key'), False)
def test_has_key(self):
# The cache can be inspected for cache keys
cache.set("hello1", "goodbye1")
self.assertIs(cache.has_key("hello1"), True)
self.assertIs(cache.has_key("goodbye1"), False)
cache.set("no_expiry", "here", None)
self.assertIs(cache.has_key("no_expiry"), True)
cache.set('null', None)
self.assertIs(
cache.has_key('null'),
True if self.supports_get_with_default else False,
)
def test_in(self):
# The in operator can be used to inspect cache contents
cache.set("hello2", "goodbye2")
self.assertIn("hello2", cache)
self.assertNotIn("goodbye2", cache)
cache.set('null', None)
if self.supports_get_with_default:
self.assertIn('null', cache)
else:
self.assertNotIn('null', cache)
def test_incr(self):
# Cache values can be incremented
cache.set('answer', 41)
self.assertEqual(cache.incr('answer'), 42)
self.assertEqual(cache.get('answer'), 42)
self.assertEqual(cache.incr('answer', 10), 52)
self.assertEqual(cache.get('answer'), 52)
self.assertEqual(cache.incr('answer', -10), 42)
with self.assertRaises(ValueError):
cache.incr('does_not_exist')
cache.set('null', None)
with self.assertRaises(self.incr_decr_type_error):
cache.incr('null')
def test_decr(self):
# Cache values can be decremented
cache.set('answer', 43)
self.assertEqual(cache.decr('answer'), 42)
self.assertEqual(cache.get('answer'), 42)
self.assertEqual(cache.decr('answer', 10), 32)
self.assertEqual(cache.get('answer'), 32)
self.assertEqual(cache.decr('answer', -10), 42)
with self.assertRaises(ValueError):
cache.decr('does_not_exist')
cache.set('null', None)
with self.assertRaises(self.incr_decr_type_error):
cache.decr('null')
def test_close(self):
self.assertTrue(hasattr(cache, 'close'))
cache.close()
def test_data_types(self):
# Many different data types can be cached
stuff = {
'string': 'this is a string',
'int': 42,
'list': [1, 2, 3, 4],
'tuple': (1, 2, 3, 4),
'dict': {'A': 1, 'B': 2},
'function': f,
'class': C,
}
cache.set("stuff", stuff)
self.assertEqual(cache.get("stuff"), stuff)
def test_cache_read_for_model_instance(self):
# Don't want fields with callable as default to be called on cache read
expensive_calculation.num_runs = 0
Poll.objects.all().delete()
my_poll = Poll.objects.create(question="Well?")
self.assertEqual(Poll.objects.count(), 1)
pub_date = my_poll.pub_date
cache.set('question', my_poll)
cached_poll = cache.get('question')
self.assertEqual(cached_poll.pub_date, pub_date)
# We only want the default expensive calculation run once
self.assertEqual(expensive_calculation.num_runs, 1)
def test_cache_write_for_model_instance_with_deferred(self):
# Don't want fields with callable as default to be called on cache write
expensive_calculation.num_runs = 0
Poll.objects.all().delete()
Poll.objects.create(question="What?")
self.assertEqual(expensive_calculation.num_runs, 1)
defer_qs = Poll.objects.all().defer('question')
self.assertEqual(defer_qs.count(), 1)
self.assertEqual(expensive_calculation.num_runs, 1)
cache.set('deferred_queryset', defer_qs)
# cache set should not re-evaluate default functions
self.assertEqual(expensive_calculation.num_runs, 1)
def test_cache_read_for_model_instance_with_deferred(self):
# Don't want fields with callable as default to be called on cache read
expensive_calculation.num_runs = 0
Poll.objects.all().delete()
Poll.objects.create(question="What?")
self.assertEqual(expensive_calculation.num_runs, 1)
defer_qs = Poll.objects.all().defer('question')
self.assertEqual(defer_qs.count(), 1)
cache.set('deferred_queryset', defer_qs)
self.assertEqual(expensive_calculation.num_runs, 1)
runs_before_cache_read = expensive_calculation.num_runs
cache.get('deferred_queryset')
# We only want the default expensive calculation run on creation and set
self.assertEqual(expensive_calculation.num_runs, runs_before_cache_read)
def test_expiration(self):
# Cache values can be set to expire
cache.set('expire1', 'very quickly', 1)
cache.set('expire2', 'very quickly', 1)
cache.set('expire3', 'very quickly', 1)
time.sleep(2)
self.assertIsNone(cache.get("expire1"))
self.assertIs(cache.add("expire2", "newvalue"), True)
self.assertEqual(cache.get("expire2"), "newvalue")
self.assertIs(cache.has_key("expire3"), False)
def test_touch(self):
# cache.touch() updates the timeout.
cache.set('expire1', 'very quickly', timeout=1)
self.assertIs(cache.touch('expire1', timeout=4), True)
time.sleep(2)
self.assertIs(cache.has_key('expire1'), True)
time.sleep(3)
self.assertIs(cache.has_key('expire1'), False)
# cache.touch() works without the timeout argument.
cache.set('expire1', 'very quickly', timeout=1)
self.assertIs(cache.touch('expire1'), True)
time.sleep(2)
self.assertIs(cache.has_key('expire1'), True)
self.assertIs(cache.touch('nonexistent'), False)
def test_unicode(self):
# Unicode values can be cached
stuff = {
'ascii': 'ascii_value',
'unicode_ascii': 'Iñtërnâtiônàlizætiøn1',
'Iñtërnâtiônàlizætiøn': 'Iñtërnâtiônàlizætiøn2',
'ascii2': {'x': 1}
}
# Test `set`
for (key, value) in stuff.items():
with self.subTest(key=key):
cache.set(key, value)
self.assertEqual(cache.get(key), value)
# Test `add`
for (key, value) in stuff.items():
with self.subTest(key=key):
self.assertIs(cache.delete(key), True)
self.assertIs(cache.add(key, value), True)
self.assertEqual(cache.get(key), value)
# Test `set_many`
for (key, value) in stuff.items():
self.assertIs(cache.delete(key), True)
cache.set_many(stuff)
for (key, value) in stuff.items():
with self.subTest(key=key):
self.assertEqual(cache.get(key), value)
def test_binary_string(self):
# Binary strings should be cacheable
from zlib import compress, decompress
value = 'value_to_be_compressed'
compressed_value = compress(value.encode())
# Test set
cache.set('binary1', compressed_value)
compressed_result = cache.get('binary1')
self.assertEqual(compressed_value, compressed_result)
self.assertEqual(value, decompress(compressed_result).decode())
# Test add
self.assertIs(cache.add('binary1-add', compressed_value), True)
compressed_result = cache.get('binary1-add')
self.assertEqual(compressed_value, compressed_result)
self.assertEqual(value, decompress(compressed_result).decode())
# Test set_many
cache.set_many({'binary1-set_many': compressed_value})
compressed_result = cache.get('binary1-set_many')
self.assertEqual(compressed_value, compressed_result)
self.assertEqual(value, decompress(compressed_result).decode())
def test_set_many(self):
# Multiple keys can be set using set_many
cache.set_many({"key1": "spam", "key2": "eggs"})
self.assertEqual(cache.get("key1"), "spam")
self.assertEqual(cache.get("key2"), "eggs")
def test_set_many_returns_empty_list_on_success(self):
"""set_many() returns an empty list when all keys are inserted."""
failing_keys = cache.set_many({'key1': 'spam', 'key2': 'eggs'})
self.assertEqual(failing_keys, [])
def test_set_many_expiration(self):
# set_many takes a second ``timeout`` parameter
cache.set_many({"key1": "spam", "key2": "eggs"}, 1)
time.sleep(2)
self.assertIsNone(cache.get("key1"))
self.assertIsNone(cache.get("key2"))
def test_delete_many(self):
# Multiple keys can be deleted using delete_many
cache.set_many({'key1': 'spam', 'key2': 'eggs', 'key3': 'ham'})
cache.delete_many(["key1", "key2"])
self.assertIsNone(cache.get("key1"))
self.assertIsNone(cache.get("key2"))
self.assertEqual(cache.get("key3"), "ham")
def test_clear(self):
# The cache can be emptied using clear
cache.set_many({'key1': 'spam', 'key2': 'eggs'})
cache.clear()
self.assertIsNone(cache.get("key1"))
self.assertIsNone(cache.get("key2"))
def test_long_timeout(self):
"""
Follow memcached's convention where a timeout greater than 30 days is
treated as an absolute expiration timestamp instead of a relative
offset (#12399).
"""
cache.set('key1', 'eggs', 60 * 60 * 24 * 30 + 1) # 30 days + 1 second
self.assertEqual(cache.get('key1'), 'eggs')
self.assertIs(cache.add('key2', 'ham', 60 * 60 * 24 * 30 + 1), True)
self.assertEqual(cache.get('key2'), 'ham')
cache.set_many({'key3': 'sausage', 'key4': 'lobster bisque'}, 60 * 60 * 24 * 30 + 1)
self.assertEqual(cache.get('key3'), 'sausage')
self.assertEqual(cache.get('key4'), 'lobster bisque')
def test_forever_timeout(self):
"""
Passing in None into timeout results in a value that is cached forever
"""
cache.set('key1', 'eggs', None)
self.assertEqual(cache.get('key1'), 'eggs')
self.assertIs(cache.add('key2', 'ham', None), True)
self.assertEqual(cache.get('key2'), 'ham')
self.assertIs(cache.add('key1', 'new eggs', None), False)
self.assertEqual(cache.get('key1'), 'eggs')
cache.set_many({'key3': 'sausage', 'key4': 'lobster bisque'}, None)
self.assertEqual(cache.get('key3'), 'sausage')
self.assertEqual(cache.get('key4'), 'lobster bisque')
cache.set('key5', 'belgian fries', timeout=1)
self.assertIs(cache.touch('key5', timeout=None), True)
time.sleep(2)
self.assertEqual(cache.get('key5'), 'belgian fries')
def test_zero_timeout(self):
"""
Passing in zero into timeout results in a value that is not cached
"""
cache.set('key1', 'eggs', 0)
self.assertIsNone(cache.get('key1'))
self.assertIs(cache.add('key2', 'ham', 0), True)
self.assertIsNone(cache.get('key2'))
cache.set_many({'key3': 'sausage', 'key4': 'lobster bisque'}, 0)
self.assertIsNone(cache.get('key3'))
self.assertIsNone(cache.get('key4'))
cache.set('key5', 'belgian fries', timeout=5)
self.assertIs(cache.touch('key5', timeout=0), True)
self.assertIsNone(cache.get('key5'))
def test_float_timeout(self):
# Make sure a timeout given as a float doesn't crash anything.
cache.set("key1", "spam", 100.2)
self.assertEqual(cache.get("key1"), "spam")
def _perform_cull_test(self, cull_cache_name, initial_count, final_count):
try:
cull_cache = caches[cull_cache_name]
except InvalidCacheBackendError:
self.skipTest("Culling isn't implemented.")
# Create initial cache key entries. This will overflow the cache,
# causing a cull.
for i in range(1, initial_count):
cull_cache.set('cull%d' % i, 'value', 1000)
count = 0
# Count how many keys are left in the cache.
for i in range(1, initial_count):
if cull_cache.has_key('cull%d' % i):
count += 1
self.assertEqual(count, final_count)
def test_cull(self):
self._perform_cull_test('cull', 50, 29)
def test_zero_cull(self):
self._perform_cull_test('zero_cull', 50, 19)
def test_cull_delete_when_store_empty(self):
try:
cull_cache = caches['cull']
except InvalidCacheBackendError:
self.skipTest("Culling isn't implemented.")
old_max_entries = cull_cache._max_entries
# Force _cull to delete on first cached record.
cull_cache._max_entries = -1
try:
cull_cache.set('force_cull_delete', 'value', 1000)
self.assertIs(cull_cache.has_key('force_cull_delete'), True)
finally:
cull_cache._max_entries = old_max_entries
def _perform_invalid_key_test(self, key, expected_warning):
"""
All the builtin backends should warn (except memcached that should
error) on keys that would be refused by memcached. This encourages
portable caching code without making it too difficult to use production
backends with more liberal key rules. Refs #6447.
"""
# mimic custom ``make_key`` method being defined since the default will
# never show the below warnings
def func(key, *args):
return key
old_func = cache.key_func
cache.key_func = func
tests = [
('add', [key, 1]),
('get', [key]),
('set', [key, 1]),
('incr', [key]),
('decr', [key]),
('touch', [key]),
('delete', [key]),
('get_many', [[key, 'b']]),
('set_many', [{key: 1, 'b': 2}]),
('delete_many', [{key: 1, 'b': 2}]),
]
try:
for operation, args in tests:
with self.subTest(operation=operation):
with self.assertWarns(CacheKeyWarning) as cm:
getattr(cache, operation)(*args)
self.assertEqual(str(cm.warning), expected_warning)
finally:
cache.key_func = old_func
def test_invalid_key_characters(self):
# memcached doesn't allow whitespace or control characters in keys.
key = 'key with spaces and 清'
self._perform_invalid_key_test(key, KEY_ERRORS_WITH_MEMCACHED_MSG % key)
def test_invalid_key_length(self):
# memcached limits key length to 250.
key = ('a' * 250) + '清'
expected_warning = (
'Cache key will cause errors if used with memcached: '
'%r (longer than %s)' % (key, 250)
)
self._perform_invalid_key_test(key, expected_warning)
def test_cache_versioning_get_set(self):
# set, using default version = 1
cache.set('answer1', 42)
self.assertEqual(cache.get('answer1'), 42)
self.assertEqual(cache.get('answer1', version=1), 42)
self.assertIsNone(cache.get('answer1', version=2))
self.assertIsNone(caches['v2'].get('answer1'))
self.assertEqual(caches['v2'].get('answer1', version=1), 42)
self.assertIsNone(caches['v2'].get('answer1', version=2))
# set, default version = 1, but manually override version = 2
cache.set('answer2', 42, version=2)
self.assertIsNone(cache.get('answer2'))
self.assertIsNone(cache.get('answer2', version=1))
self.assertEqual(cache.get('answer2', version=2), 42)
self.assertEqual(caches['v2'].get('answer2'), 42)
self.assertIsNone(caches['v2'].get('answer2', version=1))
self.assertEqual(caches['v2'].get('answer2', version=2), 42)
# v2 set, using default version = 2
caches['v2'].set('answer3', 42)
self.assertIsNone(cache.get('answer3'))
self.assertIsNone(cache.get('answer3', version=1))
self.assertEqual(cache.get('answer3', version=2), 42)
self.assertEqual(caches['v2'].get('answer3'), 42)
self.assertIsNone(caches['v2'].get('answer3', version=1))
self.assertEqual(caches['v2'].get('answer3', version=2), 42)
# v2 set, default version = 2, but manually override version = 1
caches['v2'].set('answer4', 42, version=1)
self.assertEqual(cache.get('answer4'), 42)
self.assertEqual(cache.get('answer4', version=1), 42)
self.assertIsNone(cache.get('answer4', version=2))
self.assertIsNone(caches['v2'].get('answer4'))
self.assertEqual(caches['v2'].get('answer4', version=1), 42)
self.assertIsNone(caches['v2'].get('answer4', version=2))
def test_cache_versioning_add(self):
# add, default version = 1, but manually override version = 2
self.assertIs(cache.add('answer1', 42, version=2), True)
self.assertIsNone(cache.get('answer1', version=1))
self.assertEqual(cache.get('answer1', version=2), 42)
self.assertIs(cache.add('answer1', 37, version=2), False)
self.assertIsNone(cache.get('answer1', version=1))
self.assertEqual(cache.get('answer1', version=2), 42)
self.assertIs(cache.add('answer1', 37, version=1), True)
self.assertEqual(cache.get('answer1', version=1), 37)
self.assertEqual(cache.get('answer1', version=2), 42)
# v2 add, using default version = 2
self.assertIs(caches['v2'].add('answer2', 42), True)
self.assertIsNone(cache.get('answer2', version=1))
self.assertEqual(cache.get('answer2', version=2), 42)
self.assertIs(caches['v2'].add('answer2', 37), False)
self.assertIsNone(cache.get('answer2', version=1))
self.assertEqual(cache.get('answer2', version=2), 42)
self.assertIs(caches['v2'].add('answer2', 37, version=1), True)
self.assertEqual(cache.get('answer2', version=1), 37)
self.assertEqual(cache.get('answer2', version=2), 42)
# v2 add, default version = 2, but manually override version = 1
self.assertIs(caches['v2'].add('answer3', 42, version=1), True)
self.assertEqual(cache.get('answer3', version=1), 42)
self.assertIsNone(cache.get('answer3', version=2))
self.assertIs(caches['v2'].add('answer3', 37, version=1), False)
self.assertEqual(cache.get('answer3', version=1), 42)
self.assertIsNone(cache.get('answer3', version=2))
self.assertIs(caches['v2'].add('answer3', 37), True)
self.assertEqual(cache.get('answer3', version=1), 42)
self.assertEqual(cache.get('answer3', version=2), 37)
def test_cache_versioning_has_key(self):
cache.set('answer1', 42)
# has_key
self.assertIs(cache.has_key('answer1'), True)
self.assertIs(cache.has_key('answer1', version=1), True)
self.assertIs(cache.has_key('answer1', version=2), False)
self.assertIs(caches['v2'].has_key('answer1'), False)
self.assertIs(caches['v2'].has_key('answer1', version=1), True)
self.assertIs(caches['v2'].has_key('answer1', version=2), False)
def test_cache_versioning_delete(self):
cache.set('answer1', 37, version=1)
cache.set('answer1', 42, version=2)
self.assertIs(cache.delete('answer1'), True)
self.assertIsNone(cache.get('answer1', version=1))
self.assertEqual(cache.get('answer1', version=2), 42)
cache.set('answer2', 37, version=1)
cache.set('answer2', 42, version=2)
self.assertIs(cache.delete('answer2', version=2), True)
self.assertEqual(cache.get('answer2', version=1), 37)
self.assertIsNone(cache.get('answer2', version=2))
cache.set('answer3', 37, version=1)
cache.set('answer3', 42, version=2)
self.assertIs(caches['v2'].delete('answer3'), True)
self.assertEqual(cache.get('answer3', version=1), 37)
self.assertIsNone(cache.get('answer3', version=2))
cache.set('answer4', 37, version=1)
cache.set('answer4', 42, version=2)
self.assertIs(caches['v2'].delete('answer4', version=1), True)
self.assertIsNone(cache.get('answer4', version=1))
self.assertEqual(cache.get('answer4', version=2), 42)
def test_cache_versioning_incr_decr(self):
cache.set('answer1', 37, version=1)
cache.set('answer1', 42, version=2)
self.assertEqual(cache.incr('answer1'), 38)
self.assertEqual(cache.get('answer1', version=1), 38)
self.assertEqual(cache.get('answer1', version=2), 42)
self.assertEqual(cache.decr('answer1'), 37)
self.assertEqual(cache.get('answer1', version=1), 37)
self.assertEqual(cache.get('answer1', version=2), 42)
cache.set('answer2', 37, version=1)
cache.set('answer2', 42, version=2)
self.assertEqual(cache.incr('answer2', version=2), 43)
self.assertEqual(cache.get('answer2', version=1), 37)
self.assertEqual(cache.get('answer2', version=2), 43)
self.assertEqual(cache.decr('answer2', version=2), 42)
self.assertEqual(cache.get('answer2', version=1), 37)
self.assertEqual(cache.get('answer2', version=2), 42)
cache.set('answer3', 37, version=1)
cache.set('answer3', 42, version=2)
self.assertEqual(caches['v2'].incr('answer3'), 43)
self.assertEqual(cache.get('answer3', version=1), 37)
self.assertEqual(cache.get('answer3', version=2), 43)
self.assertEqual(caches['v2'].decr('answer3'), 42)
self.assertEqual(cache.get('answer3', version=1), 37)
self.assertEqual(cache.get('answer3', version=2), 42)
cache.set('answer4', 37, version=1)
cache.set('answer4', 42, version=2)
self.assertEqual(caches['v2'].incr('answer4', version=1), 38)
self.assertEqual(cache.get('answer4', version=1), 38)
self.assertEqual(cache.get('answer4', version=2), 42)
self.assertEqual(caches['v2'].decr('answer4', version=1), 37)
self.assertEqual(cache.get('answer4', version=1), 37)
self.assertEqual(cache.get('answer4', version=2), 42)
def test_cache_versioning_get_set_many(self):
# set, using default version = 1
cache.set_many({'ford1': 37, 'arthur1': 42})
self.assertEqual(cache.get_many(['ford1', 'arthur1']), {'ford1': 37, 'arthur1': 42})
self.assertEqual(cache.get_many(['ford1', 'arthur1'], version=1), {'ford1': 37, 'arthur1': 42})
self.assertEqual(cache.get_many(['ford1', 'arthur1'], version=2), {})
self.assertEqual(caches['v2'].get_many(['ford1', 'arthur1']), {})
self.assertEqual(caches['v2'].get_many(['ford1', 'arthur1'], version=1), {'ford1': 37, 'arthur1': 42})
self.assertEqual(caches['v2'].get_many(['ford1', 'arthur1'], version=2), {})
# set, default version = 1, but manually override version = 2
cache.set_many({'ford2': 37, 'arthur2': 42}, version=2)
self.assertEqual(cache.get_many(['ford2', 'arthur2']), {})
self.assertEqual(cache.get_many(['ford2', 'arthur2'], version=1), {})
self.assertEqual(cache.get_many(['ford2', 'arthur2'], version=2), {'ford2': 37, 'arthur2': 42})
self.assertEqual(caches['v2'].get_many(['ford2', 'arthur2']), {'ford2': 37, 'arthur2': 42})
self.assertEqual(caches['v2'].get_many(['ford2', 'arthur2'], version=1), {})
self.assertEqual(caches['v2'].get_many(['ford2', 'arthur2'], version=2), {'ford2': 37, 'arthur2': 42})
# v2 set, using default version = 2
caches['v2'].set_many({'ford3': 37, 'arthur3': 42})
self.assertEqual(cache.get_many(['ford3', 'arthur3']), {})
self.assertEqual(cache.get_many(['ford3', 'arthur3'], version=1), {})
self.assertEqual(cache.get_many(['ford3', 'arthur3'], version=2), {'ford3': 37, 'arthur3': 42})
self.assertEqual(caches['v2'].get_many(['ford3', 'arthur3']), {'ford3': 37, 'arthur3': 42})
self.assertEqual(caches['v2'].get_many(['ford3', 'arthur3'], version=1), {})
self.assertEqual(caches['v2'].get_many(['ford3', 'arthur3'], version=2), {'ford3': 37, 'arthur3': 42})
# v2 set, default version = 2, but manually override version = 1
caches['v2'].set_many({'ford4': 37, 'arthur4': 42}, version=1)
self.assertEqual(cache.get_many(['ford4', 'arthur4']), {'ford4': 37, 'arthur4': 42})
self.assertEqual(cache.get_many(['ford4', 'arthur4'], version=1), {'ford4': 37, 'arthur4': 42})
self.assertEqual(cache.get_many(['ford4', 'arthur4'], version=2), {})
self.assertEqual(caches['v2'].get_many(['ford4', 'arthur4']), {})
self.assertEqual(caches['v2'].get_many(['ford4', 'arthur4'], version=1), {'ford4': 37, 'arthur4': 42})
self.assertEqual(caches['v2'].get_many(['ford4', 'arthur4'], version=2), {})
def test_incr_version(self):
cache.set('answer', 42, version=2)
self.assertIsNone(cache.get('answer'))
self.assertIsNone(cache.get('answer', version=1))
self.assertEqual(cache.get('answer', version=2), 42)
self.assertIsNone(cache.get('answer', version=3))
self.assertEqual(cache.incr_version('answer', version=2), 3)
self.assertIsNone(cache.get('answer'))
self.assertIsNone(cache.get('answer', version=1))
self.assertIsNone(cache.get('answer', version=2))
self.assertEqual(cache.get('answer', version=3), 42)
caches['v2'].set('answer2', 42)
self.assertEqual(caches['v2'].get('answer2'), 42)
self.assertIsNone(caches['v2'].get('answer2', version=1))
self.assertEqual(caches['v2'].get('answer2', version=2), 42)
self.assertIsNone(caches['v2'].get('answer2', version=3))
self.assertEqual(caches['v2'].incr_version('answer2'), 3)
self.assertIsNone(caches['v2'].get('answer2'))
self.assertIsNone(caches['v2'].get('answer2', version=1))
self.assertIsNone(caches['v2'].get('answer2', version=2))
self.assertEqual(caches['v2'].get('answer2', version=3), 42)
with self.assertRaises(ValueError):
cache.incr_version('does_not_exist')
cache.set('null', None)
if self.supports_get_with_default:
self.assertEqual(cache.incr_version('null'), 2)
else:
with self.assertRaises(self.incr_decr_type_error):
cache.incr_version('null')
def test_decr_version(self):
cache.set('answer', 42, version=2)
self.assertIsNone(cache.get('answer'))
self.assertIsNone(cache.get('answer', version=1))
self.assertEqual(cache.get('answer', version=2), 42)
self.assertEqual(cache.decr_version('answer', version=2), 1)
self.assertEqual(cache.get('answer'), 42)
self.assertEqual(cache.get('answer', version=1), 42)
self.assertIsNone(cache.get('answer', version=2))
caches['v2'].set('answer2', 42)
self.assertEqual(caches['v2'].get('answer2'), 42)
self.assertIsNone(caches['v2'].get('answer2', version=1))
self.assertEqual(caches['v2'].get('answer2', version=2), 42)
self.assertEqual(caches['v2'].decr_version('answer2'), 1)
self.assertIsNone(caches['v2'].get('answer2'))
self.assertEqual(caches['v2'].get('answer2', version=1), 42)
self.assertIsNone(caches['v2'].get('answer2', version=2))
with self.assertRaises(ValueError):
cache.decr_version('does_not_exist', version=2)
cache.set('null', None, version=2)
if self.supports_get_with_default:
self.assertEqual(cache.decr_version('null', version=2), 1)
else:
with self.assertRaises(self.incr_decr_type_error):
cache.decr_version('null', version=2)
def test_custom_key_func(self):
# Two caches with different key functions aren't visible to each other
cache.set('answer1', 42)
self.assertEqual(cache.get('answer1'), 42)
self.assertIsNone(caches['custom_key'].get('answer1'))
self.assertIsNone(caches['custom_key2'].get('answer1'))
caches['custom_key'].set('answer2', 42)
self.assertIsNone(cache.get('answer2'))
self.assertEqual(caches['custom_key'].get('answer2'), 42)
self.assertEqual(caches['custom_key2'].get('answer2'), 42)
def test_cache_write_unpicklable_object(self):
fetch_middleware = FetchFromCacheMiddleware(empty_response)
fetch_middleware.cache = cache
request = self.factory.get('/cache/test')
request._cache_update_cache = True
get_cache_data = FetchFromCacheMiddleware(empty_response).process_request(request)
self.assertIsNone(get_cache_data)
content = 'Testing cookie serialization.'
def get_response(req):
response = HttpResponse(content)
response.set_cookie('foo', 'bar')
return response
update_middleware = UpdateCacheMiddleware(get_response)
update_middleware.cache = cache
response = update_middleware(request)
get_cache_data = fetch_middleware.process_request(request)
self.assertIsNotNone(get_cache_data)
self.assertEqual(get_cache_data.content, content.encode())
self.assertEqual(get_cache_data.cookies, response.cookies)
UpdateCacheMiddleware(lambda req: get_cache_data)(request)
get_cache_data = fetch_middleware.process_request(request)
self.assertIsNotNone(get_cache_data)
self.assertEqual(get_cache_data.content, content.encode())
self.assertEqual(get_cache_data.cookies, response.cookies)
def test_add_fail_on_pickleerror(self):
# Shouldn't fail silently if trying to cache an unpicklable type.
with self.assertRaises(pickle.PickleError):
cache.add('unpicklable', Unpicklable())
def test_set_fail_on_pickleerror(self):
with self.assertRaises(pickle.PickleError):
cache.set('unpicklable', Unpicklable())
def test_get_or_set(self):
self.assertIsNone(cache.get('projector'))
self.assertEqual(cache.get_or_set('projector', 42), 42)
self.assertEqual(cache.get('projector'), 42)
self.assertIsNone(cache.get_or_set('null', None))
if self.supports_get_with_default:
# Previous get_or_set() stores None in the cache.
self.assertIsNone(cache.get('null', 'default'))
else:
self.assertEqual(cache.get('null', 'default'), 'default')
def test_get_or_set_callable(self):
def my_callable():
return 'value'
self.assertEqual(cache.get_or_set('mykey', my_callable), 'value')
self.assertEqual(cache.get_or_set('mykey', my_callable()), 'value')
self.assertIsNone(cache.get_or_set('null', lambda: None))
if self.supports_get_with_default:
# Previous get_or_set() stores None in the cache.
self.assertIsNone(cache.get('null', 'default'))
else:
self.assertEqual(cache.get('null', 'default'), 'default')
def test_get_or_set_version(self):
msg = "get_or_set() missing 1 required positional argument: 'default'"
self.assertEqual(cache.get_or_set('brian', 1979, version=2), 1979)
with self.assertRaisesMessage(TypeError, msg):
cache.get_or_set('brian')
with self.assertRaisesMessage(TypeError, msg):
cache.get_or_set('brian', version=1)
self.assertIsNone(cache.get('brian', version=1))
self.assertEqual(cache.get_or_set('brian', 42, version=1), 42)
self.assertEqual(cache.get_or_set('brian', 1979, version=2), 1979)
self.assertIsNone(cache.get('brian', version=3))
def test_get_or_set_racing(self):
with mock.patch('%s.%s' % (settings.CACHES['default']['BACKEND'], 'add')) as cache_add:
# Simulate cache.add() failing to add a value. In that case, the
# default value should be returned.
cache_add.return_value = False
self.assertEqual(cache.get_or_set('key', 'default'), 'default')
@override_settings(CACHES=caches_setting_for_tests(
BACKEND='django.core.cache.backends.db.DatabaseCache',
# Spaces are used in the table name to ensure quoting/escaping is working
LOCATION='test cache table'
))
class DBCacheTests(BaseCacheTests, TransactionTestCase):
available_apps = ['cache']
def setUp(self):
# The super calls needs to happen first for the settings override.
super().setUp()
self.create_table()
def tearDown(self):
# The super call needs to happen first because it uses the database.
super().tearDown()
self.drop_table()
def create_table(self):
management.call_command('createcachetable', verbosity=0)
def drop_table(self):
with connection.cursor() as cursor:
table_name = connection.ops.quote_name('test cache table')
cursor.execute('DROP TABLE %s' % table_name)
def test_get_many_num_queries(self):
cache.set_many({'a': 1, 'b': 2})
cache.set('expired', 'expired', 0.01)
with self.assertNumQueries(1):
self.assertEqual(cache.get_many(['a', 'b']), {'a': 1, 'b': 2})
time.sleep(0.02)
with self.assertNumQueries(2):
self.assertEqual(cache.get_many(['a', 'b', 'expired']), {'a': 1, 'b': 2})
def test_delete_many_num_queries(self):
cache.set_many({'a': 1, 'b': 2, 'c': 3})
with self.assertNumQueries(1):
cache.delete_many(['a', 'b', 'c'])
def test_cull_count_queries(self):
old_max_entries = cache._max_entries
# Force _cull to delete on first cached record.
cache._max_entries = -1
with CaptureQueriesContext(connection) as captured_queries:
try:
cache.set('force_cull', 'value', 1000)
finally:
cache._max_entries = old_max_entries
num_count_queries = sum('COUNT' in query['sql'] for query in captured_queries)
self.assertEqual(num_count_queries, 1)
def test_delete_cursor_rowcount(self):
"""
The rowcount attribute should not be checked on a closed cursor.
"""
class MockedCursorWrapper(CursorWrapper):
is_closed = False
def close(self):
self.cursor.close()
self.is_closed = True
@property
def rowcount(self):
if self.is_closed:
raise Exception('Cursor is closed.')
return self.cursor.rowcount
cache.set_many({'a': 1, 'b': 2})
with mock.patch('django.db.backends.utils.CursorWrapper', MockedCursorWrapper):
self.assertIs(cache.delete('a'), True)
def test_zero_cull(self):
self._perform_cull_test('zero_cull', 50, 18)
def test_second_call_doesnt_crash(self):
out = io.StringIO()
management.call_command('createcachetable', stdout=out)
self.assertEqual(out.getvalue(), "Cache table 'test cache table' already exists.\n" * len(settings.CACHES))
@override_settings(CACHES=caches_setting_for_tests(
BACKEND='django.core.cache.backends.db.DatabaseCache',
# Use another table name to avoid the 'table already exists' message.
LOCATION='createcachetable_dry_run_mode'
))
def test_createcachetable_dry_run_mode(self):
out = io.StringIO()
management.call_command('createcachetable', dry_run=True, stdout=out)
output = out.getvalue()
self.assertTrue(output.startswith("CREATE TABLE"))
def test_createcachetable_with_table_argument(self):
"""
Delete and recreate cache table with legacy behavior (explicitly
specifying the table name).
"""
self.drop_table()
out = io.StringIO()
management.call_command(
'createcachetable',
'test cache table',
verbosity=2,
stdout=out,
)
self.assertEqual(out.getvalue(), "Cache table 'test cache table' created.\n")
@override_settings(USE_TZ=True)
class DBCacheWithTimeZoneTests(DBCacheTests):
pass
class DBCacheRouter:
"""A router that puts the cache table on the 'other' database."""
def db_for_read(self, model, **hints):
if model._meta.app_label == 'django_cache':
return 'other'
return None
def db_for_write(self, model, **hints):
if model._meta.app_label == 'django_cache':
return 'other'
return None
def allow_migrate(self, db, app_label, **hints):
if app_label == 'django_cache':
return db == 'other'
return None
@override_settings(
CACHES={
'default': {
'BACKEND': 'django.core.cache.backends.db.DatabaseCache',
'LOCATION': 'my_cache_table',
},
},
)
class CreateCacheTableForDBCacheTests(TestCase):
databases = {'default', 'other'}
@override_settings(DATABASE_ROUTERS=[DBCacheRouter()])
def test_createcachetable_observes_database_router(self):
# cache table should not be created on 'default'
with self.assertNumQueries(0, using='default'):
management.call_command('createcachetable', database='default', verbosity=0)
# cache table should be created on 'other'
# Queries:
# 1: check table doesn't already exist
# 2: create savepoint (if transactional DDL is supported)
# 3: create the table
# 4: create the index
# 5: release savepoint (if transactional DDL is supported)
num = 5 if connections['other'].features.can_rollback_ddl else 3
with self.assertNumQueries(num, using='other'):
management.call_command('createcachetable', database='other', verbosity=0)
class PicklingSideEffect:
def __init__(self, cache):
self.cache = cache
self.locked = False
def __getstate__(self):
self.locked = self.cache._lock.locked()
return {}
limit_locmem_entries = override_settings(CACHES=caches_setting_for_tests(
BACKEND='django.core.cache.backends.locmem.LocMemCache',
OPTIONS={'MAX_ENTRIES': 9},
))
@override_settings(CACHES=caches_setting_for_tests(
BACKEND='django.core.cache.backends.locmem.LocMemCache',
))
class LocMemCacheTests(BaseCacheTests, TestCase):
def setUp(self):
super().setUp()
# LocMem requires a hack to make the other caches
# share a data store with the 'normal' cache.
caches['prefix']._cache = cache._cache
caches['prefix']._expire_info = cache._expire_info
caches['v2']._cache = cache._cache
caches['v2']._expire_info = cache._expire_info
caches['custom_key']._cache = cache._cache
caches['custom_key']._expire_info = cache._expire_info
caches['custom_key2']._cache = cache._cache
caches['custom_key2']._expire_info = cache._expire_info
@override_settings(CACHES={
'default': {'BACKEND': 'django.core.cache.backends.locmem.LocMemCache'},
'other': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'LOCATION': 'other'
},
})
def test_multiple_caches(self):
"Multiple locmem caches are isolated"
cache.set('value', 42)
self.assertEqual(caches['default'].get('value'), 42)
self.assertIsNone(caches['other'].get('value'))
def test_locking_on_pickle(self):
"""#20613/#18541 -- Ensures pickling is done outside of the lock."""
bad_obj = PicklingSideEffect(cache)
cache.set('set', bad_obj)
self.assertFalse(bad_obj.locked, "Cache was locked during pickling")
self.assertIs(cache.add('add', bad_obj), True)
self.assertFalse(bad_obj.locked, "Cache was locked during pickling")
def test_incr_decr_timeout(self):
"""incr/decr does not modify expiry time (matches memcached behavior)"""
key = 'value'
_key = cache.make_key(key)
cache.set(key, 1, timeout=cache.default_timeout * 10)
expire = cache._expire_info[_key]
self.assertEqual(cache.incr(key), 2)
self.assertEqual(expire, cache._expire_info[_key])
self.assertEqual(cache.decr(key), 1)
self.assertEqual(expire, cache._expire_info[_key])
@limit_locmem_entries
def test_lru_get(self):
"""get() moves cache keys."""
for key in range(9):
cache.set(key, key, timeout=None)
for key in range(6):
self.assertEqual(cache.get(key), key)
cache.set(9, 9, timeout=None)
for key in range(6):
self.assertEqual(cache.get(key), key)
for key in range(6, 9):
self.assertIsNone(cache.get(key))
self.assertEqual(cache.get(9), 9)
@limit_locmem_entries
def test_lru_set(self):
"""set() moves cache keys."""
for key in range(9):
cache.set(key, key, timeout=None)
for key in range(3, 9):
cache.set(key, key, timeout=None)
cache.set(9, 9, timeout=None)
for key in range(3, 10):
self.assertEqual(cache.get(key), key)
for key in range(3):
self.assertIsNone(cache.get(key))
@limit_locmem_entries
def test_lru_incr(self):
"""incr() moves cache keys."""
for key in range(9):
cache.set(key, key, timeout=None)
for key in range(6):
self.assertEqual(cache.incr(key), key + 1)
cache.set(9, 9, timeout=None)
for key in range(6):
self.assertEqual(cache.get(key), key + 1)
for key in range(6, 9):
self.assertIsNone(cache.get(key))
self.assertEqual(cache.get(9), 9)
# memcached backend isn't guaranteed to be available.
# To check the memcached backend, the test settings file will
# need to contain at least one cache backend setting that points at
# your memcache server.
configured_caches = {}
for _cache_params in settings.CACHES.values():
configured_caches[_cache_params['BACKEND']] = _cache_params
PyLibMCCache_params = configured_caches.get('django.core.cache.backends.memcached.PyLibMCCache')
PyMemcacheCache_params = configured_caches.get('django.core.cache.backends.memcached.PyMemcacheCache')
# The memcached backends don't support cull-related options like `MAX_ENTRIES`.
memcached_excluded_caches = {'cull', 'zero_cull'}
class BaseMemcachedTests(BaseCacheTests):
# By default it's assumed that the client doesn't clean up connections
# properly, in which case the backend must do so after each request.
should_disconnect_on_close = True
def test_location_multiple_servers(self):
locations = [
['server1.tld', 'server2:11211'],
'server1.tld;server2:11211',
'server1.tld,server2:11211',
]
for location in locations:
with self.subTest(location=location):
params = {'BACKEND': self.base_params['BACKEND'], 'LOCATION': location}
with self.settings(CACHES={'default': params}):
self.assertEqual(cache._servers, ['server1.tld', 'server2:11211'])
def _perform_invalid_key_test(self, key, expected_warning):
"""
While other backends merely warn, memcached should raise for an invalid
key.
"""
msg = expected_warning.replace(key, cache.make_key(key))
tests = [
('add', [key, 1]),
('get', [key]),
('set', [key, 1]),
('incr', [key]),
('decr', [key]),
('touch', [key]),
('delete', [key]),
('get_many', [[key, 'b']]),
('set_many', [{key: 1, 'b': 2}]),
('delete_many', [{key: 1, 'b': 2}]),
]
for operation, args in tests:
with self.subTest(operation=operation):
with self.assertRaises(InvalidCacheKey) as cm:
getattr(cache, operation)(*args)
self.assertEqual(str(cm.exception), msg)
def test_default_never_expiring_timeout(self):
# Regression test for #22845
with self.settings(CACHES=caches_setting_for_tests(
base=self.base_params,
exclude=memcached_excluded_caches,
TIMEOUT=None)):
cache.set('infinite_foo', 'bar')
self.assertEqual(cache.get('infinite_foo'), 'bar')
def test_default_far_future_timeout(self):
# Regression test for #22845
with self.settings(CACHES=caches_setting_for_tests(
base=self.base_params,
exclude=memcached_excluded_caches,
# 60*60*24*365, 1 year
TIMEOUT=31536000)):
cache.set('future_foo', 'bar')
self.assertEqual(cache.get('future_foo'), 'bar')
def test_memcached_deletes_key_on_failed_set(self):
# By default memcached allows objects up to 1MB. For the cache_db session
# backend to always use the current session, memcached needs to delete
# the old key if it fails to set.
max_value_length = 2 ** 20
cache.set('small_value', 'a')
self.assertEqual(cache.get('small_value'), 'a')
large_value = 'a' * (max_value_length + 1)
try:
cache.set('small_value', large_value)
except Exception:
# Most clients (e.g. pymemcache or pylibmc) raise when the value is
# too large. This test is primarily checking that the key was
# deleted, so the return/exception behavior for the set() itself is
# not important.
pass
# small_value should be deleted, or set if configured to accept larger values
value = cache.get('small_value')
self.assertTrue(value is None or value == large_value)
def test_close(self):
# For clients that don't manage their connections properly, the
# connection is closed when the request is complete.
signals.request_finished.disconnect(close_old_connections)
try:
with mock.patch.object(cache._class, 'disconnect_all', autospec=True) as mock_disconnect:
signals.request_finished.send(self.__class__)
self.assertIs(mock_disconnect.called, self.should_disconnect_on_close)
finally:
signals.request_finished.connect(close_old_connections)
def test_set_many_returns_failing_keys(self):
def fail_set_multi(mapping, *args, **kwargs):
return mapping.keys()
with mock.patch.object(cache._class, 'set_multi', side_effect=fail_set_multi):
failing_keys = cache.set_many({'key': 'value'})
self.assertEqual(failing_keys, ['key'])
# RemovedInDjango41Warning.
MemcachedCache_params = configured_caches.get('django.core.cache.backends.memcached.MemcachedCache')
@ignore_warnings(category=RemovedInDjango41Warning)
@unittest.skipUnless(MemcachedCache_params, "MemcachedCache backend not configured")
@override_settings(CACHES=caches_setting_for_tests(
base=MemcachedCache_params,
exclude=memcached_excluded_caches,
))
class MemcachedCacheTests(BaseMemcachedTests, TestCase):
base_params = MemcachedCache_params
supports_get_with_default = False
incr_decr_type_error = ValueError
def test_memcached_uses_highest_pickle_version(self):
# Regression test for #19810
for cache_key in settings.CACHES:
with self.subTest(cache_key=cache_key):
self.assertEqual(caches[cache_key]._cache.pickleProtocol, pickle.HIGHEST_PROTOCOL)
@override_settings(CACHES=caches_setting_for_tests(
base=MemcachedCache_params,
exclude=memcached_excluded_caches,
OPTIONS={'server_max_value_length': 9999},
))
def test_memcached_options(self):
self.assertEqual(cache._cache.server_max_value_length, 9999)
def test_default_used_when_none_is_set(self):
"""
python-memcached doesn't support default in get() so this test
overrides the one in BaseCacheTests.
"""
cache.set('key_default_none', None)
self.assertEqual(cache.get('key_default_none', default='default'), 'default')
class MemcachedCacheDeprecationTests(SimpleTestCase):
def test_warning(self):
from django.core.cache.backends.memcached import MemcachedCache
# Remove warnings filter on MemcachedCache deprecation warning, added
# in runtests.py.
warnings.filterwarnings(
'error',
'MemcachedCache is deprecated',
category=RemovedInDjango41Warning,
)
try:
msg = (
'MemcachedCache is deprecated in favor of PyMemcacheCache and '
'PyLibMCCache.'
)
with self.assertRaisesMessage(RemovedInDjango41Warning, msg):
MemcachedCache('127.0.0.1:11211', {})
finally:
warnings.filterwarnings(
'ignore',
'MemcachedCache is deprecated',
category=RemovedInDjango41Warning,
)
@unittest.skipUnless(PyLibMCCache_params, "PyLibMCCache backend not configured")
@override_settings(CACHES=caches_setting_for_tests(
base=PyLibMCCache_params,
exclude=memcached_excluded_caches,
))
class PyLibMCCacheTests(BaseMemcachedTests, TestCase):
base_params = PyLibMCCache_params
# libmemcached manages its own connections.
should_disconnect_on_close = False
@property
def incr_decr_type_error(self):
return cache._lib.ClientError
@override_settings(CACHES=caches_setting_for_tests(
base=PyLibMCCache_params,
exclude=memcached_excluded_caches,
OPTIONS={
'binary': True,
'behaviors': {'tcp_nodelay': True},
},
))
def test_pylibmc_options(self):
self.assertTrue(cache._cache.binary)
self.assertEqual(cache._cache.behaviors['tcp_nodelay'], int(True))
def test_pylibmc_client_servers(self):
backend = self.base_params['BACKEND']
tests = [
('unix:/run/memcached/socket', '/run/memcached/socket'),
('/run/memcached/socket', '/run/memcached/socket'),
('localhost', 'localhost'),
('localhost:11211', 'localhost:11211'),
('[::1]', '[::1]'),
('[::1]:11211', '[::1]:11211'),
('127.0.0.1', '127.0.0.1'),
('127.0.0.1:11211', '127.0.0.1:11211'),
]
for location, expected in tests:
settings = {'default': {'BACKEND': backend, 'LOCATION': location}}
with self.subTest(location), self.settings(CACHES=settings):
self.assertEqual(cache.client_servers, [expected])
@unittest.skipUnless(PyMemcacheCache_params, 'PyMemcacheCache backend not configured')
@override_settings(CACHES=caches_setting_for_tests(
base=PyMemcacheCache_params,
exclude=memcached_excluded_caches,
))
class PyMemcacheCacheTests(BaseMemcachedTests, TestCase):
base_params = PyMemcacheCache_params
@property
def incr_decr_type_error(self):
return cache._lib.exceptions.MemcacheClientError
def test_pymemcache_highest_pickle_version(self):
self.assertEqual(
cache._cache.default_kwargs['serde']._serialize_func.keywords['pickle_version'],
pickle.HIGHEST_PROTOCOL,
)
for cache_key in settings.CACHES:
for client_key, client in caches[cache_key]._cache.clients.items():
with self.subTest(cache_key=cache_key, server=client_key):
self.assertEqual(
client.serde._serialize_func.keywords['pickle_version'],
pickle.HIGHEST_PROTOCOL,
)
@override_settings(CACHES=caches_setting_for_tests(
base=PyMemcacheCache_params,
exclude=memcached_excluded_caches,
OPTIONS={'no_delay': True},
))
def test_pymemcache_options(self):
self.assertIs(cache._cache.default_kwargs['no_delay'], True)
@override_settings(CACHES=caches_setting_for_tests(
BACKEND='django.core.cache.backends.filebased.FileBasedCache',
))
class FileBasedCacheTests(BaseCacheTests, TestCase):
"""
Specific test cases for the file-based cache.
"""
def setUp(self):
super().setUp()
self.dirname = self.mkdtemp()
# Caches location cannot be modified through override_settings / modify_settings,
# hence settings are manipulated directly here and the setting_changed signal
# is triggered manually.
for cache_params in settings.CACHES.values():
cache_params['LOCATION'] = self.dirname
setting_changed.send(self.__class__, setting='CACHES', enter=False)
def tearDown(self):
super().tearDown()
# Call parent first, as cache.clear() may recreate cache base directory
shutil.rmtree(self.dirname)
def mkdtemp(self):
return tempfile.mkdtemp()
def test_ignores_non_cache_files(self):
fname = os.path.join(self.dirname, 'not-a-cache-file')
with open(fname, 'w'):
os.utime(fname, None)
cache.clear()
self.assertTrue(os.path.exists(fname),
'Expected cache.clear to ignore non cache files')
os.remove(fname)
def test_clear_does_not_remove_cache_dir(self):
cache.clear()
self.assertTrue(os.path.exists(self.dirname),
'Expected cache.clear to keep the cache dir')
def test_creates_cache_dir_if_nonexistent(self):
os.rmdir(self.dirname)
cache.set('foo', 'bar')
self.assertTrue(os.path.exists(self.dirname))
def test_get_ignores_enoent(self):
cache.set('foo', 'bar')
os.unlink(cache._key_to_file('foo'))
# Returns the default instead of erroring.
self.assertEqual(cache.get('foo', 'baz'), 'baz')
@skipIf(
sys.platform == 'win32',
'Windows only partially supports umasks and chmod.',
)
def test_cache_dir_permissions(self):
os.rmdir(self.dirname)
dir_path = Path(self.dirname) / 'nested' / 'filebasedcache'
for cache_params in settings.CACHES.values():
cache_params['LOCATION'] = dir_path
setting_changed.send(self.__class__, setting='CACHES', enter=False)
cache.set('foo', 'bar')
self.assertIs(dir_path.exists(), True)
tests = [
dir_path,
dir_path.parent,
dir_path.parent.parent,
]
for directory in tests:
with self.subTest(directory=directory):
dir_mode = directory.stat().st_mode & 0o777
self.assertEqual(dir_mode, 0o700)
def test_get_does_not_ignore_non_filenotfound_exceptions(self):
with mock.patch('builtins.open', side_effect=OSError):
with self.assertRaises(OSError):
cache.get('foo')
def test_empty_cache_file_considered_expired(self):
cache_file = cache._key_to_file('foo')
with open(cache_file, 'wb') as fh:
fh.write(b'')
with open(cache_file, 'rb') as fh:
self.assertIs(cache._is_expired(fh), True)
class FileBasedCachePathLibTests(FileBasedCacheTests):
def mkdtemp(self):
tmp_dir = super().mkdtemp()
return Path(tmp_dir)
@override_settings(CACHES={
'default': {
'BACKEND': 'cache.liberal_backend.CacheClass',
},
})
class CustomCacheKeyValidationTests(SimpleTestCase):
"""
Tests for the ability to mixin a custom ``validate_key`` method to
a custom cache backend that otherwise inherits from a builtin
backend, and override the default key validation. Refs #6447.
"""
def test_custom_key_validation(self):
# this key is both longer than 250 characters, and has spaces
key = 'some key with spaces' * 15
val = 'a value'
cache.set(key, val)
self.assertEqual(cache.get(key), val)
@override_settings(
CACHES={
'default': {
'BACKEND': 'cache.closeable_cache.CacheClass',
}
}
)
class CacheClosingTests(SimpleTestCase):
def test_close(self):
self.assertFalse(cache.closed)
signals.request_finished.send(self.__class__)
self.assertTrue(cache.closed)
def test_close_only_initialized(self):
with self.settings(CACHES={
'cache_1': {
'BACKEND': 'cache.closeable_cache.CacheClass',
},
'cache_2': {
'BACKEND': 'cache.closeable_cache.CacheClass',
},
}):
self.assertEqual(caches.all(initialized_only=True), [])
signals.request_finished.send(self.__class__)
self.assertEqual(caches.all(initialized_only=True), [])
DEFAULT_MEMORY_CACHES_SETTINGS = {
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'LOCATION': 'unique-snowflake',
}
}
NEVER_EXPIRING_CACHES_SETTINGS = copy.deepcopy(DEFAULT_MEMORY_CACHES_SETTINGS)
NEVER_EXPIRING_CACHES_SETTINGS['default']['TIMEOUT'] = None
class DefaultNonExpiringCacheKeyTests(SimpleTestCase):
"""
Settings having Cache arguments with a TIMEOUT=None create Caches that will
set non-expiring keys.
"""
def setUp(self):
# The 5 minute (300 seconds) default expiration time for keys is
# defined in the implementation of the initializer method of the
# BaseCache type.
self.DEFAULT_TIMEOUT = caches[DEFAULT_CACHE_ALIAS].default_timeout
def tearDown(self):
del(self.DEFAULT_TIMEOUT)
def test_default_expiration_time_for_keys_is_5_minutes(self):
"""The default expiration time of a cache key is 5 minutes.
This value is defined in
django.core.cache.backends.base.BaseCache.__init__().
"""
self.assertEqual(300, self.DEFAULT_TIMEOUT)
def test_caches_with_unset_timeout_has_correct_default_timeout(self):
"""Caches that have the TIMEOUT parameter undefined in the default
settings will use the default 5 minute timeout.
"""
cache = caches[DEFAULT_CACHE_ALIAS]
self.assertEqual(self.DEFAULT_TIMEOUT, cache.default_timeout)
@override_settings(CACHES=NEVER_EXPIRING_CACHES_SETTINGS)
def test_caches_set_with_timeout_as_none_has_correct_default_timeout(self):
"""Memory caches that have the TIMEOUT parameter set to `None` in the
default settings with have `None` as the default timeout.
This means "no timeout".
"""
cache = caches[DEFAULT_CACHE_ALIAS]
self.assertIsNone(cache.default_timeout)
self.assertIsNone(cache.get_backend_timeout())
@override_settings(CACHES=DEFAULT_MEMORY_CACHES_SETTINGS)
def test_caches_with_unset_timeout_set_expiring_key(self):
"""Memory caches that have the TIMEOUT parameter unset will set cache
keys having the default 5 minute timeout.
"""
key = "my-key"
value = "my-value"
cache = caches[DEFAULT_CACHE_ALIAS]
cache.set(key, value)
cache_key = cache.make_key(key)
self.assertIsNotNone(cache._expire_info[cache_key])
@override_settings(CACHES=NEVER_EXPIRING_CACHES_SETTINGS)
def test_caches_set_with_timeout_as_none_set_non_expiring_key(self):
"""Memory caches that have the TIMEOUT parameter set to `None` will set
a non expiring key by default.
"""
key = "another-key"
value = "another-value"
cache = caches[DEFAULT_CACHE_ALIAS]
cache.set(key, value)
cache_key = cache.make_key(key)
self.assertIsNone(cache._expire_info[cache_key])
@override_settings(
CACHE_MIDDLEWARE_KEY_PREFIX='settingsprefix',
CACHE_MIDDLEWARE_SECONDS=1,
CACHES={
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
},
},
USE_I18N=False,
ALLOWED_HOSTS=['.example.com'],
)
class CacheUtils(SimpleTestCase):
"""TestCase for django.utils.cache functions."""
host = 'www.example.com'
path = '/cache/test/'
factory = RequestFactory(HTTP_HOST=host)
def tearDown(self):
cache.clear()
def _get_request_cache(self, method='GET', query_string=None, update_cache=None):
request = self._get_request(self.host, self.path,
method, query_string=query_string)
request._cache_update_cache = update_cache if update_cache else True
return request
def test_patch_vary_headers(self):
headers = (
# Initial vary, new headers, resulting vary.
(None, ('Accept-Encoding',), 'Accept-Encoding'),
('Accept-Encoding', ('accept-encoding',), 'Accept-Encoding'),
('Accept-Encoding', ('ACCEPT-ENCODING',), 'Accept-Encoding'),
('Cookie', ('Accept-Encoding',), 'Cookie, Accept-Encoding'),
('Cookie, Accept-Encoding', ('Accept-Encoding',), 'Cookie, Accept-Encoding'),
('Cookie, Accept-Encoding', ('Accept-Encoding', 'cookie'), 'Cookie, Accept-Encoding'),
(None, ('Accept-Encoding', 'COOKIE'), 'Accept-Encoding, COOKIE'),
('Cookie, Accept-Encoding', ('Accept-Encoding', 'cookie'), 'Cookie, Accept-Encoding'),
('Cookie , Accept-Encoding', ('Accept-Encoding', 'cookie'), 'Cookie, Accept-Encoding'),
('*', ('Accept-Language', 'Cookie'), '*'),
('Accept-Language, Cookie', ('*',), '*'),
)
for initial_vary, newheaders, resulting_vary in headers:
with self.subTest(initial_vary=initial_vary, newheaders=newheaders):
response = HttpResponse()
if initial_vary is not None:
response.headers['Vary'] = initial_vary
patch_vary_headers(response, newheaders)
self.assertEqual(response.headers['Vary'], resulting_vary)
def test_get_cache_key(self):
request = self.factory.get(self.path)
response = HttpResponse()
# Expect None if no headers have been set yet.
self.assertIsNone(get_cache_key(request))
# Set headers to an empty list.
learn_cache_key(request, response)
self.assertEqual(
get_cache_key(request),
'views.decorators.cache.cache_page.settingsprefix.GET.'
'18a03f9c9649f7d684af5db3524f5c99.d41d8cd98f00b204e9800998ecf8427e'
)
# A specified key_prefix is taken into account.
key_prefix = 'localprefix'
learn_cache_key(request, response, key_prefix=key_prefix)
self.assertEqual(
get_cache_key(request, key_prefix=key_prefix),
'views.decorators.cache.cache_page.localprefix.GET.'
'18a03f9c9649f7d684af5db3524f5c99.d41d8cd98f00b204e9800998ecf8427e'
)
def test_get_cache_key_with_query(self):
request = self.factory.get(self.path, {'test': 1})
response = HttpResponse()
# Expect None if no headers have been set yet.
self.assertIsNone(get_cache_key(request))
# Set headers to an empty list.
learn_cache_key(request, response)
# The querystring is taken into account.
self.assertEqual(
get_cache_key(request),
'views.decorators.cache.cache_page.settingsprefix.GET.'
'beaf87a9a99ee81c673ea2d67ccbec2a.d41d8cd98f00b204e9800998ecf8427e'
)
def test_cache_key_varies_by_url(self):
"""
get_cache_key keys differ by fully-qualified URL instead of path
"""
request1 = self.factory.get(self.path, HTTP_HOST='sub-1.example.com')
learn_cache_key(request1, HttpResponse())
request2 = self.factory.get(self.path, HTTP_HOST='sub-2.example.com')
learn_cache_key(request2, HttpResponse())
self.assertNotEqual(get_cache_key(request1), get_cache_key(request2))
def test_learn_cache_key(self):
request = self.factory.head(self.path)
response = HttpResponse()
response.headers['Vary'] = 'Pony'
# Make sure that the Vary header is added to the key hash
learn_cache_key(request, response)
self.assertEqual(
get_cache_key(request),
'views.decorators.cache.cache_page.settingsprefix.GET.'
'18a03f9c9649f7d684af5db3524f5c99.d41d8cd98f00b204e9800998ecf8427e'
)
def test_patch_cache_control(self):
tests = (
# Initial Cache-Control, kwargs to patch_cache_control, expected Cache-Control parts
(None, {'private': True}, {'private'}),
('', {'private': True}, {'private'}),
# no-cache.
('', {'no_cache': 'Set-Cookie'}, {'no-cache=Set-Cookie'}),
('', {'no-cache': 'Set-Cookie'}, {'no-cache=Set-Cookie'}),
('no-cache=Set-Cookie', {'no_cache': True}, {'no-cache'}),
('no-cache=Set-Cookie,no-cache=Link', {'no_cache': True}, {'no-cache'}),
('no-cache=Set-Cookie', {'no_cache': 'Link'}, {'no-cache=Set-Cookie', 'no-cache=Link'}),
(
'no-cache=Set-Cookie,no-cache=Link',
{'no_cache': 'Custom'},
{'no-cache=Set-Cookie', 'no-cache=Link', 'no-cache=Custom'},
),
# Test whether private/public attributes are mutually exclusive
('private', {'private': True}, {'private'}),
('private', {'public': True}, {'public'}),
('public', {'public': True}, {'public'}),
('public', {'private': True}, {'private'}),
('must-revalidate,max-age=60,private', {'public': True}, {'must-revalidate', 'max-age=60', 'public'}),
('must-revalidate,max-age=60,public', {'private': True}, {'must-revalidate', 'max-age=60', 'private'}),
('must-revalidate,max-age=60', {'public': True}, {'must-revalidate', 'max-age=60', 'public'}),
)
cc_delim_re = re.compile(r'\s*,\s*')
for initial_cc, newheaders, expected_cc in tests:
with self.subTest(initial_cc=initial_cc, newheaders=newheaders):
response = HttpResponse()
if initial_cc is not None:
response.headers['Cache-Control'] = initial_cc
patch_cache_control(response, **newheaders)
parts = set(cc_delim_re.split(response.headers['Cache-Control']))
self.assertEqual(parts, expected_cc)
@override_settings(
CACHES={
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'KEY_PREFIX': 'cacheprefix',
},
},
)
class PrefixedCacheUtils(CacheUtils):
pass
@override_settings(
CACHE_MIDDLEWARE_SECONDS=60,
CACHE_MIDDLEWARE_KEY_PREFIX='test',
CACHES={
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
},
},
)
class CacheHEADTest(SimpleTestCase):
path = '/cache/test/'
factory = RequestFactory()
def tearDown(self):
cache.clear()
def _set_cache(self, request, msg):
return UpdateCacheMiddleware(lambda req: HttpResponse(msg))(request)
def test_head_caches_correctly(self):
test_content = 'test content'
request = self.factory.head(self.path)
request._cache_update_cache = True
self._set_cache(request, test_content)
request = self.factory.head(self.path)
request._cache_update_cache = True
get_cache_data = FetchFromCacheMiddleware(empty_response).process_request(request)
self.assertIsNotNone(get_cache_data)
self.assertEqual(test_content.encode(), get_cache_data.content)
def test_head_with_cached_get(self):
test_content = 'test content'
request = self.factory.get(self.path)
request._cache_update_cache = True
self._set_cache(request, test_content)
request = self.factory.head(self.path)
get_cache_data = FetchFromCacheMiddleware(empty_response).process_request(request)
self.assertIsNotNone(get_cache_data)
self.assertEqual(test_content.encode(), get_cache_data.content)
@override_settings(
CACHE_MIDDLEWARE_KEY_PREFIX='settingsprefix',
CACHES={
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
},
},
LANGUAGES=[
('en', 'English'),
('es', 'Spanish'),
],
)
class CacheI18nTest(SimpleTestCase):
path = '/cache/test/'
factory = RequestFactory()
def tearDown(self):
cache.clear()
@override_settings(USE_I18N=True, USE_TZ=False)
def test_cache_key_i18n_translation(self):
request = self.factory.get(self.path)
lang = translation.get_language()
response = HttpResponse()
key = learn_cache_key(request, response)
self.assertIn(lang, key, "Cache keys should include the language name when translation is active")
key2 = get_cache_key(request)
self.assertEqual(key, key2)
def check_accept_language_vary(self, accept_language, vary, reference_key):
request = self.factory.get(self.path)
request.META['HTTP_ACCEPT_LANGUAGE'] = accept_language
request.META['HTTP_ACCEPT_ENCODING'] = 'gzip;q=1.0, identity; q=0.5, *;q=0'
response = HttpResponse()
response.headers['Vary'] = vary
key = learn_cache_key(request, response)
key2 = get_cache_key(request)
self.assertEqual(key, reference_key)
self.assertEqual(key2, reference_key)
@override_settings(USE_I18N=True, USE_TZ=False)
def test_cache_key_i18n_translation_accept_language(self):
lang = translation.get_language()
self.assertEqual(lang, 'en')
request = self.factory.get(self.path)
request.META['HTTP_ACCEPT_ENCODING'] = 'gzip;q=1.0, identity; q=0.5, *;q=0'
response = HttpResponse()
response.headers['Vary'] = 'accept-encoding'
key = learn_cache_key(request, response)
self.assertIn(lang, key, "Cache keys should include the language name when translation is active")
self.check_accept_language_vary(
'en-us',
'cookie, accept-language, accept-encoding',
key
)
self.check_accept_language_vary(
'en-US',
'cookie, accept-encoding, accept-language',
key
)
self.check_accept_language_vary(
'en-US,en;q=0.8',
'accept-encoding, accept-language, cookie',
key
)
self.check_accept_language_vary(
'en-US,en;q=0.8,ko;q=0.6',
'accept-language, cookie, accept-encoding',
key
)
self.check_accept_language_vary(
'ko-kr,ko;q=0.8,en-us;q=0.5,en;q=0.3 ',
'accept-encoding, cookie, accept-language',
key
)
self.check_accept_language_vary(
'ko-KR,ko;q=0.8,en-US;q=0.6,en;q=0.4',
'accept-language, accept-encoding, cookie',
key
)
self.check_accept_language_vary(
'ko;q=1.0,en;q=0.5',
'cookie, accept-language, accept-encoding',
key
)
self.check_accept_language_vary(
'ko, en',
'cookie, accept-encoding, accept-language',
key
)
self.check_accept_language_vary(
'ko-KR, en-US',
'accept-encoding, accept-language, cookie',
key
)
@override_settings(USE_I18N=False, USE_TZ=True)
def test_cache_key_i18n_timezone(self):
request = self.factory.get(self.path)
tz = timezone.get_current_timezone_name()
response = HttpResponse()
key = learn_cache_key(request, response)
self.assertIn(tz, key, "Cache keys should include the time zone name when time zones are active")
key2 = get_cache_key(request)
self.assertEqual(key, key2)
@override_settings(USE_I18N=False)
def test_cache_key_no_i18n(self):
request = self.factory.get(self.path)
lang = translation.get_language()
tz = timezone.get_current_timezone_name()
response = HttpResponse()
key = learn_cache_key(request, response)
self.assertNotIn(lang, key, "Cache keys shouldn't include the language name when i18n isn't active")
self.assertNotIn(tz, key, "Cache keys shouldn't include the time zone name when i18n isn't active")
@override_settings(
CACHE_MIDDLEWARE_KEY_PREFIX="test",
CACHE_MIDDLEWARE_SECONDS=60,
USE_I18N=True,
)
def test_middleware(self):
def set_cache(request, lang, msg):
def get_response(req):
return HttpResponse(msg)
translation.activate(lang)
return UpdateCacheMiddleware(get_response)(request)
# cache with non empty request.GET
request = self.factory.get(self.path, {'foo': 'bar', 'other': 'true'})
request._cache_update_cache = True
get_cache_data = FetchFromCacheMiddleware(empty_response).process_request(request)
# first access, cache must return None
self.assertIsNone(get_cache_data)
content = 'Check for cache with QUERY_STRING'
def get_response(req):
return HttpResponse(content)
UpdateCacheMiddleware(get_response)(request)
get_cache_data = FetchFromCacheMiddleware(empty_response).process_request(request)
# cache must return content
self.assertIsNotNone(get_cache_data)
self.assertEqual(get_cache_data.content, content.encode())
# different QUERY_STRING, cache must be empty
request = self.factory.get(self.path, {'foo': 'bar', 'somethingelse': 'true'})
request._cache_update_cache = True
get_cache_data = FetchFromCacheMiddleware(empty_response).process_request(request)
self.assertIsNone(get_cache_data)
# i18n tests
en_message = "Hello world!"
es_message = "Hola mundo!"
request = self.factory.get(self.path)
request._cache_update_cache = True
set_cache(request, 'en', en_message)
get_cache_data = FetchFromCacheMiddleware(empty_response).process_request(request)
# The cache can be recovered
self.assertIsNotNone(get_cache_data)
self.assertEqual(get_cache_data.content, en_message.encode())
# change the session language and set content
request = self.factory.get(self.path)
request._cache_update_cache = True
set_cache(request, 'es', es_message)
# change again the language
translation.activate('en')
# retrieve the content from cache
get_cache_data = FetchFromCacheMiddleware(empty_response).process_request(request)
self.assertEqual(get_cache_data.content, en_message.encode())
# change again the language
translation.activate('es')
get_cache_data = FetchFromCacheMiddleware(empty_response).process_request(request)
self.assertEqual(get_cache_data.content, es_message.encode())
# reset the language
translation.deactivate()
@override_settings(
CACHE_MIDDLEWARE_KEY_PREFIX="test",
CACHE_MIDDLEWARE_SECONDS=60,
)
def test_middleware_doesnt_cache_streaming_response(self):
request = self.factory.get(self.path)
get_cache_data = FetchFromCacheMiddleware(empty_response).process_request(request)
self.assertIsNone(get_cache_data)
def get_stream_response(req):
return StreamingHttpResponse(['Check for cache with streaming content.'])
UpdateCacheMiddleware(get_stream_response)(request)
get_cache_data = FetchFromCacheMiddleware(empty_response).process_request(request)
self.assertIsNone(get_cache_data)
@override_settings(
CACHES={
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'KEY_PREFIX': 'cacheprefix'
},
},
)
class PrefixedCacheI18nTest(CacheI18nTest):
pass
def hello_world_view(request, value):
return HttpResponse('Hello World %s' % value)
def csrf_view(request):
return HttpResponse(csrf(request)['csrf_token'])
@override_settings(
CACHE_MIDDLEWARE_ALIAS='other',
CACHE_MIDDLEWARE_KEY_PREFIX='middlewareprefix',
CACHE_MIDDLEWARE_SECONDS=30,
CACHES={
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
},
'other': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'LOCATION': 'other',
'TIMEOUT': '1',
},
},
)
class CacheMiddlewareTest(SimpleTestCase):
factory = RequestFactory()
def setUp(self):
self.default_cache = caches['default']
self.other_cache = caches['other']
def tearDown(self):
self.default_cache.clear()
self.other_cache.clear()
super().tearDown()
def test_constructor(self):
"""
Ensure the constructor is correctly distinguishing between usage of CacheMiddleware as
Middleware vs. usage of CacheMiddleware as view decorator and setting attributes
appropriately.
"""
# If only one argument is passed in construction, it's being used as
# middleware.
middleware = CacheMiddleware(empty_response)
# Now test object attributes against values defined in setUp above
self.assertEqual(middleware.cache_timeout, 30)
self.assertEqual(middleware.key_prefix, 'middlewareprefix')
self.assertEqual(middleware.cache_alias, 'other')
self.assertEqual(middleware.cache, self.other_cache)
# If more arguments are being passed in construction, it's being used
# as a decorator. First, test with "defaults":
as_view_decorator = CacheMiddleware(empty_response, cache_alias=None, key_prefix=None)
self.assertEqual(as_view_decorator.cache_timeout, 30) # Timeout value for 'default' cache, i.e. 30
self.assertEqual(as_view_decorator.key_prefix, '')
# Value of DEFAULT_CACHE_ALIAS from django.core.cache
self.assertEqual(as_view_decorator.cache_alias, 'default')
self.assertEqual(as_view_decorator.cache, self.default_cache)
# Next, test with custom values:
as_view_decorator_with_custom = CacheMiddleware(
hello_world_view, cache_timeout=60, cache_alias='other', key_prefix='foo'
)
self.assertEqual(as_view_decorator_with_custom.cache_timeout, 60)
self.assertEqual(as_view_decorator_with_custom.key_prefix, 'foo')
self.assertEqual(as_view_decorator_with_custom.cache_alias, 'other')
self.assertEqual(as_view_decorator_with_custom.cache, self.other_cache)
def test_update_cache_middleware_constructor(self):
middleware = UpdateCacheMiddleware(empty_response)
self.assertEqual(middleware.cache_timeout, 30)
self.assertIsNone(middleware.page_timeout)
self.assertEqual(middleware.key_prefix, 'middlewareprefix')
self.assertEqual(middleware.cache_alias, 'other')
self.assertEqual(middleware.cache, self.other_cache)
def test_fetch_cache_middleware_constructor(self):
middleware = FetchFromCacheMiddleware(empty_response)
self.assertEqual(middleware.key_prefix, 'middlewareprefix')
self.assertEqual(middleware.cache_alias, 'other')
self.assertEqual(middleware.cache, self.other_cache)
def test_middleware(self):
middleware = CacheMiddleware(hello_world_view)
prefix_middleware = CacheMiddleware(hello_world_view, key_prefix='prefix1')
timeout_middleware = CacheMiddleware(hello_world_view, cache_timeout=1)
request = self.factory.get('/view/')
# Put the request through the request middleware
result = middleware.process_request(request)
self.assertIsNone(result)
response = hello_world_view(request, '1')
# Now put the response through the response middleware
response = middleware.process_response(request, response)
# Repeating the request should result in a cache hit
result = middleware.process_request(request)
self.assertIsNotNone(result)
self.assertEqual(result.content, b'Hello World 1')
# The same request through a different middleware won't hit
result = prefix_middleware.process_request(request)
self.assertIsNone(result)
# The same request with a timeout _will_ hit
result = timeout_middleware.process_request(request)
self.assertIsNotNone(result)
self.assertEqual(result.content, b'Hello World 1')
def test_view_decorator(self):
# decorate the same view with different cache decorators
default_view = cache_page(3)(hello_world_view)
default_with_prefix_view = cache_page(3, key_prefix='prefix1')(hello_world_view)
explicit_default_view = cache_page(3, cache='default')(hello_world_view)
explicit_default_with_prefix_view = cache_page(3, cache='default', key_prefix='prefix1')(hello_world_view)
other_view = cache_page(1, cache='other')(hello_world_view)
other_with_prefix_view = cache_page(1, cache='other', key_prefix='prefix2')(hello_world_view)
request = self.factory.get('/view/')
# Request the view once
response = default_view(request, '1')
self.assertEqual(response.content, b'Hello World 1')
# Request again -- hit the cache
response = default_view(request, '2')
self.assertEqual(response.content, b'Hello World 1')
# Requesting the same view with the explicit cache should yield the same result
response = explicit_default_view(request, '3')
self.assertEqual(response.content, b'Hello World 1')
# Requesting with a prefix will hit a different cache key
response = explicit_default_with_prefix_view(request, '4')
self.assertEqual(response.content, b'Hello World 4')
# Hitting the same view again gives a cache hit
response = explicit_default_with_prefix_view(request, '5')
self.assertEqual(response.content, b'Hello World 4')
# And going back to the implicit cache will hit the same cache
response = default_with_prefix_view(request, '6')
self.assertEqual(response.content, b'Hello World 4')
# Requesting from an alternate cache won't hit cache
response = other_view(request, '7')
self.assertEqual(response.content, b'Hello World 7')
# But a repeated hit will hit cache
response = other_view(request, '8')
self.assertEqual(response.content, b'Hello World 7')
# And prefixing the alternate cache yields yet another cache entry
response = other_with_prefix_view(request, '9')
self.assertEqual(response.content, b'Hello World 9')
# But if we wait a couple of seconds...
time.sleep(2)
# ... the default cache will still hit
caches['default']
response = default_view(request, '11')
self.assertEqual(response.content, b'Hello World 1')
# ... the default cache with a prefix will still hit
response = default_with_prefix_view(request, '12')
self.assertEqual(response.content, b'Hello World 4')
# ... the explicit default cache will still hit
response = explicit_default_view(request, '13')
self.assertEqual(response.content, b'Hello World 1')
# ... the explicit default cache with a prefix will still hit
response = explicit_default_with_prefix_view(request, '14')
self.assertEqual(response.content, b'Hello World 4')
# .. but a rapidly expiring cache won't hit
response = other_view(request, '15')
self.assertEqual(response.content, b'Hello World 15')
# .. even if it has a prefix
response = other_with_prefix_view(request, '16')
self.assertEqual(response.content, b'Hello World 16')
def test_cache_page_timeout(self):
# Page timeout takes precedence over the "max-age" section of the
# "Cache-Control".
tests = [
(1, 3), # max_age < page_timeout.
(3, 1), # max_age > page_timeout.
]
for max_age, page_timeout in tests:
with self.subTest(max_age=max_age, page_timeout=page_timeout):
view = cache_page(timeout=page_timeout)(
cache_control(max_age=max_age)(hello_world_view)
)
request = self.factory.get('/view/')
response = view(request, '1')
self.assertEqual(response.content, b'Hello World 1')
time.sleep(1)
response = view(request, '2')
self.assertEqual(
response.content,
b'Hello World 1' if page_timeout > max_age else b'Hello World 2',
)
cache.clear()
def test_cached_control_private_not_cached(self):
"""Responses with 'Cache-Control: private' are not cached."""
view_with_private_cache = cache_page(3)(cache_control(private=True)(hello_world_view))
request = self.factory.get('/view/')
response = view_with_private_cache(request, '1')
self.assertEqual(response.content, b'Hello World 1')
response = view_with_private_cache(request, '2')
self.assertEqual(response.content, b'Hello World 2')
def test_sensitive_cookie_not_cached(self):
"""
Django must prevent caching of responses that set a user-specific (and
maybe security sensitive) cookie in response to a cookie-less request.
"""
request = self.factory.get('/view/')
csrf_middleware = CsrfViewMiddleware(csrf_view)
csrf_middleware.process_view(request, csrf_view, (), {})
cache_middleware = CacheMiddleware(csrf_middleware)
self.assertIsNone(cache_middleware.process_request(request))
cache_middleware(request)
# Inserting a CSRF cookie in a cookie-less request prevented caching.
self.assertIsNone(cache_middleware.process_request(request))
def test_304_response_has_http_caching_headers_but_not_cached(self):
original_view = mock.Mock(return_value=HttpResponseNotModified())
view = cache_page(2)(original_view)
request = self.factory.get('/view/')
# The view shouldn't be cached on the second call.
view(request).close()
response = view(request)
response.close()
self.assertEqual(original_view.call_count, 2)
self.assertIsInstance(response, HttpResponseNotModified)
self.assertIn('Cache-Control', response)
self.assertIn('Expires', response)
@override_settings(
CACHE_MIDDLEWARE_KEY_PREFIX='settingsprefix',
CACHE_MIDDLEWARE_SECONDS=1,
CACHES={
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
},
},
USE_I18N=False,
)
class TestWithTemplateResponse(SimpleTestCase):
"""
Tests various headers w/ TemplateResponse.
Most are probably redundant since they manipulate the same object
anyway but the ETag header is 'special' because it relies on the
content being complete (which is not necessarily always the case
with a TemplateResponse)
"""
path = '/cache/test/'
factory = RequestFactory()
def tearDown(self):
cache.clear()
def test_patch_vary_headers(self):
headers = (
# Initial vary, new headers, resulting vary.
(None, ('Accept-Encoding',), 'Accept-Encoding'),
('Accept-Encoding', ('accept-encoding',), 'Accept-Encoding'),
('Accept-Encoding', ('ACCEPT-ENCODING',), 'Accept-Encoding'),
('Cookie', ('Accept-Encoding',), 'Cookie, Accept-Encoding'),
('Cookie, Accept-Encoding', ('Accept-Encoding',), 'Cookie, Accept-Encoding'),
('Cookie, Accept-Encoding', ('Accept-Encoding', 'cookie'), 'Cookie, Accept-Encoding'),
(None, ('Accept-Encoding', 'COOKIE'), 'Accept-Encoding, COOKIE'),
('Cookie, Accept-Encoding', ('Accept-Encoding', 'cookie'), 'Cookie, Accept-Encoding'),
('Cookie , Accept-Encoding', ('Accept-Encoding', 'cookie'), 'Cookie, Accept-Encoding'),
)
for initial_vary, newheaders, resulting_vary in headers:
with self.subTest(initial_vary=initial_vary, newheaders=newheaders):
template = engines['django'].from_string("This is a test")
response = TemplateResponse(HttpRequest(), template)
if initial_vary is not None:
response.headers['Vary'] = initial_vary
patch_vary_headers(response, newheaders)
self.assertEqual(response.headers['Vary'], resulting_vary)
def test_get_cache_key(self):
request = self.factory.get(self.path)
template = engines['django'].from_string("This is a test")
response = TemplateResponse(HttpRequest(), template)
key_prefix = 'localprefix'
# Expect None if no headers have been set yet.
self.assertIsNone(get_cache_key(request))
# Set headers to an empty list.
learn_cache_key(request, response)
self.assertEqual(
get_cache_key(request),
'views.decorators.cache.cache_page.settingsprefix.GET.'
'58a0a05c8a5620f813686ff969c26853.d41d8cd98f00b204e9800998ecf8427e'
)
# A specified key_prefix is taken into account.
learn_cache_key(request, response, key_prefix=key_prefix)
self.assertEqual(
get_cache_key(request, key_prefix=key_prefix),
'views.decorators.cache.cache_page.localprefix.GET.'
'58a0a05c8a5620f813686ff969c26853.d41d8cd98f00b204e9800998ecf8427e'
)
def test_get_cache_key_with_query(self):
request = self.factory.get(self.path, {'test': 1})
template = engines['django'].from_string("This is a test")
response = TemplateResponse(HttpRequest(), template)
# Expect None if no headers have been set yet.
self.assertIsNone(get_cache_key(request))
# Set headers to an empty list.
learn_cache_key(request, response)
# The querystring is taken into account.
self.assertEqual(
get_cache_key(request),
'views.decorators.cache.cache_page.settingsprefix.GET.'
'0f1c2d56633c943073c4569d9a9502fe.d41d8cd98f00b204e9800998ecf8427e'
)
class TestMakeTemplateFragmentKey(SimpleTestCase):
def test_without_vary_on(self):
key = make_template_fragment_key('a.fragment')
self.assertEqual(key, 'template.cache.a.fragment.d41d8cd98f00b204e9800998ecf8427e')
def test_with_one_vary_on(self):
key = make_template_fragment_key('foo', ['abc'])
self.assertEqual(key, 'template.cache.foo.493e283d571a73056196f1a68efd0f66')
def test_with_many_vary_on(self):
key = make_template_fragment_key('bar', ['abc', 'def'])
self.assertEqual(key, 'template.cache.bar.17c1a507a0cb58384f4c639067a93520')
def test_proper_escaping(self):
key = make_template_fragment_key('spam', ['abc:def%'])
self.assertEqual(key, 'template.cache.spam.06c8ae8e8c430b69fb0a6443504153dc')
def test_with_ints_vary_on(self):
key = make_template_fragment_key('foo', [1, 2, 3, 4, 5])
self.assertEqual(key, 'template.cache.foo.7ae8fd2e0d25d651c683bdeebdb29461')
def test_with_unicode_vary_on(self):
key = make_template_fragment_key('foo', ['42º', '😀'])
self.assertEqual(key, 'template.cache.foo.7ced1c94e543668590ba39b3c08b0237')
def test_long_vary_on(self):
key = make_template_fragment_key('foo', ['x' * 10000])
self.assertEqual(key, 'template.cache.foo.3670b349b5124aa56bdb50678b02b23a')
class CacheHandlerTest(SimpleTestCase):
def test_same_instance(self):
"""
Attempting to retrieve the same alias should yield the same instance.
"""
cache1 = caches['default']
cache2 = caches['default']
self.assertIs(cache1, cache2)
def test_per_thread(self):
"""
Requesting the same alias from separate threads should yield separate
instances.
"""
c = []
def runner():
c.append(caches['default'])
for x in range(2):
t = threading.Thread(target=runner)
t.start()
t.join()
self.assertIsNot(c[0], c[1])
def test_nonexistent_alias(self):
msg = "The connection 'nonexistent' doesn't exist."
with self.assertRaisesMessage(InvalidCacheBackendError, msg):
caches['nonexistent']
def test_nonexistent_backend(self):
test_caches = CacheHandler({
'invalid_backend': {
'BACKEND': 'django.nonexistent.NonexistentBackend',
},
})
msg = (
"Could not find backend 'django.nonexistent.NonexistentBackend': "
"No module named 'django.nonexistent'"
)
with self.assertRaisesMessage(InvalidCacheBackendError, msg):
test_caches['invalid_backend']
def test_all(self):
test_caches = CacheHandler({
'cache_1': {
'BACKEND': 'django.core.cache.backends.dummy.DummyCache',
},
'cache_2': {
'BACKEND': 'django.core.cache.backends.dummy.DummyCache',
},
})
self.assertEqual(test_caches.all(initialized_only=True), [])
cache_1 = test_caches['cache_1']
self.assertEqual(test_caches.all(initialized_only=True), [cache_1])
self.assertEqual(len(test_caches.all()), 2)
# .all() initializes all caches.
self.assertEqual(len(test_caches.all(initialized_only=True)), 2)
self.assertEqual(test_caches.all(), test_caches.all(initialized_only=True))
|
d9ff22bd43d25c4c13f9ea5e170adb742402f486299eb208d0dbef9e6839c72b | from datetime import date, datetime, time, tzinfo
from django.test import SimpleTestCase, override_settings
from django.test.utils import TZ_SUPPORT, requires_tz_support
from django.utils import dateformat, translation
from django.utils.dateformat import format
from django.utils.timezone import (
get_default_timezone, get_fixed_timezone, make_aware, utc,
)
@override_settings(TIME_ZONE='Europe/Copenhagen')
class DateFormatTests(SimpleTestCase):
def setUp(self):
self._orig_lang = translation.get_language()
translation.activate('en-us')
def tearDown(self):
translation.activate(self._orig_lang)
def test_date(self):
d = date(2009, 5, 16)
self.assertEqual(date.fromtimestamp(int(format(d, 'U'))), d)
def test_naive_datetime(self):
dt = datetime(2009, 5, 16, 5, 30, 30)
self.assertEqual(datetime.fromtimestamp(int(format(dt, 'U'))), dt)
def test_naive_ambiguous_datetime(self):
# dt is ambiguous in Europe/Copenhagen. pytz raises an exception for
# the ambiguity, which results in an empty string.
dt = datetime(2015, 10, 25, 2, 30, 0)
# Try all formatters that involve self.timezone.
self.assertEqual(format(dt, 'I'), '')
self.assertEqual(format(dt, 'O'), '')
self.assertEqual(format(dt, 'T'), '')
self.assertEqual(format(dt, 'Z'), '')
@requires_tz_support
def test_datetime_with_local_tzinfo(self):
ltz = get_default_timezone()
dt = make_aware(datetime(2009, 5, 16, 5, 30, 30), ltz)
self.assertEqual(datetime.fromtimestamp(int(format(dt, 'U')), ltz), dt)
self.assertEqual(datetime.fromtimestamp(int(format(dt, 'U'))), dt.replace(tzinfo=None))
@requires_tz_support
def test_datetime_with_tzinfo(self):
tz = get_fixed_timezone(-510)
ltz = get_default_timezone()
dt = make_aware(datetime(2009, 5, 16, 5, 30, 30), ltz)
self.assertEqual(datetime.fromtimestamp(int(format(dt, 'U')), tz), dt)
self.assertEqual(datetime.fromtimestamp(int(format(dt, 'U')), ltz), dt)
# astimezone() is safe here because the target timezone doesn't have DST
self.assertEqual(datetime.fromtimestamp(int(format(dt, 'U'))), dt.astimezone(ltz).replace(tzinfo=None))
self.assertEqual(datetime.fromtimestamp(int(format(dt, 'U')), tz).timetuple(), dt.astimezone(tz).timetuple())
self.assertEqual(datetime.fromtimestamp(int(format(dt, 'U')), ltz).timetuple(), dt.astimezone(ltz).timetuple())
def test_epoch(self):
udt = datetime(1970, 1, 1, tzinfo=utc)
self.assertEqual(format(udt, 'U'), '0')
def test_empty_format(self):
my_birthday = datetime(1979, 7, 8, 22, 00)
self.assertEqual(dateformat.format(my_birthday, ''), '')
def test_am_pm(self):
morning = time(7, 00)
evening = time(19, 00)
self.assertEqual(dateformat.format(morning, 'a'), 'a.m.')
self.assertEqual(dateformat.format(evening, 'a'), 'p.m.')
self.assertEqual(dateformat.format(morning, 'A'), 'AM')
self.assertEqual(dateformat.format(evening, 'A'), 'PM')
def test_microsecond(self):
# Regression test for #18951
dt = datetime(2009, 5, 16, microsecond=123)
self.assertEqual(dateformat.format(dt, 'u'), '000123')
def test_date_formats(self):
# Specifiers 'I', 'r', and 'U' are covered in test_timezones().
my_birthday = datetime(1979, 7, 8, 22, 00)
for specifier, expected in [
('b', 'jul'),
('d', '08'),
('D', 'Sun'),
('E', 'July'),
('F', 'July'),
('j', '8'),
('l', 'Sunday'),
('L', 'False'),
('m', '07'),
('M', 'Jul'),
('n', '7'),
('N', 'July'),
('o', '1979'),
('S', 'th'),
('t', '31'),
('w', '0'),
('W', '27'),
('y', '79'),
('Y', '1979'),
('z', '189'),
]:
with self.subTest(specifier=specifier):
self.assertEqual(dateformat.format(my_birthday, specifier), expected)
def test_date_formats_c_format(self):
timestamp = datetime(2008, 5, 19, 11, 45, 23, 123456)
self.assertEqual(dateformat.format(timestamp, 'c'), '2008-05-19T11:45:23.123456')
def test_time_formats(self):
# Specifiers 'I', 'r', and 'U' are covered in test_timezones().
my_birthday = datetime(1979, 7, 8, 22, 00)
for specifier, expected in [
('a', 'p.m.'),
('A', 'PM'),
('f', '10'),
('g', '10'),
('G', '22'),
('h', '10'),
('H', '22'),
('i', '00'),
('P', '10 p.m.'),
('s', '00'),
('u', '000000'),
]:
with self.subTest(specifier=specifier):
self.assertEqual(dateformat.format(my_birthday, specifier), expected)
def test_dateformat(self):
my_birthday = datetime(1979, 7, 8, 22, 00)
self.assertEqual(dateformat.format(my_birthday, r'Y z \C\E\T'), '1979 189 CET')
self.assertEqual(dateformat.format(my_birthday, r'jS \o\f F'), '8th of July')
def test_futuredates(self):
the_future = datetime(2100, 10, 25, 0, 00)
self.assertEqual(dateformat.format(the_future, r'Y'), '2100')
def test_day_of_year_leap(self):
self.assertEqual(dateformat.format(datetime(2000, 12, 31), 'z'), '366')
def test_timezones(self):
my_birthday = datetime(1979, 7, 8, 22, 00)
summertime = datetime(2005, 10, 30, 1, 00)
wintertime = datetime(2005, 10, 30, 4, 00)
noon = time(12, 0, 0)
# 3h30m to the west of UTC
tz = get_fixed_timezone(-210)
aware_dt = datetime(2009, 5, 16, 5, 30, 30, tzinfo=tz)
if TZ_SUPPORT:
for specifier, expected in [
('e', ''),
('O', '+0100'),
('r', 'Sun, 08 Jul 1979 22:00:00 +0100'),
('T', 'CET'),
('U', '300315600'),
('Z', '3600'),
]:
with self.subTest(specifier=specifier):
self.assertEqual(dateformat.format(my_birthday, specifier), expected)
self.assertEqual(dateformat.format(aware_dt, 'e'), '-0330')
self.assertEqual(
dateformat.format(aware_dt, 'r'),
'Sat, 16 May 2009 05:30:30 -0330',
)
self.assertEqual(dateformat.format(summertime, 'I'), '1')
self.assertEqual(dateformat.format(summertime, 'O'), '+0200')
self.assertEqual(dateformat.format(wintertime, 'I'), '0')
self.assertEqual(dateformat.format(wintertime, 'O'), '+0100')
for specifier in ['e', 'O', 'T', 'Z']:
with self.subTest(specifier=specifier):
self.assertEqual(dateformat.time_format(noon, specifier), '')
# Ticket #16924 -- We don't need timezone support to test this
self.assertEqual(dateformat.format(aware_dt, 'O'), '-0330')
def test_invalid_time_format_specifiers(self):
my_birthday = date(1984, 8, 7)
for specifier in ['a', 'A', 'f', 'g', 'G', 'h', 'H', 'i', 'P', 'r', 's', 'u']:
with self.subTest(specifier=specifier):
msg = (
'The format for date objects may not contain time-related '
f'format specifiers (found {specifier!r}).'
)
with self.assertRaisesMessage(TypeError, msg):
dateformat.format(my_birthday, specifier)
@requires_tz_support
def test_e_format_with_named_time_zone(self):
dt = datetime(1970, 1, 1, tzinfo=utc)
self.assertEqual(dateformat.format(dt, 'e'), 'UTC')
@requires_tz_support
def test_e_format_with_time_zone_with_unimplemented_tzname(self):
class NoNameTZ(tzinfo):
"""Time zone without .tzname() defined."""
def utcoffset(self, dt):
return None
dt = datetime(1970, 1, 1, tzinfo=NoNameTZ())
self.assertEqual(dateformat.format(dt, 'e'), '')
def test_P_format(self):
for expected, t in [
('midnight', time(0)),
('noon', time(12)),
('4 a.m.', time(4)),
('8:30 a.m.', time(8, 30)),
('4 p.m.', time(16)),
('8:30 p.m.', time(20, 30)),
]:
with self.subTest(time=t):
self.assertEqual(dateformat.time_format(t, 'P'), expected)
def test_r_format_with_non_en_locale(self):
# Changing the locale doesn't change the "r" format.
dt = datetime(1979, 7, 8, 22, 00)
with translation.override('fr'):
self.assertEqual(
dateformat.format(dt, 'r'),
'Sun, 08 Jul 1979 22:00:00 +0100',
)
def test_S_format(self):
for expected, days in [
('st', [1, 21, 31]),
('nd', [2, 22]),
('rd', [3, 23]),
('th', (n for n in range(4, 31) if n not in [21, 22, 23])),
]:
for day in days:
dt = date(1970, 1, day)
with self.subTest(day=day):
self.assertEqual(dateformat.format(dt, 'S'), expected)
def test_y_format_year_before_1000(self):
tests = [
(476, '76'),
(42, '42'),
(4, '04'),
]
for year, expected_date in tests:
with self.subTest(year=year):
self.assertEqual(
dateformat.format(datetime(year, 9, 8, 5, 0), 'y'),
expected_date,
)
def test_Y_format_year_before_1000(self):
self.assertEqual(dateformat.format(datetime(1, 1, 1), 'Y'), '0001')
self.assertEqual(dateformat.format(datetime(999, 1, 1), 'Y'), '0999')
def test_twelve_hour_format(self):
tests = [
(0, '12', '12'),
(1, '1', '01'),
(11, '11', '11'),
(12, '12', '12'),
(13, '1', '01'),
(23, '11', '11'),
]
for hour, g_expected, h_expected in tests:
dt = datetime(2000, 1, 1, hour)
with self.subTest(hour=hour):
self.assertEqual(dateformat.format(dt, 'g'), g_expected)
self.assertEqual(dateformat.format(dt, 'h'), h_expected)
|
1a3f681e8b27c408244a6c9c10b30dfe7bf8c2afad16016772a288203eae6c7e | import unittest
from datetime import date, datetime, time, timedelta
from django.utils.dateparse import (
parse_date, parse_datetime, parse_duration, parse_time,
)
from django.utils.timezone import get_fixed_timezone
class DateParseTests(unittest.TestCase):
def test_parse_date(self):
# Valid inputs
self.assertEqual(parse_date('2012-04-23'), date(2012, 4, 23))
self.assertEqual(parse_date('2012-4-9'), date(2012, 4, 9))
# Invalid inputs
self.assertIsNone(parse_date('20120423'))
with self.assertRaises(ValueError):
parse_date('2012-04-56')
def test_parse_time(self):
# Valid inputs
self.assertEqual(parse_time('09:15:00'), time(9, 15))
self.assertEqual(parse_time('10:10'), time(10, 10))
self.assertEqual(parse_time('10:20:30.400'), time(10, 20, 30, 400000))
self.assertEqual(parse_time('10:20:30,400'), time(10, 20, 30, 400000))
self.assertEqual(parse_time('4:8:16'), time(4, 8, 16))
# Time zone offset is ignored.
self.assertEqual(parse_time('00:05:23+04:00'), time(0, 5, 23))
# Invalid inputs
self.assertIsNone(parse_time('00:05:'))
self.assertIsNone(parse_time('00:05:23,'))
self.assertIsNone(parse_time('00:05:23+'))
self.assertIsNone(parse_time('00:05:23+25:00'))
self.assertIsNone(parse_time('4:18:101'))
self.assertIsNone(parse_time('091500'))
with self.assertRaises(ValueError):
parse_time('09:15:90')
def test_parse_datetime(self):
valid_inputs = (
('2012-04-23T09:15:00', datetime(2012, 4, 23, 9, 15)),
('2012-4-9 4:8:16', datetime(2012, 4, 9, 4, 8, 16)),
('2012-04-23T09:15:00Z', datetime(2012, 4, 23, 9, 15, 0, 0, get_fixed_timezone(0))),
('2012-4-9 4:8:16-0320', datetime(2012, 4, 9, 4, 8, 16, 0, get_fixed_timezone(-200))),
('2012-04-23T10:20:30.400+02:30', datetime(2012, 4, 23, 10, 20, 30, 400000, get_fixed_timezone(150))),
('2012-04-23T10:20:30.400+02', datetime(2012, 4, 23, 10, 20, 30, 400000, get_fixed_timezone(120))),
('2012-04-23T10:20:30.400-02', datetime(2012, 4, 23, 10, 20, 30, 400000, get_fixed_timezone(-120))),
('2012-04-23T10:20:30,400-02', datetime(2012, 4, 23, 10, 20, 30, 400000, get_fixed_timezone(-120))),
('2012-04-23T10:20:30.400 +0230', datetime(2012, 4, 23, 10, 20, 30, 400000, get_fixed_timezone(150))),
('2012-04-23T10:20:30,400 +00', datetime(2012, 4, 23, 10, 20, 30, 400000, get_fixed_timezone(0))),
('2012-04-23T10:20:30 -02', datetime(2012, 4, 23, 10, 20, 30, 0, get_fixed_timezone(-120))),
)
for source, expected in valid_inputs:
with self.subTest(source=source):
self.assertEqual(parse_datetime(source), expected)
# Invalid inputs
self.assertIsNone(parse_datetime('20120423091500'))
with self.assertRaises(ValueError):
parse_datetime('2012-04-56T09:15:90')
class DurationParseTests(unittest.TestCase):
def test_parse_python_format(self):
timedeltas = [
timedelta(days=4, minutes=15, seconds=30, milliseconds=100), # fractions of seconds
timedelta(hours=10, minutes=15, seconds=30), # hours, minutes, seconds
timedelta(days=4, minutes=15, seconds=30), # multiple days
timedelta(days=1, minutes=00, seconds=00), # single day
timedelta(days=-4, minutes=15, seconds=30), # negative durations
timedelta(minutes=15, seconds=30), # minute & seconds
timedelta(seconds=30), # seconds
]
for delta in timedeltas:
with self.subTest(delta=delta):
self.assertEqual(parse_duration(format(delta)), delta)
def test_parse_postgresql_format(self):
test_values = (
('1 day', timedelta(1)),
('-1 day', timedelta(-1)),
('1 day 0:00:01', timedelta(days=1, seconds=1)),
('1 day -0:00:01', timedelta(days=1, seconds=-1)),
('-1 day -0:00:01', timedelta(days=-1, seconds=-1)),
('-1 day +0:00:01', timedelta(days=-1, seconds=1)),
('4 days 0:15:30.1', timedelta(days=4, minutes=15, seconds=30, milliseconds=100)),
('4 days 0:15:30.0001', timedelta(days=4, minutes=15, seconds=30, microseconds=100)),
('-4 days -15:00:30', timedelta(days=-4, hours=-15, seconds=-30)),
)
for source, expected in test_values:
with self.subTest(source=source):
self.assertEqual(parse_duration(source), expected)
def test_seconds(self):
self.assertEqual(parse_duration('30'), timedelta(seconds=30))
def test_minutes_seconds(self):
self.assertEqual(parse_duration('15:30'), timedelta(minutes=15, seconds=30))
self.assertEqual(parse_duration('5:30'), timedelta(minutes=5, seconds=30))
def test_hours_minutes_seconds(self):
self.assertEqual(parse_duration('10:15:30'), timedelta(hours=10, minutes=15, seconds=30))
self.assertEqual(parse_duration('1:15:30'), timedelta(hours=1, minutes=15, seconds=30))
self.assertEqual(parse_duration('100:200:300'), timedelta(hours=100, minutes=200, seconds=300))
def test_days(self):
self.assertEqual(parse_duration('4 15:30'), timedelta(days=4, minutes=15, seconds=30))
self.assertEqual(parse_duration('4 10:15:30'), timedelta(days=4, hours=10, minutes=15, seconds=30))
def test_fractions_of_seconds(self):
test_values = (
('15:30.1', timedelta(minutes=15, seconds=30, milliseconds=100)),
('15:30.01', timedelta(minutes=15, seconds=30, milliseconds=10)),
('15:30.001', timedelta(minutes=15, seconds=30, milliseconds=1)),
('15:30.0001', timedelta(minutes=15, seconds=30, microseconds=100)),
('15:30.00001', timedelta(minutes=15, seconds=30, microseconds=10)),
('15:30.000001', timedelta(minutes=15, seconds=30, microseconds=1)),
('15:30,000001', timedelta(minutes=15, seconds=30, microseconds=1)),
)
for source, expected in test_values:
with self.subTest(source=source):
self.assertEqual(parse_duration(source), expected)
def test_negative(self):
test_values = (
('-4 15:30', timedelta(days=-4, minutes=15, seconds=30)),
('-172800', timedelta(days=-2)),
('-15:30', timedelta(minutes=-15, seconds=-30)),
('-1:15:30', timedelta(hours=-1, minutes=-15, seconds=-30)),
('-30.1', timedelta(seconds=-30, milliseconds=-100)),
('-30,1', timedelta(seconds=-30, milliseconds=-100)),
('-00:01:01', timedelta(minutes=-1, seconds=-1)),
('-01:01', timedelta(seconds=-61)),
('-01:-01', None),
)
for source, expected in test_values:
with self.subTest(source=source):
self.assertEqual(parse_duration(source), expected)
def test_iso_8601(self):
test_values = (
('P4Y', None),
('P4M', None),
('P4W', None),
('P4D', timedelta(days=4)),
('-P1D', timedelta(days=-1)),
('P0.5D', timedelta(hours=12)),
('P0,5D', timedelta(hours=12)),
('-P0.5D', timedelta(hours=-12)),
('-P0,5D', timedelta(hours=-12)),
('PT5H', timedelta(hours=5)),
('-PT5H', timedelta(hours=-5)),
('PT5M', timedelta(minutes=5)),
('-PT5M', timedelta(minutes=-5)),
('PT5S', timedelta(seconds=5)),
('-PT5S', timedelta(seconds=-5)),
('PT0.000005S', timedelta(microseconds=5)),
('PT0,000005S', timedelta(microseconds=5)),
('-PT0.000005S', timedelta(microseconds=-5)),
('-PT0,000005S', timedelta(microseconds=-5)),
('-P4DT1H', timedelta(days=-4, hours=-1)),
)
for source, expected in test_values:
with self.subTest(source=source):
self.assertEqual(parse_duration(source), expected)
|
1bb9144b003ee80c3b44cb825f86f3aee3a109a6dc1f02d68cff21f8094d8abb | import os
from datetime import datetime
from django.test import SimpleTestCase
from django.utils.functional import lazystr
from django.utils.html import (
conditional_escape, escape, escapejs, format_html, html_safe, json_script,
linebreaks, smart_urlquote, strip_spaces_between_tags, strip_tags, urlize,
)
from django.utils.safestring import mark_safe
class TestUtilsHtml(SimpleTestCase):
def check_output(self, function, value, output=None):
"""
function(value) equals output. If output is None, function(value)
equals value.
"""
if output is None:
output = value
self.assertEqual(function(value), output)
def test_escape(self):
items = (
('&', '&'),
('<', '<'),
('>', '>'),
('"', '"'),
("'", '''),
)
# Substitution patterns for testing the above items.
patterns = ("%s", "asdf%sfdsa", "%s1", "1%sb")
for value, output in items:
with self.subTest(value=value, output=output):
for pattern in patterns:
with self.subTest(value=value, output=output, pattern=pattern):
self.check_output(escape, pattern % value, pattern % output)
self.check_output(escape, lazystr(pattern % value), pattern % output)
# Check repeated values.
self.check_output(escape, value * 2, output * 2)
# Verify it doesn't double replace &.
self.check_output(escape, '<&', '<&')
def test_format_html(self):
self.assertEqual(
format_html(
"{} {} {third} {fourth}",
"< Dangerous >",
mark_safe("<b>safe</b>"),
third="< dangerous again",
fourth=mark_safe("<i>safe again</i>"),
),
"< Dangerous > <b>safe</b> < dangerous again <i>safe again</i>"
)
def test_linebreaks(self):
items = (
("para1\n\npara2\r\rpara3", "<p>para1</p>\n\n<p>para2</p>\n\n<p>para3</p>"),
("para1\nsub1\rsub2\n\npara2", "<p>para1<br>sub1<br>sub2</p>\n\n<p>para2</p>"),
("para1\r\n\r\npara2\rsub1\r\rpara4", "<p>para1</p>\n\n<p>para2<br>sub1</p>\n\n<p>para4</p>"),
("para1\tmore\n\npara2", "<p>para1\tmore</p>\n\n<p>para2</p>"),
)
for value, output in items:
with self.subTest(value=value, output=output):
self.check_output(linebreaks, value, output)
self.check_output(linebreaks, lazystr(value), output)
def test_strip_tags(self):
items = (
('<p>See: 'é is an apostrophe followed by e acute</p>',
'See: 'é is an apostrophe followed by e acute'),
('<p>See: 'é is an apostrophe followed by e acute</p>',
'See: 'é is an apostrophe followed by e acute'),
('<adf>a', 'a'),
('</adf>a', 'a'),
('<asdf><asdf>e', 'e'),
('hi, <f x', 'hi, <f x'),
('234<235, right?', '234<235, right?'),
('a4<a5 right?', 'a4<a5 right?'),
('b7>b2!', 'b7>b2!'),
('</fe', '</fe'),
('<x>b<y>', 'b'),
('a<p onclick="alert(\'<test>\')">b</p>c', 'abc'),
('a<p a >b</p>c', 'abc'),
('d<a:b c:d>e</p>f', 'def'),
('<strong>foo</strong><a href="http://example.com">bar</a>', 'foobar'),
# caused infinite loop on Pythons not patched with
# https://bugs.python.org/issue20288
('&gotcha&#;<>', '&gotcha&#;<>'),
('<sc<!-- -->ript>test<<!-- -->/script>', 'ript>test'),
('<script>alert()</script>&h', 'alert()h'),
('><!' + ('&' * 16000) + 'D', '><!' + ('&' * 16000) + 'D'),
('X<<<<br>br>br>br>X', 'XX'),
)
for value, output in items:
with self.subTest(value=value, output=output):
self.check_output(strip_tags, value, output)
self.check_output(strip_tags, lazystr(value), output)
def test_strip_tags_files(self):
# Test with more lengthy content (also catching performance regressions)
for filename in ('strip_tags1.html', 'strip_tags2.txt'):
with self.subTest(filename=filename):
path = os.path.join(os.path.dirname(__file__), 'files', filename)
with open(path) as fp:
content = fp.read()
start = datetime.now()
stripped = strip_tags(content)
elapsed = datetime.now() - start
self.assertEqual(elapsed.seconds, 0)
self.assertIn("Please try again.", stripped)
self.assertNotIn('<', stripped)
def test_strip_spaces_between_tags(self):
# Strings that should come out untouched.
items = (' <adf>', '<adf> ', ' </adf> ', ' <f> x</f>')
for value in items:
with self.subTest(value=value):
self.check_output(strip_spaces_between_tags, value)
self.check_output(strip_spaces_between_tags, lazystr(value))
# Strings that have spaces to strip.
items = (
('<d> </d>', '<d></d>'),
('<p>hello </p>\n<p> world</p>', '<p>hello </p><p> world</p>'),
('\n<p>\t</p>\n<p> </p>\n', '\n<p></p><p></p>\n'),
)
for value, output in items:
with self.subTest(value=value, output=output):
self.check_output(strip_spaces_between_tags, value, output)
self.check_output(strip_spaces_between_tags, lazystr(value), output)
def test_escapejs(self):
items = (
('"double quotes" and \'single quotes\'', '\\u0022double quotes\\u0022 and \\u0027single quotes\\u0027'),
(r'\ : backslashes, too', '\\u005C : backslashes, too'),
(
'and lots of whitespace: \r\n\t\v\f\b',
'and lots of whitespace: \\u000D\\u000A\\u0009\\u000B\\u000C\\u0008'
),
(r'<script>and this</script>', '\\u003Cscript\\u003Eand this\\u003C/script\\u003E'),
(
'paragraph separator:\u2029and line separator:\u2028',
'paragraph separator:\\u2029and line separator:\\u2028'
),
('`', '\\u0060'),
)
for value, output in items:
with self.subTest(value=value, output=output):
self.check_output(escapejs, value, output)
self.check_output(escapejs, lazystr(value), output)
def test_json_script(self):
tests = (
# "<", ">" and "&" are quoted inside JSON strings
(('&<>', '<script id="test_id" type="application/json">"\\u0026\\u003C\\u003E"</script>')),
# "<", ">" and "&" are quoted inside JSON objects
(
{'a': '<script>test&ing</script>'},
'<script id="test_id" type="application/json">'
'{"a": "\\u003Cscript\\u003Etest\\u0026ing\\u003C/script\\u003E"}</script>'
),
# Lazy strings are quoted
(lazystr('&<>'), '<script id="test_id" type="application/json">"\\u0026\\u003C\\u003E"</script>'),
(
{'a': lazystr('<script>test&ing</script>')},
'<script id="test_id" type="application/json">'
'{"a": "\\u003Cscript\\u003Etest\\u0026ing\\u003C/script\\u003E"}</script>'
),
)
for arg, expected in tests:
with self.subTest(arg=arg):
self.assertEqual(json_script(arg, 'test_id'), expected)
def test_smart_urlquote(self):
items = (
('http://öäü.com/', 'http://xn--4ca9at.com/'),
('http://öäü.com/öäü/', 'http://xn--4ca9at.com/%C3%B6%C3%A4%C3%BC/'),
# Everything unsafe is quoted, !*'();:@&=+$,/?#[]~ is considered
# safe as per RFC.
('http://example.com/path/öäü/', 'http://example.com/path/%C3%B6%C3%A4%C3%BC/'),
('http://example.com/%C3%B6/ä/', 'http://example.com/%C3%B6/%C3%A4/'),
('http://example.com/?x=1&y=2+3&z=', 'http://example.com/?x=1&y=2+3&z='),
('http://example.com/?x=<>"\'', 'http://example.com/?x=%3C%3E%22%27'),
('http://example.com/?q=http://example.com/?x=1%26q=django',
'http://example.com/?q=http%3A%2F%2Fexample.com%2F%3Fx%3D1%26q%3Ddjango'),
('http://example.com/?q=http%3A%2F%2Fexample.com%2F%3Fx%3D1%26q%3Ddjango',
'http://example.com/?q=http%3A%2F%2Fexample.com%2F%3Fx%3D1%26q%3Ddjango'),
('http://.www.f oo.bar/', 'http://.www.f%20oo.bar/'),
)
# IDNs are properly quoted
for value, output in items:
with self.subTest(value=value, output=output):
self.assertEqual(smart_urlquote(value), output)
def test_conditional_escape(self):
s = '<h1>interop</h1>'
self.assertEqual(conditional_escape(s), '<h1>interop</h1>')
self.assertEqual(conditional_escape(mark_safe(s)), s)
self.assertEqual(conditional_escape(lazystr(mark_safe(s))), s)
def test_html_safe(self):
@html_safe
class HtmlClass:
def __str__(self):
return "<h1>I'm a html class!</h1>"
html_obj = HtmlClass()
self.assertTrue(hasattr(HtmlClass, '__html__'))
self.assertTrue(hasattr(html_obj, '__html__'))
self.assertEqual(str(html_obj), html_obj.__html__())
def test_html_safe_subclass(self):
class BaseClass:
def __html__(self):
# defines __html__ on its own
return 'some html content'
def __str__(self):
return 'some non html content'
@html_safe
class Subclass(BaseClass):
def __str__(self):
# overrides __str__ and is marked as html_safe
return 'some html safe content'
subclass_obj = Subclass()
self.assertEqual(str(subclass_obj), subclass_obj.__html__())
def test_html_safe_defines_html_error(self):
msg = "can't apply @html_safe to HtmlClass because it defines __html__()."
with self.assertRaisesMessage(ValueError, msg):
@html_safe
class HtmlClass:
def __html__(self):
return "<h1>I'm a html class!</h1>"
def test_html_safe_doesnt_define_str(self):
msg = "can't apply @html_safe to HtmlClass because it doesn't define __str__()."
with self.assertRaisesMessage(ValueError, msg):
@html_safe
class HtmlClass:
pass
def test_urlize(self):
tests = (
(
'Search for google.com/?q=! and see.',
'Search for <a href="http://google.com/?q=">google.com/?q=</a>! and see.'
),
(
'Search for google.com/?q=1<! and see.',
'Search for <a href="http://google.com/?q=1%3C">google.com/?q=1<</a>! and see.'
),
(
lazystr('Search for google.com/?q=!'),
'Search for <a href="http://google.com/?q=">google.com/?q=</a>!'
),
('[email protected]', '<a href="mailto:[email protected]">[email protected]</a>'),
)
for value, output in tests:
with self.subTest(value=value):
self.assertEqual(urlize(value), output)
def test_urlize_unchanged_inputs(self):
tests = (
('a' + '@a' * 50000) + 'a', # simple_email_re catastrophic test
('a' + '.' * 1000000) + 'a', # trailing_punctuation catastrophic test
'foo@',
'@foo.com',
'[email protected]',
'foo@localhost',
'foo@localhost.',
)
for value in tests:
with self.subTest(value=value):
self.assertEqual(urlize(value), value)
|
c7db223a2668792109398e28e3cd290796faf7aa8c2a03e99089ca8a005252af | import copy
import unittest
from django.utils.tree import Node
class NodeTests(unittest.TestCase):
def setUp(self):
self.node1_children = [('a', 1), ('b', 2)]
self.node1 = Node(self.node1_children)
self.node2 = Node()
def test_str(self):
self.assertEqual(str(self.node1), "(DEFAULT: ('a', 1), ('b', 2))")
self.assertEqual(str(self.node2), "(DEFAULT: )")
def test_repr(self):
self.assertEqual(repr(self.node1),
"<Node: (DEFAULT: ('a', 1), ('b', 2))>")
self.assertEqual(repr(self.node2), "<Node: (DEFAULT: )>")
def test_hash(self):
node3 = Node(self.node1_children, negated=True)
node4 = Node(self.node1_children, connector='OTHER')
node5 = Node(self.node1_children)
node6 = Node([['a', 1], ['b', 2]])
node7 = Node([('a', [1, 2])])
node8 = Node([('a', (1, 2))])
self.assertNotEqual(hash(self.node1), hash(self.node2))
self.assertNotEqual(hash(self.node1), hash(node3))
self.assertNotEqual(hash(self.node1), hash(node4))
self.assertEqual(hash(self.node1), hash(node5))
self.assertEqual(hash(self.node1), hash(node6))
self.assertEqual(hash(self.node2), hash(Node()))
self.assertEqual(hash(node7), hash(node8))
def test_len(self):
self.assertEqual(len(self.node1), 2)
self.assertEqual(len(self.node2), 0)
def test_bool(self):
self.assertTrue(self.node1)
self.assertFalse(self.node2)
def test_contains(self):
self.assertIn(('a', 1), self.node1)
self.assertNotIn(('a', 1), self.node2)
def test_add(self):
# start with the same children of node1 then add an item
node3 = Node(self.node1_children)
node3_added_child = ('c', 3)
# add() returns the added data
self.assertEqual(node3.add(node3_added_child, Node.default),
node3_added_child)
# we added exactly one item, len() should reflect that
self.assertEqual(len(self.node1) + 1, len(node3))
self.assertEqual(str(node3), "(DEFAULT: ('a', 1), ('b', 2), ('c', 3))")
def test_add_eq_child_mixed_connector(self):
node = Node(['a', 'b'], 'OR')
self.assertEqual(node.add('a', 'AND'), 'a')
self.assertEqual(node, Node([Node(['a', 'b'], 'OR'), 'a'], 'AND'))
def test_negate(self):
# negated is False by default
self.assertFalse(self.node1.negated)
self.node1.negate()
self.assertTrue(self.node1.negated)
self.node1.negate()
self.assertFalse(self.node1.negated)
def test_deepcopy(self):
node4 = copy.copy(self.node1)
node5 = copy.deepcopy(self.node1)
self.assertIs(self.node1.children, node4.children)
self.assertIsNot(self.node1.children, node5.children)
def test_eq_children(self):
node = Node(self.node1_children)
self.assertEqual(node, self.node1)
self.assertNotEqual(node, self.node2)
def test_eq_connector(self):
new_node = Node(connector='NEW')
default_node = Node(connector='DEFAULT')
self.assertEqual(default_node, self.node2)
self.assertNotEqual(default_node, new_node)
def test_eq_negated(self):
node = Node(negated=False)
negated = Node(negated=True)
self.assertNotEqual(negated, node)
|
77cd574e4e4cfe43dec9d91ea244b52ffea4d2eb751c71eff42241cf24669fc2 | import contextlib
import os
import py_compile
import shutil
import sys
import tempfile
import threading
import time
import types
import weakref
import zipfile
from importlib import import_module
from pathlib import Path
from subprocess import CompletedProcess
from unittest import mock, skip, skipIf
import pytz
import django.__main__
from django.apps.registry import Apps
from django.test import SimpleTestCase
from django.test.utils import extend_sys_path
from django.utils import autoreload
from django.utils.autoreload import WatchmanUnavailable
from .test_module import __main__ as test_main, main_module as test_main_module
from .utils import on_macos_with_hfs
class TestIterModulesAndFiles(SimpleTestCase):
def import_and_cleanup(self, name):
import_module(name)
self.addCleanup(lambda: sys.path_importer_cache.clear())
self.addCleanup(lambda: sys.modules.pop(name, None))
def clear_autoreload_caches(self):
autoreload.iter_modules_and_files.cache_clear()
def assertFileFound(self, filename):
# Some temp directories are symlinks. Python resolves these fully while
# importing.
resolved_filename = filename.resolve(strict=True)
self.clear_autoreload_caches()
# Test uncached access
self.assertIn(resolved_filename, list(autoreload.iter_all_python_module_files()))
# Test cached access
self.assertIn(resolved_filename, list(autoreload.iter_all_python_module_files()))
self.assertEqual(autoreload.iter_modules_and_files.cache_info().hits, 1)
def assertFileNotFound(self, filename):
resolved_filename = filename.resolve(strict=True)
self.clear_autoreload_caches()
# Test uncached access
self.assertNotIn(resolved_filename, list(autoreload.iter_all_python_module_files()))
# Test cached access
self.assertNotIn(resolved_filename, list(autoreload.iter_all_python_module_files()))
self.assertEqual(autoreload.iter_modules_and_files.cache_info().hits, 1)
def temporary_file(self, filename):
dirname = tempfile.mkdtemp()
self.addCleanup(shutil.rmtree, dirname)
return Path(dirname) / filename
def test_paths_are_pathlib_instances(self):
for filename in autoreload.iter_all_python_module_files():
self.assertIsInstance(filename, Path)
def test_file_added(self):
"""
When a file is added, it's returned by iter_all_python_module_files().
"""
filename = self.temporary_file('test_deleted_removed_module.py')
filename.touch()
with extend_sys_path(str(filename.parent)):
self.import_and_cleanup('test_deleted_removed_module')
self.assertFileFound(filename.absolute())
def test_check_errors(self):
"""
When a file containing an error is imported in a function wrapped by
check_errors(), gen_filenames() returns it.
"""
filename = self.temporary_file('test_syntax_error.py')
filename.write_text("Ceci n'est pas du Python.")
with extend_sys_path(str(filename.parent)):
try:
with self.assertRaises(SyntaxError):
autoreload.check_errors(import_module)('test_syntax_error')
finally:
autoreload._exception = None
self.assertFileFound(filename)
def test_check_errors_catches_all_exceptions(self):
"""
Since Python may raise arbitrary exceptions when importing code,
check_errors() must catch Exception, not just some subclasses.
"""
filename = self.temporary_file('test_exception.py')
filename.write_text('raise Exception')
with extend_sys_path(str(filename.parent)):
try:
with self.assertRaises(Exception):
autoreload.check_errors(import_module)('test_exception')
finally:
autoreload._exception = None
self.assertFileFound(filename)
def test_zip_reload(self):
"""
Modules imported from zipped files have their archive location included
in the result.
"""
zip_file = self.temporary_file('zip_import.zip')
with zipfile.ZipFile(str(zip_file), 'w', zipfile.ZIP_DEFLATED) as zipf:
zipf.writestr('test_zipped_file.py', '')
with extend_sys_path(str(zip_file)):
self.import_and_cleanup('test_zipped_file')
self.assertFileFound(zip_file)
def test_bytecode_conversion_to_source(self):
""".pyc and .pyo files are included in the files list."""
filename = self.temporary_file('test_compiled.py')
filename.touch()
compiled_file = Path(py_compile.compile(str(filename), str(filename.with_suffix('.pyc'))))
filename.unlink()
with extend_sys_path(str(compiled_file.parent)):
self.import_and_cleanup('test_compiled')
self.assertFileFound(compiled_file)
def test_weakref_in_sys_module(self):
"""iter_all_python_module_file() ignores weakref modules."""
time_proxy = weakref.proxy(time)
sys.modules['time_proxy'] = time_proxy
self.addCleanup(lambda: sys.modules.pop('time_proxy', None))
list(autoreload.iter_all_python_module_files()) # No crash.
def test_module_without_spec(self):
module = types.ModuleType('test_module')
del module.__spec__
self.assertEqual(autoreload.iter_modules_and_files((module,), frozenset()), frozenset())
def test_main_module_is_resolved(self):
main_module = sys.modules['__main__']
self.assertFileFound(Path(main_module.__file__))
def test_main_module_without_file_is_not_resolved(self):
fake_main = types.ModuleType('__main__')
self.assertEqual(autoreload.iter_modules_and_files((fake_main,), frozenset()), frozenset())
def test_path_with_embedded_null_bytes(self):
for path in (
'embedded_null_byte\x00.py',
'di\x00rectory/embedded_null_byte.py',
):
with self.subTest(path=path):
self.assertEqual(
autoreload.iter_modules_and_files((), frozenset([path])),
frozenset(),
)
class TestChildArguments(SimpleTestCase):
@mock.patch.dict(sys.modules, {'__main__': django.__main__})
@mock.patch('sys.argv', [django.__main__.__file__, 'runserver'])
@mock.patch('sys.warnoptions', [])
def test_run_as_module(self):
self.assertEqual(
autoreload.get_child_arguments(),
[sys.executable, '-m', 'django', 'runserver']
)
@mock.patch.dict(sys.modules, {'__main__': test_main})
@mock.patch('sys.argv', [test_main.__file__, 'runserver'])
@mock.patch('sys.warnoptions', [])
def test_run_as_non_django_module(self):
self.assertEqual(
autoreload.get_child_arguments(),
[sys.executable, '-m', 'utils_tests.test_module', 'runserver'],
)
@mock.patch.dict(sys.modules, {'__main__': test_main_module})
@mock.patch('sys.argv', [test_main.__file__, 'runserver'])
@mock.patch('sys.warnoptions', [])
def test_run_as_non_django_module_non_package(self):
self.assertEqual(
autoreload.get_child_arguments(),
[sys.executable, '-m', 'utils_tests.test_module.main_module', 'runserver'],
)
@mock.patch('sys.argv', [__file__, 'runserver'])
@mock.patch('sys.warnoptions', ['error'])
def test_warnoptions(self):
self.assertEqual(
autoreload.get_child_arguments(),
[sys.executable, '-Werror', __file__, 'runserver']
)
@mock.patch('sys.warnoptions', [])
def test_exe_fallback(self):
with tempfile.TemporaryDirectory() as tmpdir:
exe_path = Path(tmpdir) / 'django-admin.exe'
exe_path.touch()
with mock.patch('sys.argv', [exe_path.with_suffix(''), 'runserver']):
self.assertEqual(
autoreload.get_child_arguments(),
[exe_path, 'runserver']
)
@mock.patch('sys.warnoptions', [])
def test_entrypoint_fallback(self):
with tempfile.TemporaryDirectory() as tmpdir:
script_path = Path(tmpdir) / 'django-admin-script.py'
script_path.touch()
with mock.patch('sys.argv', [script_path.with_name('django-admin'), 'runserver']):
self.assertEqual(
autoreload.get_child_arguments(),
[sys.executable, script_path, 'runserver']
)
@mock.patch('sys.argv', ['does-not-exist', 'runserver'])
@mock.patch('sys.warnoptions', [])
def test_raises_runtimeerror(self):
msg = 'Script does-not-exist does not exist.'
with self.assertRaisesMessage(RuntimeError, msg):
autoreload.get_child_arguments()
@mock.patch('sys.argv', [__file__, 'runserver'])
@mock.patch('sys.warnoptions', [])
def test_module_no_spec(self):
module = types.ModuleType('test_module')
del module.__spec__
with mock.patch.dict(sys.modules, {'__main__': module}):
self.assertEqual(
autoreload.get_child_arguments(),
[sys.executable, __file__, 'runserver']
)
class TestUtilities(SimpleTestCase):
def test_is_django_module(self):
for module, expected in (
(pytz, False),
(sys, False),
(autoreload, True)
):
with self.subTest(module=module):
self.assertIs(autoreload.is_django_module(module), expected)
def test_is_django_path(self):
for module, expected in (
(pytz.__file__, False),
(contextlib.__file__, False),
(autoreload.__file__, True)
):
with self.subTest(module=module):
self.assertIs(autoreload.is_django_path(module), expected)
class TestCommonRoots(SimpleTestCase):
def test_common_roots(self):
paths = (
Path('/first/second'),
Path('/first/second/third'),
Path('/first/'),
Path('/root/first/'),
)
results = autoreload.common_roots(paths)
self.assertCountEqual(results, [Path('/first/'), Path('/root/first/')])
class TestSysPathDirectories(SimpleTestCase):
def setUp(self):
self._directory = tempfile.TemporaryDirectory()
self.directory = Path(self._directory.name).resolve(strict=True).absolute()
self.file = self.directory / 'test'
self.file.touch()
def tearDown(self):
self._directory.cleanup()
def test_sys_paths_with_directories(self):
with extend_sys_path(str(self.file)):
paths = list(autoreload.sys_path_directories())
self.assertIn(self.file.parent, paths)
def test_sys_paths_non_existing(self):
nonexistent_file = Path(self.directory.name) / 'does_not_exist'
with extend_sys_path(str(nonexistent_file)):
paths = list(autoreload.sys_path_directories())
self.assertNotIn(nonexistent_file, paths)
self.assertNotIn(nonexistent_file.parent, paths)
def test_sys_paths_absolute(self):
paths = list(autoreload.sys_path_directories())
self.assertTrue(all(p.is_absolute() for p in paths))
def test_sys_paths_directories(self):
with extend_sys_path(str(self.directory)):
paths = list(autoreload.sys_path_directories())
self.assertIn(self.directory, paths)
class GetReloaderTests(SimpleTestCase):
@mock.patch('django.utils.autoreload.WatchmanReloader')
def test_watchman_unavailable(self, mocked_watchman):
mocked_watchman.check_availability.side_effect = WatchmanUnavailable
self.assertIsInstance(autoreload.get_reloader(), autoreload.StatReloader)
@mock.patch.object(autoreload.WatchmanReloader, 'check_availability')
def test_watchman_available(self, mocked_available):
# If WatchmanUnavailable isn't raised, Watchman will be chosen.
mocked_available.return_value = None
result = autoreload.get_reloader()
self.assertIsInstance(result, autoreload.WatchmanReloader)
class RunWithReloaderTests(SimpleTestCase):
@mock.patch.dict(os.environ, {autoreload.DJANGO_AUTORELOAD_ENV: 'true'})
@mock.patch('django.utils.autoreload.get_reloader')
def test_swallows_keyboard_interrupt(self, mocked_get_reloader):
mocked_get_reloader.side_effect = KeyboardInterrupt()
autoreload.run_with_reloader(lambda: None) # No exception
@mock.patch.dict(os.environ, {autoreload.DJANGO_AUTORELOAD_ENV: 'false'})
@mock.patch('django.utils.autoreload.restart_with_reloader')
def test_calls_sys_exit(self, mocked_restart_reloader):
mocked_restart_reloader.return_value = 1
with self.assertRaises(SystemExit) as exc:
autoreload.run_with_reloader(lambda: None)
self.assertEqual(exc.exception.code, 1)
@mock.patch.dict(os.environ, {autoreload.DJANGO_AUTORELOAD_ENV: 'true'})
@mock.patch('django.utils.autoreload.start_django')
@mock.patch('django.utils.autoreload.get_reloader')
def test_calls_start_django(self, mocked_reloader, mocked_start_django):
mocked_reloader.return_value = mock.sentinel.RELOADER
autoreload.run_with_reloader(mock.sentinel.METHOD)
self.assertEqual(mocked_start_django.call_count, 1)
self.assertSequenceEqual(
mocked_start_django.call_args[0],
[mock.sentinel.RELOADER, mock.sentinel.METHOD]
)
class StartDjangoTests(SimpleTestCase):
@mock.patch('django.utils.autoreload.StatReloader')
def test_watchman_becomes_unavailable(self, mocked_stat):
mocked_stat.should_stop.return_value = True
fake_reloader = mock.MagicMock()
fake_reloader.should_stop = False
fake_reloader.run.side_effect = autoreload.WatchmanUnavailable()
autoreload.start_django(fake_reloader, lambda: None)
self.assertEqual(mocked_stat.call_count, 1)
@mock.patch('django.utils.autoreload.ensure_echo_on')
def test_echo_on_called(self, mocked_echo):
fake_reloader = mock.MagicMock()
autoreload.start_django(fake_reloader, lambda: None)
self.assertEqual(mocked_echo.call_count, 1)
@mock.patch('django.utils.autoreload.check_errors')
def test_check_errors_called(self, mocked_check_errors):
fake_method = mock.MagicMock(return_value=None)
fake_reloader = mock.MagicMock()
autoreload.start_django(fake_reloader, fake_method)
self.assertCountEqual(mocked_check_errors.call_args[0], [fake_method])
@mock.patch('threading.Thread')
@mock.patch('django.utils.autoreload.check_errors')
def test_starts_thread_with_args(self, mocked_check_errors, mocked_thread):
fake_reloader = mock.MagicMock()
fake_main_func = mock.MagicMock()
fake_thread = mock.MagicMock()
mocked_check_errors.return_value = fake_main_func
mocked_thread.return_value = fake_thread
autoreload.start_django(fake_reloader, fake_main_func, 123, abc=123)
self.assertEqual(mocked_thread.call_count, 1)
self.assertEqual(
mocked_thread.call_args[1],
{'target': fake_main_func, 'args': (123,), 'kwargs': {'abc': 123}, 'name': 'django-main-thread'}
)
self.assertIs(fake_thread.daemon, True)
self.assertTrue(fake_thread.start.called)
class TestCheckErrors(SimpleTestCase):
def test_mutates_error_files(self):
fake_method = mock.MagicMock(side_effect=RuntimeError())
wrapped = autoreload.check_errors(fake_method)
with mock.patch.object(autoreload, '_error_files') as mocked_error_files:
try:
with self.assertRaises(RuntimeError):
wrapped()
finally:
autoreload._exception = None
self.assertEqual(mocked_error_files.append.call_count, 1)
class TestRaiseLastException(SimpleTestCase):
@mock.patch('django.utils.autoreload._exception', None)
def test_no_exception(self):
# Should raise no exception if _exception is None
autoreload.raise_last_exception()
def test_raises_exception(self):
class MyException(Exception):
pass
# Create an exception
try:
raise MyException('Test Message')
except MyException:
exc_info = sys.exc_info()
with mock.patch('django.utils.autoreload._exception', exc_info):
with self.assertRaisesMessage(MyException, 'Test Message'):
autoreload.raise_last_exception()
def test_raises_custom_exception(self):
class MyException(Exception):
def __init__(self, msg, extra_context):
super().__init__(msg)
self.extra_context = extra_context
# Create an exception.
try:
raise MyException('Test Message', 'extra context')
except MyException:
exc_info = sys.exc_info()
with mock.patch('django.utils.autoreload._exception', exc_info):
with self.assertRaisesMessage(MyException, 'Test Message'):
autoreload.raise_last_exception()
def test_raises_exception_with_context(self):
try:
raise Exception(2)
except Exception as e:
try:
raise Exception(1) from e
except Exception:
exc_info = sys.exc_info()
with mock.patch('django.utils.autoreload._exception', exc_info):
with self.assertRaises(Exception) as cm:
autoreload.raise_last_exception()
self.assertEqual(cm.exception.args[0], 1)
self.assertEqual(cm.exception.__cause__.args[0], 2)
class RestartWithReloaderTests(SimpleTestCase):
executable = '/usr/bin/python'
def patch_autoreload(self, argv):
patch_call = mock.patch('django.utils.autoreload.subprocess.run', return_value=CompletedProcess(argv, 0))
patches = [
mock.patch('django.utils.autoreload.sys.argv', argv),
mock.patch('django.utils.autoreload.sys.executable', self.executable),
mock.patch('django.utils.autoreload.sys.warnoptions', ['all']),
]
for p in patches:
p.start()
self.addCleanup(p.stop)
mock_call = patch_call.start()
self.addCleanup(patch_call.stop)
return mock_call
def test_manage_py(self):
with tempfile.TemporaryDirectory() as temp_dir:
script = Path(temp_dir) / 'manage.py'
script.touch()
argv = [str(script), 'runserver']
mock_call = self.patch_autoreload(argv)
autoreload.restart_with_reloader()
self.assertEqual(mock_call.call_count, 1)
self.assertEqual(
mock_call.call_args[0][0],
[self.executable, '-Wall'] + argv,
)
def test_python_m_django(self):
main = '/usr/lib/pythonX.Y/site-packages/django/__main__.py'
argv = [main, 'runserver']
mock_call = self.patch_autoreload(argv)
with mock.patch('django.__main__.__file__', main):
with mock.patch.dict(sys.modules, {'__main__': django.__main__}):
autoreload.restart_with_reloader()
self.assertEqual(mock_call.call_count, 1)
self.assertEqual(mock_call.call_args[0][0], [self.executable, '-Wall', '-m', 'django'] + argv[1:])
class ReloaderTests(SimpleTestCase):
RELOADER_CLS = None
def setUp(self):
self._tempdir = tempfile.TemporaryDirectory()
self.tempdir = Path(self._tempdir.name).resolve(strict=True).absolute()
self.existing_file = self.ensure_file(self.tempdir / 'test.py')
self.nonexistent_file = (self.tempdir / 'does_not_exist.py').absolute()
self.reloader = self.RELOADER_CLS()
def tearDown(self):
self._tempdir.cleanup()
self.reloader.stop()
def ensure_file(self, path):
path.parent.mkdir(exist_ok=True, parents=True)
path.touch()
# On Linux and Windows updating the mtime of a file using touch() will set a timestamp
# value that is in the past, as the time value for the last kernel tick is used rather
# than getting the correct absolute time.
# To make testing simpler set the mtime to be the observed time when this function is
# called.
self.set_mtime(path, time.time())
return path.absolute()
def set_mtime(self, fp, value):
os.utime(str(fp), (value, value))
def increment_mtime(self, fp, by=1):
current_time = time.time()
self.set_mtime(fp, current_time + by)
@contextlib.contextmanager
def tick_twice(self):
ticker = self.reloader.tick()
next(ticker)
yield
next(ticker)
class IntegrationTests:
@mock.patch('django.utils.autoreload.BaseReloader.notify_file_changed')
@mock.patch('django.utils.autoreload.iter_all_python_module_files', return_value=frozenset())
def test_glob(self, mocked_modules, notify_mock):
non_py_file = self.ensure_file(self.tempdir / 'non_py_file')
self.reloader.watch_dir(self.tempdir, '*.py')
with self.tick_twice():
self.increment_mtime(non_py_file)
self.increment_mtime(self.existing_file)
self.assertEqual(notify_mock.call_count, 1)
self.assertCountEqual(notify_mock.call_args[0], [self.existing_file])
@mock.patch('django.utils.autoreload.BaseReloader.notify_file_changed')
@mock.patch('django.utils.autoreload.iter_all_python_module_files', return_value=frozenset())
def test_multiple_globs(self, mocked_modules, notify_mock):
self.ensure_file(self.tempdir / 'x.test')
self.reloader.watch_dir(self.tempdir, '*.py')
self.reloader.watch_dir(self.tempdir, '*.test')
with self.tick_twice():
self.increment_mtime(self.existing_file)
self.assertEqual(notify_mock.call_count, 1)
self.assertCountEqual(notify_mock.call_args[0], [self.existing_file])
@mock.patch('django.utils.autoreload.BaseReloader.notify_file_changed')
@mock.patch('django.utils.autoreload.iter_all_python_module_files', return_value=frozenset())
def test_overlapping_globs(self, mocked_modules, notify_mock):
self.reloader.watch_dir(self.tempdir, '*.py')
self.reloader.watch_dir(self.tempdir, '*.p*')
with self.tick_twice():
self.increment_mtime(self.existing_file)
self.assertEqual(notify_mock.call_count, 1)
self.assertCountEqual(notify_mock.call_args[0], [self.existing_file])
@mock.patch('django.utils.autoreload.BaseReloader.notify_file_changed')
@mock.patch('django.utils.autoreload.iter_all_python_module_files', return_value=frozenset())
def test_glob_recursive(self, mocked_modules, notify_mock):
non_py_file = self.ensure_file(self.tempdir / 'dir' / 'non_py_file')
py_file = self.ensure_file(self.tempdir / 'dir' / 'file.py')
self.reloader.watch_dir(self.tempdir, '**/*.py')
with self.tick_twice():
self.increment_mtime(non_py_file)
self.increment_mtime(py_file)
self.assertEqual(notify_mock.call_count, 1)
self.assertCountEqual(notify_mock.call_args[0], [py_file])
@mock.patch('django.utils.autoreload.BaseReloader.notify_file_changed')
@mock.patch('django.utils.autoreload.iter_all_python_module_files', return_value=frozenset())
def test_multiple_recursive_globs(self, mocked_modules, notify_mock):
non_py_file = self.ensure_file(self.tempdir / 'dir' / 'test.txt')
py_file = self.ensure_file(self.tempdir / 'dir' / 'file.py')
self.reloader.watch_dir(self.tempdir, '**/*.txt')
self.reloader.watch_dir(self.tempdir, '**/*.py')
with self.tick_twice():
self.increment_mtime(non_py_file)
self.increment_mtime(py_file)
self.assertEqual(notify_mock.call_count, 2)
self.assertCountEqual(notify_mock.call_args_list, [mock.call(py_file), mock.call(non_py_file)])
@mock.patch('django.utils.autoreload.BaseReloader.notify_file_changed')
@mock.patch('django.utils.autoreload.iter_all_python_module_files', return_value=frozenset())
def test_nested_glob_recursive(self, mocked_modules, notify_mock):
inner_py_file = self.ensure_file(self.tempdir / 'dir' / 'file.py')
self.reloader.watch_dir(self.tempdir, '**/*.py')
self.reloader.watch_dir(inner_py_file.parent, '**/*.py')
with self.tick_twice():
self.increment_mtime(inner_py_file)
self.assertEqual(notify_mock.call_count, 1)
self.assertCountEqual(notify_mock.call_args[0], [inner_py_file])
@mock.patch('django.utils.autoreload.BaseReloader.notify_file_changed')
@mock.patch('django.utils.autoreload.iter_all_python_module_files', return_value=frozenset())
def test_overlapping_glob_recursive(self, mocked_modules, notify_mock):
py_file = self.ensure_file(self.tempdir / 'dir' / 'file.py')
self.reloader.watch_dir(self.tempdir, '**/*.p*')
self.reloader.watch_dir(self.tempdir, '**/*.py*')
with self.tick_twice():
self.increment_mtime(py_file)
self.assertEqual(notify_mock.call_count, 1)
self.assertCountEqual(notify_mock.call_args[0], [py_file])
class BaseReloaderTests(ReloaderTests):
RELOADER_CLS = autoreload.BaseReloader
def test_watch_dir_with_unresolvable_path(self):
path = Path('unresolvable_directory')
with mock.patch.object(Path, 'absolute', side_effect=FileNotFoundError):
self.reloader.watch_dir(path, '**/*.mo')
self.assertEqual(list(self.reloader.directory_globs), [])
def test_watch_with_glob(self):
self.reloader.watch_dir(self.tempdir, '*.py')
watched_files = list(self.reloader.watched_files())
self.assertIn(self.existing_file, watched_files)
def test_watch_files_with_recursive_glob(self):
inner_file = self.ensure_file(self.tempdir / 'test' / 'test.py')
self.reloader.watch_dir(self.tempdir, '**/*.py')
watched_files = list(self.reloader.watched_files())
self.assertIn(self.existing_file, watched_files)
self.assertIn(inner_file, watched_files)
def test_run_loop_catches_stopiteration(self):
def mocked_tick():
yield
with mock.patch.object(self.reloader, 'tick', side_effect=mocked_tick) as tick:
self.reloader.run_loop()
self.assertEqual(tick.call_count, 1)
def test_run_loop_stop_and_return(self):
def mocked_tick(*args):
yield
self.reloader.stop()
return # Raises StopIteration
with mock.patch.object(self.reloader, 'tick', side_effect=mocked_tick) as tick:
self.reloader.run_loop()
self.assertEqual(tick.call_count, 1)
def test_wait_for_apps_ready_checks_for_exception(self):
app_reg = Apps()
app_reg.ready_event.set()
# thread.is_alive() is False if it's not started.
dead_thread = threading.Thread()
self.assertFalse(self.reloader.wait_for_apps_ready(app_reg, dead_thread))
def test_wait_for_apps_ready_without_exception(self):
app_reg = Apps()
app_reg.ready_event.set()
thread = mock.MagicMock()
thread.is_alive.return_value = True
self.assertTrue(self.reloader.wait_for_apps_ready(app_reg, thread))
def skip_unless_watchman_available():
try:
autoreload.WatchmanReloader.check_availability()
except WatchmanUnavailable as e:
return skip('Watchman unavailable: %s' % e)
return lambda func: func
@skip_unless_watchman_available()
class WatchmanReloaderTests(ReloaderTests, IntegrationTests):
RELOADER_CLS = autoreload.WatchmanReloader
def setUp(self):
super().setUp()
# Shorten the timeout to speed up tests.
self.reloader.client_timeout = int(os.environ.get('DJANGO_WATCHMAN_TIMEOUT', 2))
def test_watch_glob_ignores_non_existing_directories_two_levels(self):
with mock.patch.object(self.reloader, '_subscribe') as mocked_subscribe:
self.reloader._watch_glob(self.tempdir / 'does_not_exist' / 'more', ['*'])
self.assertFalse(mocked_subscribe.called)
def test_watch_glob_uses_existing_parent_directories(self):
with mock.patch.object(self.reloader, '_subscribe') as mocked_subscribe:
self.reloader._watch_glob(self.tempdir / 'does_not_exist', ['*'])
self.assertSequenceEqual(
mocked_subscribe.call_args[0],
[
self.tempdir, 'glob-parent-does_not_exist:%s' % self.tempdir,
['anyof', ['match', 'does_not_exist/*', 'wholename']]
]
)
def test_watch_glob_multiple_patterns(self):
with mock.patch.object(self.reloader, '_subscribe') as mocked_subscribe:
self.reloader._watch_glob(self.tempdir, ['*', '*.py'])
self.assertSequenceEqual(
mocked_subscribe.call_args[0],
[
self.tempdir, 'glob:%s' % self.tempdir,
['anyof', ['match', '*', 'wholename'], ['match', '*.py', 'wholename']]
]
)
def test_watched_roots_contains_files(self):
paths = self.reloader.watched_roots([self.existing_file])
self.assertIn(self.existing_file.parent, paths)
def test_watched_roots_contains_directory_globs(self):
self.reloader.watch_dir(self.tempdir, '*.py')
paths = self.reloader.watched_roots([])
self.assertIn(self.tempdir, paths)
def test_watched_roots_contains_sys_path(self):
with extend_sys_path(str(self.tempdir)):
paths = self.reloader.watched_roots([])
self.assertIn(self.tempdir, paths)
def test_check_server_status(self):
self.assertTrue(self.reloader.check_server_status())
def test_check_server_status_raises_error(self):
with mock.patch.object(self.reloader.client, 'query') as mocked_query:
mocked_query.side_effect = Exception()
with self.assertRaises(autoreload.WatchmanUnavailable):
self.reloader.check_server_status()
@mock.patch('pywatchman.client')
def test_check_availability(self, mocked_client):
mocked_client().capabilityCheck.side_effect = Exception()
with self.assertRaisesMessage(WatchmanUnavailable, 'Cannot connect to the watchman service'):
self.RELOADER_CLS.check_availability()
@mock.patch('pywatchman.client')
def test_check_availability_lower_version(self, mocked_client):
mocked_client().capabilityCheck.return_value = {'version': '4.8.10'}
with self.assertRaisesMessage(WatchmanUnavailable, 'Watchman 4.9 or later is required.'):
self.RELOADER_CLS.check_availability()
def test_pywatchman_not_available(self):
with mock.patch.object(autoreload, 'pywatchman') as mocked:
mocked.__bool__.return_value = False
with self.assertRaisesMessage(WatchmanUnavailable, 'pywatchman not installed.'):
self.RELOADER_CLS.check_availability()
def test_update_watches_raises_exceptions(self):
class TestException(Exception):
pass
with mock.patch.object(self.reloader, '_update_watches') as mocked_watches:
with mock.patch.object(self.reloader, 'check_server_status') as mocked_server_status:
mocked_watches.side_effect = TestException()
mocked_server_status.return_value = True
with self.assertRaises(TestException):
self.reloader.update_watches()
self.assertIsInstance(mocked_server_status.call_args[0][0], TestException)
@mock.patch.dict(os.environ, {'DJANGO_WATCHMAN_TIMEOUT': '10'})
def test_setting_timeout_from_environment_variable(self):
self.assertEqual(self.RELOADER_CLS().client_timeout, 10)
@skipIf(on_macos_with_hfs(), "These tests do not work with HFS+ as a filesystem")
class StatReloaderTests(ReloaderTests, IntegrationTests):
RELOADER_CLS = autoreload.StatReloader
def setUp(self):
super().setUp()
# Shorten the sleep time to speed up tests.
self.reloader.SLEEP_TIME = 0.01
@mock.patch('django.utils.autoreload.StatReloader.notify_file_changed')
def test_tick_does_not_trigger_twice(self, mock_notify_file_changed):
with mock.patch.object(self.reloader, 'watched_files', return_value=[self.existing_file]):
ticker = self.reloader.tick()
next(ticker)
self.increment_mtime(self.existing_file)
next(ticker)
next(ticker)
self.assertEqual(mock_notify_file_changed.call_count, 1)
def test_snapshot_files_ignores_missing_files(self):
with mock.patch.object(self.reloader, 'watched_files', return_value=[self.nonexistent_file]):
self.assertEqual(dict(self.reloader.snapshot_files()), {})
def test_snapshot_files_updates(self):
with mock.patch.object(self.reloader, 'watched_files', return_value=[self.existing_file]):
snapshot1 = dict(self.reloader.snapshot_files())
self.assertIn(self.existing_file, snapshot1)
self.increment_mtime(self.existing_file)
snapshot2 = dict(self.reloader.snapshot_files())
self.assertNotEqual(snapshot1[self.existing_file], snapshot2[self.existing_file])
def test_snapshot_files_with_duplicates(self):
with mock.patch.object(self.reloader, 'watched_files', return_value=[self.existing_file, self.existing_file]):
snapshot = list(self.reloader.snapshot_files())
self.assertEqual(len(snapshot), 1)
self.assertEqual(snapshot[0][0], self.existing_file)
|
cd48c4b4a1caaf94e2407ef0a4f3e8e5d6bcdf23d1b132e1567d18e4ff93dc33 | from datetime import date as original_date, datetime as original_datetime
from django.test import SimpleTestCase, ignore_warnings
from django.utils.deprecation import RemovedInDjango50Warning
with ignore_warnings(category=RemovedInDjango50Warning):
from django.utils.datetime_safe import date, datetime
class DatetimeTests(SimpleTestCase):
def setUp(self):
self.percent_y_safe = (1900, 1, 1) # >= 1900 required on Windows.
self.just_safe = (1000, 1, 1)
self.just_unsafe = (999, 12, 31, 23, 59, 59)
self.really_old = (20, 1, 1)
self.more_recent = (2006, 1, 1)
def test_compare_datetimes(self):
self.assertEqual(original_datetime(*self.more_recent), datetime(*self.more_recent))
self.assertEqual(original_datetime(*self.really_old), datetime(*self.really_old))
self.assertEqual(original_date(*self.more_recent), date(*self.more_recent))
self.assertEqual(original_date(*self.really_old), date(*self.really_old))
self.assertEqual(
original_date(*self.just_safe).strftime('%Y-%m-%d'), date(*self.just_safe).strftime('%Y-%m-%d')
)
self.assertEqual(
original_datetime(*self.just_safe).strftime('%Y-%m-%d'), datetime(*self.just_safe).strftime('%Y-%m-%d')
)
def test_safe_strftime(self):
self.assertEqual(date(*self.just_unsafe[:3]).strftime('%Y-%m-%d (weekday %w)'), '0999-12-31 (weekday 2)')
self.assertEqual(date(*self.just_safe).strftime('%Y-%m-%d (weekday %w)'), '1000-01-01 (weekday 3)')
self.assertEqual(
datetime(*self.just_unsafe).strftime('%Y-%m-%d %H:%M:%S (weekday %w)'), '0999-12-31 23:59:59 (weekday 2)'
)
self.assertEqual(
datetime(*self.just_safe).strftime('%Y-%m-%d %H:%M:%S (weekday %w)'), '1000-01-01 00:00:00 (weekday 3)'
)
# %y will error before this date
self.assertEqual(date(*self.percent_y_safe).strftime('%y'), '00')
self.assertEqual(datetime(*self.percent_y_safe).strftime('%y'), '00')
with self.assertRaisesMessage(TypeError, 'strftime of dates before 1000 does not handle %y'):
datetime(*self.just_unsafe).strftime('%y')
self.assertEqual(date(1850, 8, 2).strftime("%Y/%m/%d was a %A"), '1850/08/02 was a Friday')
def test_zero_padding(self):
"""
Regression for #12524
Pre-1000AD dates are padded with zeros if necessary
"""
self.assertEqual(date(1, 1, 1).strftime("%Y/%m/%d was a %A"), '0001/01/01 was a Monday')
|
96123eb4605d9a6d62967814c4ec4fa43b45ca6cbfd0d8fa0ac9a4b6560efd88 | import platform
import unittest
from datetime import datetime, timezone
from unittest import mock
from django.test import SimpleTestCase
from django.utils.datastructures import MultiValueDict
from django.utils.http import (
base36_to_int, escape_leading_slashes, http_date, int_to_base36,
is_same_domain, parse_etags, parse_http_date, quote_etag,
url_has_allowed_host_and_scheme, urlencode, urlsafe_base64_decode,
urlsafe_base64_encode,
)
class URLEncodeTests(SimpleTestCase):
cannot_encode_none_msg = (
"Cannot encode None for key 'a' in a query string. Did you mean to "
"pass an empty string or omit the value?"
)
def test_tuples(self):
self.assertEqual(urlencode((('a', 1), ('b', 2), ('c', 3))), 'a=1&b=2&c=3')
def test_dict(self):
result = urlencode({'a': 1, 'b': 2, 'c': 3})
# Dictionaries are treated as unordered.
self.assertIn(result, [
'a=1&b=2&c=3',
'a=1&c=3&b=2',
'b=2&a=1&c=3',
'b=2&c=3&a=1',
'c=3&a=1&b=2',
'c=3&b=2&a=1',
])
def test_dict_containing_sequence_not_doseq(self):
self.assertEqual(urlencode({'a': [1, 2]}, doseq=False), 'a=%5B1%2C+2%5D')
def test_dict_containing_tuple_not_doseq(self):
self.assertEqual(urlencode({'a': (1, 2)}, doseq=False), 'a=%281%2C+2%29')
def test_custom_iterable_not_doseq(self):
class IterableWithStr:
def __str__(self):
return 'custom'
def __iter__(self):
yield from range(0, 3)
self.assertEqual(urlencode({'a': IterableWithStr()}, doseq=False), 'a=custom')
def test_dict_containing_sequence_doseq(self):
self.assertEqual(urlencode({'a': [1, 2]}, doseq=True), 'a=1&a=2')
def test_dict_containing_empty_sequence_doseq(self):
self.assertEqual(urlencode({'a': []}, doseq=True), '')
def test_multivaluedict(self):
result = urlencode(MultiValueDict({
'name': ['Adrian', 'Simon'],
'position': ['Developer'],
}), doseq=True)
# MultiValueDicts are similarly unordered.
self.assertIn(result, [
'name=Adrian&name=Simon&position=Developer',
'position=Developer&name=Adrian&name=Simon',
])
def test_dict_with_bytes_values(self):
self.assertEqual(urlencode({'a': b'abc'}, doseq=True), 'a=abc')
def test_dict_with_sequence_of_bytes(self):
self.assertEqual(urlencode({'a': [b'spam', b'eggs', b'bacon']}, doseq=True), 'a=spam&a=eggs&a=bacon')
def test_dict_with_bytearray(self):
self.assertEqual(urlencode({'a': bytearray(range(2))}, doseq=True), 'a=0&a=1')
def test_generator(self):
self.assertEqual(urlencode({'a': range(2)}, doseq=True), 'a=0&a=1')
self.assertEqual(urlencode({'a': range(2)}, doseq=False), 'a=range%280%2C+2%29')
def test_none(self):
with self.assertRaisesMessage(TypeError, self.cannot_encode_none_msg):
urlencode({'a': None})
def test_none_in_sequence(self):
with self.assertRaisesMessage(TypeError, self.cannot_encode_none_msg):
urlencode({'a': [None]}, doseq=True)
def test_none_in_generator(self):
def gen():
yield None
with self.assertRaisesMessage(TypeError, self.cannot_encode_none_msg):
urlencode({'a': gen()}, doseq=True)
class Base36IntTests(SimpleTestCase):
def test_roundtrip(self):
for n in [0, 1, 1000, 1000000]:
self.assertEqual(n, base36_to_int(int_to_base36(n)))
def test_negative_input(self):
with self.assertRaisesMessage(ValueError, 'Negative base36 conversion input.'):
int_to_base36(-1)
def test_to_base36_errors(self):
for n in ['1', 'foo', {1: 2}, (1, 2, 3), 3.141]:
with self.assertRaises(TypeError):
int_to_base36(n)
def test_invalid_literal(self):
for n in ['#', ' ']:
with self.assertRaisesMessage(ValueError, "invalid literal for int() with base 36: '%s'" % n):
base36_to_int(n)
def test_input_too_large(self):
with self.assertRaisesMessage(ValueError, 'Base36 input too large'):
base36_to_int('1' * 14)
def test_to_int_errors(self):
for n in [123, {1: 2}, (1, 2, 3), 3.141]:
with self.assertRaises(TypeError):
base36_to_int(n)
def test_values(self):
for n, b36 in [(0, '0'), (1, '1'), (42, '16'), (818469960, 'django')]:
self.assertEqual(int_to_base36(n), b36)
self.assertEqual(base36_to_int(b36), n)
class URLHasAllowedHostAndSchemeTests(unittest.TestCase):
def test_bad_urls(self):
bad_urls = (
'http://example.com',
'http:///example.com',
'https://example.com',
'ftp://example.com',
r'\\example.com',
r'\\\example.com',
r'/\\/example.com',
r'\\\example.com',
r'\\example.com',
r'\\//example.com',
r'/\/example.com',
r'\/example.com',
r'/\example.com',
'http:///example.com',
r'http:/\//example.com',
r'http:\/example.com',
r'http:/\example.com',
'javascript:alert("XSS")',
'\njavascript:alert(x)',
'\x08//example.com',
r'http://otherserver\@example.com',
r'http:\\testserver\@example.com',
r'http://testserver\me:[email protected]',
r'http://testserver\@example.com',
r'http:\\testserver\confirm\[email protected]',
'http:999999999',
'ftp:9999999999',
'\n',
'http://[2001:cdba:0000:0000:0000:0000:3257:9652/',
'http://2001:cdba:0000:0000:0000:0000:3257:9652]/',
)
for bad_url in bad_urls:
with self.subTest(url=bad_url):
self.assertIs(
url_has_allowed_host_and_scheme(bad_url, allowed_hosts={'testserver', 'testserver2'}),
False,
)
def test_good_urls(self):
good_urls = (
'/view/?param=http://example.com',
'/view/?param=https://example.com',
'/view?param=ftp://example.com',
'view/?param=//example.com',
'https://testserver/',
'HTTPS://testserver/',
'//testserver/',
'http://testserver/[email protected]',
'/url%20with%20spaces/',
'path/http:2222222222',
)
for good_url in good_urls:
with self.subTest(url=good_url):
self.assertIs(
url_has_allowed_host_and_scheme(good_url, allowed_hosts={'otherserver', 'testserver'}),
True,
)
def test_basic_auth(self):
# Valid basic auth credentials are allowed.
self.assertIs(
url_has_allowed_host_and_scheme(r'http://user:pass@testserver/', allowed_hosts={'user:pass@testserver'}),
True,
)
def test_no_allowed_hosts(self):
# A path without host is allowed.
self.assertIs(url_has_allowed_host_and_scheme('/confirm/[email protected]', allowed_hosts=None), True)
# Basic auth without host is not allowed.
self.assertIs(url_has_allowed_host_and_scheme(r'http://testserver\@example.com', allowed_hosts=None), False)
def test_allowed_hosts_str(self):
self.assertIs(url_has_allowed_host_and_scheme('http://good.com/good', allowed_hosts='good.com'), True)
self.assertIs(url_has_allowed_host_and_scheme('http://good.co/evil', allowed_hosts='good.com'), False)
def test_secure_param_https_urls(self):
secure_urls = (
'https://example.com/p',
'HTTPS://example.com/p',
'/view/?param=http://example.com',
)
for url in secure_urls:
with self.subTest(url=url):
self.assertIs(
url_has_allowed_host_and_scheme(url, allowed_hosts={'example.com'}, require_https=True),
True,
)
def test_secure_param_non_https_urls(self):
insecure_urls = (
'http://example.com/p',
'ftp://example.com/p',
'//example.com/p',
)
for url in insecure_urls:
with self.subTest(url=url):
self.assertIs(
url_has_allowed_host_and_scheme(url, allowed_hosts={'example.com'}, require_https=True),
False,
)
class URLSafeBase64Tests(unittest.TestCase):
def test_roundtrip(self):
bytestring = b'foo'
encoded = urlsafe_base64_encode(bytestring)
decoded = urlsafe_base64_decode(encoded)
self.assertEqual(bytestring, decoded)
class IsSameDomainTests(unittest.TestCase):
def test_good(self):
for pair in (
('example.com', 'example.com'),
('example.com', '.example.com'),
('foo.example.com', '.example.com'),
('example.com:8888', 'example.com:8888'),
('example.com:8888', '.example.com:8888'),
('foo.example.com:8888', '.example.com:8888'),
):
self.assertIs(is_same_domain(*pair), True)
def test_bad(self):
for pair in (
('example2.com', 'example.com'),
('foo.example.com', 'example.com'),
('example.com:9999', 'example.com:8888'),
('foo.example.com:8888', ''),
):
self.assertIs(is_same_domain(*pair), False)
class ETagProcessingTests(unittest.TestCase):
def test_parsing(self):
self.assertEqual(
parse_etags(r'"" , "etag", "e\\tag", W/"weak"'),
['""', '"etag"', r'"e\\tag"', 'W/"weak"']
)
self.assertEqual(parse_etags('*'), ['*'])
# Ignore RFC 2616 ETags that are invalid according to RFC 7232.
self.assertEqual(parse_etags(r'"etag", "e\"t\"ag"'), ['"etag"'])
def test_quoting(self):
self.assertEqual(quote_etag('etag'), '"etag"') # unquoted
self.assertEqual(quote_etag('"etag"'), '"etag"') # quoted
self.assertEqual(quote_etag('W/"etag"'), 'W/"etag"') # quoted, weak
class HttpDateProcessingTests(unittest.TestCase):
def test_http_date(self):
t = 1167616461.0
self.assertEqual(http_date(t), 'Mon, 01 Jan 2007 01:54:21 GMT')
def test_parsing_rfc1123(self):
parsed = parse_http_date('Sun, 06 Nov 1994 08:49:37 GMT')
self.assertEqual(
datetime.fromtimestamp(parsed, timezone.utc),
datetime(1994, 11, 6, 8, 49, 37, tzinfo=timezone.utc),
)
@unittest.skipIf(platform.architecture()[0] == '32bit', 'The Year 2038 problem.')
@mock.patch('django.utils.http.datetime.datetime')
def test_parsing_rfc850(self, mocked_datetime):
mocked_datetime.side_effect = datetime
mocked_datetime.now = mock.Mock()
now_1 = datetime(2019, 11, 6, 8, 49, 37, tzinfo=timezone.utc)
now_2 = datetime(2020, 11, 6, 8, 49, 37, tzinfo=timezone.utc)
now_3 = datetime(2048, 11, 6, 8, 49, 37, tzinfo=timezone.utc)
tests = (
(now_1, 'Tuesday, 31-Dec-69 08:49:37 GMT', datetime(2069, 12, 31, 8, 49, 37, tzinfo=timezone.utc)),
(now_1, 'Tuesday, 10-Nov-70 08:49:37 GMT', datetime(1970, 11, 10, 8, 49, 37, tzinfo=timezone.utc)),
(now_1, 'Sunday, 06-Nov-94 08:49:37 GMT', datetime(1994, 11, 6, 8, 49, 37, tzinfo=timezone.utc)),
(now_2, 'Wednesday, 31-Dec-70 08:49:37 GMT', datetime(2070, 12, 31, 8, 49, 37, tzinfo=timezone.utc)),
(now_2, 'Friday, 31-Dec-71 08:49:37 GMT', datetime(1971, 12, 31, 8, 49, 37, tzinfo=timezone.utc)),
(now_3, 'Sunday, 31-Dec-00 08:49:37 GMT', datetime(2000, 12, 31, 8, 49, 37, tzinfo=timezone.utc)),
(now_3, 'Friday, 31-Dec-99 08:49:37 GMT', datetime(1999, 12, 31, 8, 49, 37, tzinfo=timezone.utc)),
)
for now, rfc850str, expected_date in tests:
with self.subTest(rfc850str=rfc850str):
mocked_datetime.now.return_value = now
parsed = parse_http_date(rfc850str)
mocked_datetime.now.assert_called_once_with(tz=timezone.utc)
self.assertEqual(
datetime.fromtimestamp(parsed, timezone.utc),
expected_date,
)
mocked_datetime.reset_mock()
def test_parsing_asctime(self):
parsed = parse_http_date('Sun Nov 6 08:49:37 1994')
self.assertEqual(
datetime.fromtimestamp(parsed, timezone.utc),
datetime(1994, 11, 6, 8, 49, 37, tzinfo=timezone.utc),
)
def test_parsing_year_less_than_70(self):
parsed = parse_http_date('Sun Nov 6 08:49:37 0037')
self.assertEqual(
datetime.fromtimestamp(parsed, timezone.utc),
datetime(2037, 11, 6, 8, 49, 37, tzinfo=timezone.utc),
)
class EscapeLeadingSlashesTests(unittest.TestCase):
def test(self):
tests = (
('//example.com', '/%2Fexample.com'),
('//', '/%2F'),
)
for url, expected in tests:
with self.subTest(url=url):
self.assertEqual(escape_leading_slashes(url), expected)
|
cf5d1513fcf218c3e4ef4b6560edc89543f8d79331e06a57b2c1c23f24c16bf5 | import os
import stat
import sys
import tempfile
import unittest
from django.core.exceptions import SuspiciousOperation
from django.test import SimpleTestCase
from django.utils import archive
try:
import bz2 # NOQA
HAS_BZ2 = True
except ImportError:
HAS_BZ2 = False
try:
import lzma # NOQA
HAS_LZMA = True
except ImportError:
HAS_LZMA = False
class TestArchive(unittest.TestCase):
def setUp(self):
self.testdir = os.path.join(os.path.dirname(__file__), 'archives')
self.old_cwd = os.getcwd()
os.chdir(self.testdir)
def tearDown(self):
os.chdir(self.old_cwd)
def test_extract_function(self):
with os.scandir(self.testdir) as entries:
for entry in entries:
with self.subTest(entry.name), tempfile.TemporaryDirectory() as tmpdir:
if (
(entry.name.endswith('.bz2') and not HAS_BZ2) or
(entry.name.endswith(('.lzma', '.xz')) and not HAS_LZMA)
):
continue
archive.extract(entry.path, tmpdir)
self.assertTrue(os.path.isfile(os.path.join(tmpdir, '1')))
self.assertTrue(os.path.isfile(os.path.join(tmpdir, '2')))
self.assertTrue(os.path.isfile(os.path.join(tmpdir, 'foo', '1')))
self.assertTrue(os.path.isfile(os.path.join(tmpdir, 'foo', '2')))
self.assertTrue(os.path.isfile(os.path.join(tmpdir, 'foo', 'bar', '1')))
self.assertTrue(os.path.isfile(os.path.join(tmpdir, 'foo', 'bar', '2')))
@unittest.skipIf(sys.platform == 'win32', 'Python on Windows has a limited os.chmod().')
def test_extract_file_permissions(self):
"""archive.extract() preserves file permissions."""
mask = stat.S_IRWXU | stat.S_IRWXG | stat.S_IRWXO
umask = os.umask(0)
os.umask(umask) # Restore the original umask.
with os.scandir(self.testdir) as entries:
for entry in entries:
if (
entry.name.startswith('leadpath_') or
(entry.name.endswith('.bz2') and not HAS_BZ2) or
(entry.name.endswith(('.lzma', '.xz')) and not HAS_LZMA)
):
continue
with self.subTest(entry.name), tempfile.TemporaryDirectory() as tmpdir:
archive.extract(entry.path, tmpdir)
# An executable file in the archive has executable
# permissions.
filepath = os.path.join(tmpdir, 'executable')
self.assertEqual(os.stat(filepath).st_mode & mask, 0o775)
# A file is readable even if permission data is missing.
filepath = os.path.join(tmpdir, 'no_permissions')
self.assertEqual(os.stat(filepath).st_mode & mask, 0o666 & ~umask)
class TestArchiveInvalid(SimpleTestCase):
def test_extract_function_traversal(self):
archives_dir = os.path.join(os.path.dirname(__file__), 'traversal_archives')
tests = [
('traversal.tar', '..'),
('traversal_absolute.tar', '/tmp/evil.py'),
]
if sys.platform == 'win32':
tests += [
('traversal_disk_win.tar', 'd:evil.py'),
('traversal_disk_win.zip', 'd:evil.py'),
]
msg = "Archive contains invalid path: '%s'"
for entry, invalid_path in tests:
with self.subTest(entry), tempfile.TemporaryDirectory() as tmpdir:
with self.assertRaisesMessage(SuspiciousOperation, msg % invalid_path):
archive.extract(os.path.join(archives_dir, entry), tmpdir)
|
a9f28fc8f21c6bca35accbb808a8563917284aa790b7c899dac8b8e1e61e6a42 | from unittest import TestCase
from django.test import ignore_warnings
from django.utils.deprecation import RemovedInDjango50Warning
with ignore_warnings(category=RemovedInDjango50Warning):
from django.utils.baseconv import (
BaseConverter, base2, base16, base36, base56, base62, base64,
)
# RemovedInDjango50Warning
class TestBaseConv(TestCase):
def test_baseconv(self):
nums = [-10 ** 10, 10 ** 10, *range(-100, 100)]
for converter in [base2, base16, base36, base56, base62, base64]:
for i in nums:
self.assertEqual(i, converter.decode(converter.encode(i)))
def test_base11(self):
base11 = BaseConverter('0123456789-', sign='$')
self.assertEqual(base11.encode(1234), '-22')
self.assertEqual(base11.decode('-22'), 1234)
self.assertEqual(base11.encode(-1234), '$-22')
self.assertEqual(base11.decode('$-22'), -1234)
def test_base20(self):
base20 = BaseConverter('0123456789abcdefghij')
self.assertEqual(base20.encode(1234), '31e')
self.assertEqual(base20.decode('31e'), 1234)
self.assertEqual(base20.encode(-1234), '-31e')
self.assertEqual(base20.decode('-31e'), -1234)
def test_base64(self):
self.assertEqual(base64.encode(1234), 'JI')
self.assertEqual(base64.decode('JI'), 1234)
self.assertEqual(base64.encode(-1234), '$JI')
self.assertEqual(base64.decode('$JI'), -1234)
def test_base7(self):
base7 = BaseConverter('cjdhel3', sign='g')
self.assertEqual(base7.encode(1234), 'hejd')
self.assertEqual(base7.decode('hejd'), 1234)
self.assertEqual(base7.encode(-1234), 'ghejd')
self.assertEqual(base7.decode('ghejd'), -1234)
def test_exception(self):
with self.assertRaises(ValueError):
BaseConverter('abc', sign='a')
self.assertIsInstance(BaseConverter('abc', sign='d'), BaseConverter)
def test_repr(self):
base7 = BaseConverter('cjdhel3', sign='g')
self.assertEqual(repr(base7), '<BaseConverter: base7 (cjdhel3)>')
|
3ecc057762354595b9c8e3afe598d5e12bb25d3d993b6c4cbc2276b594d386aa | import json
import sys
from django.core.exceptions import SuspiciousFileOperation
from django.test import SimpleTestCase
from django.utils import text
from django.utils.functional import lazystr
from django.utils.text import format_lazy
from django.utils.translation import gettext_lazy, override
IS_WIDE_BUILD = (len('\U0001F4A9') == 1)
class TestUtilsText(SimpleTestCase):
def test_get_text_list(self):
self.assertEqual(text.get_text_list(['a', 'b', 'c', 'd']), 'a, b, c or d')
self.assertEqual(text.get_text_list(['a', 'b', 'c'], 'and'), 'a, b and c')
self.assertEqual(text.get_text_list(['a', 'b'], 'and'), 'a and b')
self.assertEqual(text.get_text_list(['a']), 'a')
self.assertEqual(text.get_text_list([]), '')
with override('ar'):
self.assertEqual(text.get_text_list(['a', 'b', 'c']), "a، b أو c")
def test_smart_split(self):
testdata = [
('This is "a person" test.',
['This', 'is', '"a person"', 'test.']),
('This is "a person\'s" test.',
['This', 'is', '"a person\'s"', 'test.']),
('This is "a person\\"s" test.',
['This', 'is', '"a person\\"s"', 'test.']),
('"a \'one',
['"a', "'one"]),
('all friends\' tests',
['all', 'friends\'', 'tests']),
('url search_page words="something else"',
['url', 'search_page', 'words="something else"']),
("url search_page words='something else'",
['url', 'search_page', "words='something else'"]),
('url search_page words "something else"',
['url', 'search_page', 'words', '"something else"']),
('url search_page words-"something else"',
['url', 'search_page', 'words-"something else"']),
('url search_page words=hello',
['url', 'search_page', 'words=hello']),
('url search_page words="something else',
['url', 'search_page', 'words="something', 'else']),
("cut:','|cut:' '",
["cut:','|cut:' '"]),
(lazystr("a b c d"), # Test for #20231
['a', 'b', 'c', 'd']),
]
for test, expected in testdata:
with self.subTest(value=test):
self.assertEqual(list(text.smart_split(test)), expected)
def test_truncate_chars(self):
truncator = text.Truncator('The quick brown fox jumped over the lazy dog.')
self.assertEqual('The quick brown fox jumped over the lazy dog.', truncator.chars(100)),
self.assertEqual('The quick brown fox …', truncator.chars(21)),
self.assertEqual('The quick brown fo.....', truncator.chars(23, '.....')),
self.assertEqual('.....', truncator.chars(4, '.....')),
nfc = text.Truncator('o\xfco\xfco\xfco\xfc')
nfd = text.Truncator('ou\u0308ou\u0308ou\u0308ou\u0308')
self.assertEqual('oüoüoüoü', nfc.chars(8))
self.assertEqual('oüoüoüoü', nfd.chars(8))
self.assertEqual('oü…', nfc.chars(3))
self.assertEqual('oü…', nfd.chars(3))
# Ensure the final length is calculated correctly when there are
# combining characters with no precomposed form, and that combining
# characters are not split up.
truncator = text.Truncator('-B\u030AB\u030A----8')
self.assertEqual('-B\u030A…', truncator.chars(3))
self.assertEqual('-B\u030AB\u030A-…', truncator.chars(5))
self.assertEqual('-B\u030AB\u030A----8', truncator.chars(8))
# Ensure the length of the end text is correctly calculated when it
# contains combining characters with no precomposed form.
truncator = text.Truncator('-----')
self.assertEqual('---B\u030A', truncator.chars(4, 'B\u030A'))
self.assertEqual('-----', truncator.chars(5, 'B\u030A'))
# Make a best effort to shorten to the desired length, but requesting
# a length shorter than the ellipsis shouldn't break
self.assertEqual('…', text.Truncator('asdf').chars(0))
# lazy strings are handled correctly
self.assertEqual(text.Truncator(lazystr('The quick brown fox')).chars(10), 'The quick…')
def test_truncate_chars_html(self):
perf_test_values = [
(('</a' + '\t' * 50000) + '//>', None),
('&' * 50000, '&' * 9 + '…'),
('_X<<<<<<<<<<<>', None),
]
for value, expected in perf_test_values:
with self.subTest(value=value):
truncator = text.Truncator(value)
self.assertEqual(expected if expected else value, truncator.chars(10, html=True))
def test_truncate_words(self):
truncator = text.Truncator('The quick brown fox jumped over the lazy dog.')
self.assertEqual('The quick brown fox jumped over the lazy dog.', truncator.words(10))
self.assertEqual('The quick brown fox…', truncator.words(4))
self.assertEqual('The quick brown fox[snip]', truncator.words(4, '[snip]'))
# lazy strings are handled correctly
truncator = text.Truncator(lazystr('The quick brown fox jumped over the lazy dog.'))
self.assertEqual('The quick brown fox…', truncator.words(4))
def test_truncate_html_words(self):
truncator = text.Truncator(
'<p id="par"><strong><em>The quick brown fox jumped over the lazy dog.</em></strong></p>'
)
self.assertEqual(
'<p id="par"><strong><em>The quick brown fox jumped over the lazy dog.</em></strong></p>',
truncator.words(10, html=True)
)
self.assertEqual(
'<p id="par"><strong><em>The quick brown fox…</em></strong></p>',
truncator.words(4, html=True)
)
self.assertEqual(
'<p id="par"><strong><em>The quick brown fox....</em></strong></p>',
truncator.words(4, '....', html=True)
)
self.assertEqual(
'<p id="par"><strong><em>The quick brown fox</em></strong></p>',
truncator.words(4, '', html=True)
)
# Test with new line inside tag
truncator = text.Truncator(
'<p>The quick <a href="xyz.html"\n id="mylink">brown fox</a> jumped over the lazy dog.</p>'
)
self.assertEqual(
'<p>The quick <a href="xyz.html"\n id="mylink">brown…</a></p>',
truncator.words(3, html=True)
)
# Test self-closing tags
truncator = text.Truncator('<br/>The <hr />quick brown fox jumped over the lazy dog.')
self.assertEqual('<br/>The <hr />quick brown…', truncator.words(3, html=True))
truncator = text.Truncator('<br>The <hr/>quick <em>brown fox</em> jumped over the lazy dog.')
self.assertEqual('<br>The <hr/>quick <em>brown…</em>', truncator.words(3, html=True))
# Test html entities
truncator = text.Truncator('<i>Buenos días! ¿Cómo está?</i>')
self.assertEqual('<i>Buenos días! ¿Cómo…</i>', truncator.words(3, html=True))
truncator = text.Truncator('<p>I <3 python, what about you?</p>')
self.assertEqual('<p>I <3 python,…</p>', truncator.words(3, html=True))
perf_test_values = [
('</a' + '\t' * 50000) + '//>',
'&' * 50000,
'_X<<<<<<<<<<<>',
]
for value in perf_test_values:
with self.subTest(value=value):
truncator = text.Truncator(value)
self.assertEqual(value, truncator.words(50, html=True))
def test_wrap(self):
digits = '1234 67 9'
self.assertEqual(text.wrap(digits, 100), '1234 67 9')
self.assertEqual(text.wrap(digits, 9), '1234 67 9')
self.assertEqual(text.wrap(digits, 8), '1234 67\n9')
self.assertEqual(text.wrap('short\na long line', 7), 'short\na long\nline')
self.assertEqual(text.wrap('do-not-break-long-words please? ok', 8), 'do-not-break-long-words\nplease?\nok')
long_word = 'l%sng' % ('o' * 20)
self.assertEqual(text.wrap(long_word, 20), long_word)
self.assertEqual(text.wrap('a %s word' % long_word, 10), 'a\n%s\nword' % long_word)
self.assertEqual(text.wrap(lazystr(digits), 100), '1234 67 9')
def test_normalize_newlines(self):
self.assertEqual(text.normalize_newlines("abc\ndef\rghi\r\n"), "abc\ndef\nghi\n")
self.assertEqual(text.normalize_newlines("\n\r\r\n\r"), "\n\n\n\n")
self.assertEqual(text.normalize_newlines("abcdefghi"), "abcdefghi")
self.assertEqual(text.normalize_newlines(""), "")
self.assertEqual(text.normalize_newlines(lazystr("abc\ndef\rghi\r\n")), "abc\ndef\nghi\n")
def test_phone2numeric(self):
numeric = text.phone2numeric('0800 flowers')
self.assertEqual(numeric, '0800 3569377')
lazy_numeric = lazystr(text.phone2numeric('0800 flowers'))
self.assertEqual(lazy_numeric, '0800 3569377')
def test_slugify(self):
items = (
# given - expected - Unicode?
('Hello, World!', 'hello-world', False),
('spam & eggs', 'spam-eggs', False),
(' multiple---dash and space ', 'multiple-dash-and-space', False),
('\t whitespace-in-value \n', 'whitespace-in-value', False),
('underscore_in-value', 'underscore_in-value', False),
('__strip__underscore-value___', 'strip__underscore-value', False),
('--strip-dash-value---', 'strip-dash-value', False),
('__strip-mixed-value---', 'strip-mixed-value', False),
('_ -strip-mixed-value _-', 'strip-mixed-value', False),
('spam & ıçüş', 'spam-ıçüş', True),
('foo ıç bar', 'foo-ıç-bar', True),
(' foo ıç bar', 'foo-ıç-bar', True),
('你好', '你好', True),
('İstanbul', 'istanbul', True),
)
for value, output, is_unicode in items:
with self.subTest(value=value):
self.assertEqual(text.slugify(value, allow_unicode=is_unicode), output)
# Interning the result may be useful, e.g. when fed to Path.
with self.subTest('intern'):
self.assertEqual(sys.intern(text.slugify('a')), 'a')
def test_unescape_string_literal(self):
items = [
('"abc"', 'abc'),
("'abc'", 'abc'),
('"a \"bc\""', 'a "bc"'),
("'\'ab\' c'", "'ab' c"),
]
for value, output in items:
with self.subTest(value=value):
self.assertEqual(text.unescape_string_literal(value), output)
self.assertEqual(text.unescape_string_literal(lazystr(value)), output)
def test_get_valid_filename(self):
filename = "^&'@{}[],$=!-#()%+~_123.txt"
self.assertEqual(text.get_valid_filename(filename), "-_123.txt")
self.assertEqual(text.get_valid_filename(lazystr(filename)), "-_123.txt")
msg = "Could not derive file name from '???'"
with self.assertRaisesMessage(SuspiciousFileOperation, msg):
text.get_valid_filename('???')
# After sanitizing this would yield '..'.
msg = "Could not derive file name from '$.$.$'"
with self.assertRaisesMessage(SuspiciousFileOperation, msg):
text.get_valid_filename('$.$.$')
def test_compress_sequence(self):
data = [{'key': i} for i in range(10)]
seq = list(json.JSONEncoder().iterencode(data))
seq = [s.encode() for s in seq]
actual_length = len(b''.join(seq))
out = text.compress_sequence(seq)
compressed_length = len(b''.join(out))
self.assertLess(compressed_length, actual_length)
def test_format_lazy(self):
self.assertEqual('django/test', format_lazy('{}/{}', 'django', lazystr('test')))
self.assertEqual('django/test', format_lazy('{0}/{1}', *('django', 'test')))
self.assertEqual('django/test', format_lazy('{a}/{b}', **{'a': 'django', 'b': 'test'}))
self.assertEqual('django/test', format_lazy('{a[0]}/{a[1]}', a=('django', 'test')))
t = {}
s = format_lazy('{0[a]}-{p[a]}', t, p=t)
t['a'] = lazystr('django')
self.assertEqual('django-django', s)
t['a'] = 'update'
self.assertEqual('update-update', s)
# The format string can be lazy. (string comes from contrib.admin)
s = format_lazy(
gettext_lazy('Added {name} “{object}”.'),
name='article', object='My first try',
)
with override('fr'):
self.assertEqual('Ajout de article «\xa0My first try\xa0».', s)
|
02fa97c589f06005933b2821c41cac4ea0bdd2c60e70248c2036289617a9f9bf | from unittest import mock
from django.conf import settings
from django.db import connection, models
from django.db.models.functions import Lower, Upper
from django.test import SimpleTestCase, TestCase, skipUnlessDBFeature
from django.test.utils import isolate_apps
from .models import Book, ChildModel1, ChildModel2
class SimpleIndexesTests(SimpleTestCase):
def test_suffix(self):
self.assertEqual(models.Index.suffix, 'idx')
def test_repr(self):
index = models.Index(fields=['title'])
named_index = models.Index(fields=['title'], name='title_idx')
multi_col_index = models.Index(fields=['title', 'author'])
partial_index = models.Index(fields=['title'], name='long_books_idx', condition=models.Q(pages__gt=400))
covering_index = models.Index(
fields=['title'],
name='include_idx',
include=['author', 'pages'],
)
opclasses_index = models.Index(
fields=['headline', 'body'],
name='opclasses_idx',
opclasses=['varchar_pattern_ops', 'text_pattern_ops'],
)
func_index = models.Index(Lower('title'), 'subtitle', name='book_func_idx')
tablespace_index = models.Index(
fields=['title'],
db_tablespace='idx_tbls',
name='book_tablespace_idx',
)
self.assertEqual(repr(index), "<Index: fields=['title']>")
self.assertEqual(
repr(named_index),
"<Index: fields=['title'] name='title_idx'>",
)
self.assertEqual(repr(multi_col_index), "<Index: fields=['title', 'author']>")
self.assertEqual(
repr(partial_index),
"<Index: fields=['title'] name='long_books_idx' "
"condition=(AND: ('pages__gt', 400))>",
)
self.assertEqual(
repr(covering_index),
"<Index: fields=['title'] name='include_idx' "
"include=('author', 'pages')>",
)
self.assertEqual(
repr(opclasses_index),
"<Index: fields=['headline', 'body'] name='opclasses_idx' "
"opclasses=['varchar_pattern_ops', 'text_pattern_ops']>",
)
self.assertEqual(
repr(func_index),
"<Index: expressions=(Lower(F(title)), F(subtitle)) "
"name='book_func_idx'>",
)
self.assertEqual(
repr(tablespace_index),
"<Index: fields=['title'] name='book_tablespace_idx' "
"db_tablespace='idx_tbls'>",
)
def test_eq(self):
index = models.Index(fields=['title'])
same_index = models.Index(fields=['title'])
another_index = models.Index(fields=['title', 'author'])
index.model = Book
same_index.model = Book
another_index.model = Book
self.assertEqual(index, same_index)
self.assertEqual(index, mock.ANY)
self.assertNotEqual(index, another_index)
def test_eq_func(self):
index = models.Index(Lower('title'), models.F('author'), name='book_func_idx')
same_index = models.Index(Lower('title'), 'author', name='book_func_idx')
another_index = models.Index(Lower('title'), name='book_func_idx')
self.assertEqual(index, same_index)
self.assertEqual(index, mock.ANY)
self.assertNotEqual(index, another_index)
def test_index_fields_type(self):
with self.assertRaisesMessage(ValueError, 'Index.fields must be a list or tuple.'):
models.Index(fields='title')
def test_index_fields_strings(self):
msg = 'Index.fields must contain only strings with field names.'
with self.assertRaisesMessage(ValueError, msg):
models.Index(fields=[models.F('title')])
def test_fields_tuple(self):
self.assertEqual(models.Index(fields=('title',)).fields, ['title'])
def test_requires_field_or_expression(self):
msg = 'At least one field or expression is required to define an index.'
with self.assertRaisesMessage(ValueError, msg):
models.Index()
def test_expressions_and_fields_mutually_exclusive(self):
msg = "Index.fields and expressions are mutually exclusive."
with self.assertRaisesMessage(ValueError, msg):
models.Index(Upper('foo'), fields=['field'])
def test_opclasses_requires_index_name(self):
with self.assertRaisesMessage(ValueError, 'An index must be named to use opclasses.'):
models.Index(opclasses=['jsonb_path_ops'])
def test_opclasses_requires_list_or_tuple(self):
with self.assertRaisesMessage(ValueError, 'Index.opclasses must be a list or tuple.'):
models.Index(name='test_opclass', fields=['field'], opclasses='jsonb_path_ops')
def test_opclasses_and_fields_same_length(self):
msg = 'Index.fields and Index.opclasses must have the same number of elements.'
with self.assertRaisesMessage(ValueError, msg):
models.Index(name='test_opclass', fields=['field', 'other'], opclasses=['jsonb_path_ops'])
def test_condition_requires_index_name(self):
with self.assertRaisesMessage(ValueError, 'An index must be named to use condition.'):
models.Index(condition=models.Q(pages__gt=400))
def test_expressions_requires_index_name(self):
msg = 'An index must be named to use expressions.'
with self.assertRaisesMessage(ValueError, msg):
models.Index(Lower('field'))
def test_expressions_with_opclasses(self):
msg = (
'Index.opclasses cannot be used with expressions. Use '
'django.contrib.postgres.indexes.OpClass() instead.'
)
with self.assertRaisesMessage(ValueError, msg):
models.Index(
Lower('field'),
name='test_func_opclass',
opclasses=['jsonb_path_ops'],
)
def test_condition_must_be_q(self):
with self.assertRaisesMessage(ValueError, 'Index.condition must be a Q instance.'):
models.Index(condition='invalid', name='long_book_idx')
def test_include_requires_list_or_tuple(self):
msg = 'Index.include must be a list or tuple.'
with self.assertRaisesMessage(ValueError, msg):
models.Index(name='test_include', fields=['field'], include='other')
def test_include_requires_index_name(self):
msg = 'A covering index must be named.'
with self.assertRaisesMessage(ValueError, msg):
models.Index(fields=['field'], include=['other'])
def test_name_auto_generation(self):
index = models.Index(fields=['author'])
index.set_name_with_model(Book)
self.assertEqual(index.name, 'model_index_author_0f5565_idx')
# '-' for DESC columns should be accounted for in the index name.
index = models.Index(fields=['-author'])
index.set_name_with_model(Book)
self.assertEqual(index.name, 'model_index_author_708765_idx')
# fields may be truncated in the name. db_column is used for naming.
long_field_index = models.Index(fields=['pages'])
long_field_index.set_name_with_model(Book)
self.assertEqual(long_field_index.name, 'model_index_page_co_69235a_idx')
# suffix can't be longer than 3 characters.
long_field_index.suffix = 'suff'
msg = (
'Index too long for multiple database support. Is self.suffix '
'longer than 3 characters?'
)
with self.assertRaisesMessage(ValueError, msg):
long_field_index.set_name_with_model(Book)
@isolate_apps('model_indexes')
def test_name_auto_generation_with_quoted_db_table(self):
class QuotedDbTable(models.Model):
name = models.CharField(max_length=50)
class Meta:
db_table = '"t_quoted"'
index = models.Index(fields=['name'])
index.set_name_with_model(QuotedDbTable)
self.assertEqual(index.name, 't_quoted_name_e4ed1b_idx')
def test_deconstruction(self):
index = models.Index(fields=['title'], db_tablespace='idx_tbls')
index.set_name_with_model(Book)
path, args, kwargs = index.deconstruct()
self.assertEqual(path, 'django.db.models.Index')
self.assertEqual(args, ())
self.assertEqual(
kwargs,
{'fields': ['title'], 'name': 'model_index_title_196f42_idx', 'db_tablespace': 'idx_tbls'}
)
def test_deconstruct_with_condition(self):
index = models.Index(
name='big_book_index',
fields=['title'],
condition=models.Q(pages__gt=400),
)
index.set_name_with_model(Book)
path, args, kwargs = index.deconstruct()
self.assertEqual(path, 'django.db.models.Index')
self.assertEqual(args, ())
self.assertEqual(
kwargs,
{
'fields': ['title'],
'name': 'model_index_title_196f42_idx',
'condition': models.Q(pages__gt=400),
}
)
def test_deconstruct_with_include(self):
index = models.Index(
name='book_include_idx',
fields=['title'],
include=['author'],
)
index.set_name_with_model(Book)
path, args, kwargs = index.deconstruct()
self.assertEqual(path, 'django.db.models.Index')
self.assertEqual(args, ())
self.assertEqual(
kwargs,
{
'fields': ['title'],
'name': 'model_index_title_196f42_idx',
'include': ('author',),
},
)
def test_deconstruct_with_expressions(self):
index = models.Index(Upper('title'), name='book_func_idx')
path, args, kwargs = index.deconstruct()
self.assertEqual(path, 'django.db.models.Index')
self.assertEqual(args, (Upper('title'),))
self.assertEqual(kwargs, {'name': 'book_func_idx'})
def test_clone(self):
index = models.Index(fields=['title'])
new_index = index.clone()
self.assertIsNot(index, new_index)
self.assertEqual(index.fields, new_index.fields)
def test_clone_with_expressions(self):
index = models.Index(Upper('title'), name='book_func_idx')
new_index = index.clone()
self.assertIsNot(index, new_index)
self.assertEqual(index.expressions, new_index.expressions)
def test_name_set(self):
index_names = [index.name for index in Book._meta.indexes]
self.assertCountEqual(
index_names,
[
'model_index_title_196f42_idx',
'model_index_isbn_34f975_idx',
'model_indexes_book_barcode_idx',
],
)
def test_abstract_children(self):
index_names = [index.name for index in ChildModel1._meta.indexes]
self.assertEqual(
index_names,
['model_index_name_440998_idx', 'model_indexes_childmodel1_idx'],
)
index_names = [index.name for index in ChildModel2._meta.indexes]
self.assertEqual(
index_names,
['model_index_name_b6c374_idx', 'model_indexes_childmodel2_idx'],
)
class IndexesTests(TestCase):
@skipUnlessDBFeature('supports_tablespaces')
def test_db_tablespace(self):
editor = connection.schema_editor()
# Index with db_tablespace attribute.
for fields in [
# Field with db_tablespace specified on model.
['shortcut'],
# Field without db_tablespace specified on model.
['author'],
# Multi-column with db_tablespaces specified on model.
['shortcut', 'isbn'],
# Multi-column without db_tablespace specified on model.
['title', 'author'],
]:
with self.subTest(fields=fields):
index = models.Index(fields=fields, db_tablespace='idx_tbls2')
self.assertIn('"idx_tbls2"', str(index.create_sql(Book, editor)).lower())
# Indexes without db_tablespace attribute.
for fields in [['author'], ['shortcut', 'isbn'], ['title', 'author']]:
with self.subTest(fields=fields):
index = models.Index(fields=fields)
# The DEFAULT_INDEX_TABLESPACE setting can't be tested because
# it's evaluated when the model class is defined. As a
# consequence, @override_settings doesn't work.
if settings.DEFAULT_INDEX_TABLESPACE:
self.assertIn(
'"%s"' % settings.DEFAULT_INDEX_TABLESPACE,
str(index.create_sql(Book, editor)).lower()
)
else:
self.assertNotIn('TABLESPACE', str(index.create_sql(Book, editor)))
# Field with db_tablespace specified on the model and an index without
# db_tablespace.
index = models.Index(fields=['shortcut'])
self.assertIn('"idx_tbls"', str(index.create_sql(Book, editor)).lower())
@skipUnlessDBFeature('supports_tablespaces')
def test_func_with_tablespace(self):
# Functional index with db_tablespace attribute.
index = models.Index(
Lower('shortcut').desc(),
name='functional_tbls',
db_tablespace='idx_tbls2',
)
with connection.schema_editor() as editor:
sql = str(index.create_sql(Book, editor))
self.assertIn(editor.quote_name('idx_tbls2'), sql)
# Functional index without db_tablespace attribute.
index = models.Index(Lower('shortcut').desc(), name='functional_no_tbls')
with connection.schema_editor() as editor:
sql = str(index.create_sql(Book, editor))
# The DEFAULT_INDEX_TABLESPACE setting can't be tested because it's
# evaluated when the model class is defined. As a consequence,
# @override_settings doesn't work.
if settings.DEFAULT_INDEX_TABLESPACE:
self.assertIn(
editor.quote_name(settings.DEFAULT_INDEX_TABLESPACE),
sql,
)
else:
self.assertNotIn('TABLESPACE', sql)
|
d07e105f2f3ab2761e2a4add33a0a0a04701b9d9f901b36a118b6118d7e2b4fe | from datetime import date
from django import forms
from django.contrib.admin.models import ADDITION, CHANGE, DELETION, LogEntry
from django.contrib.admin.options import (
HORIZONTAL, VERTICAL, ModelAdmin, TabularInline,
get_content_type_for_model,
)
from django.contrib.admin.sites import AdminSite
from django.contrib.admin.widgets import (
AdminDateWidget, AdminRadioSelect, AutocompleteSelect,
AutocompleteSelectMultiple,
)
from django.contrib.auth.models import User
from django.db import models
from django.forms.widgets import Select
from django.test import SimpleTestCase, TestCase
from django.test.utils import isolate_apps
from .models import Band, Concert, Song
class MockRequest:
pass
class MockSuperUser:
def has_perm(self, perm, obj=None):
return True
request = MockRequest()
request.user = MockSuperUser()
class ModelAdminTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.band = Band.objects.create(
name='The Doors',
bio='',
sign_date=date(1965, 1, 1),
)
def setUp(self):
self.site = AdminSite()
def test_modeladmin_str(self):
ma = ModelAdmin(Band, self.site)
self.assertEqual(str(ma), 'modeladmin.ModelAdmin')
# form/fields/fieldsets interaction ##############################
def test_default_fields(self):
ma = ModelAdmin(Band, self.site)
self.assertEqual(list(ma.get_form(request).base_fields), ['name', 'bio', 'sign_date'])
self.assertEqual(list(ma.get_fields(request)), ['name', 'bio', 'sign_date'])
self.assertEqual(list(ma.get_fields(request, self.band)), ['name', 'bio', 'sign_date'])
self.assertIsNone(ma.get_exclude(request, self.band))
def test_default_fieldsets(self):
# fieldsets_add and fieldsets_change should return a special data structure that
# is used in the templates. They should generate the "right thing" whether we
# have specified a custom form, the fields argument, or nothing at all.
#
# Here's the default case. There are no custom form_add/form_change methods,
# no fields argument, and no fieldsets argument.
ma = ModelAdmin(Band, self.site)
self.assertEqual(ma.get_fieldsets(request), [(None, {'fields': ['name', 'bio', 'sign_date']})])
self.assertEqual(ma.get_fieldsets(request, self.band), [(None, {'fields': ['name', 'bio', 'sign_date']})])
def test_get_fieldsets(self):
# get_fieldsets() is called when figuring out form fields (#18681).
class BandAdmin(ModelAdmin):
def get_fieldsets(self, request, obj=None):
return [(None, {'fields': ['name', 'bio']})]
ma = BandAdmin(Band, self.site)
form = ma.get_form(None)
self.assertEqual(form._meta.fields, ['name', 'bio'])
class InlineBandAdmin(TabularInline):
model = Concert
fk_name = 'main_band'
can_delete = False
def get_fieldsets(self, request, obj=None):
return [(None, {'fields': ['day', 'transport']})]
ma = InlineBandAdmin(Band, self.site)
form = ma.get_formset(None).form
self.assertEqual(form._meta.fields, ['day', 'transport'])
def test_lookup_allowed_allows_nonexistent_lookup(self):
"""
A lookup_allowed allows a parameter whose field lookup doesn't exist.
(#21129).
"""
class BandAdmin(ModelAdmin):
fields = ['name']
ma = BandAdmin(Band, self.site)
self.assertTrue(ma.lookup_allowed('name__nonexistent', 'test_value'))
@isolate_apps('modeladmin')
def test_lookup_allowed_onetoone(self):
class Department(models.Model):
code = models.CharField(max_length=4, unique=True)
class Employee(models.Model):
department = models.ForeignKey(Department, models.CASCADE, to_field="code")
class EmployeeProfile(models.Model):
employee = models.OneToOneField(Employee, models.CASCADE)
class EmployeeInfo(models.Model):
employee = models.OneToOneField(Employee, models.CASCADE)
description = models.CharField(max_length=100)
class EmployeeProfileAdmin(ModelAdmin):
list_filter = [
'employee__employeeinfo__description',
'employee__department__code',
]
ma = EmployeeProfileAdmin(EmployeeProfile, self.site)
# Reverse OneToOneField
self.assertIs(ma.lookup_allowed('employee__employeeinfo__description', 'test_value'), True)
# OneToOneField and ForeignKey
self.assertIs(ma.lookup_allowed('employee__department__code', 'test_value'), True)
def test_field_arguments(self):
# If fields is specified, fieldsets_add and fieldsets_change should
# just stick the fields into a formsets structure and return it.
class BandAdmin(ModelAdmin):
fields = ['name']
ma = BandAdmin(Band, self.site)
self.assertEqual(list(ma.get_fields(request)), ['name'])
self.assertEqual(list(ma.get_fields(request, self.band)), ['name'])
self.assertEqual(ma.get_fieldsets(request), [(None, {'fields': ['name']})])
self.assertEqual(ma.get_fieldsets(request, self.band), [(None, {'fields': ['name']})])
def test_field_arguments_restricted_on_form(self):
# If fields or fieldsets is specified, it should exclude fields on the
# Form class to the fields specified. This may cause errors to be
# raised in the db layer if required model fields aren't in fields/
# fieldsets, but that's preferable to ghost errors where a field in the
# Form class isn't being displayed because it's not in fields/fieldsets.
# Using `fields`.
class BandAdmin(ModelAdmin):
fields = ['name']
ma = BandAdmin(Band, self.site)
self.assertEqual(list(ma.get_form(request).base_fields), ['name'])
self.assertEqual(list(ma.get_form(request, self.band).base_fields), ['name'])
# Using `fieldsets`.
class BandAdmin(ModelAdmin):
fieldsets = [(None, {'fields': ['name']})]
ma = BandAdmin(Band, self.site)
self.assertEqual(list(ma.get_form(request).base_fields), ['name'])
self.assertEqual(list(ma.get_form(request, self.band).base_fields), ['name'])
# Using `exclude`.
class BandAdmin(ModelAdmin):
exclude = ['bio']
ma = BandAdmin(Band, self.site)
self.assertEqual(list(ma.get_form(request).base_fields), ['name', 'sign_date'])
# You can also pass a tuple to `exclude`.
class BandAdmin(ModelAdmin):
exclude = ('bio',)
ma = BandAdmin(Band, self.site)
self.assertEqual(list(ma.get_form(request).base_fields), ['name', 'sign_date'])
# Using `fields` and `exclude`.
class BandAdmin(ModelAdmin):
fields = ['name', 'bio']
exclude = ['bio']
ma = BandAdmin(Band, self.site)
self.assertEqual(list(ma.get_form(request).base_fields), ['name'])
def test_custom_form_meta_exclude_with_readonly(self):
"""
The custom ModelForm's `Meta.exclude` is respected when used in
conjunction with `ModelAdmin.readonly_fields` and when no
`ModelAdmin.exclude` is defined (#14496).
"""
# With ModelAdmin
class AdminBandForm(forms.ModelForm):
class Meta:
model = Band
exclude = ['bio']
class BandAdmin(ModelAdmin):
readonly_fields = ['name']
form = AdminBandForm
ma = BandAdmin(Band, self.site)
self.assertEqual(list(ma.get_form(request).base_fields), ['sign_date'])
# With InlineModelAdmin
class AdminConcertForm(forms.ModelForm):
class Meta:
model = Concert
exclude = ['day']
class ConcertInline(TabularInline):
readonly_fields = ['transport']
form = AdminConcertForm
fk_name = 'main_band'
model = Concert
class BandAdmin(ModelAdmin):
inlines = [ConcertInline]
ma = BandAdmin(Band, self.site)
self.assertEqual(
list(list(ma.get_formsets_with_inlines(request))[0][0]().forms[0].fields),
['main_band', 'opening_band', 'id', 'DELETE'])
def test_custom_formfield_override_readonly(self):
class AdminBandForm(forms.ModelForm):
name = forms.CharField()
class Meta:
exclude = ()
model = Band
class BandAdmin(ModelAdmin):
form = AdminBandForm
readonly_fields = ['name']
ma = BandAdmin(Band, self.site)
# `name` shouldn't appear in base_fields because it's part of
# readonly_fields.
self.assertEqual(
list(ma.get_form(request).base_fields),
['bio', 'sign_date']
)
# But it should appear in get_fields()/fieldsets() so it can be
# displayed as read-only.
self.assertEqual(
list(ma.get_fields(request)),
['bio', 'sign_date', 'name']
)
self.assertEqual(
list(ma.get_fieldsets(request)),
[(None, {'fields': ['bio', 'sign_date', 'name']})]
)
def test_custom_form_meta_exclude(self):
"""
The custom ModelForm's `Meta.exclude` is overridden if
`ModelAdmin.exclude` or `InlineModelAdmin.exclude` are defined (#14496).
"""
# With ModelAdmin
class AdminBandForm(forms.ModelForm):
class Meta:
model = Band
exclude = ['bio']
class BandAdmin(ModelAdmin):
exclude = ['name']
form = AdminBandForm
ma = BandAdmin(Band, self.site)
self.assertEqual(list(ma.get_form(request).base_fields), ['bio', 'sign_date'])
# With InlineModelAdmin
class AdminConcertForm(forms.ModelForm):
class Meta:
model = Concert
exclude = ['day']
class ConcertInline(TabularInline):
exclude = ['transport']
form = AdminConcertForm
fk_name = 'main_band'
model = Concert
class BandAdmin(ModelAdmin):
inlines = [ConcertInline]
ma = BandAdmin(Band, self.site)
self.assertEqual(
list(list(ma.get_formsets_with_inlines(request))[0][0]().forms[0].fields),
['main_band', 'opening_band', 'day', 'id', 'DELETE']
)
def test_overriding_get_exclude(self):
class BandAdmin(ModelAdmin):
def get_exclude(self, request, obj=None):
return ['name']
self.assertEqual(
list(BandAdmin(Band, self.site).get_form(request).base_fields),
['bio', 'sign_date']
)
def test_get_exclude_overrides_exclude(self):
class BandAdmin(ModelAdmin):
exclude = ['bio']
def get_exclude(self, request, obj=None):
return ['name']
self.assertEqual(
list(BandAdmin(Band, self.site).get_form(request).base_fields),
['bio', 'sign_date']
)
def test_get_exclude_takes_obj(self):
class BandAdmin(ModelAdmin):
def get_exclude(self, request, obj=None):
if obj:
return ['sign_date']
return ['name']
self.assertEqual(
list(BandAdmin(Band, self.site).get_form(request, self.band).base_fields),
['name', 'bio']
)
def test_custom_form_validation(self):
# If a form is specified, it should use it allowing custom validation
# to work properly. This won't break any of the admin widgets or media.
class AdminBandForm(forms.ModelForm):
delete = forms.BooleanField()
class BandAdmin(ModelAdmin):
form = AdminBandForm
ma = BandAdmin(Band, self.site)
self.assertEqual(list(ma.get_form(request).base_fields), ['name', 'bio', 'sign_date', 'delete'])
self.assertEqual(type(ma.get_form(request).base_fields['sign_date'].widget), AdminDateWidget)
def test_form_exclude_kwarg_override(self):
"""
The `exclude` kwarg passed to `ModelAdmin.get_form()` overrides all
other declarations (#8999).
"""
class AdminBandForm(forms.ModelForm):
class Meta:
model = Band
exclude = ['name']
class BandAdmin(ModelAdmin):
exclude = ['sign_date']
form = AdminBandForm
def get_form(self, request, obj=None, **kwargs):
kwargs['exclude'] = ['bio']
return super().get_form(request, obj, **kwargs)
ma = BandAdmin(Band, self.site)
self.assertEqual(list(ma.get_form(request).base_fields), ['name', 'sign_date'])
def test_formset_exclude_kwarg_override(self):
"""
The `exclude` kwarg passed to `InlineModelAdmin.get_formset()`
overrides all other declarations (#8999).
"""
class AdminConcertForm(forms.ModelForm):
class Meta:
model = Concert
exclude = ['day']
class ConcertInline(TabularInline):
exclude = ['transport']
form = AdminConcertForm
fk_name = 'main_band'
model = Concert
def get_formset(self, request, obj=None, **kwargs):
kwargs['exclude'] = ['opening_band']
return super().get_formset(request, obj, **kwargs)
class BandAdmin(ModelAdmin):
inlines = [ConcertInline]
ma = BandAdmin(Band, self.site)
self.assertEqual(
list(list(ma.get_formsets_with_inlines(request))[0][0]().forms[0].fields),
['main_band', 'day', 'transport', 'id', 'DELETE']
)
def test_formset_overriding_get_exclude_with_form_fields(self):
class AdminConcertForm(forms.ModelForm):
class Meta:
model = Concert
fields = ['main_band', 'opening_band', 'day', 'transport']
class ConcertInline(TabularInline):
form = AdminConcertForm
fk_name = 'main_band'
model = Concert
def get_exclude(self, request, obj=None):
return ['opening_band']
class BandAdmin(ModelAdmin):
inlines = [ConcertInline]
ma = BandAdmin(Band, self.site)
self.assertEqual(
list(list(ma.get_formsets_with_inlines(request))[0][0]().forms[0].fields),
['main_band', 'day', 'transport', 'id', 'DELETE']
)
def test_formset_overriding_get_exclude_with_form_exclude(self):
class AdminConcertForm(forms.ModelForm):
class Meta:
model = Concert
exclude = ['day']
class ConcertInline(TabularInline):
form = AdminConcertForm
fk_name = 'main_band'
model = Concert
def get_exclude(self, request, obj=None):
return ['opening_band']
class BandAdmin(ModelAdmin):
inlines = [ConcertInline]
ma = BandAdmin(Band, self.site)
self.assertEqual(
list(list(ma.get_formsets_with_inlines(request))[0][0]().forms[0].fields),
['main_band', 'day', 'transport', 'id', 'DELETE']
)
def test_raw_id_fields_widget_override(self):
"""
The autocomplete_fields, raw_id_fields, and radio_fields widgets may
overridden by specifying a widget in get_formset().
"""
class ConcertInline(TabularInline):
model = Concert
fk_name = 'main_band'
raw_id_fields = ('opening_band',)
def get_formset(self, request, obj=None, **kwargs):
kwargs['widgets'] = {'opening_band': Select}
return super().get_formset(request, obj, **kwargs)
class BandAdmin(ModelAdmin):
inlines = [ConcertInline]
ma = BandAdmin(Band, self.site)
band_widget = list(ma.get_formsets_with_inlines(request))[0][0]().forms[0].fields['opening_band'].widget
# Without the override this would be ForeignKeyRawIdWidget.
self.assertIsInstance(band_widget, Select)
def test_queryset_override(self):
# If the queryset of a ModelChoiceField in a custom form is overridden,
# RelatedFieldWidgetWrapper doesn't mess that up.
band2 = Band.objects.create(name='The Beatles', bio='', sign_date=date(1962, 1, 1))
ma = ModelAdmin(Concert, self.site)
form = ma.get_form(request)()
self.assertHTMLEqual(
str(form["main_band"]),
'<div class="related-widget-wrapper">'
'<select name="main_band" id="id_main_band" required>'
'<option value="" selected>---------</option>'
'<option value="%d">The Beatles</option>'
'<option value="%d">The Doors</option>'
'</select></div>' % (band2.id, self.band.id)
)
class AdminConcertForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.fields["main_band"].queryset = Band.objects.filter(name='The Doors')
class ConcertAdminWithForm(ModelAdmin):
form = AdminConcertForm
ma = ConcertAdminWithForm(Concert, self.site)
form = ma.get_form(request)()
self.assertHTMLEqual(
str(form["main_band"]),
'<div class="related-widget-wrapper">'
'<select name="main_band" id="id_main_band" required>'
'<option value="" selected>---------</option>'
'<option value="%d">The Doors</option>'
'</select></div>' % self.band.id
)
def test_regression_for_ticket_15820(self):
"""
`obj` is passed from `InlineModelAdmin.get_fieldsets()` to
`InlineModelAdmin.get_formset()`.
"""
class CustomConcertForm(forms.ModelForm):
class Meta:
model = Concert
fields = ['day']
class ConcertInline(TabularInline):
model = Concert
fk_name = 'main_band'
def get_formset(self, request, obj=None, **kwargs):
if obj:
kwargs['form'] = CustomConcertForm
return super().get_formset(request, obj, **kwargs)
class BandAdmin(ModelAdmin):
inlines = [ConcertInline]
Concert.objects.create(main_band=self.band, opening_band=self.band, day=1)
ma = BandAdmin(Band, self.site)
inline_instances = ma.get_inline_instances(request)
fieldsets = list(inline_instances[0].get_fieldsets(request))
self.assertEqual(fieldsets[0][1]['fields'], ['main_band', 'opening_band', 'day', 'transport'])
fieldsets = list(inline_instances[0].get_fieldsets(request, inline_instances[0].model))
self.assertEqual(fieldsets[0][1]['fields'], ['day'])
# radio_fields behavior ###########################################
def test_default_foreign_key_widget(self):
# First, without any radio_fields specified, the widgets for ForeignKey
# and fields with choices specified ought to be a basic Select widget.
# ForeignKey widgets in the admin are wrapped with RelatedFieldWidgetWrapper so
# they need to be handled properly when type checking. For Select fields, all of
# the choices lists have a first entry of dashes.
cma = ModelAdmin(Concert, self.site)
cmafa = cma.get_form(request)
self.assertEqual(type(cmafa.base_fields['main_band'].widget.widget), Select)
self.assertEqual(
list(cmafa.base_fields['main_band'].widget.choices),
[('', '---------'), (self.band.id, 'The Doors')])
self.assertEqual(type(cmafa.base_fields['opening_band'].widget.widget), Select)
self.assertEqual(
list(cmafa.base_fields['opening_band'].widget.choices),
[('', '---------'), (self.band.id, 'The Doors')]
)
self.assertEqual(type(cmafa.base_fields['day'].widget), Select)
self.assertEqual(
list(cmafa.base_fields['day'].widget.choices),
[('', '---------'), (1, 'Fri'), (2, 'Sat')]
)
self.assertEqual(type(cmafa.base_fields['transport'].widget), Select)
self.assertEqual(
list(cmafa.base_fields['transport'].widget.choices),
[('', '---------'), (1, 'Plane'), (2, 'Train'), (3, 'Bus')])
def test_foreign_key_as_radio_field(self):
# Now specify all the fields as radio_fields. Widgets should now be
# RadioSelect, and the choices list should have a first entry of 'None' if
# blank=True for the model field. Finally, the widget should have the
# 'radiolist' attr, and 'inline' as well if the field is specified HORIZONTAL.
class ConcertAdmin(ModelAdmin):
radio_fields = {
'main_band': HORIZONTAL,
'opening_band': VERTICAL,
'day': VERTICAL,
'transport': HORIZONTAL,
}
cma = ConcertAdmin(Concert, self.site)
cmafa = cma.get_form(request)
self.assertEqual(type(cmafa.base_fields['main_band'].widget.widget), AdminRadioSelect)
self.assertEqual(cmafa.base_fields['main_band'].widget.attrs, {'class': 'radiolist inline'})
self.assertEqual(
list(cmafa.base_fields['main_band'].widget.choices),
[(self.band.id, 'The Doors')]
)
self.assertEqual(type(cmafa.base_fields['opening_band'].widget.widget), AdminRadioSelect)
self.assertEqual(cmafa.base_fields['opening_band'].widget.attrs, {'class': 'radiolist'})
self.assertEqual(
list(cmafa.base_fields['opening_band'].widget.choices),
[('', 'None'), (self.band.id, 'The Doors')]
)
self.assertEqual(type(cmafa.base_fields['day'].widget), AdminRadioSelect)
self.assertEqual(cmafa.base_fields['day'].widget.attrs, {'class': 'radiolist'})
self.assertEqual(list(cmafa.base_fields['day'].widget.choices), [(1, 'Fri'), (2, 'Sat')])
self.assertEqual(type(cmafa.base_fields['transport'].widget), AdminRadioSelect)
self.assertEqual(cmafa.base_fields['transport'].widget.attrs, {'class': 'radiolist inline'})
self.assertEqual(
list(cmafa.base_fields['transport'].widget.choices),
[('', 'None'), (1, 'Plane'), (2, 'Train'), (3, 'Bus')]
)
class AdminConcertForm(forms.ModelForm):
class Meta:
model = Concert
exclude = ('transport',)
class ConcertAdmin(ModelAdmin):
form = AdminConcertForm
ma = ConcertAdmin(Concert, self.site)
self.assertEqual(list(ma.get_form(request).base_fields), ['main_band', 'opening_band', 'day'])
class AdminConcertForm(forms.ModelForm):
extra = forms.CharField()
class Meta:
model = Concert
fields = ['extra', 'transport']
class ConcertAdmin(ModelAdmin):
form = AdminConcertForm
ma = ConcertAdmin(Concert, self.site)
self.assertEqual(list(ma.get_form(request).base_fields), ['extra', 'transport'])
class ConcertInline(TabularInline):
form = AdminConcertForm
model = Concert
fk_name = 'main_band'
can_delete = True
class BandAdmin(ModelAdmin):
inlines = [ConcertInline]
ma = BandAdmin(Band, self.site)
self.assertEqual(
list(list(ma.get_formsets_with_inlines(request))[0][0]().forms[0].fields),
['extra', 'transport', 'id', 'DELETE', 'main_band']
)
def test_log_actions(self):
ma = ModelAdmin(Band, self.site)
mock_request = MockRequest()
mock_request.user = User.objects.create(username='bill')
content_type = get_content_type_for_model(self.band)
tests = (
(ma.log_addition, ADDITION, {'added': {}}),
(ma.log_change, CHANGE, {'changed': {'fields': ['name', 'bio']}}),
(ma.log_deletion, DELETION, str(self.band)),
)
for method, flag, message in tests:
with self.subTest(name=method.__name__):
created = method(mock_request, self.band, message)
fetched = LogEntry.objects.filter(action_flag=flag).latest('id')
self.assertEqual(created, fetched)
self.assertEqual(fetched.action_flag, flag)
self.assertEqual(fetched.content_type, content_type)
self.assertEqual(fetched.object_id, str(self.band.pk))
self.assertEqual(fetched.user, mock_request.user)
if flag == DELETION:
self.assertEqual(fetched.change_message, '')
self.assertEqual(fetched.object_repr, message)
else:
self.assertEqual(fetched.change_message, str(message))
self.assertEqual(fetched.object_repr, str(self.band))
def test_get_autocomplete_fields(self):
class NameAdmin(ModelAdmin):
search_fields = ['name']
class SongAdmin(ModelAdmin):
autocomplete_fields = ['featuring']
fields = ['featuring', 'band']
class OtherSongAdmin(SongAdmin):
def get_autocomplete_fields(self, request):
return ['band']
self.site.register(Band, NameAdmin)
try:
# Uses autocomplete_fields if not overridden.
model_admin = SongAdmin(Song, self.site)
form = model_admin.get_form(request)()
self.assertIsInstance(form.fields['featuring'].widget.widget, AutocompleteSelectMultiple)
# Uses overridden get_autocomplete_fields
model_admin = OtherSongAdmin(Song, self.site)
form = model_admin.get_form(request)()
self.assertIsInstance(form.fields['band'].widget.widget, AutocompleteSelect)
finally:
self.site.unregister(Band)
def test_get_deleted_objects(self):
mock_request = MockRequest()
mock_request.user = User.objects.create_superuser(username='bob', email='[email protected]', password='test')
self.site.register(Band, ModelAdmin)
ma = self.site._registry[Band]
deletable_objects, model_count, perms_needed, protected = ma.get_deleted_objects([self.band], request)
self.assertEqual(deletable_objects, ['Band: The Doors'])
self.assertEqual(model_count, {'bands': 1})
self.assertEqual(perms_needed, set())
self.assertEqual(protected, [])
def test_get_deleted_objects_with_custom_has_delete_permission(self):
"""
ModelAdmin.get_deleted_objects() uses ModelAdmin.has_delete_permission()
for permissions checking.
"""
mock_request = MockRequest()
mock_request.user = User.objects.create_superuser(username='bob', email='[email protected]', password='test')
class TestModelAdmin(ModelAdmin):
def has_delete_permission(self, request, obj=None):
return False
self.site.register(Band, TestModelAdmin)
ma = self.site._registry[Band]
deletable_objects, model_count, perms_needed, protected = ma.get_deleted_objects([self.band], request)
self.assertEqual(deletable_objects, ['Band: The Doors'])
self.assertEqual(model_count, {'bands': 1})
self.assertEqual(perms_needed, {'band'})
self.assertEqual(protected, [])
def test_modeladmin_repr(self):
ma = ModelAdmin(Band, self.site)
self.assertEqual(
repr(ma),
"<ModelAdmin: model=Band site=AdminSite(name='admin')>",
)
class ModelAdminPermissionTests(SimpleTestCase):
class MockUser:
def has_module_perms(self, app_label):
return app_label == 'modeladmin'
class MockViewUser(MockUser):
def has_perm(self, perm, obj=None):
return perm == 'modeladmin.view_band'
class MockAddUser(MockUser):
def has_perm(self, perm, obj=None):
return perm == 'modeladmin.add_band'
class MockChangeUser(MockUser):
def has_perm(self, perm, obj=None):
return perm == 'modeladmin.change_band'
class MockDeleteUser(MockUser):
def has_perm(self, perm, obj=None):
return perm == 'modeladmin.delete_band'
def test_has_view_permission(self):
"""
has_view_permission() returns True for users who can view objects and
False for users who can't.
"""
ma = ModelAdmin(Band, AdminSite())
request = MockRequest()
request.user = self.MockViewUser()
self.assertIs(ma.has_view_permission(request), True)
request.user = self.MockAddUser()
self.assertIs(ma.has_view_permission(request), False)
request.user = self.MockChangeUser()
self.assertIs(ma.has_view_permission(request), True)
request.user = self.MockDeleteUser()
self.assertIs(ma.has_view_permission(request), False)
def test_has_add_permission(self):
"""
has_add_permission returns True for users who can add objects and
False for users who can't.
"""
ma = ModelAdmin(Band, AdminSite())
request = MockRequest()
request.user = self.MockViewUser()
self.assertFalse(ma.has_add_permission(request))
request.user = self.MockAddUser()
self.assertTrue(ma.has_add_permission(request))
request.user = self.MockChangeUser()
self.assertFalse(ma.has_add_permission(request))
request.user = self.MockDeleteUser()
self.assertFalse(ma.has_add_permission(request))
def test_inline_has_add_permission_uses_obj(self):
class ConcertInline(TabularInline):
model = Concert
def has_add_permission(self, request, obj):
return bool(obj)
class BandAdmin(ModelAdmin):
inlines = [ConcertInline]
ma = BandAdmin(Band, AdminSite())
request = MockRequest()
request.user = self.MockAddUser()
self.assertEqual(ma.get_inline_instances(request), [])
band = Band(name='The Doors', bio='', sign_date=date(1965, 1, 1))
inline_instances = ma.get_inline_instances(request, band)
self.assertEqual(len(inline_instances), 1)
self.assertIsInstance(inline_instances[0], ConcertInline)
def test_has_change_permission(self):
"""
has_change_permission returns True for users who can edit objects and
False for users who can't.
"""
ma = ModelAdmin(Band, AdminSite())
request = MockRequest()
request.user = self.MockViewUser()
self.assertIs(ma.has_change_permission(request), False)
request.user = self.MockAddUser()
self.assertFalse(ma.has_change_permission(request))
request.user = self.MockChangeUser()
self.assertTrue(ma.has_change_permission(request))
request.user = self.MockDeleteUser()
self.assertFalse(ma.has_change_permission(request))
def test_has_delete_permission(self):
"""
has_delete_permission returns True for users who can delete objects and
False for users who can't.
"""
ma = ModelAdmin(Band, AdminSite())
request = MockRequest()
request.user = self.MockViewUser()
self.assertIs(ma.has_delete_permission(request), False)
request.user = self.MockAddUser()
self.assertFalse(ma.has_delete_permission(request))
request.user = self.MockChangeUser()
self.assertFalse(ma.has_delete_permission(request))
request.user = self.MockDeleteUser()
self.assertTrue(ma.has_delete_permission(request))
def test_has_module_permission(self):
"""
as_module_permission returns True for users who have any permission
for the module and False for users who don't.
"""
ma = ModelAdmin(Band, AdminSite())
request = MockRequest()
request.user = self.MockViewUser()
self.assertIs(ma.has_module_permission(request), True)
request.user = self.MockAddUser()
self.assertTrue(ma.has_module_permission(request))
request.user = self.MockChangeUser()
self.assertTrue(ma.has_module_permission(request))
request.user = self.MockDeleteUser()
self.assertTrue(ma.has_module_permission(request))
original_app_label = ma.opts.app_label
ma.opts.app_label = 'anotherapp'
try:
request.user = self.MockViewUser()
self.assertIs(ma.has_module_permission(request), False)
request.user = self.MockAddUser()
self.assertFalse(ma.has_module_permission(request))
request.user = self.MockChangeUser()
self.assertFalse(ma.has_module_permission(request))
request.user = self.MockDeleteUser()
self.assertFalse(ma.has_module_permission(request))
finally:
ma.opts.app_label = original_app_label
|
1e0bb68d86c84cd4c373b5b00a2389d05885ed8f271ccf714646eafb98cee1ee | """
Tests for django test runner
"""
import collections.abc
import unittest
from unittest import mock
from admin_scripts.tests import AdminScriptTestCase
from django import db
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.core.management import call_command
from django.core.management.base import SystemCheckError
from django.test import (
SimpleTestCase, TransactionTestCase, skipUnlessDBFeature,
)
from django.test.runner import (
DiscoverRunner, Shuffler, reorder_test_bin, reorder_tests, shuffle_tests,
)
from django.test.testcases import connections_support_transactions
from django.test.utils import (
captured_stderr, dependency_ordered, get_unique_databases_and_mirrors,
iter_test_cases,
)
from django.utils.deprecation import RemovedInDjango50Warning
from .models import B, Person, Through
class MySuite:
def __init__(self):
self.tests = []
def addTest(self, test):
self.tests.append(test)
def __iter__(self):
yield from self.tests
class TestSuiteTests(SimpleTestCase):
def build_test_suite(self, test_classes, suite=None, suite_class=None):
if suite_class is None:
suite_class = unittest.TestSuite
if suite is None:
suite = suite_class()
loader = unittest.defaultTestLoader
for test_class in test_classes:
tests = loader.loadTestsFromTestCase(test_class)
subsuite = suite_class()
# Only use addTest() to simplify testing a custom TestSuite.
for test in tests:
subsuite.addTest(test)
suite.addTest(subsuite)
return suite
def make_test_suite(self, suite=None, suite_class=None):
class Tests1(unittest.TestCase):
def test1(self):
pass
def test2(self):
pass
class Tests2(unittest.TestCase):
def test1(self):
pass
def test2(self):
pass
return self.build_test_suite(
(Tests1, Tests2),
suite=suite,
suite_class=suite_class,
)
def assertTestNames(self, tests, expected):
# Each test.id() has a form like the following:
# "test_runner.tests.IterTestCasesTests.test_iter_test_cases.<locals>.Tests1.test1".
# It suffices to check only the last two parts.
names = ['.'.join(test.id().split('.')[-2:]) for test in tests]
self.assertEqual(names, expected)
def test_iter_test_cases_basic(self):
suite = self.make_test_suite()
tests = iter_test_cases(suite)
self.assertTestNames(tests, expected=[
'Tests1.test1', 'Tests1.test2', 'Tests2.test1', 'Tests2.test2',
])
def test_iter_test_cases_string_input(self):
msg = (
"Test 'a' must be a test case or test suite not string (was found "
"in 'abc')."
)
with self.assertRaisesMessage(TypeError, msg):
list(iter_test_cases('abc'))
def test_iter_test_cases_iterable_of_tests(self):
class Tests(unittest.TestCase):
def test1(self):
pass
def test2(self):
pass
tests = list(unittest.defaultTestLoader.loadTestsFromTestCase(Tests))
actual_tests = iter_test_cases(tests)
self.assertTestNames(actual_tests, expected=[
'Tests.test1', 'Tests.test2',
])
def test_iter_test_cases_custom_test_suite_class(self):
suite = self.make_test_suite(suite_class=MySuite)
tests = iter_test_cases(suite)
self.assertTestNames(tests, expected=[
'Tests1.test1', 'Tests1.test2', 'Tests2.test1', 'Tests2.test2',
])
def test_iter_test_cases_mixed_test_suite_classes(self):
suite = self.make_test_suite(suite=MySuite())
child_suite = list(suite)[0]
self.assertNotIsInstance(child_suite, MySuite)
tests = list(iter_test_cases(suite))
self.assertEqual(len(tests), 4)
self.assertNotIsInstance(tests[0], unittest.TestSuite)
def make_tests(self):
"""Return an iterable of tests."""
suite = self.make_test_suite()
tests = list(iter_test_cases(suite))
return tests
def test_shuffle_tests(self):
tests = self.make_tests()
# Choose a seed that shuffles both the classes and methods.
shuffler = Shuffler(seed=9)
shuffled_tests = shuffle_tests(tests, shuffler)
self.assertIsInstance(shuffled_tests, collections.abc.Iterator)
self.assertTestNames(shuffled_tests, expected=[
'Tests2.test1', 'Tests2.test2', 'Tests1.test2', 'Tests1.test1',
])
def test_reorder_test_bin_no_arguments(self):
tests = self.make_tests()
reordered_tests = reorder_test_bin(tests)
self.assertIsInstance(reordered_tests, collections.abc.Iterator)
self.assertTestNames(reordered_tests, expected=[
'Tests1.test1', 'Tests1.test2', 'Tests2.test1', 'Tests2.test2',
])
def test_reorder_test_bin_reverse(self):
tests = self.make_tests()
reordered_tests = reorder_test_bin(tests, reverse=True)
self.assertIsInstance(reordered_tests, collections.abc.Iterator)
self.assertTestNames(reordered_tests, expected=[
'Tests2.test2', 'Tests2.test1', 'Tests1.test2', 'Tests1.test1',
])
def test_reorder_test_bin_random(self):
tests = self.make_tests()
# Choose a seed that shuffles both the classes and methods.
shuffler = Shuffler(seed=9)
reordered_tests = reorder_test_bin(tests, shuffler=shuffler)
self.assertIsInstance(reordered_tests, collections.abc.Iterator)
self.assertTestNames(reordered_tests, expected=[
'Tests2.test1', 'Tests2.test2', 'Tests1.test2', 'Tests1.test1',
])
def test_reorder_test_bin_random_and_reverse(self):
tests = self.make_tests()
# Choose a seed that shuffles both the classes and methods.
shuffler = Shuffler(seed=9)
reordered_tests = reorder_test_bin(tests, shuffler=shuffler, reverse=True)
self.assertIsInstance(reordered_tests, collections.abc.Iterator)
self.assertTestNames(reordered_tests, expected=[
'Tests1.test1', 'Tests1.test2', 'Tests2.test2', 'Tests2.test1',
])
def test_reorder_tests_same_type_consecutive(self):
"""Tests of the same type are made consecutive."""
tests = self.make_tests()
# Move the last item to the front.
tests.insert(0, tests.pop())
self.assertTestNames(tests, expected=[
'Tests2.test2', 'Tests1.test1', 'Tests1.test2', 'Tests2.test1',
])
reordered_tests = reorder_tests(tests, classes=[])
self.assertTestNames(reordered_tests, expected=[
'Tests2.test2', 'Tests2.test1', 'Tests1.test1', 'Tests1.test2',
])
def test_reorder_tests_random(self):
tests = self.make_tests()
# Choose a seed that shuffles both the classes and methods.
shuffler = Shuffler(seed=9)
reordered_tests = reorder_tests(tests, classes=[], shuffler=shuffler)
self.assertIsInstance(reordered_tests, collections.abc.Iterator)
self.assertTestNames(reordered_tests, expected=[
'Tests2.test1', 'Tests2.test2', 'Tests1.test2', 'Tests1.test1',
])
def test_reorder_tests_random_mixed_classes(self):
tests = self.make_tests()
# Move the last item to the front.
tests.insert(0, tests.pop())
shuffler = Shuffler(seed=9)
self.assertTestNames(tests, expected=[
'Tests2.test2', 'Tests1.test1', 'Tests1.test2', 'Tests2.test1',
])
reordered_tests = reorder_tests(tests, classes=[], shuffler=shuffler)
self.assertTestNames(reordered_tests, expected=[
'Tests2.test1', 'Tests2.test2', 'Tests1.test2', 'Tests1.test1',
])
def test_reorder_tests_reverse_with_duplicates(self):
class Tests1(unittest.TestCase):
def test1(self):
pass
class Tests2(unittest.TestCase):
def test2(self):
pass
def test3(self):
pass
suite = self.build_test_suite((Tests1, Tests2))
subsuite = list(suite)[0]
suite.addTest(subsuite)
tests = list(iter_test_cases(suite))
self.assertTestNames(tests, expected=[
'Tests1.test1', 'Tests2.test2', 'Tests2.test3', 'Tests1.test1',
])
reordered_tests = reorder_tests(tests, classes=[])
self.assertTestNames(reordered_tests, expected=[
'Tests1.test1', 'Tests2.test2', 'Tests2.test3',
])
reordered_tests = reorder_tests(tests, classes=[], reverse=True)
self.assertTestNames(reordered_tests, expected=[
'Tests2.test3', 'Tests2.test2', 'Tests1.test1',
])
class DependencyOrderingTests(unittest.TestCase):
def test_simple_dependencies(self):
raw = [
('s1', ('s1_db', ['alpha'])),
('s2', ('s2_db', ['bravo'])),
('s3', ('s3_db', ['charlie'])),
]
dependencies = {
'alpha': ['charlie'],
'bravo': ['charlie'],
}
ordered = dependency_ordered(raw, dependencies=dependencies)
ordered_sigs = [sig for sig, value in ordered]
self.assertIn('s1', ordered_sigs)
self.assertIn('s2', ordered_sigs)
self.assertIn('s3', ordered_sigs)
self.assertLess(ordered_sigs.index('s3'), ordered_sigs.index('s1'))
self.assertLess(ordered_sigs.index('s3'), ordered_sigs.index('s2'))
def test_chained_dependencies(self):
raw = [
('s1', ('s1_db', ['alpha'])),
('s2', ('s2_db', ['bravo'])),
('s3', ('s3_db', ['charlie'])),
]
dependencies = {
'alpha': ['bravo'],
'bravo': ['charlie'],
}
ordered = dependency_ordered(raw, dependencies=dependencies)
ordered_sigs = [sig for sig, value in ordered]
self.assertIn('s1', ordered_sigs)
self.assertIn('s2', ordered_sigs)
self.assertIn('s3', ordered_sigs)
# Explicit dependencies
self.assertLess(ordered_sigs.index('s2'), ordered_sigs.index('s1'))
self.assertLess(ordered_sigs.index('s3'), ordered_sigs.index('s2'))
# Implied dependencies
self.assertLess(ordered_sigs.index('s3'), ordered_sigs.index('s1'))
def test_multiple_dependencies(self):
raw = [
('s1', ('s1_db', ['alpha'])),
('s2', ('s2_db', ['bravo'])),
('s3', ('s3_db', ['charlie'])),
('s4', ('s4_db', ['delta'])),
]
dependencies = {
'alpha': ['bravo', 'delta'],
'bravo': ['charlie'],
'delta': ['charlie'],
}
ordered = dependency_ordered(raw, dependencies=dependencies)
ordered_sigs = [sig for sig, aliases in ordered]
self.assertIn('s1', ordered_sigs)
self.assertIn('s2', ordered_sigs)
self.assertIn('s3', ordered_sigs)
self.assertIn('s4', ordered_sigs)
# Explicit dependencies
self.assertLess(ordered_sigs.index('s2'), ordered_sigs.index('s1'))
self.assertLess(ordered_sigs.index('s4'), ordered_sigs.index('s1'))
self.assertLess(ordered_sigs.index('s3'), ordered_sigs.index('s2'))
self.assertLess(ordered_sigs.index('s3'), ordered_sigs.index('s4'))
# Implicit dependencies
self.assertLess(ordered_sigs.index('s3'), ordered_sigs.index('s1'))
def test_circular_dependencies(self):
raw = [
('s1', ('s1_db', ['alpha'])),
('s2', ('s2_db', ['bravo'])),
]
dependencies = {
'bravo': ['alpha'],
'alpha': ['bravo'],
}
with self.assertRaises(ImproperlyConfigured):
dependency_ordered(raw, dependencies=dependencies)
def test_own_alias_dependency(self):
raw = [
('s1', ('s1_db', ['alpha', 'bravo']))
]
dependencies = {
'alpha': ['bravo']
}
with self.assertRaises(ImproperlyConfigured):
dependency_ordered(raw, dependencies=dependencies)
# reordering aliases shouldn't matter
raw = [
('s1', ('s1_db', ['bravo', 'alpha']))
]
with self.assertRaises(ImproperlyConfigured):
dependency_ordered(raw, dependencies=dependencies)
class MockTestRunner:
def __init__(self, *args, **kwargs):
pass
MockTestRunner.run_tests = mock.Mock(return_value=[])
class ManageCommandTests(unittest.TestCase):
def test_custom_test_runner(self):
call_command('test', 'sites',
testrunner='test_runner.tests.MockTestRunner')
MockTestRunner.run_tests.assert_called_with(('sites',))
def test_bad_test_runner(self):
with self.assertRaises(AttributeError):
call_command('test', 'sites', testrunner='test_runner.NonexistentRunner')
def test_time_recorded(self):
with captured_stderr() as stderr:
call_command('test', '--timing', 'sites', testrunner='test_runner.tests.MockTestRunner')
self.assertIn('Total run took', stderr.getvalue())
class CustomTestRunnerOptionsSettingsTests(AdminScriptTestCase):
"""
Custom runners can add command line arguments. The runner is specified
through a settings file.
"""
def setUp(self):
super().setUp()
settings = {
'TEST_RUNNER': '\'test_runner.runner.CustomOptionsTestRunner\'',
}
self.write_settings('settings.py', sdict=settings)
def test_default_options(self):
args = ['test', '--settings=test_project.settings']
out, err = self.run_django_admin(args)
self.assertNoOutput(err)
self.assertOutput(out, '1:2:3')
def test_default_and_given_options(self):
args = ['test', '--settings=test_project.settings', '--option_b=foo']
out, err = self.run_django_admin(args)
self.assertNoOutput(err)
self.assertOutput(out, '1:foo:3')
def test_option_name_and_value_separated(self):
args = ['test', '--settings=test_project.settings', '--option_b', 'foo']
out, err = self.run_django_admin(args)
self.assertNoOutput(err)
self.assertOutput(out, '1:foo:3')
def test_all_options_given(self):
args = ['test', '--settings=test_project.settings', '--option_a=bar',
'--option_b=foo', '--option_c=31337']
out, err = self.run_django_admin(args)
self.assertNoOutput(err)
self.assertOutput(out, 'bar:foo:31337')
class CustomTestRunnerOptionsCmdlineTests(AdminScriptTestCase):
"""
Custom runners can add command line arguments when the runner is specified
using --testrunner.
"""
def setUp(self):
super().setUp()
self.write_settings('settings.py')
def test_testrunner_option(self):
args = [
'test', '--testrunner', 'test_runner.runner.CustomOptionsTestRunner',
'--option_a=bar', '--option_b=foo', '--option_c=31337'
]
out, err = self.run_django_admin(args, 'test_project.settings')
self.assertNoOutput(err)
self.assertOutput(out, 'bar:foo:31337')
def test_testrunner_equals(self):
args = [
'test', '--testrunner=test_runner.runner.CustomOptionsTestRunner',
'--option_a=bar', '--option_b=foo', '--option_c=31337'
]
out, err = self.run_django_admin(args, 'test_project.settings')
self.assertNoOutput(err)
self.assertOutput(out, 'bar:foo:31337')
def test_no_testrunner(self):
args = ['test', '--testrunner']
out, err = self.run_django_admin(args, 'test_project.settings')
self.assertIn('usage', err)
self.assertNotIn('Traceback', err)
self.assertNoOutput(out)
class Ticket17477RegressionTests(AdminScriptTestCase):
def setUp(self):
super().setUp()
self.write_settings('settings.py')
def test_ticket_17477(self):
"""'manage.py help test' works after r16352."""
args = ['help', 'test']
out, err = self.run_manage(args)
self.assertNoOutput(err)
class SQLiteInMemoryTestDbs(TransactionTestCase):
available_apps = ['test_runner']
databases = {'default', 'other'}
@unittest.skipUnless(all(db.connections[conn].vendor == 'sqlite' for conn in db.connections),
"This is an sqlite-specific issue")
def test_transaction_support(self):
# Assert connections mocking is appropriately applied by preventing
# any attempts at calling create_test_db on the global connection
# objects.
for connection in db.connections.all():
create_test_db = mock.patch.object(
connection.creation,
'create_test_db',
side_effect=AssertionError("Global connection object shouldn't be manipulated.")
)
create_test_db.start()
self.addCleanup(create_test_db.stop)
for option_key, option_value in (
('NAME', ':memory:'), ('TEST', {'NAME': ':memory:'})):
tested_connections = db.ConnectionHandler({
'default': {
'ENGINE': 'django.db.backends.sqlite3',
option_key: option_value,
},
'other': {
'ENGINE': 'django.db.backends.sqlite3',
option_key: option_value,
},
})
with mock.patch('django.test.utils.connections', new=tested_connections):
other = tested_connections['other']
DiscoverRunner(verbosity=0).setup_databases()
msg = (
"DATABASES setting '%s' option set to sqlite3's ':memory:' value "
"shouldn't interfere with transaction support detection." % option_key
)
# Transaction support is properly initialized for the 'other' DB.
self.assertTrue(other.features.supports_transactions, msg)
# And all the DBs report that they support transactions.
self.assertTrue(connections_support_transactions(), msg)
class DummyBackendTest(unittest.TestCase):
def test_setup_databases(self):
"""
setup_databases() doesn't fail with dummy database backend.
"""
tested_connections = db.ConnectionHandler({})
with mock.patch('django.test.utils.connections', new=tested_connections):
runner_instance = DiscoverRunner(verbosity=0)
old_config = runner_instance.setup_databases()
runner_instance.teardown_databases(old_config)
class AliasedDefaultTestSetupTest(unittest.TestCase):
def test_setup_aliased_default_database(self):
"""
setup_databases() doesn't fail when 'default' is aliased
"""
tested_connections = db.ConnectionHandler({
'default': {
'NAME': 'dummy'
},
'aliased': {
'NAME': 'dummy'
}
})
with mock.patch('django.test.utils.connections', new=tested_connections):
runner_instance = DiscoverRunner(verbosity=0)
old_config = runner_instance.setup_databases()
runner_instance.teardown_databases(old_config)
class SetupDatabasesTests(SimpleTestCase):
def setUp(self):
self.runner_instance = DiscoverRunner(verbosity=0)
def test_setup_aliased_databases(self):
tested_connections = db.ConnectionHandler({
'default': {
'ENGINE': 'django.db.backends.dummy',
'NAME': 'dbname',
},
'other': {
'ENGINE': 'django.db.backends.dummy',
'NAME': 'dbname',
}
})
with mock.patch('django.db.backends.dummy.base.DatabaseWrapper.creation_class') as mocked_db_creation:
with mock.patch('django.test.utils.connections', new=tested_connections):
old_config = self.runner_instance.setup_databases()
self.runner_instance.teardown_databases(old_config)
mocked_db_creation.return_value.destroy_test_db.assert_called_once_with('dbname', 0, False)
def test_setup_test_database_aliases(self):
"""
The default database must be the first because data migrations
use the default alias by default.
"""
tested_connections = db.ConnectionHandler({
'other': {
'ENGINE': 'django.db.backends.dummy',
'NAME': 'dbname',
},
'default': {
'ENGINE': 'django.db.backends.dummy',
'NAME': 'dbname',
}
})
with mock.patch('django.test.utils.connections', new=tested_connections):
test_databases, _ = get_unique_databases_and_mirrors()
self.assertEqual(
test_databases,
{
('', '', 'django.db.backends.dummy', 'test_dbname'): (
'dbname',
['default', 'other'],
),
},
)
def test_destroy_test_db_restores_db_name(self):
tested_connections = db.ConnectionHandler({
'default': {
'ENGINE': settings.DATABASES[db.DEFAULT_DB_ALIAS]["ENGINE"],
'NAME': 'xxx_test_database',
},
})
# Using the real current name as old_name to not mess with the test suite.
old_name = settings.DATABASES[db.DEFAULT_DB_ALIAS]["NAME"]
with mock.patch('django.db.connections', new=tested_connections):
tested_connections['default'].creation.destroy_test_db(old_name, verbosity=0, keepdb=True)
self.assertEqual(tested_connections['default'].settings_dict["NAME"], old_name)
def test_serialization(self):
tested_connections = db.ConnectionHandler({
'default': {
'ENGINE': 'django.db.backends.dummy',
},
})
with mock.patch('django.db.backends.dummy.base.DatabaseWrapper.creation_class') as mocked_db_creation:
with mock.patch('django.test.utils.connections', new=tested_connections):
self.runner_instance.setup_databases()
mocked_db_creation.return_value.create_test_db.assert_called_once_with(
verbosity=0, autoclobber=False, serialize=True, keepdb=False
)
def test_serialized_off(self):
tested_connections = db.ConnectionHandler({
'default': {
'ENGINE': 'django.db.backends.dummy',
'TEST': {'SERIALIZE': False},
},
})
msg = (
'The SERIALIZE test database setting is deprecated as it can be '
'inferred from the TestCase/TransactionTestCase.databases that '
'enable the serialized_rollback feature.'
)
with mock.patch('django.db.backends.dummy.base.DatabaseWrapper.creation_class') as mocked_db_creation:
with mock.patch('django.test.utils.connections', new=tested_connections):
with self.assertWarnsMessage(RemovedInDjango50Warning, msg):
self.runner_instance.setup_databases()
mocked_db_creation.return_value.create_test_db.assert_called_once_with(
verbosity=0, autoclobber=False, serialize=False, keepdb=False
)
@skipUnlessDBFeature('supports_sequence_reset')
class AutoIncrementResetTest(TransactionTestCase):
"""
Creating the same models in different test methods receive the same PK
values since the sequences are reset before each test method.
"""
available_apps = ['test_runner']
reset_sequences = True
def _test(self):
# Regular model
p = Person.objects.create(first_name='Jack', last_name='Smith')
self.assertEqual(p.pk, 1)
# Auto-created many-to-many through model
p.friends.add(Person.objects.create(first_name='Jacky', last_name='Smith'))
self.assertEqual(p.friends.through.objects.first().pk, 1)
# Many-to-many through model
b = B.objects.create()
t = Through.objects.create(person=p, b=b)
self.assertEqual(t.pk, 1)
def test_autoincrement_reset1(self):
self._test()
def test_autoincrement_reset2(self):
self._test()
class EmptyDefaultDatabaseTest(unittest.TestCase):
def test_empty_default_database(self):
"""
An empty default database in settings does not raise an ImproperlyConfigured
error when running a unit test that does not use a database.
"""
tested_connections = db.ConnectionHandler({'default': {}})
with mock.patch('django.db.connections', new=tested_connections):
connection = tested_connections[db.utils.DEFAULT_DB_ALIAS]
self.assertEqual(connection.settings_dict['ENGINE'], 'django.db.backends.dummy')
connections_support_transactions()
class RunTestsExceptionHandlingTests(unittest.TestCase):
def test_run_checks_raises(self):
"""
Teardown functions are run when run_checks() raises SystemCheckError.
"""
with mock.patch('django.test.runner.DiscoverRunner.setup_test_environment'), \
mock.patch('django.test.runner.DiscoverRunner.setup_databases'), \
mock.patch('django.test.runner.DiscoverRunner.build_suite'), \
mock.patch('django.test.runner.DiscoverRunner.run_checks', side_effect=SystemCheckError), \
mock.patch('django.test.runner.DiscoverRunner.teardown_databases') as teardown_databases, \
mock.patch('django.test.runner.DiscoverRunner.teardown_test_environment') as teardown_test_environment:
runner = DiscoverRunner(verbosity=0, interactive=False)
with self.assertRaises(SystemCheckError):
runner.run_tests(['test_runner_apps.sample.tests_sample.TestDjangoTestCase'])
self.assertTrue(teardown_databases.called)
self.assertTrue(teardown_test_environment.called)
def test_run_checks_raises_and_teardown_raises(self):
"""
SystemCheckError is surfaced when run_checks() raises SystemCheckError
and teardown databases() raises ValueError.
"""
with mock.patch('django.test.runner.DiscoverRunner.setup_test_environment'), \
mock.patch('django.test.runner.DiscoverRunner.setup_databases'), \
mock.patch('django.test.runner.DiscoverRunner.build_suite'), \
mock.patch('django.test.runner.DiscoverRunner.run_checks', side_effect=SystemCheckError), \
mock.patch('django.test.runner.DiscoverRunner.teardown_databases', side_effect=ValueError) \
as teardown_databases, \
mock.patch('django.test.runner.DiscoverRunner.teardown_test_environment') as teardown_test_environment:
runner = DiscoverRunner(verbosity=0, interactive=False)
with self.assertRaises(SystemCheckError):
runner.run_tests(['test_runner_apps.sample.tests_sample.TestDjangoTestCase'])
self.assertTrue(teardown_databases.called)
self.assertFalse(teardown_test_environment.called)
def test_run_checks_passes_and_teardown_raises(self):
"""
Exceptions on teardown are surfaced if no exceptions happen during
run_checks().
"""
with mock.patch('django.test.runner.DiscoverRunner.setup_test_environment'), \
mock.patch('django.test.runner.DiscoverRunner.setup_databases'), \
mock.patch('django.test.runner.DiscoverRunner.build_suite'), \
mock.patch('django.test.runner.DiscoverRunner.run_checks'), \
mock.patch('django.test.runner.DiscoverRunner.teardown_databases', side_effect=ValueError) \
as teardown_databases, \
mock.patch('django.test.runner.DiscoverRunner.teardown_test_environment') as teardown_test_environment:
runner = DiscoverRunner(verbosity=0, interactive=False)
with self.assertRaises(ValueError):
# Suppress the output when running TestDjangoTestCase.
with mock.patch('sys.stderr'):
runner.run_tests(['test_runner_apps.sample.tests_sample.TestDjangoTestCase'])
self.assertTrue(teardown_databases.called)
self.assertFalse(teardown_test_environment.called)
# RemovedInDjango50Warning
class NoOpTestRunner(DiscoverRunner):
def setup_test_environment(self, **kwargs):
return
def setup_databases(self, **kwargs):
return
def run_checks(self, databases):
return
def teardown_databases(self, old_config, **kwargs):
return
def teardown_test_environment(self, **kwargs):
return
class DiscoverRunnerExtraTestsDeprecationTests(SimpleTestCase):
msg = 'The extra_tests argument is deprecated.'
def get_runner(self):
return NoOpTestRunner(verbosity=0, interactive=False)
def test_extra_tests_build_suite(self):
runner = self.get_runner()
with self.assertWarnsMessage(RemovedInDjango50Warning, self.msg):
runner.build_suite(extra_tests=[])
def test_extra_tests_run_tests(self):
runner = self.get_runner()
with captured_stderr():
with self.assertWarnsMessage(RemovedInDjango50Warning, self.msg):
runner.run_tests(
test_labels=['test_runner_apps.sample.tests_sample.EmptyTestCase'],
extra_tests=[],
)
|
bcb5e84c857290673b2c2cb89a84ef1aff30ab5b87c4402c59ca80a5ed81c10c | from django.test.runner import DiscoverRunner
class CustomOptionsTestRunner(DiscoverRunner):
def __init__(self, verbosity=1, interactive=True, failfast=True,
option_a=None, option_b=None, option_c=None, **kwargs):
super().__init__(verbosity=verbosity, interactive=interactive, failfast=failfast)
self.option_a = option_a
self.option_b = option_b
self.option_c = option_c
@classmethod
def add_arguments(cls, parser):
parser.add_argument('--option_a', '-a', default='1'),
parser.add_argument('--option_b', '-b', default='2'),
parser.add_argument('--option_c', '-c', default='3'),
def run_tests(self, test_labels, **kwargs):
print("%s:%s:%s" % (self.option_a, self.option_b, self.option_c))
|
3ab8caf09cb0019f62d405546c9f9012650e806da13b7b51599bd4ef931c68ef | from unittest import mock
from django.test import SimpleTestCase
from django.test.runner import Shuffler
class ShufflerTests(SimpleTestCase):
def test_hash_text(self):
actual = Shuffler._hash_text('abcd')
self.assertEqual(actual, 'e2fc714c4727ee9395f324cd2e7f331f')
def test_hash_text_hash_algorithm(self):
class MyShuffler(Shuffler):
hash_algorithm = 'sha1'
actual = MyShuffler._hash_text('abcd')
self.assertEqual(actual, '81fe8bfe87576c3ecb22426f8e57847382917acf')
def test_init(self):
shuffler = Shuffler(100)
self.assertEqual(shuffler.seed, 100)
self.assertEqual(shuffler.seed_source, 'given')
def test_init_none_seed(self):
with mock.patch('random.randint', return_value=200):
shuffler = Shuffler(None)
self.assertEqual(shuffler.seed, 200)
self.assertEqual(shuffler.seed_source, 'generated')
def test_init_no_seed_argument(self):
with mock.patch('random.randint', return_value=300):
shuffler = Shuffler()
self.assertEqual(shuffler.seed, 300)
self.assertEqual(shuffler.seed_source, 'generated')
def test_seed_display(self):
shuffler = Shuffler(100)
shuffler.seed_source = 'test'
self.assertEqual(shuffler.seed_display, '100 (test)')
def test_hash_item_seed(self):
cases = [
(1234, '64ad3fb166ddb41a2ca24f1803b8b722'),
# Passing a string gives the same value.
('1234', '64ad3fb166ddb41a2ca24f1803b8b722'),
(5678, '4dde450ad339b6ce45a0a2666e35b975'),
]
for seed, expected in cases:
with self.subTest(seed=seed):
shuffler = Shuffler(seed=seed)
actual = shuffler._hash_item('abc', lambda x: x)
self.assertEqual(actual, expected)
def test_hash_item_key(self):
cases = [
(lambda x: x, '64ad3fb166ddb41a2ca24f1803b8b722'),
(lambda x: x.upper(), 'ee22e8597bff91742affe4befbf4649a'),
]
for key, expected in cases:
with self.subTest(key=key):
shuffler = Shuffler(seed=1234)
actual = shuffler._hash_item('abc', key)
self.assertEqual(actual, expected)
def test_shuffle_key(self):
cases = [
(lambda x: x, ['a', 'd', 'b', 'c']),
(lambda x: x.upper(), ['d', 'c', 'a', 'b']),
]
for num, (key, expected) in enumerate(cases, start=1):
with self.subTest(num=num):
shuffler = Shuffler(seed=1234)
actual = shuffler.shuffle(['a', 'b', 'c', 'd'], key)
self.assertEqual(actual, expected)
def test_shuffle_consistency(self):
seq = [str(n) for n in range(5)]
cases = [
(None, ['3', '0', '2', '4', '1']),
(0, ['3', '2', '4', '1']),
(1, ['3', '0', '2', '4']),
(2, ['3', '0', '4', '1']),
(3, ['0', '2', '4', '1']),
(4, ['3', '0', '2', '1']),
]
shuffler = Shuffler(seed=1234)
for index, expected in cases:
with self.subTest(index=index):
if index is None:
new_seq = seq
else:
new_seq = seq.copy()
del new_seq[index]
actual = shuffler.shuffle(new_seq, lambda x: x)
self.assertEqual(actual, expected)
def test_shuffle_same_hash(self):
shuffler = Shuffler(seed=1234)
msg = "item 'A' has same hash 'a56ce89262959e151ee2266552f1819c' as item 'a'"
with self.assertRaisesMessage(RuntimeError, msg):
shuffler.shuffle(['a', 'b', 'A'], lambda x: x.upper())
|
90ede07171d6158f1692be521ee28a68aaf6ec007155425c7182928e472c8cf4 | import logging
import multiprocessing
import os
import unittest.loader
from argparse import ArgumentParser
from contextlib import contextmanager
from importlib import import_module
from unittest import TestSuite, TextTestRunner, defaultTestLoader, mock
from django.db import connections
from django.test import SimpleTestCase
from django.test.runner import DiscoverRunner
from django.test.utils import (
NullTimeKeeper, TimeKeeper, captured_stderr, captured_stdout,
)
@contextmanager
def change_cwd(directory):
current_dir = os.path.abspath(os.path.dirname(__file__))
new_dir = os.path.join(current_dir, directory)
old_cwd = os.getcwd()
os.chdir(new_dir)
try:
yield
finally:
os.chdir(old_cwd)
@contextmanager
def change_loader_patterns(patterns):
original_patterns = DiscoverRunner.test_loader.testNamePatterns
DiscoverRunner.test_loader.testNamePatterns = patterns
try:
yield
finally:
DiscoverRunner.test_loader.testNamePatterns = original_patterns
# Isolate from the real environment.
@mock.patch.dict(os.environ, {}, clear=True)
@mock.patch.object(multiprocessing, 'cpu_count', return_value=12)
# Python 3.8 on macOS defaults to 'spawn' mode.
@mock.patch.object(multiprocessing, 'get_start_method', return_value='fork')
class DiscoverRunnerParallelArgumentTests(SimpleTestCase):
def get_parser(self):
parser = ArgumentParser()
DiscoverRunner.add_arguments(parser)
return parser
def test_parallel_default(self, *mocked_objects):
result = self.get_parser().parse_args([])
self.assertEqual(result.parallel, 0)
def test_parallel_flag(self, *mocked_objects):
result = self.get_parser().parse_args(['--parallel'])
self.assertEqual(result.parallel, 12)
def test_parallel_auto(self, *mocked_objects):
result = self.get_parser().parse_args(['--parallel', 'auto'])
self.assertEqual(result.parallel, 12)
def test_parallel_count(self, *mocked_objects):
result = self.get_parser().parse_args(['--parallel', '17'])
self.assertEqual(result.parallel, 17)
def test_parallel_invalid(self, *mocked_objects):
with self.assertRaises(SystemExit), captured_stderr() as stderr:
self.get_parser().parse_args(['--parallel', 'unaccepted'])
msg = "argument --parallel: 'unaccepted' is not an integer or the string 'auto'"
self.assertIn(msg, stderr.getvalue())
@mock.patch.dict(os.environ, {'DJANGO_TEST_PROCESSES': '7'})
def test_parallel_env_var(self, *mocked_objects):
result = self.get_parser().parse_args([])
self.assertEqual(result.parallel, 7)
@mock.patch.dict(os.environ, {'DJANGO_TEST_PROCESSES': 'typo'})
def test_parallel_env_var_non_int(self, *mocked_objects):
with self.assertRaises(ValueError):
self.get_parser().parse_args([])
def test_parallel_spawn(self, mocked_get_start_method, mocked_cpu_count):
mocked_get_start_method.return_value = 'spawn'
result = self.get_parser().parse_args(['--parallel'])
self.assertEqual(result.parallel, 1)
class DiscoverRunnerTests(SimpleTestCase):
@staticmethod
def get_test_methods_names(suite):
return [
t.__class__.__name__ + '.' + t._testMethodName
for t in suite._tests
]
def test_init_debug_mode(self):
runner = DiscoverRunner()
self.assertFalse(runner.debug_mode)
def test_add_arguments_shuffle(self):
parser = ArgumentParser()
DiscoverRunner.add_arguments(parser)
ns = parser.parse_args([])
self.assertIs(ns.shuffle, False)
ns = parser.parse_args(['--shuffle'])
self.assertIsNone(ns.shuffle)
ns = parser.parse_args(['--shuffle', '5'])
self.assertEqual(ns.shuffle, 5)
def test_add_arguments_debug_mode(self):
parser = ArgumentParser()
DiscoverRunner.add_arguments(parser)
ns = parser.parse_args([])
self.assertFalse(ns.debug_mode)
ns = parser.parse_args(["--debug-mode"])
self.assertTrue(ns.debug_mode)
def test_setup_shuffler_no_shuffle_argument(self):
runner = DiscoverRunner()
self.assertIs(runner.shuffle, False)
runner.setup_shuffler()
self.assertIsNone(runner.shuffle_seed)
def test_setup_shuffler_shuffle_none(self):
runner = DiscoverRunner(shuffle=None)
self.assertIsNone(runner.shuffle)
with mock.patch('random.randint', return_value=1):
with captured_stdout() as stdout:
runner.setup_shuffler()
self.assertEqual(stdout.getvalue(), 'Using shuffle seed: 1 (generated)\n')
self.assertEqual(runner.shuffle_seed, 1)
def test_setup_shuffler_shuffle_int(self):
runner = DiscoverRunner(shuffle=2)
self.assertEqual(runner.shuffle, 2)
with captured_stdout() as stdout:
runner.setup_shuffler()
expected_out = 'Using shuffle seed: 2 (given)\n'
self.assertEqual(stdout.getvalue(), expected_out)
self.assertEqual(runner.shuffle_seed, 2)
def test_load_tests_for_label_file_path(self):
with change_cwd('.'):
msg = (
"One of the test labels is a path to a file: "
"'test_discover_runner.py', which is not supported. Use a "
"dotted module name or path to a directory instead."
)
with self.assertRaisesMessage(RuntimeError, msg):
DiscoverRunner().load_tests_for_label('test_discover_runner.py', {})
def test_dotted_test_module(self):
count = DiscoverRunner(verbosity=0).build_suite(
['test_runner_apps.sample.tests_sample'],
).countTestCases()
self.assertEqual(count, 4)
def test_dotted_test_class_vanilla_unittest(self):
count = DiscoverRunner(verbosity=0).build_suite(
['test_runner_apps.sample.tests_sample.TestVanillaUnittest'],
).countTestCases()
self.assertEqual(count, 1)
def test_dotted_test_class_django_testcase(self):
count = DiscoverRunner(verbosity=0).build_suite(
['test_runner_apps.sample.tests_sample.TestDjangoTestCase'],
).countTestCases()
self.assertEqual(count, 1)
def test_dotted_test_method_django_testcase(self):
count = DiscoverRunner(verbosity=0).build_suite(
['test_runner_apps.sample.tests_sample.TestDjangoTestCase.test_sample'],
).countTestCases()
self.assertEqual(count, 1)
def test_pattern(self):
count = DiscoverRunner(
pattern="*_tests.py",
verbosity=0,
).build_suite(['test_runner_apps.sample']).countTestCases()
self.assertEqual(count, 1)
def test_name_patterns(self):
all_test_1 = [
'DjangoCase1.test_1', 'DjangoCase2.test_1',
'SimpleCase1.test_1', 'SimpleCase2.test_1',
'UnittestCase1.test_1', 'UnittestCase2.test_1',
]
all_test_2 = [
'DjangoCase1.test_2', 'DjangoCase2.test_2',
'SimpleCase1.test_2', 'SimpleCase2.test_2',
'UnittestCase1.test_2', 'UnittestCase2.test_2',
]
all_tests = sorted([*all_test_1, *all_test_2, 'UnittestCase2.test_3_test'])
for pattern, expected in [
[['test_1'], all_test_1],
[['UnittestCase1'], ['UnittestCase1.test_1', 'UnittestCase1.test_2']],
[['*test'], ['UnittestCase2.test_3_test']],
[['test*'], all_tests],
[['test'], all_tests],
[['test_1', 'test_2'], sorted([*all_test_1, *all_test_2])],
[['test*1'], all_test_1],
]:
with self.subTest(pattern):
suite = DiscoverRunner(
test_name_patterns=pattern,
verbosity=0,
).build_suite(['test_runner_apps.simple'])
self.assertEqual(expected, self.get_test_methods_names(suite))
def test_loader_patterns_not_mutated(self):
runner = DiscoverRunner(test_name_patterns=['test_sample'], verbosity=0)
tests = [
('test_runner_apps.sample.tests', 1),
('test_runner_apps.sample.tests.Test.test_sample', 1),
('test_runner_apps.sample.empty', 0),
('test_runner_apps.sample.tests_sample.EmptyTestCase', 0),
]
for test_labels, tests_count in tests:
with self.subTest(test_labels=test_labels):
with change_loader_patterns(['UnittestCase1']):
count = runner.build_suite([test_labels]).countTestCases()
self.assertEqual(count, tests_count)
self.assertEqual(runner.test_loader.testNamePatterns, ['UnittestCase1'])
def test_loader_patterns_not_mutated_when_test_label_is_file_path(self):
runner = DiscoverRunner(test_name_patterns=['test_sample'], verbosity=0)
with change_cwd('.'), change_loader_patterns(['UnittestCase1']):
with self.assertRaises(RuntimeError):
runner.build_suite(['test_discover_runner.py'])
self.assertEqual(runner.test_loader.testNamePatterns, ['UnittestCase1'])
def test_file_path(self):
with change_cwd(".."):
count = DiscoverRunner(verbosity=0).build_suite(
['test_runner_apps/sample/'],
).countTestCases()
self.assertEqual(count, 5)
def test_empty_label(self):
"""
If the test label is empty, discovery should happen on the current
working directory.
"""
with change_cwd("."):
suite = DiscoverRunner(verbosity=0).build_suite([])
self.assertEqual(
suite._tests[0].id().split(".")[0],
os.path.basename(os.getcwd()),
)
def test_empty_test_case(self):
count = DiscoverRunner(verbosity=0).build_suite(
['test_runner_apps.sample.tests_sample.EmptyTestCase'],
).countTestCases()
self.assertEqual(count, 0)
def test_discovery_on_package(self):
count = DiscoverRunner(verbosity=0).build_suite(
['test_runner_apps.sample.tests'],
).countTestCases()
self.assertEqual(count, 1)
def test_ignore_adjacent(self):
"""
When given a dotted path to a module, unittest discovery searches
not just the module, but also the directory containing the module.
This results in tests from adjacent modules being run when they
should not. The discover runner avoids this behavior.
"""
count = DiscoverRunner(verbosity=0).build_suite(
['test_runner_apps.sample.empty'],
).countTestCases()
self.assertEqual(count, 0)
def test_testcase_ordering(self):
with change_cwd(".."):
suite = DiscoverRunner(verbosity=0).build_suite(['test_runner_apps/sample/'])
self.assertEqual(
suite._tests[0].__class__.__name__,
'TestDjangoTestCase',
msg="TestDjangoTestCase should be the first test case")
self.assertEqual(
suite._tests[1].__class__.__name__,
'TestZimpleTestCase',
msg="TestZimpleTestCase should be the second test case")
# All others can follow in unspecified order, including doctests
self.assertIn('DocTestCase', [t.__class__.__name__ for t in suite._tests[2:]])
def test_duplicates_ignored(self):
"""
Tests shouldn't be discovered twice when discovering on overlapping paths.
"""
base_app = 'forms_tests'
sub_app = 'forms_tests.field_tests'
runner = DiscoverRunner(verbosity=0)
with self.modify_settings(INSTALLED_APPS={'append': sub_app}):
single = runner.build_suite([base_app]).countTestCases()
dups = runner.build_suite([base_app, sub_app]).countTestCases()
self.assertEqual(single, dups)
def test_reverse(self):
"""
Reverse should reorder tests while maintaining the grouping specified
by ``DiscoverRunner.reorder_by``.
"""
runner = DiscoverRunner(reverse=True, verbosity=0)
suite = runner.build_suite(
test_labels=('test_runner_apps.sample', 'test_runner_apps.simple'))
self.assertIn('test_runner_apps.simple', next(iter(suite)).id(),
msg="Test labels should be reversed.")
suite = runner.build_suite(test_labels=('test_runner_apps.simple',))
suite = tuple(suite)
self.assertIn('DjangoCase', suite[0].id(),
msg="Test groups should not be reversed.")
self.assertIn('SimpleCase', suite[4].id(),
msg="Test groups order should be preserved.")
self.assertIn('DjangoCase2', suite[0].id(),
msg="Django test cases should be reversed.")
self.assertIn('SimpleCase2', suite[4].id(),
msg="Simple test cases should be reversed.")
self.assertIn('UnittestCase2', suite[8].id(),
msg="Unittest test cases should be reversed.")
self.assertIn('test_2', suite[0].id(),
msg="Methods of Django cases should be reversed.")
self.assertIn('test_2', suite[4].id(),
msg="Methods of simple cases should be reversed.")
self.assertIn('test_2', suite[9].id(),
msg="Methods of unittest cases should be reversed.")
def test_build_suite_failed_tests_first(self):
# The "doesnotexist" label results in a _FailedTest instance.
suite = DiscoverRunner(verbosity=0).build_suite(
test_labels=['test_runner_apps.sample', 'doesnotexist'],
)
tests = list(suite)
self.assertIsInstance(tests[0], unittest.loader._FailedTest)
self.assertNotIsInstance(tests[-1], unittest.loader._FailedTest)
def test_build_suite_shuffling(self):
# These will result in unittest.loader._FailedTest instances rather
# than TestCase objects, but they are sufficient for testing.
labels = ['label1', 'label2', 'label3', 'label4']
cases = [
({}, ['label1', 'label2', 'label3', 'label4']),
({'reverse': True}, ['label4', 'label3', 'label2', 'label1']),
({'shuffle': 8}, ['label4', 'label1', 'label3', 'label2']),
({'shuffle': 8, 'reverse': True}, ['label2', 'label3', 'label1', 'label4']),
]
for kwargs, expected in cases:
with self.subTest(kwargs=kwargs):
# Prevent writing the seed to stdout.
runner = DiscoverRunner(**kwargs, verbosity=0)
tests = runner.build_suite(test_labels=labels)
# The ids have the form "unittest.loader._FailedTest.label1".
names = [test.id().split('.')[-1] for test in tests]
self.assertEqual(names, expected)
def test_overridable_get_test_runner_kwargs(self):
self.assertIsInstance(DiscoverRunner().get_test_runner_kwargs(), dict)
def test_overridable_test_suite(self):
self.assertEqual(DiscoverRunner().test_suite, TestSuite)
def test_overridable_test_runner(self):
self.assertEqual(DiscoverRunner().test_runner, TextTestRunner)
def test_overridable_test_loader(self):
self.assertEqual(DiscoverRunner().test_loader, defaultTestLoader)
def test_tags(self):
runner = DiscoverRunner(tags=['core'], verbosity=0)
self.assertEqual(runner.build_suite(['test_runner_apps.tagged.tests']).countTestCases(), 1)
runner = DiscoverRunner(tags=['fast'], verbosity=0)
self.assertEqual(runner.build_suite(['test_runner_apps.tagged.tests']).countTestCases(), 2)
runner = DiscoverRunner(tags=['slow'], verbosity=0)
self.assertEqual(runner.build_suite(['test_runner_apps.tagged.tests']).countTestCases(), 2)
def test_exclude_tags(self):
runner = DiscoverRunner(tags=['fast'], exclude_tags=['core'], verbosity=0)
self.assertEqual(runner.build_suite(['test_runner_apps.tagged.tests']).countTestCases(), 1)
runner = DiscoverRunner(tags=['fast'], exclude_tags=['slow'], verbosity=0)
self.assertEqual(runner.build_suite(['test_runner_apps.tagged.tests']).countTestCases(), 0)
runner = DiscoverRunner(exclude_tags=['slow'], verbosity=0)
self.assertEqual(runner.build_suite(['test_runner_apps.tagged.tests']).countTestCases(), 0)
def test_tag_inheritance(self):
def count_tests(**kwargs):
kwargs.setdefault('verbosity', 0)
suite = DiscoverRunner(**kwargs).build_suite(['test_runner_apps.tagged.tests_inheritance'])
return suite.countTestCases()
self.assertEqual(count_tests(tags=['foo']), 4)
self.assertEqual(count_tests(tags=['bar']), 2)
self.assertEqual(count_tests(tags=['baz']), 2)
self.assertEqual(count_tests(tags=['foo'], exclude_tags=['bar']), 2)
self.assertEqual(count_tests(tags=['foo'], exclude_tags=['bar', 'baz']), 1)
self.assertEqual(count_tests(exclude_tags=['foo']), 0)
def test_tag_fail_to_load(self):
with self.assertRaises(SyntaxError):
import_module('test_runner_apps.tagged.tests_syntax_error')
runner = DiscoverRunner(tags=['syntax_error'], verbosity=0)
# A label that doesn't exist or cannot be loaded due to syntax errors
# is always considered matching.
suite = runner.build_suite(['doesnotexist', 'test_runner_apps.tagged'])
self.assertEqual([test.id() for test in suite], [
'unittest.loader._FailedTest.doesnotexist',
'unittest.loader._FailedTest.test_runner_apps.tagged.tests_syntax_error',
])
def test_included_tags_displayed(self):
runner = DiscoverRunner(tags=['foo', 'bar'], verbosity=2)
with captured_stdout() as stdout:
runner.build_suite(['test_runner_apps.tagged.tests'])
self.assertIn('Including test tag(s): bar, foo.\n', stdout.getvalue())
def test_excluded_tags_displayed(self):
runner = DiscoverRunner(exclude_tags=['foo', 'bar'], verbosity=3)
with captured_stdout() as stdout:
runner.build_suite(['test_runner_apps.tagged.tests'])
self.assertIn('Excluding test tag(s): bar, foo.\n', stdout.getvalue())
def test_number_of_tests_found_displayed(self):
runner = DiscoverRunner()
with captured_stdout() as stdout:
runner.build_suite([
'test_runner_apps.sample.tests_sample.TestDjangoTestCase',
'test_runner_apps.simple',
])
self.assertIn('Found 14 test(s).\n', stdout.getvalue())
def test_pdb_with_parallel(self):
msg = (
'You cannot use --pdb with parallel tests; pass --parallel=1 to '
'use it.'
)
with self.assertRaisesMessage(ValueError, msg):
DiscoverRunner(pdb=True, parallel=2)
def test_number_of_parallel_workers(self):
"""Number of processes doesn't exceed the number of TestCases."""
runner = DiscoverRunner(parallel=5, verbosity=0)
suite = runner.build_suite(['test_runner_apps.tagged'])
self.assertEqual(suite.processes, len(suite.subsuites))
def test_buffer_mode_test_pass(self):
runner = DiscoverRunner(buffer=True, verbosity=0)
with captured_stdout() as stdout, captured_stderr() as stderr:
suite = runner.build_suite([
'test_runner_apps.buffer.tests_buffer.WriteToStdoutStderrTestCase.test_pass',
])
runner.run_suite(suite)
self.assertNotIn('Write to stderr.', stderr.getvalue())
self.assertNotIn('Write to stdout.', stdout.getvalue())
def test_buffer_mode_test_fail(self):
runner = DiscoverRunner(buffer=True, verbosity=0)
with captured_stdout() as stdout, captured_stderr() as stderr:
suite = runner.build_suite([
'test_runner_apps.buffer.tests_buffer.WriteToStdoutStderrTestCase.test_fail',
])
runner.run_suite(suite)
self.assertIn('Write to stderr.', stderr.getvalue())
self.assertIn('Write to stdout.', stdout.getvalue())
def run_suite_with_runner(self, runner_class, **kwargs):
class MyRunner(DiscoverRunner):
def test_runner(self, *args, **kwargs):
return runner_class()
runner = MyRunner(**kwargs)
# Suppress logging "Using shuffle seed" to the console.
with captured_stdout():
runner.setup_shuffler()
with captured_stdout() as stdout:
try:
result = runner.run_suite(None)
except RuntimeError as exc:
result = str(exc)
output = stdout.getvalue()
return result, output
def test_run_suite_logs_seed(self):
class TestRunner:
def run(self, suite):
return '<fake-result>'
expected_prefix = 'Used shuffle seed'
# Test with and without shuffling enabled.
result, output = self.run_suite_with_runner(TestRunner)
self.assertEqual(result, '<fake-result>')
self.assertNotIn(expected_prefix, output)
result, output = self.run_suite_with_runner(TestRunner, shuffle=2)
self.assertEqual(result, '<fake-result>')
expected_output = f'{expected_prefix}: 2 (given)\n'
self.assertEqual(output, expected_output)
def test_run_suite_logs_seed_exception(self):
"""
run_suite() logs the seed when TestRunner.run() raises an exception.
"""
class TestRunner:
def run(self, suite):
raise RuntimeError('my exception')
result, output = self.run_suite_with_runner(TestRunner, shuffle=2)
self.assertEqual(result, 'my exception')
expected_output = 'Used shuffle seed: 2 (given)\n'
self.assertEqual(output, expected_output)
@mock.patch('faulthandler.enable')
def test_faulthandler_enabled(self, mocked_enable):
with mock.patch('faulthandler.is_enabled', return_value=False):
DiscoverRunner(enable_faulthandler=True)
mocked_enable.assert_called()
@mock.patch('faulthandler.enable')
def test_faulthandler_already_enabled(self, mocked_enable):
with mock.patch('faulthandler.is_enabled', return_value=True):
DiscoverRunner(enable_faulthandler=True)
mocked_enable.assert_not_called()
@mock.patch('faulthandler.enable')
def test_faulthandler_enabled_fileno(self, mocked_enable):
# sys.stderr that is not an actual file.
with mock.patch('faulthandler.is_enabled', return_value=False), captured_stderr():
DiscoverRunner(enable_faulthandler=True)
mocked_enable.assert_called()
@mock.patch('faulthandler.enable')
def test_faulthandler_disabled(self, mocked_enable):
with mock.patch('faulthandler.is_enabled', return_value=False):
DiscoverRunner(enable_faulthandler=False)
mocked_enable.assert_not_called()
def test_timings_not_captured(self):
runner = DiscoverRunner(timing=False)
with captured_stderr() as stderr:
with runner.time_keeper.timed('test'):
pass
runner.time_keeper.print_results()
self.assertIsInstance(runner.time_keeper, NullTimeKeeper)
self.assertNotIn('test', stderr.getvalue())
def test_timings_captured(self):
runner = DiscoverRunner(timing=True)
with captured_stderr() as stderr:
with runner.time_keeper.timed('test'):
pass
runner.time_keeper.print_results()
self.assertIsInstance(runner.time_keeper, TimeKeeper)
self.assertIn('test', stderr.getvalue())
def test_log(self):
custom_low_level = 5
custom_high_level = 45
msg = 'logging message'
cases = [
(0, None, False),
(0, custom_low_level, False),
(0, logging.DEBUG, False),
(0, logging.INFO, False),
(0, logging.WARNING, False),
(0, custom_high_level, False),
(1, None, True),
(1, custom_low_level, False),
(1, logging.DEBUG, False),
(1, logging.INFO, True),
(1, logging.WARNING, True),
(1, custom_high_level, True),
(2, None, True),
(2, custom_low_level, True),
(2, logging.DEBUG, True),
(2, logging.INFO, True),
(2, logging.WARNING, True),
(2, custom_high_level, True),
(3, None, True),
(3, custom_low_level, True),
(3, logging.DEBUG, True),
(3, logging.INFO, True),
(3, logging.WARNING, True),
(3, custom_high_level, True),
]
for verbosity, level, output in cases:
with self.subTest(verbosity=verbosity, level=level):
with captured_stdout() as stdout:
runner = DiscoverRunner(verbosity=verbosity)
runner.log(msg, level)
self.assertEqual(stdout.getvalue(), f'{msg}\n' if output else '')
class DiscoverRunnerGetDatabasesTests(SimpleTestCase):
runner = DiscoverRunner(verbosity=2)
skip_msg = 'Skipping setup of unused database(s): '
def get_databases(self, test_labels):
with captured_stdout() as stdout:
suite = self.runner.build_suite(test_labels)
databases = self.runner.get_databases(suite)
return databases, stdout.getvalue()
def assertSkippedDatabases(self, test_labels, expected_databases):
databases, output = self.get_databases(test_labels)
self.assertEqual(databases, expected_databases)
skipped_databases = set(connections) - set(expected_databases)
if skipped_databases:
self.assertIn(self.skip_msg + ', '.join(sorted(skipped_databases)), output)
else:
self.assertNotIn(self.skip_msg, output)
def test_mixed(self):
databases, output = self.get_databases(['test_runner_apps.databases.tests'])
self.assertEqual(databases, {'default': True, 'other': False})
self.assertNotIn(self.skip_msg, output)
def test_all(self):
databases, output = self.get_databases(['test_runner_apps.databases.tests.AllDatabasesTests'])
self.assertEqual(databases, {alias: False for alias in connections})
self.assertNotIn(self.skip_msg, output)
def test_default_and_other(self):
self.assertSkippedDatabases([
'test_runner_apps.databases.tests.DefaultDatabaseTests',
'test_runner_apps.databases.tests.OtherDatabaseTests',
], {'default': False, 'other': False})
def test_default_only(self):
self.assertSkippedDatabases([
'test_runner_apps.databases.tests.DefaultDatabaseTests',
], {'default': False})
def test_other_only(self):
self.assertSkippedDatabases([
'test_runner_apps.databases.tests.OtherDatabaseTests'
], {'other': False})
def test_no_databases_required(self):
self.assertSkippedDatabases([
'test_runner_apps.databases.tests.NoDatabaseTests'
], {})
def test_serialize(self):
databases, _ = self.get_databases([
'test_runner_apps.databases.tests.DefaultDatabaseSerializedTests'
])
self.assertEqual(databases, {'default': True})
|
597ff179a86061b55fb5bce4e61f49eca18b0f0007a8eb869328b0706b11a2fc | from unittest import mock
from django.db import transaction
from django.test import TestCase, skipIfDBFeature, skipUnlessDBFeature
from .models import (
Article, InheritedArticleA, InheritedArticleB, Publication, User,
)
class ManyToManyTests(TestCase):
@classmethod
def setUpTestData(cls):
# Create a couple of Publications.
cls.p1 = Publication.objects.create(title='The Python Journal')
cls.p2 = Publication.objects.create(title='Science News')
cls.p3 = Publication.objects.create(title='Science Weekly')
cls.p4 = Publication.objects.create(title='Highlights for Children')
cls.a1 = Article.objects.create(headline='Django lets you build web apps easily')
cls.a1.publications.add(cls.p1)
cls.a2 = Article.objects.create(headline='NASA uses Python')
cls.a2.publications.add(cls.p1, cls.p2, cls.p3, cls.p4)
cls.a3 = Article.objects.create(headline='NASA finds intelligent life on Earth')
cls.a3.publications.add(cls.p2)
cls.a4 = Article.objects.create(headline='Oxygen-free diet works wonders')
cls.a4.publications.add(cls.p2)
def test_add(self):
# Create an Article.
a5 = Article(headline='Django lets you create web apps easily')
# You can't associate it with a Publication until it's been saved.
msg = (
'"<Article: Django lets you create web apps easily>" needs to have '
'a value for field "id" before this many-to-many relationship can be used.'
)
with self.assertRaisesMessage(ValueError, msg):
getattr(a5, 'publications')
# Save it!
a5.save()
# Associate the Article with a Publication.
a5.publications.add(self.p1)
self.assertSequenceEqual(a5.publications.all(), [self.p1])
# Create another Article, and set it to appear in both Publications.
a6 = Article(headline='ESA uses Python')
a6.save()
a6.publications.add(self.p1, self.p2)
a6.publications.add(self.p3)
# Adding a second time is OK
a6.publications.add(self.p3)
self.assertSequenceEqual(
a6.publications.all(),
[self.p2, self.p3, self.p1],
)
# Adding an object of the wrong type raises TypeError
msg = "'Publication' instance expected, got <Article: Django lets you create web apps easily>"
with self.assertRaisesMessage(TypeError, msg):
with transaction.atomic():
a6.publications.add(a5)
# Add a Publication directly via publications.add by using keyword arguments.
p5 = a6.publications.create(title='Highlights for Adults')
self.assertSequenceEqual(
a6.publications.all(),
[p5, self.p2, self.p3, self.p1],
)
def test_add_remove_set_by_pk(self):
a5 = Article.objects.create(headline='Django lets you create web apps easily')
a5.publications.add(self.p1.pk)
self.assertSequenceEqual(a5.publications.all(), [self.p1])
a5.publications.set([self.p2.pk])
self.assertSequenceEqual(a5.publications.all(), [self.p2])
a5.publications.remove(self.p2.pk)
self.assertSequenceEqual(a5.publications.all(), [])
def test_add_remove_set_by_to_field(self):
user_1 = User.objects.create(username='Jean')
user_2 = User.objects.create(username='Joe')
a5 = Article.objects.create(headline='Django lets you create web apps easily')
a5.authors.add(user_1.username)
self.assertSequenceEqual(a5.authors.all(), [user_1])
a5.authors.set([user_2.username])
self.assertSequenceEqual(a5.authors.all(), [user_2])
a5.authors.remove(user_2.username)
self.assertSequenceEqual(a5.authors.all(), [])
def test_add_remove_invalid_type(self):
msg = "Field 'id' expected a number but got 'invalid'."
for method in ['add', 'remove']:
with self.subTest(method), self.assertRaisesMessage(ValueError, msg):
getattr(self.a1.publications, method)('invalid')
def test_reverse_add(self):
# Adding via the 'other' end of an m2m
a5 = Article(headline='NASA finds intelligent life on Mars')
a5.save()
self.p2.article_set.add(a5)
self.assertSequenceEqual(
self.p2.article_set.all(),
[self.a3, a5, self.a2, self.a4],
)
self.assertSequenceEqual(a5.publications.all(), [self.p2])
# Adding via the other end using keywords
a6 = self.p2.article_set.create(headline='Carbon-free diet works wonders')
self.assertSequenceEqual(
self.p2.article_set.all(),
[a6, self.a3, a5, self.a2, self.a4],
)
a6 = self.p2.article_set.all()[3]
self.assertSequenceEqual(
a6.publications.all(),
[self.p4, self.p2, self.p3, self.p1],
)
@skipUnlessDBFeature('supports_ignore_conflicts')
def test_fast_add_ignore_conflicts(self):
"""
A single query is necessary to add auto-created through instances if
the database backend supports bulk_create(ignore_conflicts) and no
m2m_changed signals receivers are connected.
"""
with self.assertNumQueries(1):
self.a1.publications.add(self.p1, self.p2)
@skipIfDBFeature('supports_ignore_conflicts')
def test_add_existing_different_type(self):
# A single SELECT query is necessary to compare existing values to the
# provided one; no INSERT should be attempted.
with self.assertNumQueries(1):
self.a1.publications.add(str(self.p1.pk))
self.assertEqual(self.a1.publications.get(), self.p1)
@skipUnlessDBFeature('supports_ignore_conflicts')
def test_slow_add_ignore_conflicts(self):
manager_cls = self.a1.publications.__class__
# Simulate a race condition between the missing ids retrieval and
# the bulk insertion attempt.
missing_target_ids = {self.p1.id}
# Disable fast-add to test the case where the slow add path is taken.
add_plan = (True, False, False)
with mock.patch.object(manager_cls, '_get_missing_target_ids', return_value=missing_target_ids) as mocked:
with mock.patch.object(manager_cls, '_get_add_plan', return_value=add_plan):
self.a1.publications.add(self.p1)
mocked.assert_called_once()
def test_related_sets(self):
# Article objects have access to their related Publication objects.
self.assertSequenceEqual(self.a1.publications.all(), [self.p1])
self.assertSequenceEqual(
self.a2.publications.all(),
[self.p4, self.p2, self.p3, self.p1],
)
# Publication objects have access to their related Article objects.
self.assertSequenceEqual(
self.p2.article_set.all(),
[self.a3, self.a2, self.a4],
)
self.assertSequenceEqual(
self.p1.article_set.all(),
[self.a1, self.a2],
)
self.assertSequenceEqual(
Publication.objects.get(id=self.p4.id).article_set.all(),
[self.a2],
)
def test_selects(self):
# We can perform kwarg queries across m2m relationships
self.assertSequenceEqual(
Article.objects.filter(publications__id__exact=self.p1.id),
[self.a1, self.a2],
)
self.assertSequenceEqual(
Article.objects.filter(publications__pk=self.p1.id),
[self.a1, self.a2],
)
self.assertSequenceEqual(
Article.objects.filter(publications=self.p1.id),
[self.a1, self.a2],
)
self.assertSequenceEqual(
Article.objects.filter(publications=self.p1),
[self.a1, self.a2],
)
self.assertSequenceEqual(
Article.objects.filter(publications__title__startswith="Science"),
[self.a3, self.a2, self.a2, self.a4]
)
self.assertSequenceEqual(
Article.objects.filter(publications__title__startswith="Science").distinct(),
[self.a3, self.a2, self.a4],
)
# The count() function respects distinct() as well.
self.assertEqual(Article.objects.filter(publications__title__startswith="Science").count(), 4)
self.assertEqual(Article.objects.filter(publications__title__startswith="Science").distinct().count(), 3)
self.assertSequenceEqual(
Article.objects.filter(publications__in=[self.p1.id, self.p2.id]).distinct(),
[self.a1, self.a3, self.a2, self.a4],
)
self.assertSequenceEqual(
Article.objects.filter(publications__in=[self.p1.id, self.p2]).distinct(),
[self.a1, self.a3, self.a2, self.a4],
)
self.assertSequenceEqual(
Article.objects.filter(publications__in=[self.p1, self.p2]).distinct(),
[self.a1, self.a3, self.a2, self.a4],
)
# Excluding a related item works as you would expect, too (although the SQL
# involved is a little complex).
self.assertSequenceEqual(
Article.objects.exclude(publications=self.p2),
[self.a1],
)
def test_reverse_selects(self):
# Reverse m2m queries are supported (i.e., starting at the table that
# doesn't have a ManyToManyField).
python_journal = [self.p1]
self.assertSequenceEqual(Publication.objects.filter(id__exact=self.p1.id), python_journal)
self.assertSequenceEqual(Publication.objects.filter(pk=self.p1.id), python_journal)
self.assertSequenceEqual(
Publication.objects.filter(article__headline__startswith="NASA"),
[self.p4, self.p2, self.p2, self.p3, self.p1],
)
self.assertSequenceEqual(Publication.objects.filter(article__id__exact=self.a1.id), python_journal)
self.assertSequenceEqual(Publication.objects.filter(article__pk=self.a1.id), python_journal)
self.assertSequenceEqual(Publication.objects.filter(article=self.a1.id), python_journal)
self.assertSequenceEqual(Publication.objects.filter(article=self.a1), python_journal)
self.assertSequenceEqual(
Publication.objects.filter(article__in=[self.a1.id, self.a2.id]).distinct(),
[self.p4, self.p2, self.p3, self.p1],
)
self.assertSequenceEqual(
Publication.objects.filter(article__in=[self.a1.id, self.a2]).distinct(),
[self.p4, self.p2, self.p3, self.p1],
)
self.assertSequenceEqual(
Publication.objects.filter(article__in=[self.a1, self.a2]).distinct(),
[self.p4, self.p2, self.p3, self.p1],
)
def test_delete(self):
# If we delete a Publication, its Articles won't be able to access it.
self.p1.delete()
self.assertSequenceEqual(
Publication.objects.all(),
[self.p4, self.p2, self.p3],
)
self.assertSequenceEqual(self.a1.publications.all(), [])
# If we delete an Article, its Publications won't be able to access it.
self.a2.delete()
self.assertSequenceEqual(
Article.objects.all(),
[self.a1, self.a3, self.a4],
)
self.assertSequenceEqual(
self.p2.article_set.all(),
[self.a3, self.a4],
)
def test_bulk_delete(self):
# Bulk delete some Publications - references to deleted publications should go
Publication.objects.filter(title__startswith='Science').delete()
self.assertSequenceEqual(
Publication.objects.all(),
[self.p4, self.p1],
)
self.assertSequenceEqual(
Article.objects.all(),
[self.a1, self.a3, self.a2, self.a4],
)
self.assertSequenceEqual(
self.a2.publications.all(),
[self.p4, self.p1],
)
# Bulk delete some articles - references to deleted objects should go
q = Article.objects.filter(headline__startswith='Django')
self.assertSequenceEqual(q, [self.a1])
q.delete()
# After the delete, the QuerySet cache needs to be cleared,
# and the referenced objects should be gone
self.assertSequenceEqual(q, [])
self.assertSequenceEqual(self.p1.article_set.all(), [self.a2])
def test_remove(self):
# Removing publication from an article:
self.assertSequenceEqual(
self.p2.article_set.all(),
[self.a3, self.a2, self.a4],
)
self.a4.publications.remove(self.p2)
self.assertSequenceEqual(
self.p2.article_set.all(),
[self.a3, self.a2],
)
self.assertSequenceEqual(self.a4.publications.all(), [])
# And from the other end
self.p2.article_set.remove(self.a3)
self.assertSequenceEqual(self.p2.article_set.all(), [self.a2])
self.assertSequenceEqual(self.a3.publications.all(), [])
def test_set(self):
self.p2.article_set.set([self.a4, self.a3])
self.assertSequenceEqual(
self.p2.article_set.all(),
[self.a3, self.a4],
)
self.assertSequenceEqual(self.a4.publications.all(), [self.p2])
self.a4.publications.set([self.p3.id])
self.assertSequenceEqual(self.p2.article_set.all(), [self.a3])
self.assertSequenceEqual(self.a4.publications.all(), [self.p3])
self.p2.article_set.set([])
self.assertSequenceEqual(self.p2.article_set.all(), [])
self.a4.publications.set([])
self.assertSequenceEqual(self.a4.publications.all(), [])
self.p2.article_set.set([self.a4, self.a3], clear=True)
self.assertSequenceEqual(
self.p2.article_set.all(),
[self.a3, self.a4],
)
self.assertSequenceEqual(self.a4.publications.all(), [self.p2])
self.a4.publications.set([self.p3.id], clear=True)
self.assertSequenceEqual(self.p2.article_set.all(), [self.a3])
self.assertSequenceEqual(self.a4.publications.all(), [self.p3])
self.p2.article_set.set([], clear=True)
self.assertSequenceEqual(self.p2.article_set.all(), [])
self.a4.publications.set([], clear=True)
self.assertSequenceEqual(self.a4.publications.all(), [])
def test_set_existing_different_type(self):
# Existing many-to-many relations remain the same for values provided
# with a different type.
ids = set(Publication.article_set.through.objects.filter(
article__in=[self.a4, self.a3],
publication=self.p2,
).values_list('id', flat=True))
self.p2.article_set.set([str(self.a4.pk), str(self.a3.pk)])
new_ids = set(Publication.article_set.through.objects.filter(
publication=self.p2,
).values_list('id', flat=True))
self.assertEqual(ids, new_ids)
def test_assign_forward(self):
msg = (
"Direct assignment to the reverse side of a many-to-many set is "
"prohibited. Use article_set.set() instead."
)
with self.assertRaisesMessage(TypeError, msg):
self.p2.article_set = [self.a4, self.a3]
def test_assign_reverse(self):
msg = (
"Direct assignment to the forward side of a many-to-many "
"set is prohibited. Use publications.set() instead."
)
with self.assertRaisesMessage(TypeError, msg):
self.a1.publications = [self.p1, self.p2]
def test_assign(self):
# Relation sets can be assigned using set().
self.p2.article_set.set([self.a4, self.a3])
self.assertSequenceEqual(
self.p2.article_set.all(),
[self.a3, self.a4],
)
self.assertSequenceEqual(self.a4.publications.all(), [self.p2])
self.a4.publications.set([self.p3.id])
self.assertSequenceEqual(self.p2.article_set.all(), [self.a3])
self.assertSequenceEqual(self.a4.publications.all(), [self.p3])
# An alternate to calling clear() is to set an empty set.
self.p2.article_set.set([])
self.assertSequenceEqual(self.p2.article_set.all(), [])
self.a4.publications.set([])
self.assertSequenceEqual(self.a4.publications.all(), [])
def test_assign_ids(self):
# Relation sets can also be set using primary key values
self.p2.article_set.set([self.a4.id, self.a3.id])
self.assertSequenceEqual(
self.p2.article_set.all(),
[self.a3, self.a4],
)
self.assertSequenceEqual(self.a4.publications.all(), [self.p2])
self.a4.publications.set([self.p3.id])
self.assertSequenceEqual(self.p2.article_set.all(), [self.a3])
self.assertSequenceEqual(self.a4.publications.all(), [self.p3])
def test_forward_assign_with_queryset(self):
# Querysets used in m2m assignments are pre-evaluated so their value
# isn't affected by the clearing operation in ManyRelatedManager.set()
# (#19816).
self.a1.publications.set([self.p1, self.p2])
qs = self.a1.publications.filter(title='The Python Journal')
self.a1.publications.set(qs)
self.assertEqual(1, self.a1.publications.count())
self.assertEqual(1, qs.count())
def test_reverse_assign_with_queryset(self):
# Querysets used in M2M assignments are pre-evaluated so their value
# isn't affected by the clearing operation in ManyRelatedManager.set()
# (#19816).
self.p1.article_set.set([self.a1, self.a2])
qs = self.p1.article_set.filter(headline='Django lets you build web apps easily')
self.p1.article_set.set(qs)
self.assertEqual(1, self.p1.article_set.count())
self.assertEqual(1, qs.count())
def test_clear(self):
# Relation sets can be cleared:
self.p2.article_set.clear()
self.assertSequenceEqual(self.p2.article_set.all(), [])
self.assertSequenceEqual(self.a4.publications.all(), [])
# And you can clear from the other end
self.p2.article_set.add(self.a3, self.a4)
self.assertSequenceEqual(
self.p2.article_set.all(),
[self.a3, self.a4],
)
self.assertSequenceEqual(self.a4.publications.all(), [self.p2])
self.a4.publications.clear()
self.assertSequenceEqual(self.a4.publications.all(), [])
self.assertSequenceEqual(self.p2.article_set.all(), [self.a3])
def test_clear_after_prefetch(self):
a4 = Article.objects.prefetch_related('publications').get(id=self.a4.id)
self.assertSequenceEqual(a4.publications.all(), [self.p2])
a4.publications.clear()
self.assertSequenceEqual(a4.publications.all(), [])
def test_remove_after_prefetch(self):
a4 = Article.objects.prefetch_related('publications').get(id=self.a4.id)
self.assertSequenceEqual(a4.publications.all(), [self.p2])
a4.publications.remove(self.p2)
self.assertSequenceEqual(a4.publications.all(), [])
def test_add_after_prefetch(self):
a4 = Article.objects.prefetch_related('publications').get(id=self.a4.id)
self.assertEqual(a4.publications.count(), 1)
a4.publications.add(self.p1)
self.assertEqual(a4.publications.count(), 2)
def test_set_after_prefetch(self):
a4 = Article.objects.prefetch_related('publications').get(id=self.a4.id)
self.assertEqual(a4.publications.count(), 1)
a4.publications.set([self.p2, self.p1])
self.assertEqual(a4.publications.count(), 2)
a4.publications.set([self.p1])
self.assertEqual(a4.publications.count(), 1)
def test_add_then_remove_after_prefetch(self):
a4 = Article.objects.prefetch_related('publications').get(id=self.a4.id)
self.assertEqual(a4.publications.count(), 1)
a4.publications.add(self.p1)
self.assertEqual(a4.publications.count(), 2)
a4.publications.remove(self.p1)
self.assertSequenceEqual(a4.publications.all(), [self.p2])
def test_inherited_models_selects(self):
"""
#24156 - Objects from child models where the parent's m2m field uses
related_name='+' should be retrieved correctly.
"""
a = InheritedArticleA.objects.create()
b = InheritedArticleB.objects.create()
a.publications.add(self.p1, self.p2)
self.assertSequenceEqual(
a.publications.all(),
[self.p2, self.p1],
)
self.assertSequenceEqual(b.publications.all(), [])
b.publications.add(self.p3)
self.assertSequenceEqual(
a.publications.all(),
[self.p2, self.p1],
)
self.assertSequenceEqual(b.publications.all(), [self.p3])
def test_custom_default_manager_exists_count(self):
a5 = Article.objects.create(headline='deleted')
a5.publications.add(self.p2)
self.assertEqual(self.p2.article_set.count(), self.p2.article_set.all().count())
self.assertEqual(self.p3.article_set.exists(), self.p3.article_set.all().exists())
|
ba009e2d7191f07be297986e5245921e0342e4363509f0bef678fc30d7a87ddf | from io import BytesIO
from itertools import chain
from urllib.parse import urlencode
from django.core.exceptions import DisallowedHost
from django.core.handlers.wsgi import LimitedStream, WSGIRequest
from django.http import HttpRequest, RawPostDataException, UnreadablePostError
from django.http.multipartparser import MultiPartParserError
from django.http.request import HttpHeaders, split_domain_port
from django.test import RequestFactory, SimpleTestCase, override_settings
from django.test.client import FakePayload
class RequestsTests(SimpleTestCase):
def test_httprequest(self):
request = HttpRequest()
self.assertEqual(list(request.GET), [])
self.assertEqual(list(request.POST), [])
self.assertEqual(list(request.COOKIES), [])
self.assertEqual(list(request.META), [])
# .GET and .POST should be QueryDicts
self.assertEqual(request.GET.urlencode(), '')
self.assertEqual(request.POST.urlencode(), '')
# and FILES should be MultiValueDict
self.assertEqual(request.FILES.getlist('foo'), [])
self.assertIsNone(request.content_type)
self.assertIsNone(request.content_params)
def test_httprequest_full_path(self):
request = HttpRequest()
request.path = '/;some/?awful/=path/foo:bar/'
request.path_info = '/prefix' + request.path
request.META['QUERY_STRING'] = ';some=query&+query=string'
expected = '/%3Bsome/%3Fawful/%3Dpath/foo:bar/?;some=query&+query=string'
self.assertEqual(request.get_full_path(), expected)
self.assertEqual(request.get_full_path_info(), '/prefix' + expected)
def test_httprequest_full_path_with_query_string_and_fragment(self):
request = HttpRequest()
request.path = '/foo#bar'
request.path_info = '/prefix' + request.path
request.META['QUERY_STRING'] = 'baz#quux'
self.assertEqual(request.get_full_path(), '/foo%23bar?baz#quux')
self.assertEqual(request.get_full_path_info(), '/prefix/foo%23bar?baz#quux')
def test_httprequest_repr(self):
request = HttpRequest()
request.path = '/somepath/'
request.method = 'GET'
request.GET = {'get-key': 'get-value'}
request.POST = {'post-key': 'post-value'}
request.COOKIES = {'post-key': 'post-value'}
request.META = {'post-key': 'post-value'}
self.assertEqual(repr(request), "<HttpRequest: GET '/somepath/'>")
def test_httprequest_repr_invalid_method_and_path(self):
request = HttpRequest()
self.assertEqual(repr(request), "<HttpRequest>")
request = HttpRequest()
request.method = "GET"
self.assertEqual(repr(request), "<HttpRequest>")
request = HttpRequest()
request.path = ""
self.assertEqual(repr(request), "<HttpRequest>")
def test_wsgirequest(self):
request = WSGIRequest({
'PATH_INFO': 'bogus',
'REQUEST_METHOD': 'bogus',
'CONTENT_TYPE': 'text/html; charset=utf8',
'wsgi.input': BytesIO(b''),
})
self.assertEqual(list(request.GET), [])
self.assertEqual(list(request.POST), [])
self.assertEqual(list(request.COOKIES), [])
self.assertEqual(
set(request.META),
{'PATH_INFO', 'REQUEST_METHOD', 'SCRIPT_NAME', 'CONTENT_TYPE', 'wsgi.input'}
)
self.assertEqual(request.META['PATH_INFO'], 'bogus')
self.assertEqual(request.META['REQUEST_METHOD'], 'bogus')
self.assertEqual(request.META['SCRIPT_NAME'], '')
self.assertEqual(request.content_type, 'text/html')
self.assertEqual(request.content_params, {'charset': 'utf8'})
def test_wsgirequest_with_script_name(self):
"""
The request's path is correctly assembled, regardless of whether or
not the SCRIPT_NAME has a trailing slash (#20169).
"""
# With trailing slash
request = WSGIRequest({
'PATH_INFO': '/somepath/',
'SCRIPT_NAME': '/PREFIX/',
'REQUEST_METHOD': 'get',
'wsgi.input': BytesIO(b''),
})
self.assertEqual(request.path, '/PREFIX/somepath/')
# Without trailing slash
request = WSGIRequest({
'PATH_INFO': '/somepath/',
'SCRIPT_NAME': '/PREFIX',
'REQUEST_METHOD': 'get',
'wsgi.input': BytesIO(b''),
})
self.assertEqual(request.path, '/PREFIX/somepath/')
def test_wsgirequest_script_url_double_slashes(self):
"""
WSGI squashes multiple successive slashes in PATH_INFO, WSGIRequest
should take that into account when populating request.path and
request.META['SCRIPT_NAME'] (#17133).
"""
request = WSGIRequest({
'SCRIPT_URL': '/mst/milestones//accounts/login//help',
'PATH_INFO': '/milestones/accounts/login/help',
'REQUEST_METHOD': 'get',
'wsgi.input': BytesIO(b''),
})
self.assertEqual(request.path, '/mst/milestones/accounts/login/help')
self.assertEqual(request.META['SCRIPT_NAME'], '/mst')
def test_wsgirequest_with_force_script_name(self):
"""
The FORCE_SCRIPT_NAME setting takes precedence over the request's
SCRIPT_NAME environment parameter (#20169).
"""
with override_settings(FORCE_SCRIPT_NAME='/FORCED_PREFIX/'):
request = WSGIRequest({
'PATH_INFO': '/somepath/',
'SCRIPT_NAME': '/PREFIX/',
'REQUEST_METHOD': 'get',
'wsgi.input': BytesIO(b''),
})
self.assertEqual(request.path, '/FORCED_PREFIX/somepath/')
def test_wsgirequest_path_with_force_script_name_trailing_slash(self):
"""
The request's path is correctly assembled, regardless of whether or not
the FORCE_SCRIPT_NAME setting has a trailing slash (#20169).
"""
# With trailing slash
with override_settings(FORCE_SCRIPT_NAME='/FORCED_PREFIX/'):
request = WSGIRequest({'PATH_INFO': '/somepath/', 'REQUEST_METHOD': 'get', 'wsgi.input': BytesIO(b'')})
self.assertEqual(request.path, '/FORCED_PREFIX/somepath/')
# Without trailing slash
with override_settings(FORCE_SCRIPT_NAME='/FORCED_PREFIX'):
request = WSGIRequest({'PATH_INFO': '/somepath/', 'REQUEST_METHOD': 'get', 'wsgi.input': BytesIO(b'')})
self.assertEqual(request.path, '/FORCED_PREFIX/somepath/')
def test_wsgirequest_repr(self):
request = WSGIRequest({'REQUEST_METHOD': 'get', 'wsgi.input': BytesIO(b'')})
self.assertEqual(repr(request), "<WSGIRequest: GET '/'>")
request = WSGIRequest({'PATH_INFO': '/somepath/', 'REQUEST_METHOD': 'get', 'wsgi.input': BytesIO(b'')})
request.GET = {'get-key': 'get-value'}
request.POST = {'post-key': 'post-value'}
request.COOKIES = {'post-key': 'post-value'}
request.META = {'post-key': 'post-value'}
self.assertEqual(repr(request), "<WSGIRequest: GET '/somepath/'>")
def test_wsgirequest_path_info(self):
def wsgi_str(path_info, encoding='utf-8'):
path_info = path_info.encode(encoding) # Actual URL sent by the browser (bytestring)
path_info = path_info.decode('iso-8859-1') # Value in the WSGI environ dict (native string)
return path_info
# Regression for #19468
request = WSGIRequest({'PATH_INFO': wsgi_str("/سلام/"), 'REQUEST_METHOD': 'get', 'wsgi.input': BytesIO(b'')})
self.assertEqual(request.path, "/سلام/")
# The URL may be incorrectly encoded in a non-UTF-8 encoding (#26971)
request = WSGIRequest({
'PATH_INFO': wsgi_str("/café/", encoding='iso-8859-1'),
'REQUEST_METHOD': 'get',
'wsgi.input': BytesIO(b''),
})
# Since it's impossible to decide the (wrong) encoding of the URL, it's
# left percent-encoded in the path.
self.assertEqual(request.path, "/caf%E9/")
def test_limited_stream(self):
# Read all of a limited stream
stream = LimitedStream(BytesIO(b'test'), 2)
self.assertEqual(stream.read(), b'te')
# Reading again returns nothing.
self.assertEqual(stream.read(), b'')
# Read a number of characters greater than the stream has to offer
stream = LimitedStream(BytesIO(b'test'), 2)
self.assertEqual(stream.read(5), b'te')
# Reading again returns nothing.
self.assertEqual(stream.readline(5), b'')
# Read sequentially from a stream
stream = LimitedStream(BytesIO(b'12345678'), 8)
self.assertEqual(stream.read(5), b'12345')
self.assertEqual(stream.read(5), b'678')
# Reading again returns nothing.
self.assertEqual(stream.readline(5), b'')
# Read lines from a stream
stream = LimitedStream(BytesIO(b'1234\n5678\nabcd\nefgh\nijkl'), 24)
# Read a full line, unconditionally
self.assertEqual(stream.readline(), b'1234\n')
# Read a number of characters less than a line
self.assertEqual(stream.readline(2), b'56')
# Read the rest of the partial line
self.assertEqual(stream.readline(), b'78\n')
# Read a full line, with a character limit greater than the line length
self.assertEqual(stream.readline(6), b'abcd\n')
# Read the next line, deliberately terminated at the line end
self.assertEqual(stream.readline(4), b'efgh')
# Read the next line... just the line end
self.assertEqual(stream.readline(), b'\n')
# Read everything else.
self.assertEqual(stream.readline(), b'ijkl')
# Regression for #15018
# If a stream contains a newline, but the provided length
# is less than the number of provided characters, the newline
# doesn't reset the available character count
stream = LimitedStream(BytesIO(b'1234\nabcdef'), 9)
self.assertEqual(stream.readline(10), b'1234\n')
self.assertEqual(stream.readline(3), b'abc')
# Now expire the available characters
self.assertEqual(stream.readline(3), b'd')
# Reading again returns nothing.
self.assertEqual(stream.readline(2), b'')
# Same test, but with read, not readline.
stream = LimitedStream(BytesIO(b'1234\nabcdef'), 9)
self.assertEqual(stream.read(6), b'1234\na')
self.assertEqual(stream.read(2), b'bc')
self.assertEqual(stream.read(2), b'd')
self.assertEqual(stream.read(2), b'')
self.assertEqual(stream.read(), b'')
def test_stream(self):
payload = FakePayload('name=value')
request = WSGIRequest({
'REQUEST_METHOD': 'POST',
'CONTENT_TYPE': 'application/x-www-form-urlencoded',
'CONTENT_LENGTH': len(payload),
'wsgi.input': payload},
)
self.assertEqual(request.read(), b'name=value')
def test_read_after_value(self):
"""
Reading from request is allowed after accessing request contents as
POST or body.
"""
payload = FakePayload('name=value')
request = WSGIRequest({
'REQUEST_METHOD': 'POST',
'CONTENT_TYPE': 'application/x-www-form-urlencoded',
'CONTENT_LENGTH': len(payload),
'wsgi.input': payload,
})
self.assertEqual(request.POST, {'name': ['value']})
self.assertEqual(request.body, b'name=value')
self.assertEqual(request.read(), b'name=value')
def test_value_after_read(self):
"""
Construction of POST or body is not allowed after reading
from request.
"""
payload = FakePayload('name=value')
request = WSGIRequest({
'REQUEST_METHOD': 'POST',
'CONTENT_TYPE': 'application/x-www-form-urlencoded',
'CONTENT_LENGTH': len(payload),
'wsgi.input': payload,
})
self.assertEqual(request.read(2), b'na')
with self.assertRaises(RawPostDataException):
request.body
self.assertEqual(request.POST, {})
def test_non_ascii_POST(self):
payload = FakePayload(urlencode({'key': 'España'}))
request = WSGIRequest({
'REQUEST_METHOD': 'POST',
'CONTENT_LENGTH': len(payload),
'CONTENT_TYPE': 'application/x-www-form-urlencoded',
'wsgi.input': payload,
})
self.assertEqual(request.POST, {'key': ['España']})
def test_alternate_charset_POST(self):
"""
Test a POST with non-utf-8 payload encoding.
"""
payload = FakePayload(urlencode({'key': 'España'.encode('latin-1')}))
request = WSGIRequest({
'REQUEST_METHOD': 'POST',
'CONTENT_LENGTH': len(payload),
'CONTENT_TYPE': 'application/x-www-form-urlencoded; charset=iso-8859-1',
'wsgi.input': payload,
})
self.assertEqual(request.POST, {'key': ['España']})
def test_body_after_POST_multipart_form_data(self):
"""
Reading body after parsing multipart/form-data is not allowed
"""
# Because multipart is used for large amounts of data i.e. file uploads,
# we don't want the data held in memory twice, and we don't want to
# silence the error by setting body = '' either.
payload = FakePayload("\r\n".join([
'--boundary',
'Content-Disposition: form-data; name="name"',
'',
'value',
'--boundary--'
'']))
request = WSGIRequest({
'REQUEST_METHOD': 'POST',
'CONTENT_TYPE': 'multipart/form-data; boundary=boundary',
'CONTENT_LENGTH': len(payload),
'wsgi.input': payload,
})
self.assertEqual(request.POST, {'name': ['value']})
with self.assertRaises(RawPostDataException):
request.body
def test_body_after_POST_multipart_related(self):
"""
Reading body after parsing multipart that isn't form-data is allowed
"""
# Ticket #9054
# There are cases in which the multipart data is related instead of
# being a binary upload, in which case it should still be accessible
# via body.
payload_data = b"\r\n".join([
b'--boundary',
b'Content-ID: id; name="name"',
b'',
b'value',
b'--boundary--'
b''])
payload = FakePayload(payload_data)
request = WSGIRequest({
'REQUEST_METHOD': 'POST',
'CONTENT_TYPE': 'multipart/related; boundary=boundary',
'CONTENT_LENGTH': len(payload),
'wsgi.input': payload,
})
self.assertEqual(request.POST, {})
self.assertEqual(request.body, payload_data)
def test_POST_multipart_with_content_length_zero(self):
"""
Multipart POST requests with Content-Length >= 0 are valid and need to be handled.
"""
# According to:
# https://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.13
# Every request.POST with Content-Length >= 0 is a valid request,
# this test ensures that we handle Content-Length == 0.
payload = FakePayload("\r\n".join([
'--boundary',
'Content-Disposition: form-data; name="name"',
'',
'value',
'--boundary--'
'']))
request = WSGIRequest({
'REQUEST_METHOD': 'POST',
'CONTENT_TYPE': 'multipart/form-data; boundary=boundary',
'CONTENT_LENGTH': 0,
'wsgi.input': payload,
})
self.assertEqual(request.POST, {})
def test_POST_binary_only(self):
payload = b'\r\n\x01\x00\x00\x00ab\x00\x00\xcd\xcc,@'
environ = {
'REQUEST_METHOD': 'POST',
'CONTENT_TYPE': 'application/octet-stream',
'CONTENT_LENGTH': len(payload),
'wsgi.input': BytesIO(payload),
}
request = WSGIRequest(environ)
self.assertEqual(request.POST, {})
self.assertEqual(request.FILES, {})
self.assertEqual(request.body, payload)
# Same test without specifying content-type
environ.update({'CONTENT_TYPE': '', 'wsgi.input': BytesIO(payload)})
request = WSGIRequest(environ)
self.assertEqual(request.POST, {})
self.assertEqual(request.FILES, {})
self.assertEqual(request.body, payload)
def test_read_by_lines(self):
payload = FakePayload('name=value')
request = WSGIRequest({
'REQUEST_METHOD': 'POST',
'CONTENT_TYPE': 'application/x-www-form-urlencoded',
'CONTENT_LENGTH': len(payload),
'wsgi.input': payload,
})
self.assertEqual(list(request), [b'name=value'])
def test_POST_after_body_read(self):
"""
POST should be populated even if body is read first
"""
payload = FakePayload('name=value')
request = WSGIRequest({
'REQUEST_METHOD': 'POST',
'CONTENT_TYPE': 'application/x-www-form-urlencoded',
'CONTENT_LENGTH': len(payload),
'wsgi.input': payload,
})
request.body # evaluate
self.assertEqual(request.POST, {'name': ['value']})
def test_POST_after_body_read_and_stream_read(self):
"""
POST should be populated even if body is read first, and then
the stream is read second.
"""
payload = FakePayload('name=value')
request = WSGIRequest({
'REQUEST_METHOD': 'POST',
'CONTENT_TYPE': 'application/x-www-form-urlencoded',
'CONTENT_LENGTH': len(payload),
'wsgi.input': payload,
})
request.body # evaluate
self.assertEqual(request.read(1), b'n')
self.assertEqual(request.POST, {'name': ['value']})
def test_POST_after_body_read_and_stream_read_multipart(self):
"""
POST should be populated even if body is read first, and then
the stream is read second. Using multipart/form-data instead of urlencoded.
"""
payload = FakePayload("\r\n".join([
'--boundary',
'Content-Disposition: form-data; name="name"',
'',
'value',
'--boundary--'
'']))
request = WSGIRequest({
'REQUEST_METHOD': 'POST',
'CONTENT_TYPE': 'multipart/form-data; boundary=boundary',
'CONTENT_LENGTH': len(payload),
'wsgi.input': payload,
})
request.body # evaluate
# Consume enough data to mess up the parsing:
self.assertEqual(request.read(13), b'--boundary\r\nC')
self.assertEqual(request.POST, {'name': ['value']})
def test_POST_immutable_for_multipart(self):
"""
MultiPartParser.parse() leaves request.POST immutable.
"""
payload = FakePayload("\r\n".join([
'--boundary',
'Content-Disposition: form-data; name="name"',
'',
'value',
'--boundary--',
]))
request = WSGIRequest({
'REQUEST_METHOD': 'POST',
'CONTENT_TYPE': 'multipart/form-data; boundary=boundary',
'CONTENT_LENGTH': len(payload),
'wsgi.input': payload,
})
self.assertFalse(request.POST._mutable)
def test_multipart_without_boundary(self):
request = WSGIRequest({
'REQUEST_METHOD': 'POST',
'CONTENT_TYPE': 'multipart/form-data;',
'CONTENT_LENGTH': 0,
'wsgi.input': FakePayload(),
})
with self.assertRaisesMessage(MultiPartParserError, 'Invalid boundary in multipart: None'):
request.POST
def test_multipart_non_ascii_content_type(self):
request = WSGIRequest({
'REQUEST_METHOD': 'POST',
'CONTENT_TYPE': 'multipart/form-data; boundary = \xe0',
'CONTENT_LENGTH': 0,
'wsgi.input': FakePayload(),
})
msg = 'Invalid non-ASCII Content-Type in multipart: multipart/form-data; boundary = à'
with self.assertRaisesMessage(MultiPartParserError, msg):
request.POST
def test_POST_connection_error(self):
"""
If wsgi.input.read() raises an exception while trying to read() the
POST, the exception is identifiable (not a generic OSError).
"""
class ExplodingBytesIO(BytesIO):
def read(self, len=0):
raise OSError('kaboom!')
payload = b'name=value'
request = WSGIRequest({
'REQUEST_METHOD': 'POST',
'CONTENT_TYPE': 'application/x-www-form-urlencoded',
'CONTENT_LENGTH': len(payload),
'wsgi.input': ExplodingBytesIO(payload),
})
with self.assertRaises(UnreadablePostError):
request.body
def test_set_encoding_clears_POST(self):
payload = FakePayload('name=Hello Günter')
request = WSGIRequest({
'REQUEST_METHOD': 'POST',
'CONTENT_TYPE': 'application/x-www-form-urlencoded',
'CONTENT_LENGTH': len(payload),
'wsgi.input': payload,
})
self.assertEqual(request.POST, {'name': ['Hello Günter']})
request.encoding = 'iso-8859-16'
self.assertEqual(request.POST, {'name': ['Hello GĂŒnter']})
def test_set_encoding_clears_GET(self):
request = WSGIRequest({
'REQUEST_METHOD': 'GET',
'wsgi.input': '',
'QUERY_STRING': 'name=Hello%20G%C3%BCnter',
})
self.assertEqual(request.GET, {'name': ['Hello Günter']})
request.encoding = 'iso-8859-16'
self.assertEqual(request.GET, {'name': ['Hello G\u0102\u0152nter']})
def test_FILES_connection_error(self):
"""
If wsgi.input.read() raises an exception while trying to read() the
FILES, the exception is identifiable (not a generic OSError).
"""
class ExplodingBytesIO(BytesIO):
def read(self, len=0):
raise OSError('kaboom!')
payload = b'x'
request = WSGIRequest({
'REQUEST_METHOD': 'POST',
'CONTENT_TYPE': 'multipart/form-data; boundary=foo_',
'CONTENT_LENGTH': len(payload),
'wsgi.input': ExplodingBytesIO(payload),
})
with self.assertRaises(UnreadablePostError):
request.FILES
class HostValidationTests(SimpleTestCase):
poisoned_hosts = [
'[email protected]',
'example.com:[email protected]',
'example.com:[email protected]:80',
'example.com:80/badpath',
'example.com: recovermypassword.com',
]
@override_settings(
USE_X_FORWARDED_HOST=False,
ALLOWED_HOSTS=[
'forward.com', 'example.com', 'internal.com', '12.34.56.78',
'[2001:19f0:feee::dead:beef:cafe]', 'xn--4ca9at.com',
'.multitenant.com', 'INSENSITIVE.com', '[::ffff:169.254.169.254]',
])
def test_http_get_host(self):
# Check if X_FORWARDED_HOST is provided.
request = HttpRequest()
request.META = {
'HTTP_X_FORWARDED_HOST': 'forward.com',
'HTTP_HOST': 'example.com',
'SERVER_NAME': 'internal.com',
'SERVER_PORT': 80,
}
# X_FORWARDED_HOST is ignored.
self.assertEqual(request.get_host(), 'example.com')
# Check if X_FORWARDED_HOST isn't provided.
request = HttpRequest()
request.META = {
'HTTP_HOST': 'example.com',
'SERVER_NAME': 'internal.com',
'SERVER_PORT': 80,
}
self.assertEqual(request.get_host(), 'example.com')
# Check if HTTP_HOST isn't provided.
request = HttpRequest()
request.META = {
'SERVER_NAME': 'internal.com',
'SERVER_PORT': 80,
}
self.assertEqual(request.get_host(), 'internal.com')
# Check if HTTP_HOST isn't provided, and we're on a nonstandard port
request = HttpRequest()
request.META = {
'SERVER_NAME': 'internal.com',
'SERVER_PORT': 8042,
}
self.assertEqual(request.get_host(), 'internal.com:8042')
legit_hosts = [
'example.com',
'example.com:80',
'12.34.56.78',
'12.34.56.78:443',
'[2001:19f0:feee::dead:beef:cafe]',
'[2001:19f0:feee::dead:beef:cafe]:8080',
'xn--4ca9at.com', # Punycode for öäü.com
'anything.multitenant.com',
'multitenant.com',
'insensitive.com',
'example.com.',
'example.com.:80',
'[::ffff:169.254.169.254]',
]
for host in legit_hosts:
request = HttpRequest()
request.META = {
'HTTP_HOST': host,
}
request.get_host()
# Poisoned host headers are rejected as suspicious
for host in chain(self.poisoned_hosts, ['other.com', 'example.com..']):
with self.assertRaises(DisallowedHost):
request = HttpRequest()
request.META = {
'HTTP_HOST': host,
}
request.get_host()
@override_settings(USE_X_FORWARDED_HOST=True, ALLOWED_HOSTS=['*'])
def test_http_get_host_with_x_forwarded_host(self):
# Check if X_FORWARDED_HOST is provided.
request = HttpRequest()
request.META = {
'HTTP_X_FORWARDED_HOST': 'forward.com',
'HTTP_HOST': 'example.com',
'SERVER_NAME': 'internal.com',
'SERVER_PORT': 80,
}
# X_FORWARDED_HOST is obeyed.
self.assertEqual(request.get_host(), 'forward.com')
# Check if X_FORWARDED_HOST isn't provided.
request = HttpRequest()
request.META = {
'HTTP_HOST': 'example.com',
'SERVER_NAME': 'internal.com',
'SERVER_PORT': 80,
}
self.assertEqual(request.get_host(), 'example.com')
# Check if HTTP_HOST isn't provided.
request = HttpRequest()
request.META = {
'SERVER_NAME': 'internal.com',
'SERVER_PORT': 80,
}
self.assertEqual(request.get_host(), 'internal.com')
# Check if HTTP_HOST isn't provided, and we're on a nonstandard port
request = HttpRequest()
request.META = {
'SERVER_NAME': 'internal.com',
'SERVER_PORT': 8042,
}
self.assertEqual(request.get_host(), 'internal.com:8042')
# Poisoned host headers are rejected as suspicious
legit_hosts = [
'example.com',
'example.com:80',
'12.34.56.78',
'12.34.56.78:443',
'[2001:19f0:feee::dead:beef:cafe]',
'[2001:19f0:feee::dead:beef:cafe]:8080',
'xn--4ca9at.com', # Punycode for öäü.com
]
for host in legit_hosts:
request = HttpRequest()
request.META = {
'HTTP_HOST': host,
}
request.get_host()
for host in self.poisoned_hosts:
with self.assertRaises(DisallowedHost):
request = HttpRequest()
request.META = {
'HTTP_HOST': host,
}
request.get_host()
@override_settings(USE_X_FORWARDED_PORT=False)
def test_get_port(self):
request = HttpRequest()
request.META = {
'SERVER_PORT': '8080',
'HTTP_X_FORWARDED_PORT': '80',
}
# Shouldn't use the X-Forwarded-Port header
self.assertEqual(request.get_port(), '8080')
request = HttpRequest()
request.META = {
'SERVER_PORT': '8080',
}
self.assertEqual(request.get_port(), '8080')
@override_settings(USE_X_FORWARDED_PORT=True)
def test_get_port_with_x_forwarded_port(self):
request = HttpRequest()
request.META = {
'SERVER_PORT': '8080',
'HTTP_X_FORWARDED_PORT': '80',
}
# Should use the X-Forwarded-Port header
self.assertEqual(request.get_port(), '80')
request = HttpRequest()
request.META = {
'SERVER_PORT': '8080',
}
self.assertEqual(request.get_port(), '8080')
@override_settings(DEBUG=True, ALLOWED_HOSTS=[])
def test_host_validation_in_debug_mode(self):
"""
If ALLOWED_HOSTS is empty and DEBUG is True, variants of localhost are
allowed.
"""
valid_hosts = ['localhost', 'subdomain.localhost', '127.0.0.1', '[::1]']
for host in valid_hosts:
request = HttpRequest()
request.META = {'HTTP_HOST': host}
self.assertEqual(request.get_host(), host)
# Other hostnames raise a DisallowedHost.
with self.assertRaises(DisallowedHost):
request = HttpRequest()
request.META = {'HTTP_HOST': 'example.com'}
request.get_host()
@override_settings(ALLOWED_HOSTS=[])
def test_get_host_suggestion_of_allowed_host(self):
"""get_host() makes helpful suggestions if a valid-looking host is not in ALLOWED_HOSTS."""
msg_invalid_host = "Invalid HTTP_HOST header: %r."
msg_suggestion = msg_invalid_host + " You may need to add %r to ALLOWED_HOSTS."
msg_suggestion2 = msg_invalid_host + " The domain name provided is not valid according to RFC 1034/1035"
for host in [ # Valid-looking hosts
'example.com',
'12.34.56.78',
'[2001:19f0:feee::dead:beef:cafe]',
'xn--4ca9at.com', # Punycode for öäü.com
]:
request = HttpRequest()
request.META = {'HTTP_HOST': host}
with self.assertRaisesMessage(DisallowedHost, msg_suggestion % (host, host)):
request.get_host()
for domain, port in [ # Valid-looking hosts with a port number
('example.com', 80),
('12.34.56.78', 443),
('[2001:19f0:feee::dead:beef:cafe]', 8080),
]:
host = '%s:%s' % (domain, port)
request = HttpRequest()
request.META = {'HTTP_HOST': host}
with self.assertRaisesMessage(DisallowedHost, msg_suggestion % (host, domain)):
request.get_host()
for host in self.poisoned_hosts:
request = HttpRequest()
request.META = {'HTTP_HOST': host}
with self.assertRaisesMessage(DisallowedHost, msg_invalid_host % host):
request.get_host()
request = HttpRequest()
request.META = {'HTTP_HOST': "invalid_hostname.com"}
with self.assertRaisesMessage(DisallowedHost, msg_suggestion2 % "invalid_hostname.com"):
request.get_host()
def test_split_domain_port_removes_trailing_dot(self):
domain, port = split_domain_port('example.com.:8080')
self.assertEqual(domain, 'example.com')
self.assertEqual(port, '8080')
class BuildAbsoluteURITests(SimpleTestCase):
factory = RequestFactory()
def test_absolute_url(self):
request = HttpRequest()
url = 'https://www.example.com/asdf'
self.assertEqual(request.build_absolute_uri(location=url), url)
def test_host_retrieval(self):
request = HttpRequest()
request.get_host = lambda: 'www.example.com'
request.path = ''
self.assertEqual(
request.build_absolute_uri(location='/path/with:colons'),
'http://www.example.com/path/with:colons'
)
def test_request_path_begins_with_two_slashes(self):
# //// creates a request with a path beginning with //
request = self.factory.get('////absolute-uri')
tests = (
# location isn't provided
(None, 'http://testserver//absolute-uri'),
# An absolute URL
('http://example.com/?foo=bar', 'http://example.com/?foo=bar'),
# A schema-relative URL
('//example.com/?foo=bar', 'http://example.com/?foo=bar'),
# Relative URLs
('/foo/bar/', 'http://testserver/foo/bar/'),
('/foo/./bar/', 'http://testserver/foo/bar/'),
('/foo/../bar/', 'http://testserver/bar/'),
('///foo/bar/', 'http://testserver/foo/bar/'),
)
for location, expected_url in tests:
with self.subTest(location=location):
self.assertEqual(request.build_absolute_uri(location=location), expected_url)
class RequestHeadersTests(SimpleTestCase):
ENVIRON = {
# Non-headers are ignored.
'PATH_INFO': '/somepath/',
'REQUEST_METHOD': 'get',
'wsgi.input': BytesIO(b''),
'SERVER_NAME': 'internal.com',
'SERVER_PORT': 80,
# These non-HTTP prefixed headers are included.
'CONTENT_TYPE': 'text/html',
'CONTENT_LENGTH': '100',
# All HTTP-prefixed headers are included.
'HTTP_ACCEPT': '*',
'HTTP_HOST': 'example.com',
'HTTP_USER_AGENT': 'python-requests/1.2.0',
}
def test_base_request_headers(self):
request = HttpRequest()
request.META = self.ENVIRON
self.assertEqual(dict(request.headers), {
'Content-Type': 'text/html',
'Content-Length': '100',
'Accept': '*',
'Host': 'example.com',
'User-Agent': 'python-requests/1.2.0',
})
def test_wsgi_request_headers(self):
request = WSGIRequest(self.ENVIRON)
self.assertEqual(dict(request.headers), {
'Content-Type': 'text/html',
'Content-Length': '100',
'Accept': '*',
'Host': 'example.com',
'User-Agent': 'python-requests/1.2.0',
})
def test_wsgi_request_headers_getitem(self):
request = WSGIRequest(self.ENVIRON)
self.assertEqual(request.headers['User-Agent'], 'python-requests/1.2.0')
self.assertEqual(request.headers['user-agent'], 'python-requests/1.2.0')
self.assertEqual(request.headers['user_agent'], 'python-requests/1.2.0')
self.assertEqual(request.headers['Content-Type'], 'text/html')
self.assertEqual(request.headers['Content-Length'], '100')
def test_wsgi_request_headers_get(self):
request = WSGIRequest(self.ENVIRON)
self.assertEqual(request.headers.get('User-Agent'), 'python-requests/1.2.0')
self.assertEqual(request.headers.get('user-agent'), 'python-requests/1.2.0')
self.assertEqual(request.headers.get('Content-Type'), 'text/html')
self.assertEqual(request.headers.get('Content-Length'), '100')
class HttpHeadersTests(SimpleTestCase):
def test_basic(self):
environ = {
'CONTENT_TYPE': 'text/html',
'CONTENT_LENGTH': '100',
'HTTP_HOST': 'example.com',
}
headers = HttpHeaders(environ)
self.assertEqual(sorted(headers), ['Content-Length', 'Content-Type', 'Host'])
self.assertEqual(headers, {
'Content-Type': 'text/html',
'Content-Length': '100',
'Host': 'example.com',
})
def test_parse_header_name(self):
tests = (
('PATH_INFO', None),
('HTTP_ACCEPT', 'Accept'),
('HTTP_USER_AGENT', 'User-Agent'),
('HTTP_X_FORWARDED_PROTO', 'X-Forwarded-Proto'),
('CONTENT_TYPE', 'Content-Type'),
('CONTENT_LENGTH', 'Content-Length'),
)
for header, expected in tests:
with self.subTest(header=header):
self.assertEqual(HttpHeaders.parse_header_name(header), expected)
|
210639a09cad418d5e8c106e37ec4771f01aa00172b4044b4fb6c51122414d3b | from django.db.models import BooleanField, Exists, F, OuterRef, Q
from django.db.models.expressions import RawSQL
from django.test import SimpleTestCase
from .models import Tag
class QTests(SimpleTestCase):
def test_combine_and_empty(self):
q = Q(x=1)
self.assertEqual(q & Q(), q)
self.assertEqual(Q() & q, q)
q = Q(x__in={}.keys())
self.assertEqual(q & Q(), q)
self.assertEqual(Q() & q, q)
def test_combine_and_both_empty(self):
self.assertEqual(Q() & Q(), Q())
def test_combine_or_empty(self):
q = Q(x=1)
self.assertEqual(q | Q(), q)
self.assertEqual(Q() | q, q)
q = Q(x__in={}.keys())
self.assertEqual(q | Q(), q)
self.assertEqual(Q() | q, q)
def test_combine_empty_copy(self):
base_q = Q(x=1)
tests = [
base_q | Q(),
Q() | base_q,
base_q & Q(),
Q() & base_q,
]
for i, q in enumerate(tests):
with self.subTest(i=i):
self.assertEqual(q, base_q)
self.assertIsNot(q, base_q)
def test_combine_or_both_empty(self):
self.assertEqual(Q() | Q(), Q())
def test_combine_not_q_object(self):
obj = object()
q = Q(x=1)
with self.assertRaisesMessage(TypeError, str(obj)):
q | obj
with self.assertRaisesMessage(TypeError, str(obj)):
q & obj
def test_combine_negated_boolean_expression(self):
tagged = Tag.objects.filter(category=OuterRef('pk'))
tests = [
Q() & ~Exists(tagged),
Q() | ~Exists(tagged),
]
for q in tests:
with self.subTest(q=q):
self.assertIs(q.negated, True)
def test_deconstruct(self):
q = Q(price__gt=F('discounted_price'))
path, args, kwargs = q.deconstruct()
self.assertEqual(path, 'django.db.models.Q')
self.assertEqual(args, (('price__gt', F('discounted_price')),))
self.assertEqual(kwargs, {})
def test_deconstruct_negated(self):
q = ~Q(price__gt=F('discounted_price'))
path, args, kwargs = q.deconstruct()
self.assertEqual(args, (('price__gt', F('discounted_price')),))
self.assertEqual(kwargs, {'_negated': True})
def test_deconstruct_or(self):
q1 = Q(price__gt=F('discounted_price'))
q2 = Q(price=F('discounted_price'))
q = q1 | q2
path, args, kwargs = q.deconstruct()
self.assertEqual(args, (
('price__gt', F('discounted_price')),
('price', F('discounted_price')),
))
self.assertEqual(kwargs, {'_connector': 'OR'})
def test_deconstruct_and(self):
q1 = Q(price__gt=F('discounted_price'))
q2 = Q(price=F('discounted_price'))
q = q1 & q2
path, args, kwargs = q.deconstruct()
self.assertEqual(args, (
('price__gt', F('discounted_price')),
('price', F('discounted_price')),
))
self.assertEqual(kwargs, {})
def test_deconstruct_multiple_kwargs(self):
q = Q(price__gt=F('discounted_price'), price=F('discounted_price'))
path, args, kwargs = q.deconstruct()
self.assertEqual(args, (
('price', F('discounted_price')),
('price__gt', F('discounted_price')),
))
self.assertEqual(kwargs, {})
def test_deconstruct_nested(self):
q = Q(Q(price__gt=F('discounted_price')))
path, args, kwargs = q.deconstruct()
self.assertEqual(args, (Q(price__gt=F('discounted_price')),))
self.assertEqual(kwargs, {})
def test_deconstruct_boolean_expression(self):
expr = RawSQL('1 = 1', BooleanField())
q = Q(expr)
_, args, kwargs = q.deconstruct()
self.assertEqual(args, (expr,))
self.assertEqual(kwargs, {})
def test_reconstruct(self):
q = Q(price__gt=F('discounted_price'))
path, args, kwargs = q.deconstruct()
self.assertEqual(Q(*args, **kwargs), q)
def test_reconstruct_negated(self):
q = ~Q(price__gt=F('discounted_price'))
path, args, kwargs = q.deconstruct()
self.assertEqual(Q(*args, **kwargs), q)
def test_reconstruct_or(self):
q1 = Q(price__gt=F('discounted_price'))
q2 = Q(price=F('discounted_price'))
q = q1 | q2
path, args, kwargs = q.deconstruct()
self.assertEqual(Q(*args, **kwargs), q)
def test_reconstruct_and(self):
q1 = Q(price__gt=F('discounted_price'))
q2 = Q(price=F('discounted_price'))
q = q1 & q2
path, args, kwargs = q.deconstruct()
self.assertEqual(Q(*args, **kwargs), q)
|
c75d459da1e0cbcd1f95f9443f28183b56014708c4b5789d36c1a059425d87fc | import datetime
import pickle
import sys
import unittest
from operator import attrgetter
from threading import Lock
from django.core.exceptions import EmptyResultSet, FieldError
from django.db import DEFAULT_DB_ALIAS, connection
from django.db.models import Count, Exists, F, Max, OuterRef, Q
from django.db.models.expressions import RawSQL
from django.db.models.sql.constants import LOUTER
from django.db.models.sql.where import NothingNode, WhereNode
from django.test import SimpleTestCase, TestCase, skipUnlessDBFeature
from django.test.utils import CaptureQueriesContext
from .models import (
FK1, Annotation, Article, Author, BaseA, Book, CategoryItem,
CategoryRelationship, Celebrity, Channel, Chapter, Child, ChildObjectA,
Classroom, CommonMixedCaseForeignKeys, Company, Cover, CustomPk,
CustomPkTag, DateTimePK, Detail, DumbCategory, Eaten, Employment,
ExtraInfo, Fan, Food, Identifier, Individual, Item, Job,
JobResponsibilities, Join, LeafA, LeafB, LoopX, LoopZ, ManagedModel,
Member, MixedCaseDbColumnCategoryItem, MixedCaseFieldCategoryItem, ModelA,
ModelB, ModelC, ModelD, MyObject, NamedCategory, Node, Note, NullableName,
Number, ObjectA, ObjectB, ObjectC, OneToOneCategory, Order, OrderItem,
Page, Paragraph, Person, Plaything, PointerA, Program, ProxyCategory,
ProxyObjectA, ProxyObjectB, Ranking, Related, RelatedIndividual,
RelatedObject, Report, ReportComment, ReservedName, Responsibility, School,
SharedConnection, SimpleCategory, SingleObject, SpecialCategory, Staff,
StaffUser, Student, Tag, Task, Teacher, Ticket21203Child,
Ticket21203Parent, Ticket23605A, Ticket23605B, Ticket23605C, TvChef, Valid,
X,
)
class Queries1Tests(TestCase):
@classmethod
def setUpTestData(cls):
cls.nc1 = generic = NamedCategory.objects.create(name="Generic")
cls.t1 = Tag.objects.create(name='t1', category=generic)
cls.t2 = Tag.objects.create(name='t2', parent=cls.t1, category=generic)
cls.t3 = Tag.objects.create(name='t3', parent=cls.t1)
cls.t4 = Tag.objects.create(name='t4', parent=cls.t3)
cls.t5 = Tag.objects.create(name='t5', parent=cls.t3)
cls.n1 = Note.objects.create(note='n1', misc='foo', id=1)
cls.n2 = Note.objects.create(note='n2', misc='bar', id=2)
cls.n3 = Note.objects.create(note='n3', misc='foo', id=3, negate=False)
cls.ann1 = Annotation.objects.create(name='a1', tag=cls.t1)
cls.ann1.notes.add(cls.n1)
ann2 = Annotation.objects.create(name='a2', tag=cls.t4)
ann2.notes.add(cls.n2, cls.n3)
# Create these out of order so that sorting by 'id' will be different to sorting
# by 'info'. Helps detect some problems later.
cls.e2 = ExtraInfo.objects.create(info='e2', note=cls.n2, value=41, filterable=False)
e1 = ExtraInfo.objects.create(info='e1', note=cls.n1, value=42)
cls.a1 = Author.objects.create(name='a1', num=1001, extra=e1)
cls.a2 = Author.objects.create(name='a2', num=2002, extra=e1)
cls.a3 = Author.objects.create(name='a3', num=3003, extra=cls.e2)
cls.a4 = Author.objects.create(name='a4', num=4004, extra=cls.e2)
cls.time1 = datetime.datetime(2007, 12, 19, 22, 25, 0)
cls.time2 = datetime.datetime(2007, 12, 19, 21, 0, 0)
time3 = datetime.datetime(2007, 12, 20, 22, 25, 0)
time4 = datetime.datetime(2007, 12, 20, 21, 0, 0)
cls.i1 = Item.objects.create(name='one', created=cls.time1, modified=cls.time1, creator=cls.a1, note=cls.n3)
cls.i1.tags.set([cls.t1, cls.t2])
cls.i2 = Item.objects.create(name='two', created=cls.time2, creator=cls.a2, note=cls.n2)
cls.i2.tags.set([cls.t1, cls.t3])
cls.i3 = Item.objects.create(name='three', created=time3, creator=cls.a2, note=cls.n3)
cls.i4 = Item.objects.create(name='four', created=time4, creator=cls.a4, note=cls.n3)
cls.i4.tags.set([cls.t4])
cls.r1 = Report.objects.create(name='r1', creator=cls.a1)
cls.r2 = Report.objects.create(name='r2', creator=cls.a3)
cls.r3 = Report.objects.create(name='r3')
# Ordering by 'rank' gives us rank2, rank1, rank3. Ordering by the Meta.ordering
# will be rank3, rank2, rank1.
cls.rank1 = Ranking.objects.create(rank=2, author=cls.a2)
cls.c1 = Cover.objects.create(title="first", item=cls.i4)
cls.c2 = Cover.objects.create(title="second", item=cls.i2)
def test_subquery_condition(self):
qs1 = Tag.objects.filter(pk__lte=0)
qs2 = Tag.objects.filter(parent__in=qs1)
qs3 = Tag.objects.filter(parent__in=qs2)
self.assertEqual(qs3.query.subq_aliases, {'T', 'U', 'V'})
self.assertIn('v0', str(qs3.query).lower())
qs4 = qs3.filter(parent__in=qs1)
self.assertEqual(qs4.query.subq_aliases, {'T', 'U', 'V'})
# It is possible to reuse U for the second subquery, no need to use W.
self.assertNotIn('w0', str(qs4.query).lower())
# So, 'U0."id"' is referenced in SELECT and WHERE twice.
self.assertEqual(str(qs4.query).lower().count('u0.'), 4)
def test_ticket1050(self):
self.assertSequenceEqual(
Item.objects.filter(tags__isnull=True),
[self.i3],
)
self.assertSequenceEqual(
Item.objects.filter(tags__id__isnull=True),
[self.i3],
)
def test_ticket1801(self):
self.assertSequenceEqual(
Author.objects.filter(item=self.i2),
[self.a2],
)
self.assertSequenceEqual(
Author.objects.filter(item=self.i3),
[self.a2],
)
self.assertSequenceEqual(
Author.objects.filter(item=self.i2) & Author.objects.filter(item=self.i3),
[self.a2],
)
def test_ticket2306(self):
# Checking that no join types are "left outer" joins.
query = Item.objects.filter(tags=self.t2).query
self.assertNotIn(LOUTER, [x.join_type for x in query.alias_map.values()])
self.assertSequenceEqual(
Item.objects.filter(Q(tags=self.t1)).order_by('name'),
[self.i1, self.i2],
)
self.assertSequenceEqual(
Item.objects.filter(Q(tags=self.t1)).filter(Q(tags=self.t2)),
[self.i1],
)
self.assertSequenceEqual(
Item.objects.filter(Q(tags=self.t1)).filter(Q(creator__name='fred') | Q(tags=self.t2)),
[self.i1],
)
# Each filter call is processed "at once" against a single table, so this is
# different from the previous example as it tries to find tags that are two
# things at once (rather than two tags).
self.assertSequenceEqual(
Item.objects.filter(Q(tags=self.t1) & Q(tags=self.t2)),
[]
)
self.assertSequenceEqual(
Item.objects.filter(Q(tags=self.t1), Q(creator__name='fred') | Q(tags=self.t2)),
[]
)
qs = Author.objects.filter(ranking__rank=2, ranking__id=self.rank1.id)
self.assertSequenceEqual(list(qs), [self.a2])
self.assertEqual(2, qs.query.count_active_tables(), 2)
qs = Author.objects.filter(ranking__rank=2).filter(ranking__id=self.rank1.id)
self.assertEqual(qs.query.count_active_tables(), 3)
def test_ticket4464(self):
self.assertSequenceEqual(
Item.objects.filter(tags=self.t1).filter(tags=self.t2),
[self.i1],
)
self.assertSequenceEqual(
Item.objects.filter(tags__in=[self.t1, self.t2]).distinct().order_by('name'),
[self.i1, self.i2],
)
self.assertSequenceEqual(
Item.objects.filter(tags__in=[self.t1, self.t2]).filter(tags=self.t3),
[self.i2],
)
# Make sure .distinct() works with slicing (this was broken in Oracle).
self.assertSequenceEqual(
Item.objects.filter(tags__in=[self.t1, self.t2]).order_by('name')[:3],
[self.i1, self.i1, self.i2],
)
self.assertSequenceEqual(
Item.objects.filter(tags__in=[self.t1, self.t2]).distinct().order_by('name')[:3],
[self.i1, self.i2],
)
def test_tickets_2080_3592(self):
self.assertSequenceEqual(
Author.objects.filter(item__name='one') | Author.objects.filter(name='a3'),
[self.a1, self.a3],
)
self.assertSequenceEqual(
Author.objects.filter(Q(item__name='one') | Q(name='a3')),
[self.a1, self.a3],
)
self.assertSequenceEqual(
Author.objects.filter(Q(name='a3') | Q(item__name='one')),
[self.a1, self.a3],
)
self.assertSequenceEqual(
Author.objects.filter(Q(item__name='three') | Q(report__name='r3')),
[self.a2],
)
def test_ticket6074(self):
# Merging two empty result sets shouldn't leave a queryset with no constraints
# (which would match everything).
self.assertSequenceEqual(Author.objects.filter(Q(id__in=[])), [])
self.assertSequenceEqual(
Author.objects.filter(Q(id__in=[]) | Q(id__in=[])),
[]
)
def test_tickets_1878_2939(self):
self.assertEqual(Item.objects.values('creator').distinct().count(), 3)
# Create something with a duplicate 'name' so that we can test multi-column
# cases (which require some tricky SQL transformations under the covers).
xx = Item(name='four', created=self.time1, creator=self.a2, note=self.n1)
xx.save()
self.assertEqual(
Item.objects.exclude(name='two').values('creator', 'name').distinct().count(),
4
)
self.assertEqual(
(
Item.objects
.exclude(name='two')
.extra(select={'foo': '%s'}, select_params=(1,))
.values('creator', 'name', 'foo')
.distinct()
.count()
),
4
)
self.assertEqual(
(
Item.objects
.exclude(name='two')
.extra(select={'foo': '%s'}, select_params=(1,))
.values('creator', 'name')
.distinct()
.count()
),
4
)
xx.delete()
def test_ticket7323(self):
self.assertEqual(Item.objects.values('creator', 'name').count(), 4)
def test_ticket2253(self):
q1 = Item.objects.order_by('name')
q2 = Item.objects.filter(id=self.i1.id)
self.assertSequenceEqual(q1, [self.i4, self.i1, self.i3, self.i2])
self.assertSequenceEqual(q2, [self.i1])
self.assertSequenceEqual(
(q1 | q2).order_by('name'),
[self.i4, self.i1, self.i3, self.i2],
)
self.assertSequenceEqual((q1 & q2).order_by('name'), [self.i1])
q1 = Item.objects.filter(tags=self.t1)
q2 = Item.objects.filter(note=self.n3, tags=self.t2)
q3 = Item.objects.filter(creator=self.a4)
self.assertSequenceEqual(
((q1 & q2) | q3).order_by('name'),
[self.i4, self.i1],
)
def test_order_by_tables(self):
q1 = Item.objects.order_by('name')
q2 = Item.objects.filter(id=self.i1.id)
list(q2)
combined_query = (q1 & q2).order_by('name').query
self.assertEqual(len([
t for t in combined_query.alias_map if combined_query.alias_refcount[t]
]), 1)
def test_order_by_join_unref(self):
"""
This test is related to the above one, testing that there aren't
old JOINs in the query.
"""
qs = Celebrity.objects.order_by('greatest_fan__fan_of')
self.assertIn('OUTER JOIN', str(qs.query))
qs = qs.order_by('id')
self.assertNotIn('OUTER JOIN', str(qs.query))
def test_get_clears_ordering(self):
"""
get() should clear ordering for optimization purposes.
"""
with CaptureQueriesContext(connection) as captured_queries:
Author.objects.order_by('name').get(pk=self.a1.pk)
self.assertNotIn('order by', captured_queries[0]['sql'].lower())
def test_tickets_4088_4306(self):
self.assertSequenceEqual(Report.objects.filter(creator=1001), [self.r1])
self.assertSequenceEqual(
Report.objects.filter(creator__num=1001),
[self.r1]
)
self.assertSequenceEqual(Report.objects.filter(creator__id=1001), [])
self.assertSequenceEqual(
Report.objects.filter(creator__id=self.a1.id),
[self.r1]
)
self.assertSequenceEqual(
Report.objects.filter(creator__name='a1'),
[self.r1]
)
def test_ticket4510(self):
self.assertSequenceEqual(
Author.objects.filter(report__name='r1'),
[self.a1],
)
def test_ticket7378(self):
self.assertSequenceEqual(self.a1.report_set.all(), [self.r1])
def test_tickets_5324_6704(self):
self.assertSequenceEqual(
Item.objects.filter(tags__name='t4'),
[self.i4],
)
self.assertSequenceEqual(
Item.objects.exclude(tags__name='t4').order_by('name').distinct(),
[self.i1, self.i3, self.i2],
)
self.assertSequenceEqual(
Item.objects.exclude(tags__name='t4').order_by('name').distinct().reverse(),
[self.i2, self.i3, self.i1],
)
self.assertSequenceEqual(
Author.objects.exclude(item__name='one').distinct().order_by('name'),
[self.a2, self.a3, self.a4],
)
# Excluding across a m2m relation when there is more than one related
# object associated was problematic.
self.assertSequenceEqual(
Item.objects.exclude(tags__name='t1').order_by('name'),
[self.i4, self.i3],
)
self.assertSequenceEqual(
Item.objects.exclude(tags__name='t1').exclude(tags__name='t4'),
[self.i3],
)
# Excluding from a relation that cannot be NULL should not use outer joins.
query = Item.objects.exclude(creator__in=[self.a1, self.a2]).query
self.assertNotIn(LOUTER, [x.join_type for x in query.alias_map.values()])
# Similarly, when one of the joins cannot possibly, ever, involve NULL
# values (Author -> ExtraInfo, in the following), it should never be
# promoted to a left outer join. So the following query should only
# involve one "left outer" join (Author -> Item is 0-to-many).
qs = Author.objects.filter(id=self.a1.id).filter(Q(extra__note=self.n1) | Q(item__note=self.n3))
self.assertEqual(
len([
x for x in qs.query.alias_map.values()
if x.join_type == LOUTER and qs.query.alias_refcount[x.table_alias]
]),
1
)
# The previous changes shouldn't affect nullable foreign key joins.
self.assertSequenceEqual(
Tag.objects.filter(parent__isnull=True).order_by('name'),
[self.t1]
)
self.assertSequenceEqual(
Tag.objects.exclude(parent__isnull=True).order_by('name'),
[self.t2, self.t3, self.t4, self.t5],
)
self.assertSequenceEqual(
Tag.objects.exclude(Q(parent__name='t1') | Q(parent__isnull=True)).order_by('name'),
[self.t4, self.t5],
)
self.assertSequenceEqual(
Tag.objects.exclude(Q(parent__isnull=True) | Q(parent__name='t1')).order_by('name'),
[self.t4, self.t5],
)
self.assertSequenceEqual(
Tag.objects.exclude(Q(parent__parent__isnull=True)).order_by('name'),
[self.t4, self.t5],
)
self.assertSequenceEqual(
Tag.objects.filter(~Q(parent__parent__isnull=True)).order_by('name'),
[self.t4, self.t5],
)
def test_ticket2091(self):
t = Tag.objects.get(name='t4')
self.assertSequenceEqual(Item.objects.filter(tags__in=[t]), [self.i4])
def test_avoid_infinite_loop_on_too_many_subqueries(self):
x = Tag.objects.filter(pk=1)
local_recursion_limit = sys.getrecursionlimit() // 16
msg = 'Maximum recursion depth exceeded: too many subqueries.'
with self.assertRaisesMessage(RecursionError, msg):
for i in range(local_recursion_limit + 2):
x = Tag.objects.filter(pk__in=x)
def test_reasonable_number_of_subq_aliases(self):
x = Tag.objects.filter(pk=1)
for _ in range(20):
x = Tag.objects.filter(pk__in=x)
self.assertEqual(
x.query.subq_aliases, {
'T', 'U', 'V', 'W', 'X', 'Y', 'Z', 'AA', 'AB', 'AC', 'AD',
'AE', 'AF', 'AG', 'AH', 'AI', 'AJ', 'AK', 'AL', 'AM', 'AN',
}
)
def test_heterogeneous_qs_combination(self):
# Combining querysets built on different models should behave in a well-defined
# fashion. We raise an error.
msg = 'Cannot combine queries on two different base models.'
with self.assertRaisesMessage(TypeError, msg):
Author.objects.all() & Tag.objects.all()
with self.assertRaisesMessage(TypeError, msg):
Author.objects.all() | Tag.objects.all()
def test_ticket3141(self):
self.assertEqual(Author.objects.extra(select={'foo': '1'}).count(), 4)
self.assertEqual(
Author.objects.extra(select={'foo': '%s'}, select_params=(1,)).count(),
4
)
def test_ticket2400(self):
self.assertSequenceEqual(
Author.objects.filter(item__isnull=True),
[self.a3],
)
self.assertSequenceEqual(
Tag.objects.filter(item__isnull=True),
[self.t5],
)
def test_ticket2496(self):
self.assertSequenceEqual(
Item.objects.extra(tables=['queries_author']).select_related().order_by('name')[:1],
[self.i4],
)
def test_error_raised_on_filter_with_dictionary(self):
with self.assertRaisesMessage(FieldError, 'Cannot parse keyword query as dict'):
Note.objects.filter({'note': 'n1', 'misc': 'foo'})
def test_tickets_2076_7256(self):
# Ordering on related tables should be possible, even if the table is
# not otherwise involved.
self.assertSequenceEqual(
Item.objects.order_by('note__note', 'name'),
[self.i2, self.i4, self.i1, self.i3],
)
# Ordering on a related field should use the remote model's default
# ordering as a final step.
self.assertSequenceEqual(
Author.objects.order_by('extra', '-name'),
[self.a2, self.a1, self.a4, self.a3],
)
# Using remote model default ordering can span multiple models (in this
# case, Cover is ordered by Item's default, which uses Note's default).
self.assertSequenceEqual(Cover.objects.all(), [self.c1, self.c2])
# If the remote model does not have a default ordering, we order by its 'id'
# field.
self.assertSequenceEqual(
Item.objects.order_by('creator', 'name'),
[self.i1, self.i3, self.i2, self.i4],
)
# Ordering by a many-valued attribute (e.g. a many-to-many or reverse
# ForeignKey) is legal, but the results might not make sense. That
# isn't Django's problem. Garbage in, garbage out.
self.assertSequenceEqual(
Item.objects.filter(tags__isnull=False).order_by('tags', 'id'),
[self.i1, self.i2, self.i1, self.i2, self.i4],
)
# If we replace the default ordering, Django adjusts the required
# tables automatically. Item normally requires a join with Note to do
# the default ordering, but that isn't needed here.
qs = Item.objects.order_by('name')
self.assertSequenceEqual(qs, [self.i4, self.i1, self.i3, self.i2])
self.assertEqual(len(qs.query.alias_map), 1)
def test_tickets_2874_3002(self):
qs = Item.objects.select_related().order_by('note__note', 'name')
self.assertQuerysetEqual(qs, [self.i2, self.i4, self.i1, self.i3])
# This is also a good select_related() test because there are multiple
# Note entries in the SQL. The two Note items should be different.
self.assertEqual(repr(qs[0].note), '<Note: n2>')
self.assertEqual(repr(qs[0].creator.extra.note), '<Note: n1>')
def test_ticket3037(self):
self.assertSequenceEqual(
Item.objects.filter(Q(creator__name='a3', name='two') | Q(creator__name='a4', name='four')),
[self.i4],
)
def test_tickets_5321_7070(self):
# Ordering columns must be included in the output columns. Note that
# this means results that might otherwise be distinct are not (if there
# are multiple values in the ordering cols), as in this example. This
# isn't a bug; it's a warning to be careful with the selection of
# ordering columns.
self.assertSequenceEqual(
Note.objects.values('misc').distinct().order_by('note', '-misc'),
[{'misc': 'foo'}, {'misc': 'bar'}, {'misc': 'foo'}]
)
def test_ticket4358(self):
# If you don't pass any fields to values(), relation fields are
# returned as "foo_id" keys, not "foo". For consistency, you should be
# able to pass "foo_id" in the fields list and have it work, too. We
# actually allow both "foo" and "foo_id".
# The *_id version is returned by default.
self.assertIn('note_id', ExtraInfo.objects.values()[0])
# You can also pass it in explicitly.
self.assertSequenceEqual(ExtraInfo.objects.values('note_id'), [{'note_id': 1}, {'note_id': 2}])
# ...or use the field name.
self.assertSequenceEqual(ExtraInfo.objects.values('note'), [{'note': 1}, {'note': 2}])
def test_ticket6154(self):
# Multiple filter statements are joined using "AND" all the time.
self.assertSequenceEqual(
Author.objects.filter(id=self.a1.id).filter(Q(extra__note=self.n1) | Q(item__note=self.n3)),
[self.a1],
)
self.assertSequenceEqual(
Author.objects.filter(Q(extra__note=self.n1) | Q(item__note=self.n3)).filter(id=self.a1.id),
[self.a1],
)
def test_ticket6981(self):
self.assertSequenceEqual(
Tag.objects.select_related('parent').order_by('name'),
[self.t1, self.t2, self.t3, self.t4, self.t5],
)
def test_ticket9926(self):
self.assertSequenceEqual(
Tag.objects.select_related("parent", "category").order_by('name'),
[self.t1, self.t2, self.t3, self.t4, self.t5],
)
self.assertSequenceEqual(
Tag.objects.select_related('parent', "parent__category").order_by('name'),
[self.t1, self.t2, self.t3, self.t4, self.t5],
)
def test_tickets_6180_6203(self):
# Dates with limits and/or counts
self.assertEqual(Item.objects.count(), 4)
self.assertEqual(Item.objects.datetimes('created', 'month').count(), 1)
self.assertEqual(Item.objects.datetimes('created', 'day').count(), 2)
self.assertEqual(len(Item.objects.datetimes('created', 'day')), 2)
self.assertEqual(Item.objects.datetimes('created', 'day')[0], datetime.datetime(2007, 12, 19, 0, 0))
def test_tickets_7087_12242(self):
# Dates with extra select columns
self.assertSequenceEqual(
Item.objects.datetimes('created', 'day').extra(select={'a': 1}),
[datetime.datetime(2007, 12, 19, 0, 0), datetime.datetime(2007, 12, 20, 0, 0)],
)
self.assertSequenceEqual(
Item.objects.extra(select={'a': 1}).datetimes('created', 'day'),
[datetime.datetime(2007, 12, 19, 0, 0), datetime.datetime(2007, 12, 20, 0, 0)],
)
name = "one"
self.assertSequenceEqual(
Item.objects.datetimes('created', 'day').extra(where=['name=%s'], params=[name]),
[datetime.datetime(2007, 12, 19, 0, 0)],
)
self.assertSequenceEqual(
Item.objects.extra(where=['name=%s'], params=[name]).datetimes('created', 'day'),
[datetime.datetime(2007, 12, 19, 0, 0)],
)
def test_ticket7155(self):
# Nullable dates
self.assertSequenceEqual(
Item.objects.datetimes('modified', 'day'),
[datetime.datetime(2007, 12, 19, 0, 0)],
)
def test_order_by_rawsql(self):
self.assertSequenceEqual(
Item.objects.values('note__note').order_by(
RawSQL('queries_note.note', ()),
'id',
),
[
{'note__note': 'n2'},
{'note__note': 'n3'},
{'note__note': 'n3'},
{'note__note': 'n3'},
],
)
def test_ticket7096(self):
# Make sure exclude() with multiple conditions continues to work.
self.assertSequenceEqual(
Tag.objects.filter(parent=self.t1, name='t3').order_by('name'),
[self.t3],
)
self.assertSequenceEqual(
Tag.objects.exclude(parent=self.t1, name='t3').order_by('name'),
[self.t1, self.t2, self.t4, self.t5],
)
self.assertSequenceEqual(
Item.objects.exclude(tags__name='t1', name='one').order_by('name').distinct(),
[self.i4, self.i3, self.i2],
)
self.assertSequenceEqual(
Item.objects.filter(name__in=['three', 'four']).exclude(tags__name='t1').order_by('name'),
[self.i4, self.i3],
)
# More twisted cases, involving nested negations.
self.assertSequenceEqual(
Item.objects.exclude(~Q(tags__name='t1', name='one')),
[self.i1],
)
self.assertSequenceEqual(
Item.objects.filter(~Q(tags__name='t1', name='one'), name='two'),
[self.i2],
)
self.assertSequenceEqual(
Item.objects.exclude(~Q(tags__name='t1', name='one'), name='two'),
[self.i4, self.i1, self.i3],
)
def test_tickets_7204_7506(self):
# Make sure querysets with related fields can be pickled. If this
# doesn't crash, it's a Good Thing.
pickle.dumps(Item.objects.all())
def test_ticket7813(self):
# We should also be able to pickle things that use select_related().
# The only tricky thing here is to ensure that we do the related
# selections properly after unpickling.
qs = Item.objects.select_related()
query = qs.query.get_compiler(qs.db).as_sql()[0]
query2 = pickle.loads(pickle.dumps(qs.query))
self.assertEqual(
query2.get_compiler(qs.db).as_sql()[0],
query
)
def test_deferred_load_qs_pickling(self):
# Check pickling of deferred-loading querysets
qs = Item.objects.defer('name', 'creator')
q2 = pickle.loads(pickle.dumps(qs))
self.assertEqual(list(qs), list(q2))
q3 = pickle.loads(pickle.dumps(qs, pickle.HIGHEST_PROTOCOL))
self.assertEqual(list(qs), list(q3))
def test_ticket7277(self):
self.assertSequenceEqual(
self.n1.annotation_set.filter(
Q(tag=self.t5) | Q(tag__children=self.t5) | Q(tag__children__children=self.t5)
),
[self.ann1],
)
def test_tickets_7448_7707(self):
# Complex objects should be converted to strings before being used in
# lookups.
self.assertSequenceEqual(
Item.objects.filter(created__in=[self.time1, self.time2]),
[self.i1, self.i2],
)
def test_ticket7235(self):
# An EmptyQuerySet should not raise exceptions if it is filtered.
Eaten.objects.create(meal='m')
q = Eaten.objects.none()
with self.assertNumQueries(0):
self.assertQuerysetEqual(q.all(), [])
self.assertQuerysetEqual(q.filter(meal='m'), [])
self.assertQuerysetEqual(q.exclude(meal='m'), [])
self.assertQuerysetEqual(q.complex_filter({'pk': 1}), [])
self.assertQuerysetEqual(q.select_related('food'), [])
self.assertQuerysetEqual(q.annotate(Count('food')), [])
self.assertQuerysetEqual(q.order_by('meal', 'food'), [])
self.assertQuerysetEqual(q.distinct(), [])
self.assertQuerysetEqual(
q.extra(select={'foo': "1"}),
[]
)
self.assertQuerysetEqual(q.reverse(), [])
q.query.low_mark = 1
msg = 'Cannot change a query once a slice has been taken.'
with self.assertRaisesMessage(TypeError, msg):
q.extra(select={'foo': "1"})
self.assertQuerysetEqual(q.defer('meal'), [])
self.assertQuerysetEqual(q.only('meal'), [])
def test_ticket7791(self):
# There were "issues" when ordering and distinct-ing on fields related
# via ForeignKeys.
self.assertEqual(
len(Note.objects.order_by('extrainfo__info').distinct()),
3
)
# Pickling of QuerySets using datetimes() should work.
qs = Item.objects.datetimes('created', 'month')
pickle.loads(pickle.dumps(qs))
def test_ticket9997(self):
# If a ValuesList or Values queryset is passed as an inner query, we
# make sure it's only requesting a single value and use that as the
# thing to select.
self.assertSequenceEqual(
Tag.objects.filter(name__in=Tag.objects.filter(parent=self.t1).values('name')),
[self.t2, self.t3],
)
# Multi-valued values() and values_list() querysets should raise errors.
with self.assertRaisesMessage(TypeError, 'Cannot use multi-field values as a filter value.'):
Tag.objects.filter(name__in=Tag.objects.filter(parent=self.t1).values('name', 'id'))
with self.assertRaisesMessage(TypeError, 'Cannot use multi-field values as a filter value.'):
Tag.objects.filter(name__in=Tag.objects.filter(parent=self.t1).values_list('name', 'id'))
def test_ticket9985(self):
# qs.values_list(...).values(...) combinations should work.
self.assertSequenceEqual(
Note.objects.values_list("note", flat=True).values("id").order_by("id"),
[{'id': 1}, {'id': 2}, {'id': 3}]
)
self.assertSequenceEqual(
Annotation.objects.filter(notes__in=Note.objects.filter(note="n1").values_list('note').values('id')),
[self.ann1],
)
def test_ticket10205(self):
# When bailing out early because of an empty "__in" filter, we need
# to set things up correctly internally so that subqueries can continue properly.
self.assertEqual(Tag.objects.filter(name__in=()).update(name="foo"), 0)
def test_ticket10432(self):
# Testing an empty "__in" filter with a generator as the value.
def f():
return iter([])
n_obj = Note.objects.all()[0]
def g():
yield n_obj.pk
self.assertQuerysetEqual(Note.objects.filter(pk__in=f()), [])
self.assertEqual(list(Note.objects.filter(pk__in=g())), [n_obj])
def test_ticket10742(self):
# Queries used in an __in clause don't execute subqueries
subq = Author.objects.filter(num__lt=3000)
qs = Author.objects.filter(pk__in=subq)
self.assertSequenceEqual(qs, [self.a1, self.a2])
# The subquery result cache should not be populated
self.assertIsNone(subq._result_cache)
subq = Author.objects.filter(num__lt=3000)
qs = Author.objects.exclude(pk__in=subq)
self.assertSequenceEqual(qs, [self.a3, self.a4])
# The subquery result cache should not be populated
self.assertIsNone(subq._result_cache)
subq = Author.objects.filter(num__lt=3000)
self.assertSequenceEqual(
Author.objects.filter(Q(pk__in=subq) & Q(name='a1')),
[self.a1],
)
# The subquery result cache should not be populated
self.assertIsNone(subq._result_cache)
def test_ticket7076(self):
# Excluding shouldn't eliminate NULL entries.
self.assertSequenceEqual(
Item.objects.exclude(modified=self.time1).order_by('name'),
[self.i4, self.i3, self.i2],
)
self.assertSequenceEqual(
Tag.objects.exclude(parent__name=self.t1.name),
[self.t1, self.t4, self.t5],
)
def test_ticket7181(self):
# Ordering by related tables should accommodate nullable fields (this
# test is a little tricky, since NULL ordering is database dependent.
# Instead, we just count the number of results).
self.assertEqual(len(Tag.objects.order_by('parent__name')), 5)
# Empty querysets can be merged with others.
self.assertSequenceEqual(
Note.objects.none() | Note.objects.all(),
[self.n1, self.n2, self.n3],
)
self.assertSequenceEqual(
Note.objects.all() | Note.objects.none(),
[self.n1, self.n2, self.n3],
)
self.assertSequenceEqual(Note.objects.none() & Note.objects.all(), [])
self.assertSequenceEqual(Note.objects.all() & Note.objects.none(), [])
def test_ticket8439(self):
# Complex combinations of conjunctions, disjunctions and nullable
# relations.
self.assertSequenceEqual(
Author.objects.filter(Q(item__note__extrainfo=self.e2) | Q(report=self.r1, name='xyz')),
[self.a2],
)
self.assertSequenceEqual(
Author.objects.filter(Q(report=self.r1, name='xyz') | Q(item__note__extrainfo=self.e2)),
[self.a2],
)
self.assertSequenceEqual(
Annotation.objects.filter(Q(tag__parent=self.t1) | Q(notes__note='n1', name='a1')),
[self.ann1],
)
xx = ExtraInfo.objects.create(info='xx', note=self.n3)
self.assertSequenceEqual(
Note.objects.filter(Q(extrainfo__author=self.a1) | Q(extrainfo=xx)),
[self.n1, self.n3],
)
q = Note.objects.filter(Q(extrainfo__author=self.a1) | Q(extrainfo=xx)).query
self.assertEqual(
len([x for x in q.alias_map.values() if x.join_type == LOUTER and q.alias_refcount[x.table_alias]]),
1
)
def test_ticket17429(self):
"""
Meta.ordering=None works the same as Meta.ordering=[]
"""
original_ordering = Tag._meta.ordering
Tag._meta.ordering = None
try:
self.assertCountEqual(
Tag.objects.all(),
[self.t1, self.t2, self.t3, self.t4, self.t5],
)
finally:
Tag._meta.ordering = original_ordering
def test_exclude(self):
self.assertQuerysetEqual(
Item.objects.exclude(tags__name='t4'),
Item.objects.filter(~Q(tags__name='t4')))
self.assertQuerysetEqual(
Item.objects.exclude(Q(tags__name='t4') | Q(tags__name='t3')),
Item.objects.filter(~(Q(tags__name='t4') | Q(tags__name='t3'))))
self.assertQuerysetEqual(
Item.objects.exclude(Q(tags__name='t4') | ~Q(tags__name='t3')),
Item.objects.filter(~(Q(tags__name='t4') | ~Q(tags__name='t3'))))
def test_nested_exclude(self):
self.assertQuerysetEqual(
Item.objects.exclude(~Q(tags__name='t4')),
Item.objects.filter(~~Q(tags__name='t4')))
def test_double_exclude(self):
self.assertQuerysetEqual(
Item.objects.filter(Q(tags__name='t4')),
Item.objects.filter(~~Q(tags__name='t4')))
self.assertQuerysetEqual(
Item.objects.filter(Q(tags__name='t4')),
Item.objects.filter(~Q(~Q(tags__name='t4'))))
def test_exclude_in(self):
self.assertQuerysetEqual(
Item.objects.exclude(Q(tags__name__in=['t4', 't3'])),
Item.objects.filter(~Q(tags__name__in=['t4', 't3'])))
self.assertQuerysetEqual(
Item.objects.filter(Q(tags__name__in=['t4', 't3'])),
Item.objects.filter(~~Q(tags__name__in=['t4', 't3'])))
def test_ticket_10790_1(self):
# Querying direct fields with isnull should trim the left outer join.
# It also should not create INNER JOIN.
q = Tag.objects.filter(parent__isnull=True)
self.assertSequenceEqual(q, [self.t1])
self.assertNotIn('JOIN', str(q.query))
q = Tag.objects.filter(parent__isnull=False)
self.assertSequenceEqual(q, [self.t2, self.t3, self.t4, self.t5])
self.assertNotIn('JOIN', str(q.query))
q = Tag.objects.exclude(parent__isnull=True)
self.assertSequenceEqual(q, [self.t2, self.t3, self.t4, self.t5])
self.assertNotIn('JOIN', str(q.query))
q = Tag.objects.exclude(parent__isnull=False)
self.assertSequenceEqual(q, [self.t1])
self.assertNotIn('JOIN', str(q.query))
q = Tag.objects.exclude(parent__parent__isnull=False)
self.assertSequenceEqual(q, [self.t1, self.t2, self.t3])
self.assertEqual(str(q.query).count('LEFT OUTER JOIN'), 1)
self.assertNotIn('INNER JOIN', str(q.query))
def test_ticket_10790_2(self):
# Querying across several tables should strip only the last outer join,
# while preserving the preceding inner joins.
q = Tag.objects.filter(parent__parent__isnull=False)
self.assertSequenceEqual(q, [self.t4, self.t5])
self.assertEqual(str(q.query).count('LEFT OUTER JOIN'), 0)
self.assertEqual(str(q.query).count('INNER JOIN'), 1)
# Querying without isnull should not convert anything to left outer join.
q = Tag.objects.filter(parent__parent=self.t1)
self.assertSequenceEqual(q, [self.t4, self.t5])
self.assertEqual(str(q.query).count('LEFT OUTER JOIN'), 0)
self.assertEqual(str(q.query).count('INNER JOIN'), 1)
def test_ticket_10790_3(self):
# Querying via indirect fields should populate the left outer join
q = NamedCategory.objects.filter(tag__isnull=True)
self.assertEqual(str(q.query).count('LEFT OUTER JOIN'), 1)
# join to dumbcategory ptr_id
self.assertEqual(str(q.query).count('INNER JOIN'), 1)
self.assertSequenceEqual(q, [])
# Querying across several tables should strip only the last join, while
# preserving the preceding left outer joins.
q = NamedCategory.objects.filter(tag__parent__isnull=True)
self.assertEqual(str(q.query).count('INNER JOIN'), 1)
self.assertEqual(str(q.query).count('LEFT OUTER JOIN'), 1)
self.assertSequenceEqual(q, [self.nc1])
def test_ticket_10790_4(self):
# Querying across m2m field should not strip the m2m table from join.
q = Author.objects.filter(item__tags__isnull=True)
self.assertSequenceEqual(q, [self.a2, self.a3])
self.assertEqual(str(q.query).count('LEFT OUTER JOIN'), 2)
self.assertNotIn('INNER JOIN', str(q.query))
q = Author.objects.filter(item__tags__parent__isnull=True)
self.assertSequenceEqual(q, [self.a1, self.a2, self.a2, self.a3])
self.assertEqual(str(q.query).count('LEFT OUTER JOIN'), 3)
self.assertNotIn('INNER JOIN', str(q.query))
def test_ticket_10790_5(self):
# Querying with isnull=False across m2m field should not create outer joins
q = Author.objects.filter(item__tags__isnull=False)
self.assertSequenceEqual(q, [self.a1, self.a1, self.a2, self.a2, self.a4])
self.assertEqual(str(q.query).count('LEFT OUTER JOIN'), 0)
self.assertEqual(str(q.query).count('INNER JOIN'), 2)
q = Author.objects.filter(item__tags__parent__isnull=False)
self.assertSequenceEqual(q, [self.a1, self.a2, self.a4])
self.assertEqual(str(q.query).count('LEFT OUTER JOIN'), 0)
self.assertEqual(str(q.query).count('INNER JOIN'), 3)
q = Author.objects.filter(item__tags__parent__parent__isnull=False)
self.assertSequenceEqual(q, [self.a4])
self.assertEqual(str(q.query).count('LEFT OUTER JOIN'), 0)
self.assertEqual(str(q.query).count('INNER JOIN'), 4)
def test_ticket_10790_6(self):
# Querying with isnull=True across m2m field should not create inner joins
# and strip last outer join
q = Author.objects.filter(item__tags__parent__parent__isnull=True)
self.assertSequenceEqual(
q,
[self.a1, self.a1, self.a2, self.a2, self.a2, self.a3],
)
self.assertEqual(str(q.query).count('LEFT OUTER JOIN'), 4)
self.assertEqual(str(q.query).count('INNER JOIN'), 0)
q = Author.objects.filter(item__tags__parent__isnull=True)
self.assertSequenceEqual(q, [self.a1, self.a2, self.a2, self.a3])
self.assertEqual(str(q.query).count('LEFT OUTER JOIN'), 3)
self.assertEqual(str(q.query).count('INNER JOIN'), 0)
def test_ticket_10790_7(self):
# Reverse querying with isnull should not strip the join
q = Author.objects.filter(item__isnull=True)
self.assertSequenceEqual(q, [self.a3])
self.assertEqual(str(q.query).count('LEFT OUTER JOIN'), 1)
self.assertEqual(str(q.query).count('INNER JOIN'), 0)
q = Author.objects.filter(item__isnull=False)
self.assertSequenceEqual(q, [self.a1, self.a2, self.a2, self.a4])
self.assertEqual(str(q.query).count('LEFT OUTER JOIN'), 0)
self.assertEqual(str(q.query).count('INNER JOIN'), 1)
def test_ticket_10790_8(self):
# Querying with combined q-objects should also strip the left outer join
q = Tag.objects.filter(Q(parent__isnull=True) | Q(parent=self.t1))
self.assertSequenceEqual(q, [self.t1, self.t2, self.t3])
self.assertEqual(str(q.query).count('LEFT OUTER JOIN'), 0)
self.assertEqual(str(q.query).count('INNER JOIN'), 0)
def test_ticket_10790_combine(self):
# Combining queries should not re-populate the left outer join
q1 = Tag.objects.filter(parent__isnull=True)
q2 = Tag.objects.filter(parent__isnull=False)
q3 = q1 | q2
self.assertSequenceEqual(q3, [self.t1, self.t2, self.t3, self.t4, self.t5])
self.assertEqual(str(q3.query).count('LEFT OUTER JOIN'), 0)
self.assertEqual(str(q3.query).count('INNER JOIN'), 0)
q3 = q1 & q2
self.assertSequenceEqual(q3, [])
self.assertEqual(str(q3.query).count('LEFT OUTER JOIN'), 0)
self.assertEqual(str(q3.query).count('INNER JOIN'), 0)
q2 = Tag.objects.filter(parent=self.t1)
q3 = q1 | q2
self.assertSequenceEqual(q3, [self.t1, self.t2, self.t3])
self.assertEqual(str(q3.query).count('LEFT OUTER JOIN'), 0)
self.assertEqual(str(q3.query).count('INNER JOIN'), 0)
q3 = q2 | q1
self.assertSequenceEqual(q3, [self.t1, self.t2, self.t3])
self.assertEqual(str(q3.query).count('LEFT OUTER JOIN'), 0)
self.assertEqual(str(q3.query).count('INNER JOIN'), 0)
q1 = Tag.objects.filter(parent__isnull=True)
q2 = Tag.objects.filter(parent__parent__isnull=True)
q3 = q1 | q2
self.assertSequenceEqual(q3, [self.t1, self.t2, self.t3])
self.assertEqual(str(q3.query).count('LEFT OUTER JOIN'), 1)
self.assertEqual(str(q3.query).count('INNER JOIN'), 0)
q3 = q2 | q1
self.assertSequenceEqual(q3, [self.t1, self.t2, self.t3])
self.assertEqual(str(q3.query).count('LEFT OUTER JOIN'), 1)
self.assertEqual(str(q3.query).count('INNER JOIN'), 0)
def test_ticket19672(self):
self.assertSequenceEqual(
Report.objects.filter(Q(creator__isnull=False) & ~Q(creator__extra__value=41)),
[self.r1],
)
def test_ticket_20250(self):
# A negated Q along with an annotated queryset failed in Django 1.4
qs = Author.objects.annotate(Count('item'))
qs = qs.filter(~Q(extra__value=0)).order_by('name')
self.assertIn('SELECT', str(qs.query))
self.assertSequenceEqual(qs, [self.a1, self.a2, self.a3, self.a4])
def test_lookup_constraint_fielderror(self):
msg = (
"Cannot resolve keyword 'unknown_field' into field. Choices are: "
"annotation, category, category_id, children, id, item, "
"managedmodel, name, note, parent, parent_id"
)
with self.assertRaisesMessage(FieldError, msg):
Tag.objects.filter(unknown_field__name='generic')
def test_common_mixed_case_foreign_keys(self):
"""
Valid query should be generated when fields fetched from joined tables
include FKs whose names only differ by case.
"""
c1 = SimpleCategory.objects.create(name='c1')
c2 = SimpleCategory.objects.create(name='c2')
c3 = SimpleCategory.objects.create(name='c3')
category = CategoryItem.objects.create(category=c1)
mixed_case_field_category = MixedCaseFieldCategoryItem.objects.create(CaTeGoRy=c2)
mixed_case_db_column_category = MixedCaseDbColumnCategoryItem.objects.create(category=c3)
CommonMixedCaseForeignKeys.objects.create(
category=category,
mixed_case_field_category=mixed_case_field_category,
mixed_case_db_column_category=mixed_case_db_column_category,
)
qs = CommonMixedCaseForeignKeys.objects.values(
'category',
'mixed_case_field_category',
'mixed_case_db_column_category',
'category__category',
'mixed_case_field_category__CaTeGoRy',
'mixed_case_db_column_category__category',
)
self.assertTrue(qs.first())
def test_excluded_intermediary_m2m_table_joined(self):
self.assertSequenceEqual(
Note.objects.filter(~Q(tag__annotation__name=F('note'))),
[self.n1, self.n2, self.n3],
)
self.assertSequenceEqual(
Note.objects.filter(tag__annotation__name='a1').filter(~Q(tag__annotation__name=F('note'))),
[],
)
def test_field_with_filterable(self):
self.assertSequenceEqual(
Author.objects.filter(extra=self.e2),
[self.a3, self.a4],
)
def test_negate_field(self):
self.assertSequenceEqual(
Note.objects.filter(negate=True),
[self.n1, self.n2],
)
self.assertSequenceEqual(Note.objects.exclude(negate=True), [self.n3])
class Queries2Tests(TestCase):
@classmethod
def setUpTestData(cls):
cls.num4 = Number.objects.create(num=4)
cls.num8 = Number.objects.create(num=8)
cls.num12 = Number.objects.create(num=12)
def test_ticket4289(self):
# A slight variation on the restricting the filtering choices by the
# lookup constraints.
self.assertSequenceEqual(Number.objects.filter(num__lt=4), [])
self.assertSequenceEqual(Number.objects.filter(num__gt=8, num__lt=12), [])
self.assertSequenceEqual(
Number.objects.filter(num__gt=8, num__lt=13),
[self.num12],
)
self.assertSequenceEqual(
Number.objects.filter(Q(num__lt=4) | Q(num__gt=8, num__lt=12)),
[]
)
self.assertSequenceEqual(
Number.objects.filter(Q(num__gt=8, num__lt=12) | Q(num__lt=4)),
[]
)
self.assertSequenceEqual(
Number.objects.filter(Q(num__gt=8) & Q(num__lt=12) | Q(num__lt=4)),
[]
)
self.assertSequenceEqual(
Number.objects.filter(Q(num__gt=7) & Q(num__lt=12) | Q(num__lt=4)),
[self.num8],
)
def test_ticket12239(self):
# Custom lookups are registered to round float values correctly on gte
# and lt IntegerField queries.
self.assertSequenceEqual(
Number.objects.filter(num__gt=11.9),
[self.num12],
)
self.assertSequenceEqual(Number.objects.filter(num__gt=12), [])
self.assertSequenceEqual(Number.objects.filter(num__gt=12.0), [])
self.assertSequenceEqual(Number.objects.filter(num__gt=12.1), [])
self.assertCountEqual(
Number.objects.filter(num__lt=12),
[self.num4, self.num8],
)
self.assertCountEqual(
Number.objects.filter(num__lt=12.0),
[self.num4, self.num8],
)
self.assertCountEqual(
Number.objects.filter(num__lt=12.1),
[self.num4, self.num8, self.num12],
)
self.assertCountEqual(
Number.objects.filter(num__gte=11.9),
[self.num12],
)
self.assertCountEqual(
Number.objects.filter(num__gte=12),
[self.num12],
)
self.assertCountEqual(
Number.objects.filter(num__gte=12.0),
[self.num12],
)
self.assertSequenceEqual(Number.objects.filter(num__gte=12.1), [])
self.assertSequenceEqual(Number.objects.filter(num__gte=12.9), [])
self.assertCountEqual(
Number.objects.filter(num__lte=11.9),
[self.num4, self.num8],
)
self.assertCountEqual(
Number.objects.filter(num__lte=12),
[self.num4, self.num8, self.num12],
)
self.assertCountEqual(
Number.objects.filter(num__lte=12.0),
[self.num4, self.num8, self.num12],
)
self.assertCountEqual(
Number.objects.filter(num__lte=12.1),
[self.num4, self.num8, self.num12],
)
self.assertCountEqual(
Number.objects.filter(num__lte=12.9),
[self.num4, self.num8, self.num12],
)
def test_ticket7759(self):
# Count should work with a partially read result set.
count = Number.objects.count()
qs = Number.objects.all()
def run():
for obj in qs:
return qs.count() == count
self.assertTrue(run())
class Queries3Tests(TestCase):
def test_ticket7107(self):
# This shouldn't create an infinite loop.
self.assertQuerysetEqual(Valid.objects.all(), [])
def test_datetimes_invalid_field(self):
# An error should be raised when QuerySet.datetimes() is passed the
# wrong type of field.
msg = "'name' isn't a DateField, TimeField, or DateTimeField."
with self.assertRaisesMessage(TypeError, msg):
Item.objects.datetimes('name', 'month')
def test_ticket22023(self):
with self.assertRaisesMessage(TypeError, "Cannot call only() after .values() or .values_list()"):
Valid.objects.values().only()
with self.assertRaisesMessage(TypeError, "Cannot call defer() after .values() or .values_list()"):
Valid.objects.values().defer()
class Queries4Tests(TestCase):
@classmethod
def setUpTestData(cls):
generic = NamedCategory.objects.create(name="Generic")
cls.t1 = Tag.objects.create(name='t1', category=generic)
n1 = Note.objects.create(note='n1', misc='foo')
n2 = Note.objects.create(note='n2', misc='bar')
e1 = ExtraInfo.objects.create(info='e1', note=n1)
e2 = ExtraInfo.objects.create(info='e2', note=n2)
cls.a1 = Author.objects.create(name='a1', num=1001, extra=e1)
cls.a3 = Author.objects.create(name='a3', num=3003, extra=e2)
cls.r1 = Report.objects.create(name='r1', creator=cls.a1)
cls.r2 = Report.objects.create(name='r2', creator=cls.a3)
cls.r3 = Report.objects.create(name='r3')
cls.i1 = Item.objects.create(name='i1', created=datetime.datetime.now(), note=n1, creator=cls.a1)
cls.i2 = Item.objects.create(name='i2', created=datetime.datetime.now(), note=n1, creator=cls.a3)
def test_ticket24525(self):
tag = Tag.objects.create()
anth100 = tag.note_set.create(note='ANTH', misc='100')
math101 = tag.note_set.create(note='MATH', misc='101')
s1 = tag.annotation_set.create(name='1')
s2 = tag.annotation_set.create(name='2')
s1.notes.set([math101, anth100])
s2.notes.set([math101])
result = math101.annotation_set.all() & tag.annotation_set.exclude(notes__in=[anth100])
self.assertEqual(list(result), [s2])
def test_ticket11811(self):
unsaved_category = NamedCategory(name="Other")
msg = 'Unsaved model instance <NamedCategory: Other> cannot be used in an ORM query.'
with self.assertRaisesMessage(ValueError, msg):
Tag.objects.filter(pk=self.t1.pk).update(category=unsaved_category)
def test_ticket14876(self):
# Note: when combining the query we need to have information available
# about the join type of the trimmed "creator__isnull" join. If we
# don't have that information, then the join is created as INNER JOIN
# and results will be incorrect.
q1 = Report.objects.filter(Q(creator__isnull=True) | Q(creator__extra__info='e1'))
q2 = Report.objects.filter(Q(creator__isnull=True)) | Report.objects.filter(Q(creator__extra__info='e1'))
self.assertCountEqual(q1, [self.r1, self.r3])
self.assertEqual(str(q1.query), str(q2.query))
q1 = Report.objects.filter(Q(creator__extra__info='e1') | Q(creator__isnull=True))
q2 = Report.objects.filter(Q(creator__extra__info='e1')) | Report.objects.filter(Q(creator__isnull=True))
self.assertCountEqual(q1, [self.r1, self.r3])
self.assertEqual(str(q1.query), str(q2.query))
q1 = Item.objects.filter(Q(creator=self.a1) | Q(creator__report__name='r1')).order_by()
q2 = (
Item.objects
.filter(Q(creator=self.a1)).order_by() | Item.objects.filter(Q(creator__report__name='r1'))
.order_by()
)
self.assertCountEqual(q1, [self.i1])
self.assertEqual(str(q1.query), str(q2.query))
q1 = Item.objects.filter(Q(creator__report__name='e1') | Q(creator=self.a1)).order_by()
q2 = (
Item.objects.filter(Q(creator__report__name='e1')).order_by() |
Item.objects.filter(Q(creator=self.a1)).order_by()
)
self.assertCountEqual(q1, [self.i1])
self.assertEqual(str(q1.query), str(q2.query))
def test_combine_join_reuse(self):
# Joins having identical connections are correctly recreated in the
# rhs query, in case the query is ORed together (#18748).
Report.objects.create(name='r4', creator=self.a1)
q1 = Author.objects.filter(report__name='r5')
q2 = Author.objects.filter(report__name='r4').filter(report__name='r1')
combined = q1 | q2
self.assertEqual(str(combined.query).count('JOIN'), 2)
self.assertEqual(len(combined), 1)
self.assertEqual(combined[0].name, 'a1')
def test_combine_or_filter_reuse(self):
combined = Author.objects.filter(name='a1') | Author.objects.filter(name='a3')
self.assertEqual(combined.get(name='a1'), self.a1)
def test_join_reuse_order(self):
# Join aliases are reused in order. This shouldn't raise AssertionError
# because change_map contains a circular reference (#26522).
s1 = School.objects.create()
s2 = School.objects.create()
s3 = School.objects.create()
t1 = Teacher.objects.create()
otherteachers = Teacher.objects.exclude(pk=t1.pk).exclude(friends=t1)
qs1 = otherteachers.filter(schools=s1).filter(schools=s2)
qs2 = otherteachers.filter(schools=s1).filter(schools=s3)
self.assertQuerysetEqual(qs1 | qs2, [])
def test_ticket7095(self):
# Updates that are filtered on the model being updated are somewhat
# tricky in MySQL.
ManagedModel.objects.create(data='mm1', tag=self.t1, public=True)
self.assertEqual(ManagedModel.objects.update(data='mm'), 1)
# A values() or values_list() query across joined models must use outer
# joins appropriately.
# Note: In Oracle, we expect a null CharField to return '' instead of
# None.
if connection.features.interprets_empty_strings_as_nulls:
expected_null_charfield_repr = ''
else:
expected_null_charfield_repr = None
self.assertSequenceEqual(
Report.objects.values_list("creator__extra__info", flat=True).order_by("name"),
['e1', 'e2', expected_null_charfield_repr],
)
# Similarly for select_related(), joins beyond an initial nullable join
# must use outer joins so that all results are included.
self.assertSequenceEqual(
Report.objects.select_related("creator", "creator__extra").order_by("name"),
[self.r1, self.r2, self.r3]
)
# When there are multiple paths to a table from another table, we have
# to be careful not to accidentally reuse an inappropriate join when
# using select_related(). We used to return the parent's Detail record
# here by mistake.
d1 = Detail.objects.create(data="d1")
d2 = Detail.objects.create(data="d2")
m1 = Member.objects.create(name="m1", details=d1)
m2 = Member.objects.create(name="m2", details=d2)
Child.objects.create(person=m2, parent=m1)
obj = m1.children.select_related("person__details")[0]
self.assertEqual(obj.person.details.data, 'd2')
def test_order_by_resetting(self):
# Calling order_by() with no parameters removes any existing ordering on the
# model. But it should still be possible to add new ordering after that.
qs = Author.objects.order_by().order_by('name')
self.assertIn('ORDER BY', qs.query.get_compiler(qs.db).as_sql()[0])
def test_order_by_reverse_fk(self):
# It is possible to order by reverse of foreign key, although that can lead
# to duplicate results.
c1 = SimpleCategory.objects.create(name="category1")
c2 = SimpleCategory.objects.create(name="category2")
CategoryItem.objects.create(category=c1)
CategoryItem.objects.create(category=c2)
CategoryItem.objects.create(category=c1)
self.assertSequenceEqual(SimpleCategory.objects.order_by('categoryitem', 'pk'), [c1, c2, c1])
def test_filter_reverse_non_integer_pk(self):
date_obj = DateTimePK.objects.create()
extra_obj = ExtraInfo.objects.create(info='extra', date=date_obj)
self.assertEqual(
DateTimePK.objects.filter(extrainfo=extra_obj).get(),
date_obj,
)
def test_ticket10181(self):
# Avoid raising an EmptyResultSet if an inner query is probably
# empty (and hence, not executed).
self.assertQuerysetEqual(
Tag.objects.filter(id__in=Tag.objects.filter(id__in=[])),
[]
)
def test_ticket15316_filter_false(self):
c1 = SimpleCategory.objects.create(name="category1")
c2 = SpecialCategory.objects.create(name="named category1", special_name="special1")
c3 = SpecialCategory.objects.create(name="named category2", special_name="special2")
CategoryItem.objects.create(category=c1)
ci2 = CategoryItem.objects.create(category=c2)
ci3 = CategoryItem.objects.create(category=c3)
qs = CategoryItem.objects.filter(category__specialcategory__isnull=False)
self.assertEqual(qs.count(), 2)
self.assertSequenceEqual(qs, [ci2, ci3])
def test_ticket15316_exclude_false(self):
c1 = SimpleCategory.objects.create(name="category1")
c2 = SpecialCategory.objects.create(name="named category1", special_name="special1")
c3 = SpecialCategory.objects.create(name="named category2", special_name="special2")
ci1 = CategoryItem.objects.create(category=c1)
CategoryItem.objects.create(category=c2)
CategoryItem.objects.create(category=c3)
qs = CategoryItem.objects.exclude(category__specialcategory__isnull=False)
self.assertEqual(qs.count(), 1)
self.assertSequenceEqual(qs, [ci1])
def test_ticket15316_filter_true(self):
c1 = SimpleCategory.objects.create(name="category1")
c2 = SpecialCategory.objects.create(name="named category1", special_name="special1")
c3 = SpecialCategory.objects.create(name="named category2", special_name="special2")
ci1 = CategoryItem.objects.create(category=c1)
CategoryItem.objects.create(category=c2)
CategoryItem.objects.create(category=c3)
qs = CategoryItem.objects.filter(category__specialcategory__isnull=True)
self.assertEqual(qs.count(), 1)
self.assertSequenceEqual(qs, [ci1])
def test_ticket15316_exclude_true(self):
c1 = SimpleCategory.objects.create(name="category1")
c2 = SpecialCategory.objects.create(name="named category1", special_name="special1")
c3 = SpecialCategory.objects.create(name="named category2", special_name="special2")
CategoryItem.objects.create(category=c1)
ci2 = CategoryItem.objects.create(category=c2)
ci3 = CategoryItem.objects.create(category=c3)
qs = CategoryItem.objects.exclude(category__specialcategory__isnull=True)
self.assertEqual(qs.count(), 2)
self.assertSequenceEqual(qs, [ci2, ci3])
def test_ticket15316_one2one_filter_false(self):
c = SimpleCategory.objects.create(name="cat")
c0 = SimpleCategory.objects.create(name="cat0")
c1 = SimpleCategory.objects.create(name="category1")
OneToOneCategory.objects.create(category=c1, new_name="new1")
OneToOneCategory.objects.create(category=c0, new_name="new2")
CategoryItem.objects.create(category=c)
ci2 = CategoryItem.objects.create(category=c0)
ci3 = CategoryItem.objects.create(category=c1)
qs = CategoryItem.objects.filter(category__onetoonecategory__isnull=False).order_by('pk')
self.assertEqual(qs.count(), 2)
self.assertSequenceEqual(qs, [ci2, ci3])
def test_ticket15316_one2one_exclude_false(self):
c = SimpleCategory.objects.create(name="cat")
c0 = SimpleCategory.objects.create(name="cat0")
c1 = SimpleCategory.objects.create(name="category1")
OneToOneCategory.objects.create(category=c1, new_name="new1")
OneToOneCategory.objects.create(category=c0, new_name="new2")
ci1 = CategoryItem.objects.create(category=c)
CategoryItem.objects.create(category=c0)
CategoryItem.objects.create(category=c1)
qs = CategoryItem.objects.exclude(category__onetoonecategory__isnull=False)
self.assertEqual(qs.count(), 1)
self.assertSequenceEqual(qs, [ci1])
def test_ticket15316_one2one_filter_true(self):
c = SimpleCategory.objects.create(name="cat")
c0 = SimpleCategory.objects.create(name="cat0")
c1 = SimpleCategory.objects.create(name="category1")
OneToOneCategory.objects.create(category=c1, new_name="new1")
OneToOneCategory.objects.create(category=c0, new_name="new2")
ci1 = CategoryItem.objects.create(category=c)
CategoryItem.objects.create(category=c0)
CategoryItem.objects.create(category=c1)
qs = CategoryItem.objects.filter(category__onetoonecategory__isnull=True)
self.assertEqual(qs.count(), 1)
self.assertSequenceEqual(qs, [ci1])
def test_ticket15316_one2one_exclude_true(self):
c = SimpleCategory.objects.create(name="cat")
c0 = SimpleCategory.objects.create(name="cat0")
c1 = SimpleCategory.objects.create(name="category1")
OneToOneCategory.objects.create(category=c1, new_name="new1")
OneToOneCategory.objects.create(category=c0, new_name="new2")
CategoryItem.objects.create(category=c)
ci2 = CategoryItem.objects.create(category=c0)
ci3 = CategoryItem.objects.create(category=c1)
qs = CategoryItem.objects.exclude(category__onetoonecategory__isnull=True).order_by('pk')
self.assertEqual(qs.count(), 2)
self.assertSequenceEqual(qs, [ci2, ci3])
class Queries5Tests(TestCase):
@classmethod
def setUpTestData(cls):
# Ordering by 'rank' gives us rank2, rank1, rank3. Ordering by the
# Meta.ordering will be rank3, rank2, rank1.
cls.n1 = Note.objects.create(note='n1', misc='foo', id=1)
cls.n2 = Note.objects.create(note='n2', misc='bar', id=2)
e1 = ExtraInfo.objects.create(info='e1', note=cls.n1)
e2 = ExtraInfo.objects.create(info='e2', note=cls.n2)
a1 = Author.objects.create(name='a1', num=1001, extra=e1)
a2 = Author.objects.create(name='a2', num=2002, extra=e1)
a3 = Author.objects.create(name='a3', num=3003, extra=e2)
cls.rank2 = Ranking.objects.create(rank=2, author=a2)
cls.rank1 = Ranking.objects.create(rank=1, author=a3)
cls.rank3 = Ranking.objects.create(rank=3, author=a1)
def test_ordering(self):
# Cross model ordering is possible in Meta, too.
self.assertSequenceEqual(
Ranking.objects.all(),
[self.rank3, self.rank2, self.rank1],
)
self.assertSequenceEqual(
Ranking.objects.all().order_by('rank'),
[self.rank1, self.rank2, self.rank3],
)
# Ordering of extra() pieces is possible, too and you can mix extra
# fields and model fields in the ordering.
self.assertSequenceEqual(
Ranking.objects.extra(tables=['django_site'], order_by=['-django_site.id', 'rank']),
[self.rank1, self.rank2, self.rank3],
)
sql = 'case when %s > 2 then 1 else 0 end' % connection.ops.quote_name('rank')
qs = Ranking.objects.extra(select={'good': sql})
self.assertEqual(
[o.good for o in qs.extra(order_by=('-good',))],
[True, False, False]
)
self.assertSequenceEqual(
qs.extra(order_by=('-good', 'id')),
[self.rank3, self.rank2, self.rank1],
)
# Despite having some extra aliases in the query, we can still omit
# them in a values() query.
dicts = qs.values('id', 'rank').order_by('id')
self.assertEqual(
[d['rank'] for d in dicts],
[2, 1, 3]
)
def test_ticket7256(self):
# An empty values() call includes all aliases, including those from an
# extra()
sql = 'case when %s > 2 then 1 else 0 end' % connection.ops.quote_name('rank')
qs = Ranking.objects.extra(select={'good': sql})
dicts = qs.values().order_by('id')
for d in dicts:
del d['id']
del d['author_id']
self.assertEqual(
[sorted(d.items()) for d in dicts],
[[('good', 0), ('rank', 2)], [('good', 0), ('rank', 1)], [('good', 1), ('rank', 3)]]
)
def test_ticket7045(self):
# Extra tables used to crash SQL construction on the second use.
qs = Ranking.objects.extra(tables=['django_site'])
qs.query.get_compiler(qs.db).as_sql()
# test passes if this doesn't raise an exception.
qs.query.get_compiler(qs.db).as_sql()
def test_ticket9848(self):
# Make sure that updates which only filter on sub-tables don't
# inadvertently update the wrong records (bug #9848).
author_start = Author.objects.get(name='a1')
ranking_start = Ranking.objects.get(author__name='a1')
# Make sure that the IDs from different tables don't happen to match.
self.assertSequenceEqual(
Ranking.objects.filter(author__name='a1'),
[self.rank3],
)
self.assertEqual(
Ranking.objects.filter(author__name='a1').update(rank=4636),
1
)
r = Ranking.objects.get(author__name='a1')
self.assertEqual(r.id, ranking_start.id)
self.assertEqual(r.author.id, author_start.id)
self.assertEqual(r.rank, 4636)
r.rank = 3
r.save()
self.assertSequenceEqual(
Ranking.objects.all(),
[self.rank3, self.rank2, self.rank1],
)
def test_ticket5261(self):
# Test different empty excludes.
self.assertSequenceEqual(
Note.objects.exclude(Q()),
[self.n1, self.n2],
)
self.assertSequenceEqual(
Note.objects.filter(~Q()),
[self.n1, self.n2],
)
self.assertSequenceEqual(
Note.objects.filter(~Q() | ~Q()),
[self.n1, self.n2],
)
self.assertSequenceEqual(
Note.objects.exclude(~Q() & ~Q()),
[self.n1, self.n2],
)
def test_extra_select_literal_percent_s(self):
# Allow %%s to escape select clauses
self.assertEqual(
Note.objects.extra(select={'foo': "'%%s'"})[0].foo,
'%s'
)
self.assertEqual(
Note.objects.extra(select={'foo': "'%%s bar %%s'"})[0].foo,
'%s bar %s'
)
self.assertEqual(
Note.objects.extra(select={'foo': "'bar %%s'"})[0].foo,
'bar %s'
)
class SelectRelatedTests(TestCase):
def test_tickets_3045_3288(self):
# Once upon a time, select_related() with circular relations would loop
# infinitely if you forgot to specify "depth". Now we set an arbitrary
# default upper bound.
self.assertQuerysetEqual(X.objects.all(), [])
self.assertQuerysetEqual(X.objects.select_related(), [])
class SubclassFKTests(TestCase):
def test_ticket7778(self):
# Model subclasses could not be deleted if a nullable foreign key
# relates to a model that relates back.
num_celebs = Celebrity.objects.count()
tvc = TvChef.objects.create(name="Huey")
self.assertEqual(Celebrity.objects.count(), num_celebs + 1)
Fan.objects.create(fan_of=tvc)
Fan.objects.create(fan_of=tvc)
tvc.delete()
# The parent object should have been deleted as well.
self.assertEqual(Celebrity.objects.count(), num_celebs)
class CustomPkTests(TestCase):
def test_ticket7371(self):
self.assertQuerysetEqual(Related.objects.order_by('custom'), [])
class NullableRelOrderingTests(TestCase):
def test_ticket10028(self):
# Ordering by model related to nullable relations(!) should use outer
# joins, so that all results are included.
p1 = Plaything.objects.create(name="p1")
self.assertSequenceEqual(Plaything.objects.all(), [p1])
def test_join_already_in_query(self):
# Ordering by model related to nullable relations should not change
# the join type of already existing joins.
Plaything.objects.create(name="p1")
s = SingleObject.objects.create(name='s')
r = RelatedObject.objects.create(single=s, f=1)
p2 = Plaything.objects.create(name="p2", others=r)
qs = Plaything.objects.all().filter(others__isnull=False).order_by('pk')
self.assertNotIn('JOIN', str(qs.query))
qs = Plaything.objects.all().filter(others__f__isnull=False).order_by('pk')
self.assertIn('INNER', str(qs.query))
qs = qs.order_by('others__single__name')
# The ordering by others__single__pk will add one new join (to single)
# and that join must be LEFT join. The already existing join to related
# objects must be kept INNER. So, we have both an INNER and a LEFT join
# in the query.
self.assertEqual(str(qs.query).count('LEFT'), 1)
self.assertEqual(str(qs.query).count('INNER'), 1)
self.assertSequenceEqual(qs, [p2])
class DisjunctiveFilterTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.n1 = Note.objects.create(note='n1', misc='foo', id=1)
cls.e1 = ExtraInfo.objects.create(info='e1', note=cls.n1)
def test_ticket7872(self):
# Another variation on the disjunctive filtering theme.
# For the purposes of this regression test, it's important that there is no
# Join object related to the LeafA we create.
l1 = LeafA.objects.create(data='first')
self.assertSequenceEqual(LeafA.objects.all(), [l1])
self.assertSequenceEqual(
LeafA.objects.filter(Q(data='first') | Q(join__b__data='second')),
[l1],
)
def test_ticket8283(self):
# Checking that applying filters after a disjunction works correctly.
self.assertSequenceEqual(
(ExtraInfo.objects.filter(note=self.n1) | ExtraInfo.objects.filter(info='e2')).filter(note=self.n1),
[self.e1],
)
self.assertSequenceEqual(
(ExtraInfo.objects.filter(info='e2') | ExtraInfo.objects.filter(note=self.n1)).filter(note=self.n1),
[self.e1],
)
class Queries6Tests(TestCase):
@classmethod
def setUpTestData(cls):
generic = NamedCategory.objects.create(name="Generic")
cls.t1 = Tag.objects.create(name='t1', category=generic)
cls.t2 = Tag.objects.create(name='t2', parent=cls.t1, category=generic)
cls.t3 = Tag.objects.create(name='t3', parent=cls.t1)
cls.t4 = Tag.objects.create(name='t4', parent=cls.t3)
cls.t5 = Tag.objects.create(name='t5', parent=cls.t3)
n1 = Note.objects.create(note='n1', misc='foo', id=1)
cls.ann1 = Annotation.objects.create(name='a1', tag=cls.t1)
cls.ann1.notes.add(n1)
cls.ann2 = Annotation.objects.create(name='a2', tag=cls.t4)
def test_parallel_iterators(self):
# Parallel iterators work.
qs = Tag.objects.all()
i1, i2 = iter(qs), iter(qs)
self.assertEqual(repr(next(i1)), '<Tag: t1>')
self.assertEqual(repr(next(i1)), '<Tag: t2>')
self.assertEqual(repr(next(i2)), '<Tag: t1>')
self.assertEqual(repr(next(i2)), '<Tag: t2>')
self.assertEqual(repr(next(i2)), '<Tag: t3>')
self.assertEqual(repr(next(i1)), '<Tag: t3>')
qs = X.objects.all()
self.assertFalse(qs)
self.assertFalse(qs)
def test_nested_queries_sql(self):
# Nested queries should not evaluate the inner query as part of constructing the
# SQL (so we should see a nested query here, indicated by two "SELECT" calls).
qs = Annotation.objects.filter(notes__in=Note.objects.filter(note="xyzzy"))
self.assertEqual(
qs.query.get_compiler(qs.db).as_sql()[0].count('SELECT'),
2
)
def test_tickets_8921_9188(self):
# Incorrect SQL was being generated for certain types of exclude()
# queries that crossed multi-valued relations (#8921, #9188 and some
# preemptively discovered cases).
self.assertSequenceEqual(
PointerA.objects.filter(connection__pointerb__id=1),
[]
)
self.assertSequenceEqual(
PointerA.objects.exclude(connection__pointerb__id=1),
[]
)
self.assertSequenceEqual(
Tag.objects.exclude(children=None),
[self.t1, self.t3],
)
# This example is tricky because the parent could be NULL, so only checking
# parents with annotations omits some results (tag t1, in this case).
self.assertSequenceEqual(
Tag.objects.exclude(parent__annotation__name="a1"),
[self.t1, self.t4, self.t5],
)
# The annotation->tag link is single values and tag->children links is
# multi-valued. So we have to split the exclude filter in the middle
# and then optimize the inner query without losing results.
self.assertSequenceEqual(
Annotation.objects.exclude(tag__children__name="t2"),
[self.ann2],
)
# Nested queries are possible (although should be used with care, since
# they have performance problems on backends like MySQL.
self.assertSequenceEqual(
Annotation.objects.filter(notes__in=Note.objects.filter(note="n1")),
[self.ann1],
)
def test_ticket3739(self):
# The all() method on querysets returns a copy of the queryset.
q1 = Tag.objects.order_by('name')
self.assertIsNot(q1, q1.all())
def test_ticket_11320(self):
qs = Tag.objects.exclude(category=None).exclude(category__name='foo')
self.assertEqual(str(qs.query).count(' INNER JOIN '), 1)
def test_distinct_ordered_sliced_subquery_aggregation(self):
self.assertEqual(Tag.objects.distinct().order_by('category__name')[:3].count(), 3)
def test_multiple_columns_with_the_same_name_slice(self):
self.assertEqual(
list(Tag.objects.order_by('name').values_list('name', 'category__name')[:2]),
[('t1', 'Generic'), ('t2', 'Generic')],
)
self.assertSequenceEqual(
Tag.objects.order_by('name').select_related('category')[:2],
[self.t1, self.t2],
)
self.assertEqual(
list(Tag.objects.order_by('-name').values_list('name', 'parent__name')[:2]),
[('t5', 't3'), ('t4', 't3')],
)
self.assertSequenceEqual(
Tag.objects.order_by('-name').select_related('parent')[:2],
[self.t5, self.t4],
)
def test_col_alias_quoted(self):
with CaptureQueriesContext(connection) as captured_queries:
self.assertEqual(
Tag.objects.values('parent').annotate(
tag_per_parent=Count('pk'),
).aggregate(Max('tag_per_parent')),
{'tag_per_parent__max': 2},
)
sql = captured_queries[0]['sql']
self.assertIn('AS %s' % connection.ops.quote_name('col1'), sql)
class RawQueriesTests(TestCase):
@classmethod
def setUpTestData(cls):
Note.objects.create(note='n1', misc='foo', id=1)
def test_ticket14729(self):
# Test representation of raw query with one or few parameters passed as list
query = "SELECT * FROM queries_note WHERE note = %s"
params = ['n1']
qs = Note.objects.raw(query, params=params)
self.assertEqual(repr(qs), "<RawQuerySet: SELECT * FROM queries_note WHERE note = n1>")
query = "SELECT * FROM queries_note WHERE note = %s and misc = %s"
params = ['n1', 'foo']
qs = Note.objects.raw(query, params=params)
self.assertEqual(repr(qs), "<RawQuerySet: SELECT * FROM queries_note WHERE note = n1 and misc = foo>")
class GeneratorExpressionTests(SimpleTestCase):
def test_ticket10432(self):
# Using an empty iterator as the rvalue for an "__in"
# lookup is legal.
self.assertCountEqual(Note.objects.filter(pk__in=iter(())), [])
class ComparisonTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.n1 = Note.objects.create(note='n1', misc='foo', id=1)
e1 = ExtraInfo.objects.create(info='e1', note=cls.n1)
cls.a2 = Author.objects.create(name='a2', num=2002, extra=e1)
def test_ticket8597(self):
# Regression tests for case-insensitive comparisons
item_ab = Item.objects.create(name="a_b", created=datetime.datetime.now(), creator=self.a2, note=self.n1)
item_xy = Item.objects.create(name="x%y", created=datetime.datetime.now(), creator=self.a2, note=self.n1)
self.assertSequenceEqual(
Item.objects.filter(name__iexact="A_b"),
[item_ab],
)
self.assertSequenceEqual(
Item.objects.filter(name__iexact="x%Y"),
[item_xy],
)
self.assertSequenceEqual(
Item.objects.filter(name__istartswith="A_b"),
[item_ab],
)
self.assertSequenceEqual(
Item.objects.filter(name__iendswith="A_b"),
[item_ab],
)
class ExistsSql(TestCase):
def test_exists(self):
with CaptureQueriesContext(connection) as captured_queries:
self.assertFalse(Tag.objects.exists())
# Ok - so the exist query worked - but did it include too many columns?
self.assertEqual(len(captured_queries), 1)
qstr = captured_queries[0]['sql']
id, name = connection.ops.quote_name('id'), connection.ops.quote_name('name')
self.assertNotIn(id, qstr)
self.assertNotIn(name, qstr)
def test_ticket_18414(self):
Article.objects.create(name='one', created=datetime.datetime.now())
Article.objects.create(name='one', created=datetime.datetime.now())
Article.objects.create(name='two', created=datetime.datetime.now())
self.assertTrue(Article.objects.exists())
self.assertTrue(Article.objects.distinct().exists())
self.assertTrue(Article.objects.distinct()[1:3].exists())
self.assertFalse(Article.objects.distinct()[1:1].exists())
@skipUnlessDBFeature('can_distinct_on_fields')
def test_ticket_18414_distinct_on(self):
Article.objects.create(name='one', created=datetime.datetime.now())
Article.objects.create(name='one', created=datetime.datetime.now())
Article.objects.create(name='two', created=datetime.datetime.now())
self.assertTrue(Article.objects.distinct('name').exists())
self.assertTrue(Article.objects.distinct('name')[1:2].exists())
self.assertFalse(Article.objects.distinct('name')[2:3].exists())
class QuerysetOrderedTests(unittest.TestCase):
"""
Tests for the Queryset.ordered attribute.
"""
def test_no_default_or_explicit_ordering(self):
self.assertIs(Annotation.objects.all().ordered, False)
def test_cleared_default_ordering(self):
self.assertIs(Tag.objects.all().ordered, True)
self.assertIs(Tag.objects.all().order_by().ordered, False)
def test_explicit_ordering(self):
self.assertIs(Annotation.objects.all().order_by('id').ordered, True)
def test_empty_queryset(self):
self.assertIs(Annotation.objects.none().ordered, True)
def test_order_by_extra(self):
self.assertIs(Annotation.objects.all().extra(order_by=['id']).ordered, True)
def test_annotated_ordering(self):
qs = Annotation.objects.annotate(num_notes=Count('notes'))
self.assertIs(qs.ordered, False)
self.assertIs(qs.order_by('num_notes').ordered, True)
def test_annotated_default_ordering(self):
qs = Tag.objects.annotate(num_notes=Count('pk'))
self.assertIs(qs.ordered, False)
self.assertIs(qs.order_by('name').ordered, True)
def test_annotated_values_default_ordering(self):
qs = Tag.objects.values('name').annotate(num_notes=Count('pk'))
self.assertIs(qs.ordered, False)
self.assertIs(qs.order_by('name').ordered, True)
@skipUnlessDBFeature('allow_sliced_subqueries_with_in')
class SubqueryTests(TestCase):
@classmethod
def setUpTestData(cls):
NamedCategory.objects.create(id=1, name='first')
NamedCategory.objects.create(id=2, name='second')
NamedCategory.objects.create(id=3, name='third')
NamedCategory.objects.create(id=4, name='fourth')
def test_ordered_subselect(self):
"Subselects honor any manual ordering"
query = DumbCategory.objects.filter(id__in=DumbCategory.objects.order_by('-id')[0:2])
self.assertEqual(set(query.values_list('id', flat=True)), {3, 4})
query = DumbCategory.objects.filter(id__in=DumbCategory.objects.order_by('-id')[:2])
self.assertEqual(set(query.values_list('id', flat=True)), {3, 4})
query = DumbCategory.objects.filter(id__in=DumbCategory.objects.order_by('-id')[1:2])
self.assertEqual(set(query.values_list('id', flat=True)), {3})
query = DumbCategory.objects.filter(id__in=DumbCategory.objects.order_by('-id')[2:])
self.assertEqual(set(query.values_list('id', flat=True)), {1, 2})
def test_slice_subquery_and_query(self):
"""
Slice a query that has a sliced subquery
"""
query = DumbCategory.objects.filter(id__in=DumbCategory.objects.order_by('-id')[0:2])[0:2]
self.assertEqual({x.id for x in query}, {3, 4})
query = DumbCategory.objects.filter(id__in=DumbCategory.objects.order_by('-id')[1:3])[1:3]
self.assertEqual({x.id for x in query}, {3})
query = DumbCategory.objects.filter(id__in=DumbCategory.objects.order_by('-id')[2:])[1:]
self.assertEqual({x.id for x in query}, {2})
def test_related_sliced_subquery(self):
"""
Related objects constraints can safely contain sliced subqueries.
refs #22434
"""
generic = NamedCategory.objects.create(id=5, name="Generic")
t1 = Tag.objects.create(name='t1', category=generic)
t2 = Tag.objects.create(name='t2', category=generic)
ManagedModel.objects.create(data='mm1', tag=t1, public=True)
mm2 = ManagedModel.objects.create(data='mm2', tag=t2, public=True)
query = ManagedModel.normal_manager.filter(
tag__in=Tag.objects.order_by('-id')[:1]
)
self.assertEqual({x.id for x in query}, {mm2.id})
def test_sliced_delete(self):
"Delete queries can safely contain sliced subqueries"
DumbCategory.objects.filter(id__in=DumbCategory.objects.order_by('-id')[0:1]).delete()
self.assertEqual(set(DumbCategory.objects.values_list('id', flat=True)), {1, 2, 3})
DumbCategory.objects.filter(id__in=DumbCategory.objects.order_by('-id')[1:2]).delete()
self.assertEqual(set(DumbCategory.objects.values_list('id', flat=True)), {1, 3})
DumbCategory.objects.filter(id__in=DumbCategory.objects.order_by('-id')[1:]).delete()
self.assertEqual(set(DumbCategory.objects.values_list('id', flat=True)), {3})
def test_distinct_ordered_sliced_subquery(self):
# Implicit values('id').
self.assertSequenceEqual(
NamedCategory.objects.filter(
id__in=NamedCategory.objects.distinct().order_by('name')[0:2],
).order_by('name').values_list('name', flat=True), ['first', 'fourth']
)
# Explicit values('id').
self.assertSequenceEqual(
NamedCategory.objects.filter(
id__in=NamedCategory.objects.distinct().order_by('-name').values('id')[0:2],
).order_by('name').values_list('name', flat=True), ['second', 'third']
)
# Annotated value.
self.assertSequenceEqual(
DumbCategory.objects.filter(
id__in=DumbCategory.objects.annotate(
double_id=F('id') * 2
).order_by('id').distinct().values('double_id')[0:2],
).order_by('id').values_list('id', flat=True), [2, 4]
)
class QuerySetBitwiseOperationTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.school = School.objects.create()
cls.room_1 = Classroom.objects.create(school=cls.school, has_blackboard=False, name='Room 1')
cls.room_2 = Classroom.objects.create(school=cls.school, has_blackboard=True, name='Room 2')
cls.room_3 = Classroom.objects.create(school=cls.school, has_blackboard=True, name='Room 3')
cls.room_4 = Classroom.objects.create(school=cls.school, has_blackboard=False, name='Room 4')
@skipUnlessDBFeature('allow_sliced_subqueries_with_in')
def test_or_with_rhs_slice(self):
qs1 = Classroom.objects.filter(has_blackboard=True)
qs2 = Classroom.objects.filter(has_blackboard=False)[:1]
self.assertCountEqual(qs1 | qs2, [self.room_1, self.room_2, self.room_3])
@skipUnlessDBFeature('allow_sliced_subqueries_with_in')
def test_or_with_lhs_slice(self):
qs1 = Classroom.objects.filter(has_blackboard=True)[:1]
qs2 = Classroom.objects.filter(has_blackboard=False)
self.assertCountEqual(qs1 | qs2, [self.room_1, self.room_2, self.room_4])
@skipUnlessDBFeature('allow_sliced_subqueries_with_in')
def test_or_with_both_slice(self):
qs1 = Classroom.objects.filter(has_blackboard=False)[:1]
qs2 = Classroom.objects.filter(has_blackboard=True)[:1]
self.assertCountEqual(qs1 | qs2, [self.room_1, self.room_2])
@skipUnlessDBFeature('allow_sliced_subqueries_with_in')
def test_or_with_both_slice_and_ordering(self):
qs1 = Classroom.objects.filter(has_blackboard=False).order_by('-pk')[:1]
qs2 = Classroom.objects.filter(has_blackboard=True).order_by('-name')[:1]
self.assertCountEqual(qs1 | qs2, [self.room_3, self.room_4])
def test_subquery_aliases(self):
combined = School.objects.filter(pk__isnull=False) & School.objects.filter(
Exists(Classroom.objects.filter(
has_blackboard=True,
school=OuterRef('pk'),
)),
)
self.assertSequenceEqual(combined, [self.school])
nested_combined = School.objects.filter(pk__in=combined.values('pk'))
self.assertSequenceEqual(nested_combined, [self.school])
class CloneTests(TestCase):
def test_evaluated_queryset_as_argument(self):
"#13227 -- If a queryset is already evaluated, it can still be used as a query arg"
n = Note(note='Test1', misc='misc')
n.save()
e = ExtraInfo(info='good', note=n)
e.save()
n_list = Note.objects.all()
# Evaluate the Note queryset, populating the query cache
list(n_list)
# Make one of cached results unpickable.
n_list._result_cache[0].lock = Lock()
with self.assertRaises(TypeError):
pickle.dumps(n_list)
# Use the note queryset in a query, and evaluate
# that query in a way that involves cloning.
self.assertEqual(ExtraInfo.objects.filter(note__in=n_list)[0].info, 'good')
def test_no_model_options_cloning(self):
"""
Cloning a queryset does not get out of hand. While complete
testing is impossible, this is a sanity check against invalid use of
deepcopy. refs #16759.
"""
opts_class = type(Note._meta)
note_deepcopy = getattr(opts_class, "__deepcopy__", None)
opts_class.__deepcopy__ = lambda obj, memo: self.fail("Model options shouldn't be cloned.")
try:
Note.objects.filter(pk__lte=F('pk') + 1).all()
finally:
if note_deepcopy is None:
delattr(opts_class, "__deepcopy__")
else:
opts_class.__deepcopy__ = note_deepcopy
def test_no_fields_cloning(self):
"""
Cloning a queryset does not get out of hand. While complete
testing is impossible, this is a sanity check against invalid use of
deepcopy. refs #16759.
"""
opts_class = type(Note._meta.get_field("misc"))
note_deepcopy = getattr(opts_class, "__deepcopy__", None)
opts_class.__deepcopy__ = lambda obj, memo: self.fail("Model fields shouldn't be cloned")
try:
Note.objects.filter(note=F('misc')).all()
finally:
if note_deepcopy is None:
delattr(opts_class, "__deepcopy__")
else:
opts_class.__deepcopy__ = note_deepcopy
class EmptyQuerySetTests(SimpleTestCase):
def test_emptyqueryset_values(self):
# #14366 -- Calling .values() on an empty QuerySet and then cloning
# that should not cause an error
self.assertCountEqual(Number.objects.none().values('num').order_by('num'), [])
def test_values_subquery(self):
self.assertCountEqual(Number.objects.filter(pk__in=Number.objects.none().values('pk')), [])
self.assertCountEqual(Number.objects.filter(pk__in=Number.objects.none().values_list('pk')), [])
def test_ticket_19151(self):
# #19151 -- Calling .values() or .values_list() on an empty QuerySet
# should return an empty QuerySet and not cause an error.
q = Author.objects.none()
self.assertCountEqual(q.values(), [])
self.assertCountEqual(q.values_list(), [])
class ValuesQuerysetTests(TestCase):
@classmethod
def setUpTestData(cls):
Number.objects.create(num=72)
def test_flat_values_list(self):
qs = Number.objects.values_list("num")
qs = qs.values_list("num", flat=True)
self.assertSequenceEqual(qs, [72])
def test_extra_values(self):
# testing for ticket 14930 issues
qs = Number.objects.extra(select={'value_plus_x': 'num+%s', 'value_minus_x': 'num-%s'}, select_params=(1, 2))
qs = qs.order_by('value_minus_x')
qs = qs.values('num')
self.assertSequenceEqual(qs, [{'num': 72}])
def test_extra_values_order_twice(self):
# testing for ticket 14930 issues
qs = Number.objects.extra(select={'value_plus_one': 'num+1', 'value_minus_one': 'num-1'})
qs = qs.order_by('value_minus_one').order_by('value_plus_one')
qs = qs.values('num')
self.assertSequenceEqual(qs, [{'num': 72}])
def test_extra_values_order_multiple(self):
# Postgres doesn't allow constants in order by, so check for that.
qs = Number.objects.extra(select={
'value_plus_one': 'num+1',
'value_minus_one': 'num-1',
'constant_value': '1'
})
qs = qs.order_by('value_plus_one', 'value_minus_one', 'constant_value')
qs = qs.values('num')
self.assertSequenceEqual(qs, [{'num': 72}])
def test_extra_values_order_in_extra(self):
# testing for ticket 14930 issues
qs = Number.objects.extra(
select={'value_plus_one': 'num+1', 'value_minus_one': 'num-1'},
order_by=['value_minus_one'],
)
qs = qs.values('num')
def test_extra_select_params_values_order_in_extra(self):
# testing for 23259 issue
qs = Number.objects.extra(
select={'value_plus_x': 'num+%s'},
select_params=[1],
order_by=['value_plus_x'],
)
qs = qs.filter(num=72)
qs = qs.values('num')
self.assertSequenceEqual(qs, [{'num': 72}])
def test_extra_multiple_select_params_values_order_by(self):
# testing for 23259 issue
qs = Number.objects.extra(select={'value_plus_x': 'num+%s', 'value_minus_x': 'num-%s'}, select_params=(72, 72))
qs = qs.order_by('value_minus_x')
qs = qs.filter(num=1)
qs = qs.values('num')
self.assertSequenceEqual(qs, [])
def test_extra_values_list(self):
# testing for ticket 14930 issues
qs = Number.objects.extra(select={'value_plus_one': 'num+1'})
qs = qs.order_by('value_plus_one')
qs = qs.values_list('num')
self.assertSequenceEqual(qs, [(72,)])
def test_flat_extra_values_list(self):
# testing for ticket 14930 issues
qs = Number.objects.extra(select={'value_plus_one': 'num+1'})
qs = qs.order_by('value_plus_one')
qs = qs.values_list('num', flat=True)
self.assertSequenceEqual(qs, [72])
def test_field_error_values_list(self):
# see #23443
msg = "Cannot resolve keyword %r into field. Join on 'name' not permitted." % 'foo'
with self.assertRaisesMessage(FieldError, msg):
Tag.objects.values_list('name__foo')
def test_named_values_list_flat(self):
msg = "'flat' and 'named' can't be used together."
with self.assertRaisesMessage(TypeError, msg):
Number.objects.values_list('num', flat=True, named=True)
def test_named_values_list_bad_field_name(self):
msg = "Type names and field names must be valid identifiers: '1'"
with self.assertRaisesMessage(ValueError, msg):
Number.objects.extra(select={'1': 'num+1'}).values_list('1', named=True).first()
def test_named_values_list_with_fields(self):
qs = Number.objects.extra(select={'num2': 'num+1'}).annotate(Count('id'))
values = qs.values_list('num', 'num2', named=True).first()
self.assertEqual(type(values).__name__, 'Row')
self.assertEqual(values._fields, ('num', 'num2'))
self.assertEqual(values.num, 72)
self.assertEqual(values.num2, 73)
def test_named_values_list_without_fields(self):
qs = Number.objects.extra(select={'num2': 'num+1'}).annotate(Count('id'))
values = qs.values_list(named=True).first()
self.assertEqual(type(values).__name__, 'Row')
self.assertEqual(
values._fields,
('num2', 'id', 'num', 'other_num', 'another_num', 'id__count'),
)
self.assertEqual(values.num, 72)
self.assertEqual(values.num2, 73)
self.assertEqual(values.id__count, 1)
def test_named_values_list_expression_with_default_alias(self):
expr = Count('id')
values = Number.objects.annotate(id__count1=expr).values_list(expr, 'id__count1', named=True).first()
self.assertEqual(values._fields, ('id__count2', 'id__count1'))
def test_named_values_list_expression(self):
expr = F('num') + 1
qs = Number.objects.annotate(combinedexpression1=expr).values_list(expr, 'combinedexpression1', named=True)
values = qs.first()
self.assertEqual(values._fields, ('combinedexpression2', 'combinedexpression1'))
def test_named_values_pickle(self):
value = Number.objects.values_list('num', 'other_num', named=True).get()
self.assertEqual(value, (72, None))
self.assertEqual(pickle.loads(pickle.dumps(value)), value)
class QuerySetSupportsPythonIdioms(TestCase):
@classmethod
def setUpTestData(cls):
some_date = datetime.datetime(2014, 5, 16, 12, 1)
cls.articles = [
Article.objects.create(name=f'Article {i}', created=some_date)
for i in range(1, 8)
]
def get_ordered_articles(self):
return Article.objects.all().order_by('name')
def test_can_get_items_using_index_and_slice_notation(self):
self.assertEqual(self.get_ordered_articles()[0].name, 'Article 1')
self.assertSequenceEqual(
self.get_ordered_articles()[1:3],
[self.articles[1], self.articles[2]],
)
def test_slicing_with_steps_can_be_used(self):
self.assertSequenceEqual(
self.get_ordered_articles()[::2], [
self.articles[0],
self.articles[2],
self.articles[4],
self.articles[6],
]
)
def test_slicing_without_step_is_lazy(self):
with self.assertNumQueries(0):
self.get_ordered_articles()[0:5]
def test_slicing_with_tests_is_not_lazy(self):
with self.assertNumQueries(1):
self.get_ordered_articles()[0:5:3]
def test_slicing_can_slice_again_after_slicing(self):
self.assertSequenceEqual(
self.get_ordered_articles()[0:5][0:2],
[self.articles[0], self.articles[1]],
)
self.assertSequenceEqual(self.get_ordered_articles()[0:5][4:], [self.articles[4]])
self.assertSequenceEqual(self.get_ordered_articles()[0:5][5:], [])
# Some more tests!
self.assertSequenceEqual(
self.get_ordered_articles()[2:][0:2],
[self.articles[2], self.articles[3]],
)
self.assertSequenceEqual(
self.get_ordered_articles()[2:][:2],
[self.articles[2], self.articles[3]],
)
self.assertSequenceEqual(self.get_ordered_articles()[2:][2:3], [self.articles[4]])
# Using an offset without a limit is also possible.
self.assertSequenceEqual(
self.get_ordered_articles()[5:],
[self.articles[5], self.articles[6]],
)
def test_slicing_cannot_filter_queryset_once_sliced(self):
msg = 'Cannot filter a query once a slice has been taken.'
with self.assertRaisesMessage(TypeError, msg):
Article.objects.all()[0:5].filter(id=1)
def test_slicing_cannot_reorder_queryset_once_sliced(self):
msg = 'Cannot reorder a query once a slice has been taken.'
with self.assertRaisesMessage(TypeError, msg):
Article.objects.all()[0:5].order_by('id')
def test_slicing_cannot_combine_queries_once_sliced(self):
msg = 'Cannot combine queries once a slice has been taken.'
with self.assertRaisesMessage(TypeError, msg):
Article.objects.all()[0:1] & Article.objects.all()[4:5]
def test_slicing_negative_indexing_not_supported_for_single_element(self):
"""hint: inverting your ordering might do what you need"""
msg = 'Negative indexing is not supported.'
with self.assertRaisesMessage(ValueError, msg):
Article.objects.all()[-1]
def test_slicing_negative_indexing_not_supported_for_range(self):
"""hint: inverting your ordering might do what you need"""
msg = 'Negative indexing is not supported.'
with self.assertRaisesMessage(ValueError, msg):
Article.objects.all()[0:-5]
with self.assertRaisesMessage(ValueError, msg):
Article.objects.all()[-1:]
def test_invalid_index(self):
msg = 'QuerySet indices must be integers or slices, not str.'
with self.assertRaisesMessage(TypeError, msg):
Article.objects.all()['foo']
def test_can_get_number_of_items_in_queryset_using_standard_len(self):
self.assertEqual(len(Article.objects.filter(name__exact='Article 1')), 1)
def test_can_combine_queries_using_and_and_or_operators(self):
s1 = Article.objects.filter(name__exact='Article 1')
s2 = Article.objects.filter(name__exact='Article 2')
self.assertSequenceEqual(
(s1 | s2).order_by('name'),
[self.articles[0], self.articles[1]],
)
self.assertSequenceEqual(s1 & s2, [])
class WeirdQuerysetSlicingTests(TestCase):
@classmethod
def setUpTestData(cls):
Number.objects.create(num=1)
Number.objects.create(num=2)
Article.objects.create(name='one', created=datetime.datetime.now())
Article.objects.create(name='two', created=datetime.datetime.now())
Article.objects.create(name='three', created=datetime.datetime.now())
Article.objects.create(name='four', created=datetime.datetime.now())
food = Food.objects.create(name='spam')
Eaten.objects.create(meal='spam with eggs', food=food)
def test_tickets_7698_10202(self):
# People like to slice with '0' as the high-water mark.
self.assertQuerysetEqual(Article.objects.all()[0:0], [])
self.assertQuerysetEqual(Article.objects.all()[0:0][:10], [])
self.assertEqual(Article.objects.all()[:0].count(), 0)
msg = 'Cannot change a query once a slice has been taken.'
with self.assertRaisesMessage(TypeError, msg):
Article.objects.all()[:0].latest('created')
def test_empty_resultset_sql(self):
# ticket #12192
self.assertNumQueries(0, lambda: list(Number.objects.all()[1:1]))
def test_empty_sliced_subquery(self):
self.assertEqual(Eaten.objects.filter(food__in=Food.objects.all()[0:0]).count(), 0)
def test_empty_sliced_subquery_exclude(self):
self.assertEqual(Eaten.objects.exclude(food__in=Food.objects.all()[0:0]).count(), 1)
def test_zero_length_values_slicing(self):
n = 42
with self.assertNumQueries(0):
self.assertQuerysetEqual(Article.objects.values()[n:n], [])
self.assertQuerysetEqual(Article.objects.values_list()[n:n], [])
class EscapingTests(TestCase):
def test_ticket_7302(self):
# Reserved names are appropriately escaped
r_a = ReservedName.objects.create(name='a', order=42)
r_b = ReservedName.objects.create(name='b', order=37)
self.assertSequenceEqual(
ReservedName.objects.all().order_by('order'),
[r_b, r_a],
)
self.assertSequenceEqual(
ReservedName.objects.extra(select={'stuff': 'name'}, order_by=('order', 'stuff')),
[r_b, r_a],
)
class ToFieldTests(TestCase):
def test_in_query(self):
apple = Food.objects.create(name="apple")
pear = Food.objects.create(name="pear")
lunch = Eaten.objects.create(food=apple, meal="lunch")
dinner = Eaten.objects.create(food=pear, meal="dinner")
self.assertEqual(
set(Eaten.objects.filter(food__in=[apple, pear])),
{lunch, dinner},
)
def test_in_subquery(self):
apple = Food.objects.create(name="apple")
lunch = Eaten.objects.create(food=apple, meal="lunch")
self.assertEqual(
set(Eaten.objects.filter(food__in=Food.objects.filter(name='apple'))),
{lunch}
)
self.assertEqual(
set(Eaten.objects.filter(food__in=Food.objects.filter(name='apple').values('eaten__meal'))),
set()
)
self.assertEqual(
set(Food.objects.filter(eaten__in=Eaten.objects.filter(meal='lunch'))),
{apple}
)
def test_nested_in_subquery(self):
extra = ExtraInfo.objects.create()
author = Author.objects.create(num=42, extra=extra)
report = Report.objects.create(creator=author)
comment = ReportComment.objects.create(report=report)
comments = ReportComment.objects.filter(
report__in=Report.objects.filter(
creator__in=extra.author_set.all(),
),
)
self.assertSequenceEqual(comments, [comment])
def test_reverse_in(self):
apple = Food.objects.create(name="apple")
pear = Food.objects.create(name="pear")
lunch_apple = Eaten.objects.create(food=apple, meal="lunch")
lunch_pear = Eaten.objects.create(food=pear, meal="dinner")
self.assertEqual(
set(Food.objects.filter(eaten__in=[lunch_apple, lunch_pear])),
{apple, pear}
)
def test_single_object(self):
apple = Food.objects.create(name="apple")
lunch = Eaten.objects.create(food=apple, meal="lunch")
dinner = Eaten.objects.create(food=apple, meal="dinner")
self.assertEqual(
set(Eaten.objects.filter(food=apple)),
{lunch, dinner}
)
def test_single_object_reverse(self):
apple = Food.objects.create(name="apple")
lunch = Eaten.objects.create(food=apple, meal="lunch")
self.assertEqual(
set(Food.objects.filter(eaten=lunch)),
{apple}
)
def test_recursive_fk(self):
node1 = Node.objects.create(num=42)
node2 = Node.objects.create(num=1, parent=node1)
self.assertEqual(
list(Node.objects.filter(parent=node1)),
[node2]
)
def test_recursive_fk_reverse(self):
node1 = Node.objects.create(num=42)
node2 = Node.objects.create(num=1, parent=node1)
self.assertEqual(
list(Node.objects.filter(node=node2)),
[node1]
)
class IsNullTests(TestCase):
def test_primary_key(self):
custom = CustomPk.objects.create(name='pk')
null = Related.objects.create()
notnull = Related.objects.create(custom=custom)
self.assertSequenceEqual(Related.objects.filter(custom__isnull=False), [notnull])
self.assertSequenceEqual(Related.objects.filter(custom__isnull=True), [null])
def test_to_field(self):
apple = Food.objects.create(name="apple")
e1 = Eaten.objects.create(food=apple, meal="lunch")
e2 = Eaten.objects.create(meal="lunch")
self.assertSequenceEqual(
Eaten.objects.filter(food__isnull=False),
[e1],
)
self.assertSequenceEqual(
Eaten.objects.filter(food__isnull=True),
[e2],
)
class ConditionalTests(TestCase):
"""Tests whose execution depend on different environment conditions like
Python version or DB backend features"""
@classmethod
def setUpTestData(cls):
generic = NamedCategory.objects.create(name="Generic")
t1 = Tag.objects.create(name='t1', category=generic)
Tag.objects.create(name='t2', parent=t1, category=generic)
t3 = Tag.objects.create(name='t3', parent=t1)
Tag.objects.create(name='t4', parent=t3)
Tag.objects.create(name='t5', parent=t3)
def test_infinite_loop(self):
# If you're not careful, it's possible to introduce infinite loops via
# default ordering on foreign keys in a cycle. We detect that.
with self.assertRaisesMessage(FieldError, 'Infinite loop caused by ordering.'):
list(LoopX.objects.all()) # Force queryset evaluation with list()
with self.assertRaisesMessage(FieldError, 'Infinite loop caused by ordering.'):
list(LoopZ.objects.all()) # Force queryset evaluation with list()
# Note that this doesn't cause an infinite loop, since the default
# ordering on the Tag model is empty (and thus defaults to using "id"
# for the related field).
self.assertEqual(len(Tag.objects.order_by('parent')), 5)
# ... but you can still order in a non-recursive fashion among linked
# fields (the previous test failed because the default ordering was
# recursive).
self.assertQuerysetEqual(
LoopX.objects.all().order_by('y__x__y__x__id'),
[]
)
# When grouping without specifying ordering, we add an explicit "ORDER BY NULL"
# portion in MySQL to prevent unnecessary sorting.
@skipUnlessDBFeature('requires_explicit_null_ordering_when_grouping')
def test_null_ordering_added(self):
query = Tag.objects.values_list('parent_id', flat=True).order_by().query
query.group_by = ['parent_id']
sql = query.get_compiler(DEFAULT_DB_ALIAS).as_sql()[0]
fragment = "ORDER BY "
pos = sql.find(fragment)
self.assertEqual(sql.find(fragment, pos + 1), -1)
self.assertEqual(sql.find("NULL", pos + len(fragment)), pos + len(fragment))
def test_in_list_limit(self):
# The "in" lookup works with lists of 1000 items or more.
# The numbers amount is picked to force three different IN batches
# for Oracle, yet to be less than 2100 parameter limit for MSSQL.
numbers = list(range(2050))
max_query_params = connection.features.max_query_params
if max_query_params is None or max_query_params >= len(numbers):
Number.objects.bulk_create(Number(num=num) for num in numbers)
for number in [1000, 1001, 2000, len(numbers)]:
with self.subTest(number=number):
self.assertEqual(Number.objects.filter(num__in=numbers[:number]).count(), number)
class UnionTests(unittest.TestCase):
"""
Tests for the union of two querysets. Bug #12252.
"""
@classmethod
def setUpTestData(cls):
objectas = []
objectbs = []
objectcs = []
a_info = ['one', 'two', 'three']
for name in a_info:
o = ObjectA(name=name)
o.save()
objectas.append(o)
b_info = [('un', 1, objectas[0]), ('deux', 2, objectas[0]), ('trois', 3, objectas[2])]
for name, number, objecta in b_info:
o = ObjectB(name=name, num=number, objecta=objecta)
o.save()
objectbs.append(o)
c_info = [('ein', objectas[2], objectbs[2]), ('zwei', objectas[1], objectbs[1])]
for name, objecta, objectb in c_info:
o = ObjectC(name=name, objecta=objecta, objectb=objectb)
o.save()
objectcs.append(o)
def check_union(self, model, Q1, Q2):
filter = model.objects.filter
self.assertEqual(set(filter(Q1) | filter(Q2)), set(filter(Q1 | Q2)))
self.assertEqual(set(filter(Q2) | filter(Q1)), set(filter(Q1 | Q2)))
def test_A_AB(self):
Q1 = Q(name='two')
Q2 = Q(objectb__name='deux')
self.check_union(ObjectA, Q1, Q2)
def test_A_AB2(self):
Q1 = Q(name='two')
Q2 = Q(objectb__name='deux', objectb__num=2)
self.check_union(ObjectA, Q1, Q2)
def test_AB_ACB(self):
Q1 = Q(objectb__name='deux')
Q2 = Q(objectc__objectb__name='deux')
self.check_union(ObjectA, Q1, Q2)
def test_BAB_BAC(self):
Q1 = Q(objecta__objectb__name='deux')
Q2 = Q(objecta__objectc__name='ein')
self.check_union(ObjectB, Q1, Q2)
def test_BAB_BACB(self):
Q1 = Q(objecta__objectb__name='deux')
Q2 = Q(objecta__objectc__objectb__name='trois')
self.check_union(ObjectB, Q1, Q2)
def test_BA_BCA__BAB_BAC_BCA(self):
Q1 = Q(objecta__name='one', objectc__objecta__name='two')
Q2 = Q(objecta__objectc__name='ein', objectc__objecta__name='three', objecta__objectb__name='trois')
self.check_union(ObjectB, Q1, Q2)
class DefaultValuesInsertTest(TestCase):
def test_no_extra_params(self):
"""
Can create an instance of a model with only the PK field (#17056)."
"""
DumbCategory.objects.create()
class ExcludeTests(TestCase):
@classmethod
def setUpTestData(cls):
f1 = Food.objects.create(name='apples')
cls.f2 = Food.objects.create(name='oranges')
Eaten.objects.create(food=f1, meal='dinner')
cls.j1 = Job.objects.create(name='Manager')
cls.r1 = Responsibility.objects.create(description='Playing golf')
cls.j2 = Job.objects.create(name='Programmer')
cls.r2 = Responsibility.objects.create(description='Programming')
JobResponsibilities.objects.create(job=cls.j1, responsibility=cls.r1)
JobResponsibilities.objects.create(job=cls.j2, responsibility=cls.r2)
def test_to_field(self):
self.assertSequenceEqual(
Food.objects.exclude(eaten__meal='dinner'),
[self.f2],
)
self.assertSequenceEqual(
Job.objects.exclude(responsibilities__description='Playing golf'),
[self.j2],
)
self.assertSequenceEqual(
Responsibility.objects.exclude(jobs__name='Manager'),
[self.r2],
)
def test_ticket14511(self):
alex = Person.objects.get_or_create(name='Alex')[0]
jane = Person.objects.get_or_create(name='Jane')[0]
oracle = Company.objects.get_or_create(name='Oracle')[0]
google = Company.objects.get_or_create(name='Google')[0]
microsoft = Company.objects.get_or_create(name='Microsoft')[0]
intel = Company.objects.get_or_create(name='Intel')[0]
def employ(employer, employee, title):
Employment.objects.get_or_create(employee=employee, employer=employer, title=title)
employ(oracle, alex, 'Engineer')
employ(oracle, alex, 'Developer')
employ(google, alex, 'Engineer')
employ(google, alex, 'Manager')
employ(microsoft, alex, 'Manager')
employ(intel, alex, 'Manager')
employ(microsoft, jane, 'Developer')
employ(intel, jane, 'Manager')
alex_tech_employers = alex.employers.filter(
employment__title__in=('Engineer', 'Developer')).distinct().order_by('name')
self.assertSequenceEqual(alex_tech_employers, [google, oracle])
alex_nontech_employers = alex.employers.exclude(
employment__title__in=('Engineer', 'Developer')).distinct().order_by('name')
self.assertSequenceEqual(alex_nontech_employers, [google, intel, microsoft])
def test_exclude_reverse_fk_field_ref(self):
tag = Tag.objects.create()
Note.objects.create(tag=tag, note='note')
annotation = Annotation.objects.create(name='annotation', tag=tag)
self.assertEqual(Annotation.objects.exclude(tag__note__note=F('name')).get(), annotation)
def test_exclude_with_circular_fk_relation(self):
self.assertEqual(ObjectB.objects.exclude(objecta__objectb__name=F('name')).count(), 0)
def test_subquery_exclude_outerref(self):
qs = JobResponsibilities.objects.filter(
Exists(Responsibility.objects.exclude(jobs=OuterRef('job'))),
)
self.assertTrue(qs.exists())
self.r1.delete()
self.assertFalse(qs.exists())
def test_exclude_nullable_fields(self):
number = Number.objects.create(num=1, other_num=1)
Number.objects.create(num=2, other_num=2, another_num=2)
self.assertSequenceEqual(
Number.objects.exclude(other_num=F('another_num')),
[number],
)
self.assertSequenceEqual(
Number.objects.exclude(num=F('another_num')),
[number],
)
def test_exclude_multivalued_exists(self):
with CaptureQueriesContext(connection) as captured_queries:
self.assertSequenceEqual(
Job.objects.exclude(responsibilities__description='Programming'),
[self.j1],
)
self.assertIn('exists', captured_queries[0]['sql'].lower())
def test_exclude_subquery(self):
subquery = JobResponsibilities.objects.filter(
responsibility__description='bar',
) | JobResponsibilities.objects.exclude(
job__responsibilities__description='foo',
)
self.assertCountEqual(
Job.objects.annotate(
responsibility=subquery.filter(
job=OuterRef('name'),
).values('id')[:1]
),
[self.j1, self.j2],
)
class ExcludeTest17600(TestCase):
"""
Some regressiontests for ticket #17600. Some of these likely duplicate
other existing tests.
"""
@classmethod
def setUpTestData(cls):
# Create a few Orders.
cls.o1 = Order.objects.create(pk=1)
cls.o2 = Order.objects.create(pk=2)
cls.o3 = Order.objects.create(pk=3)
# Create some OrderItems for the first order with homogeneous
# status_id values
cls.oi1 = OrderItem.objects.create(order=cls.o1, status=1)
cls.oi2 = OrderItem.objects.create(order=cls.o1, status=1)
cls.oi3 = OrderItem.objects.create(order=cls.o1, status=1)
# Create some OrderItems for the second order with heterogeneous
# status_id values
cls.oi4 = OrderItem.objects.create(order=cls.o2, status=1)
cls.oi5 = OrderItem.objects.create(order=cls.o2, status=2)
cls.oi6 = OrderItem.objects.create(order=cls.o2, status=3)
# Create some OrderItems for the second order with heterogeneous
# status_id values
cls.oi7 = OrderItem.objects.create(order=cls.o3, status=2)
cls.oi8 = OrderItem.objects.create(order=cls.o3, status=3)
cls.oi9 = OrderItem.objects.create(order=cls.o3, status=4)
def test_exclude_plain(self):
"""
This should exclude Orders which have some items with status 1
"""
self.assertSequenceEqual(
Order.objects.exclude(items__status=1),
[self.o3],
)
def test_exclude_plain_distinct(self):
"""
This should exclude Orders which have some items with status 1
"""
self.assertSequenceEqual(
Order.objects.exclude(items__status=1).distinct(),
[self.o3],
)
def test_exclude_with_q_object_distinct(self):
"""
This should exclude Orders which have some items with status 1
"""
self.assertSequenceEqual(
Order.objects.exclude(Q(items__status=1)).distinct(),
[self.o3],
)
def test_exclude_with_q_object_no_distinct(self):
"""
This should exclude Orders which have some items with status 1
"""
self.assertSequenceEqual(
Order.objects.exclude(Q(items__status=1)),
[self.o3],
)
def test_exclude_with_q_is_equal_to_plain_exclude(self):
"""
Using exclude(condition) and exclude(Q(condition)) should
yield the same QuerySet
"""
self.assertEqual(
list(Order.objects.exclude(items__status=1).distinct()),
list(Order.objects.exclude(Q(items__status=1)).distinct()))
def test_exclude_with_q_is_equal_to_plain_exclude_variation(self):
"""
Using exclude(condition) and exclude(Q(condition)) should
yield the same QuerySet
"""
self.assertEqual(
list(Order.objects.exclude(items__status=1)),
list(Order.objects.exclude(Q(items__status=1)).distinct()))
@unittest.expectedFailure
def test_only_orders_with_all_items_having_status_1(self):
"""
This should only return orders having ALL items set to status 1, or
those items not having any orders at all. The correct way to write
this query in SQL seems to be using two nested subqueries.
"""
self.assertQuerysetEqual(
Order.objects.exclude(~Q(items__status=1)).distinct(),
[self.o1],
)
class Exclude15786(TestCase):
"""Regression test for #15786"""
def test_ticket15786(self):
c1 = SimpleCategory.objects.create(name='c1')
c2 = SimpleCategory.objects.create(name='c2')
OneToOneCategory.objects.create(category=c1)
OneToOneCategory.objects.create(category=c2)
rel = CategoryRelationship.objects.create(first=c1, second=c2)
self.assertEqual(
CategoryRelationship.objects.exclude(
first__onetoonecategory=F('second__onetoonecategory')
).get(), rel
)
class NullInExcludeTest(TestCase):
@classmethod
def setUpTestData(cls):
NullableName.objects.create(name='i1')
NullableName.objects.create()
def test_null_in_exclude_qs(self):
none_val = '' if connection.features.interprets_empty_strings_as_nulls else None
self.assertQuerysetEqual(
NullableName.objects.exclude(name__in=[]),
['i1', none_val], attrgetter('name'))
self.assertQuerysetEqual(
NullableName.objects.exclude(name__in=['i1']),
[none_val], attrgetter('name'))
self.assertQuerysetEqual(
NullableName.objects.exclude(name__in=['i3']),
['i1', none_val], attrgetter('name'))
inner_qs = NullableName.objects.filter(name='i1').values_list('name')
self.assertQuerysetEqual(
NullableName.objects.exclude(name__in=inner_qs),
[none_val], attrgetter('name'))
# The inner queryset wasn't executed - it should be turned
# into subquery above
self.assertIs(inner_qs._result_cache, None)
@unittest.expectedFailure
def test_col_not_in_list_containing_null(self):
"""
The following case is not handled properly because
SQL's COL NOT IN (list containing null) handling is too weird to
abstract away.
"""
self.assertQuerysetEqual(
NullableName.objects.exclude(name__in=[None]),
['i1'], attrgetter('name'))
def test_double_exclude(self):
self.assertEqual(
list(NullableName.objects.filter(~~Q(name='i1'))),
list(NullableName.objects.filter(Q(name='i1'))))
self.assertNotIn(
'IS NOT NULL',
str(NullableName.objects.filter(~~Q(name='i1')).query))
class EmptyStringsAsNullTest(TestCase):
"""
Filtering on non-null character fields works as expected.
The reason for these tests is that Oracle treats '' as NULL, and this
can cause problems in query construction. Refs #17957.
"""
@classmethod
def setUpTestData(cls):
cls.nc = NamedCategory.objects.create(name='')
def test_direct_exclude(self):
self.assertQuerysetEqual(
NamedCategory.objects.exclude(name__in=['nonexistent']),
[self.nc.pk], attrgetter('pk')
)
def test_joined_exclude(self):
self.assertQuerysetEqual(
DumbCategory.objects.exclude(namedcategory__name__in=['nonexistent']),
[self.nc.pk], attrgetter('pk')
)
def test_21001(self):
foo = NamedCategory.objects.create(name='foo')
self.assertQuerysetEqual(
NamedCategory.objects.exclude(name=''),
[foo.pk], attrgetter('pk')
)
class ProxyQueryCleanupTest(TestCase):
def test_evaluated_proxy_count(self):
"""
Generating the query string doesn't alter the query's state
in irreversible ways. Refs #18248.
"""
ProxyCategory.objects.create()
qs = ProxyCategory.objects.all()
self.assertEqual(qs.count(), 1)
str(qs.query)
self.assertEqual(qs.count(), 1)
class WhereNodeTest(SimpleTestCase):
class DummyNode:
def as_sql(self, compiler, connection):
return 'dummy', []
class MockCompiler:
def compile(self, node):
return node.as_sql(self, connection)
def __call__(self, name):
return connection.ops.quote_name(name)
def test_empty_full_handling_conjunction(self):
compiler = WhereNodeTest.MockCompiler()
w = WhereNode(children=[NothingNode()])
with self.assertRaises(EmptyResultSet):
w.as_sql(compiler, connection)
w.negate()
self.assertEqual(w.as_sql(compiler, connection), ('', []))
w = WhereNode(children=[self.DummyNode(), self.DummyNode()])
self.assertEqual(w.as_sql(compiler, connection), ('(dummy AND dummy)', []))
w.negate()
self.assertEqual(w.as_sql(compiler, connection), ('NOT (dummy AND dummy)', []))
w = WhereNode(children=[NothingNode(), self.DummyNode()])
with self.assertRaises(EmptyResultSet):
w.as_sql(compiler, connection)
w.negate()
self.assertEqual(w.as_sql(compiler, connection), ('', []))
def test_empty_full_handling_disjunction(self):
compiler = WhereNodeTest.MockCompiler()
w = WhereNode(children=[NothingNode()], connector='OR')
with self.assertRaises(EmptyResultSet):
w.as_sql(compiler, connection)
w.negate()
self.assertEqual(w.as_sql(compiler, connection), ('', []))
w = WhereNode(children=[self.DummyNode(), self.DummyNode()], connector='OR')
self.assertEqual(w.as_sql(compiler, connection), ('(dummy OR dummy)', []))
w.negate()
self.assertEqual(w.as_sql(compiler, connection), ('NOT (dummy OR dummy)', []))
w = WhereNode(children=[NothingNode(), self.DummyNode()], connector='OR')
self.assertEqual(w.as_sql(compiler, connection), ('dummy', []))
w.negate()
self.assertEqual(w.as_sql(compiler, connection), ('NOT (dummy)', []))
def test_empty_nodes(self):
compiler = WhereNodeTest.MockCompiler()
empty_w = WhereNode()
w = WhereNode(children=[empty_w, empty_w])
self.assertEqual(w.as_sql(compiler, connection), ('', []))
w.negate()
with self.assertRaises(EmptyResultSet):
w.as_sql(compiler, connection)
w.connector = 'OR'
with self.assertRaises(EmptyResultSet):
w.as_sql(compiler, connection)
w.negate()
self.assertEqual(w.as_sql(compiler, connection), ('', []))
w = WhereNode(children=[empty_w, NothingNode()], connector='OR')
self.assertEqual(w.as_sql(compiler, connection), ('', []))
w = WhereNode(children=[empty_w, NothingNode()], connector='AND')
with self.assertRaises(EmptyResultSet):
w.as_sql(compiler, connection)
class QuerySetExceptionTests(SimpleTestCase):
def test_iter_exceptions(self):
qs = ExtraInfo.objects.only('author')
msg = "'ManyToOneRel' object has no attribute 'attname'"
with self.assertRaisesMessage(AttributeError, msg):
list(qs)
def test_invalid_order_by(self):
msg = (
"Cannot resolve keyword '*' into field. Choices are: created, id, "
"name"
)
with self.assertRaisesMessage(FieldError, msg):
Article.objects.order_by('*')
def test_invalid_order_by_raw_column_alias(self):
msg = (
"Cannot resolve keyword 'queries_author.name' into field. Choices "
"are: cover, created, creator, creator_id, id, modified, name, "
"note, note_id, tags"
)
with self.assertRaisesMessage(FieldError, msg):
Item.objects.values('creator__name').order_by('queries_author.name')
def test_invalid_queryset_model(self):
msg = 'Cannot use QuerySet for "Article": Use a QuerySet for "ExtraInfo".'
with self.assertRaisesMessage(ValueError, msg):
list(Author.objects.filter(extra=Article.objects.all()))
class NullJoinPromotionOrTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.d1 = ModelD.objects.create(name='foo')
d2 = ModelD.objects.create(name='bar')
cls.a1 = ModelA.objects.create(name='a1', d=cls.d1)
c = ModelC.objects.create(name='c')
b = ModelB.objects.create(name='b', c=c)
cls.a2 = ModelA.objects.create(name='a2', b=b, d=d2)
def test_ticket_17886(self):
# The first Q-object is generating the match, the rest of the filters
# should not remove the match even if they do not match anything. The
# problem here was that b__name generates a LOUTER JOIN, then
# b__c__name generates join to c, which the ORM tried to promote but
# failed as that join isn't nullable.
q_obj = (
Q(d__name='foo') |
Q(b__name='foo') |
Q(b__c__name='foo')
)
qset = ModelA.objects.filter(q_obj)
self.assertEqual(list(qset), [self.a1])
# We generate one INNER JOIN to D. The join is direct and not nullable
# so we can use INNER JOIN for it. However, we can NOT use INNER JOIN
# for the b->c join, as a->b is nullable.
self.assertEqual(str(qset.query).count('INNER JOIN'), 1)
def test_isnull_filter_promotion(self):
qs = ModelA.objects.filter(Q(b__name__isnull=True))
self.assertEqual(str(qs.query).count('LEFT OUTER'), 1)
self.assertEqual(list(qs), [self.a1])
qs = ModelA.objects.filter(~Q(b__name__isnull=True))
self.assertEqual(str(qs.query).count('INNER JOIN'), 1)
self.assertEqual(list(qs), [self.a2])
qs = ModelA.objects.filter(~~Q(b__name__isnull=True))
self.assertEqual(str(qs.query).count('LEFT OUTER'), 1)
self.assertEqual(list(qs), [self.a1])
qs = ModelA.objects.filter(Q(b__name__isnull=False))
self.assertEqual(str(qs.query).count('INNER JOIN'), 1)
self.assertEqual(list(qs), [self.a2])
qs = ModelA.objects.filter(~Q(b__name__isnull=False))
self.assertEqual(str(qs.query).count('LEFT OUTER'), 1)
self.assertEqual(list(qs), [self.a1])
qs = ModelA.objects.filter(~~Q(b__name__isnull=False))
self.assertEqual(str(qs.query).count('INNER JOIN'), 1)
self.assertEqual(list(qs), [self.a2])
def test_null_join_demotion(self):
qs = ModelA.objects.filter(Q(b__name__isnull=False) & Q(b__name__isnull=True))
self.assertIn(' INNER JOIN ', str(qs.query))
qs = ModelA.objects.filter(Q(b__name__isnull=True) & Q(b__name__isnull=False))
self.assertIn(' INNER JOIN ', str(qs.query))
qs = ModelA.objects.filter(Q(b__name__isnull=False) | Q(b__name__isnull=True))
self.assertIn(' LEFT OUTER JOIN ', str(qs.query))
qs = ModelA.objects.filter(Q(b__name__isnull=True) | Q(b__name__isnull=False))
self.assertIn(' LEFT OUTER JOIN ', str(qs.query))
def test_ticket_21366(self):
n = Note.objects.create(note='n', misc='m')
e = ExtraInfo.objects.create(info='info', note=n)
a = Author.objects.create(name='Author1', num=1, extra=e)
Ranking.objects.create(rank=1, author=a)
r1 = Report.objects.create(name='Foo', creator=a)
r2 = Report.objects.create(name='Bar')
Report.objects.create(name='Bar', creator=a)
qs = Report.objects.filter(
Q(creator__ranking__isnull=True) |
Q(creator__ranking__rank=1, name='Foo')
)
self.assertEqual(str(qs.query).count('LEFT OUTER JOIN'), 2)
self.assertEqual(str(qs.query).count(' JOIN '), 2)
self.assertSequenceEqual(qs.order_by('name'), [r2, r1])
def test_ticket_21748(self):
i1 = Identifier.objects.create(name='i1')
i2 = Identifier.objects.create(name='i2')
i3 = Identifier.objects.create(name='i3')
Program.objects.create(identifier=i1)
Channel.objects.create(identifier=i1)
Program.objects.create(identifier=i2)
self.assertSequenceEqual(Identifier.objects.filter(program=None, channel=None), [i3])
self.assertSequenceEqual(Identifier.objects.exclude(program=None, channel=None).order_by('name'), [i1, i2])
def test_ticket_21748_double_negated_and(self):
i1 = Identifier.objects.create(name='i1')
i2 = Identifier.objects.create(name='i2')
Identifier.objects.create(name='i3')
p1 = Program.objects.create(identifier=i1)
c1 = Channel.objects.create(identifier=i1)
Program.objects.create(identifier=i2)
# Check the ~~Q() (or equivalently .exclude(~Q)) works like Q() for
# join promotion.
qs1_doubleneg = Identifier.objects.exclude(~Q(program__id=p1.id, channel__id=c1.id)).order_by('pk')
qs1_filter = Identifier.objects.filter(program__id=p1.id, channel__id=c1.id).order_by('pk')
self.assertQuerysetEqual(qs1_doubleneg, qs1_filter, lambda x: x)
self.assertEqual(str(qs1_filter.query).count('JOIN'),
str(qs1_doubleneg.query).count('JOIN'))
self.assertEqual(2, str(qs1_doubleneg.query).count('INNER JOIN'))
self.assertEqual(str(qs1_filter.query).count('INNER JOIN'),
str(qs1_doubleneg.query).count('INNER JOIN'))
def test_ticket_21748_double_negated_or(self):
i1 = Identifier.objects.create(name='i1')
i2 = Identifier.objects.create(name='i2')
Identifier.objects.create(name='i3')
p1 = Program.objects.create(identifier=i1)
c1 = Channel.objects.create(identifier=i1)
p2 = Program.objects.create(identifier=i2)
# Test OR + doubleneg. The expected result is that channel is LOUTER
# joined, program INNER joined
qs1_filter = Identifier.objects.filter(
Q(program__id=p2.id, channel__id=c1.id) | Q(program__id=p1.id)
).order_by('pk')
qs1_doubleneg = Identifier.objects.exclude(
~Q(Q(program__id=p2.id, channel__id=c1.id) | Q(program__id=p1.id))
).order_by('pk')
self.assertQuerysetEqual(qs1_doubleneg, qs1_filter, lambda x: x)
self.assertEqual(str(qs1_filter.query).count('JOIN'),
str(qs1_doubleneg.query).count('JOIN'))
self.assertEqual(1, str(qs1_doubleneg.query).count('INNER JOIN'))
self.assertEqual(str(qs1_filter.query).count('INNER JOIN'),
str(qs1_doubleneg.query).count('INNER JOIN'))
def test_ticket_21748_complex_filter(self):
i1 = Identifier.objects.create(name='i1')
i2 = Identifier.objects.create(name='i2')
Identifier.objects.create(name='i3')
p1 = Program.objects.create(identifier=i1)
c1 = Channel.objects.create(identifier=i1)
p2 = Program.objects.create(identifier=i2)
# Finally, a more complex case, one time in a way where each
# NOT is pushed to lowest level in the boolean tree, and
# another query where this isn't done.
qs1 = Identifier.objects.filter(
~Q(~Q(program__id=p2.id, channel__id=c1.id) & Q(program__id=p1.id))
).order_by('pk')
qs2 = Identifier.objects.filter(
Q(Q(program__id=p2.id, channel__id=c1.id) | ~Q(program__id=p1.id))
).order_by('pk')
self.assertQuerysetEqual(qs1, qs2, lambda x: x)
self.assertEqual(str(qs1.query).count('JOIN'),
str(qs2.query).count('JOIN'))
self.assertEqual(0, str(qs1.query).count('INNER JOIN'))
self.assertEqual(str(qs1.query).count('INNER JOIN'),
str(qs2.query).count('INNER JOIN'))
class ReverseJoinTrimmingTest(TestCase):
def test_reverse_trimming(self):
# We don't accidentally trim reverse joins - we can't know if there is
# anything on the other side of the join, so trimming reverse joins
# can't be done, ever.
t = Tag.objects.create()
qs = Tag.objects.filter(annotation__tag=t.pk)
self.assertIn('INNER JOIN', str(qs.query))
self.assertEqual(list(qs), [])
class JoinReuseTest(TestCase):
"""
The queries reuse joins sensibly (for example, direct joins
are always reused).
"""
def test_fk_reuse(self):
qs = Annotation.objects.filter(tag__name='foo').filter(tag__name='bar')
self.assertEqual(str(qs.query).count('JOIN'), 1)
def test_fk_reuse_select_related(self):
qs = Annotation.objects.filter(tag__name='foo').select_related('tag')
self.assertEqual(str(qs.query).count('JOIN'), 1)
def test_fk_reuse_annotation(self):
qs = Annotation.objects.filter(tag__name='foo').annotate(cnt=Count('tag__name'))
self.assertEqual(str(qs.query).count('JOIN'), 1)
def test_fk_reuse_disjunction(self):
qs = Annotation.objects.filter(Q(tag__name='foo') | Q(tag__name='bar'))
self.assertEqual(str(qs.query).count('JOIN'), 1)
def test_fk_reuse_order_by(self):
qs = Annotation.objects.filter(tag__name='foo').order_by('tag__name')
self.assertEqual(str(qs.query).count('JOIN'), 1)
def test_revo2o_reuse(self):
qs = Detail.objects.filter(member__name='foo').filter(member__name='foo')
self.assertEqual(str(qs.query).count('JOIN'), 1)
def test_revfk_noreuse(self):
qs = Author.objects.filter(report__name='r4').filter(report__name='r1')
self.assertEqual(str(qs.query).count('JOIN'), 2)
def test_inverted_q_across_relations(self):
"""
When a trimmable join is specified in the query (here school__), the
ORM detects it and removes unnecessary joins. The set of reusable joins
are updated after trimming the query so that other lookups don't
consider that the outer query's filters are in effect for the subquery
(#26551).
"""
springfield_elementary = School.objects.create()
hogward = School.objects.create()
Student.objects.create(school=springfield_elementary)
hp = Student.objects.create(school=hogward)
Classroom.objects.create(school=hogward, name='Potion')
Classroom.objects.create(school=springfield_elementary, name='Main')
qs = Student.objects.filter(
~(Q(school__classroom__name='Main') & Q(school__classroom__has_blackboard=None))
)
self.assertSequenceEqual(qs, [hp])
class DisjunctionPromotionTests(TestCase):
def test_disjunction_promotion_select_related(self):
fk1 = FK1.objects.create(f1='f1', f2='f2')
basea = BaseA.objects.create(a=fk1)
qs = BaseA.objects.filter(Q(a=fk1) | Q(b=2))
self.assertEqual(str(qs.query).count(' JOIN '), 0)
qs = qs.select_related('a', 'b')
self.assertEqual(str(qs.query).count(' INNER JOIN '), 0)
self.assertEqual(str(qs.query).count(' LEFT OUTER JOIN '), 2)
with self.assertNumQueries(1):
self.assertSequenceEqual(qs, [basea])
self.assertEqual(qs[0].a, fk1)
self.assertIs(qs[0].b, None)
def test_disjunction_promotion1(self):
# Pre-existing join, add two ORed filters to the same join,
# all joins can be INNER JOINS.
qs = BaseA.objects.filter(a__f1='foo')
self.assertEqual(str(qs.query).count('INNER JOIN'), 1)
qs = qs.filter(Q(b__f1='foo') | Q(b__f2='foo'))
self.assertEqual(str(qs.query).count('INNER JOIN'), 2)
# Reverse the order of AND and OR filters.
qs = BaseA.objects.filter(Q(b__f1='foo') | Q(b__f2='foo'))
self.assertEqual(str(qs.query).count('INNER JOIN'), 1)
qs = qs.filter(a__f1='foo')
self.assertEqual(str(qs.query).count('INNER JOIN'), 2)
def test_disjunction_promotion2(self):
qs = BaseA.objects.filter(a__f1='foo')
self.assertEqual(str(qs.query).count('INNER JOIN'), 1)
# Now we have two different joins in an ORed condition, these
# must be OUTER joins. The pre-existing join should remain INNER.
qs = qs.filter(Q(b__f1='foo') | Q(c__f2='foo'))
self.assertEqual(str(qs.query).count('INNER JOIN'), 1)
self.assertEqual(str(qs.query).count('LEFT OUTER JOIN'), 2)
# Reverse case.
qs = BaseA.objects.filter(Q(b__f1='foo') | Q(c__f2='foo'))
self.assertEqual(str(qs.query).count('LEFT OUTER JOIN'), 2)
qs = qs.filter(a__f1='foo')
self.assertEqual(str(qs.query).count('INNER JOIN'), 1)
self.assertEqual(str(qs.query).count('LEFT OUTER JOIN'), 2)
def test_disjunction_promotion3(self):
qs = BaseA.objects.filter(a__f2='bar')
self.assertEqual(str(qs.query).count('INNER JOIN'), 1)
# The ANDed a__f2 filter allows us to use keep using INNER JOIN
# even inside the ORed case. If the join to a__ returns nothing,
# the ANDed filter for a__f2 can't be true.
qs = qs.filter(Q(a__f1='foo') | Q(b__f2='foo'))
self.assertEqual(str(qs.query).count('INNER JOIN'), 1)
self.assertEqual(str(qs.query).count('LEFT OUTER JOIN'), 1)
def test_disjunction_promotion3_demote(self):
# This one needs demotion logic: the first filter causes a to be
# outer joined, the second filter makes it inner join again.
qs = BaseA.objects.filter(
Q(a__f1='foo') | Q(b__f2='foo')).filter(a__f2='bar')
self.assertEqual(str(qs.query).count('INNER JOIN'), 1)
self.assertEqual(str(qs.query).count('LEFT OUTER JOIN'), 1)
def test_disjunction_promotion4_demote(self):
qs = BaseA.objects.filter(Q(a=1) | Q(a=2))
self.assertEqual(str(qs.query).count('JOIN'), 0)
# Demote needed for the "a" join. It is marked as outer join by
# above filter (even if it is trimmed away).
qs = qs.filter(a__f1='foo')
self.assertEqual(str(qs.query).count('INNER JOIN'), 1)
def test_disjunction_promotion4(self):
qs = BaseA.objects.filter(a__f1='foo')
self.assertEqual(str(qs.query).count('INNER JOIN'), 1)
qs = qs.filter(Q(a=1) | Q(a=2))
self.assertEqual(str(qs.query).count('INNER JOIN'), 1)
def test_disjunction_promotion5_demote(self):
qs = BaseA.objects.filter(Q(a=1) | Q(a=2))
# Note that the above filters on a force the join to an
# inner join even if it is trimmed.
self.assertEqual(str(qs.query).count('JOIN'), 0)
qs = qs.filter(Q(a__f1='foo') | Q(b__f1='foo'))
# So, now the a__f1 join doesn't need promotion.
self.assertEqual(str(qs.query).count('INNER JOIN'), 1)
# But b__f1 does.
self.assertEqual(str(qs.query).count('LEFT OUTER JOIN'), 1)
qs = BaseA.objects.filter(Q(a__f1='foo') | Q(b__f1='foo'))
# Now the join to a is created as LOUTER
self.assertEqual(str(qs.query).count('LEFT OUTER JOIN'), 2)
qs = qs.filter(Q(a=1) | Q(a=2))
self.assertEqual(str(qs.query).count('INNER JOIN'), 1)
self.assertEqual(str(qs.query).count('LEFT OUTER JOIN'), 1)
def test_disjunction_promotion6(self):
qs = BaseA.objects.filter(Q(a=1) | Q(a=2))
self.assertEqual(str(qs.query).count('JOIN'), 0)
qs = BaseA.objects.filter(Q(a__f1='foo') & Q(b__f1='foo'))
self.assertEqual(str(qs.query).count('INNER JOIN'), 2)
self.assertEqual(str(qs.query).count('LEFT OUTER JOIN'), 0)
qs = BaseA.objects.filter(Q(a__f1='foo') & Q(b__f1='foo'))
self.assertEqual(str(qs.query).count('LEFT OUTER JOIN'), 0)
self.assertEqual(str(qs.query).count('INNER JOIN'), 2)
qs = qs.filter(Q(a=1) | Q(a=2))
self.assertEqual(str(qs.query).count('INNER JOIN'), 2)
self.assertEqual(str(qs.query).count('LEFT OUTER JOIN'), 0)
def test_disjunction_promotion7(self):
qs = BaseA.objects.filter(Q(a=1) | Q(a=2))
self.assertEqual(str(qs.query).count('JOIN'), 0)
qs = BaseA.objects.filter(Q(a__f1='foo') | (Q(b__f1='foo') & Q(a__f1='bar')))
self.assertEqual(str(qs.query).count('INNER JOIN'), 1)
self.assertEqual(str(qs.query).count('LEFT OUTER JOIN'), 1)
qs = BaseA.objects.filter(
(Q(a__f1='foo') | Q(b__f1='foo')) & (Q(a__f1='bar') | Q(c__f1='foo'))
)
self.assertEqual(str(qs.query).count('LEFT OUTER JOIN'), 3)
self.assertEqual(str(qs.query).count('INNER JOIN'), 0)
qs = BaseA.objects.filter(
Q(a__f1='foo') | Q(a__f1='bar') & (Q(b__f1='bar') | Q(c__f1='foo'))
)
self.assertEqual(str(qs.query).count('LEFT OUTER JOIN'), 2)
self.assertEqual(str(qs.query).count('INNER JOIN'), 1)
def test_disjunction_promotion_fexpression(self):
qs = BaseA.objects.filter(Q(a__f1=F('b__f1')) | Q(b__f1='foo'))
self.assertEqual(str(qs.query).count('LEFT OUTER JOIN'), 1)
self.assertEqual(str(qs.query).count('INNER JOIN'), 1)
qs = BaseA.objects.filter(Q(a__f1=F('c__f1')) | Q(b__f1='foo'))
self.assertEqual(str(qs.query).count('LEFT OUTER JOIN'), 3)
qs = BaseA.objects.filter(Q(a__f1=F('b__f1')) | Q(a__f2=F('b__f2')) | Q(c__f1='foo'))
self.assertEqual(str(qs.query).count('LEFT OUTER JOIN'), 3)
qs = BaseA.objects.filter(Q(a__f1=F('c__f1')) | (Q(pk=1) & Q(pk=2)))
self.assertEqual(str(qs.query).count('LEFT OUTER JOIN'), 2)
self.assertEqual(str(qs.query).count('INNER JOIN'), 0)
class ManyToManyExcludeTest(TestCase):
def test_exclude_many_to_many(self):
i_extra = Identifier.objects.create(name='extra')
i_program = Identifier.objects.create(name='program')
program = Program.objects.create(identifier=i_program)
i_channel = Identifier.objects.create(name='channel')
channel = Channel.objects.create(identifier=i_channel)
channel.programs.add(program)
# channel contains 'program1', so all Identifiers except that one
# should be returned
self.assertSequenceEqual(
Identifier.objects.exclude(program__channel=channel).order_by('name'),
[i_channel, i_extra],
)
self.assertSequenceEqual(
Identifier.objects.exclude(program__channel=None).order_by('name'),
[i_program],
)
def test_ticket_12823(self):
pg3 = Page.objects.create(text='pg3')
pg2 = Page.objects.create(text='pg2')
pg1 = Page.objects.create(text='pg1')
pa1 = Paragraph.objects.create(text='pa1')
pa1.page.set([pg1, pg2])
pa2 = Paragraph.objects.create(text='pa2')
pa2.page.set([pg2, pg3])
pa3 = Paragraph.objects.create(text='pa3')
ch1 = Chapter.objects.create(title='ch1', paragraph=pa1)
ch2 = Chapter.objects.create(title='ch2', paragraph=pa2)
ch3 = Chapter.objects.create(title='ch3', paragraph=pa3)
b1 = Book.objects.create(title='b1', chapter=ch1)
b2 = Book.objects.create(title='b2', chapter=ch2)
b3 = Book.objects.create(title='b3', chapter=ch3)
q = Book.objects.exclude(chapter__paragraph__page__text='pg1')
self.assertNotIn('IS NOT NULL', str(q.query))
self.assertEqual(len(q), 2)
self.assertNotIn(b1, q)
self.assertIn(b2, q)
self.assertIn(b3, q)
class RelabelCloneTest(TestCase):
def test_ticket_19964(self):
my1 = MyObject.objects.create(data='foo')
my1.parent = my1
my1.save()
my2 = MyObject.objects.create(data='bar', parent=my1)
parents = MyObject.objects.filter(parent=F('id'))
children = MyObject.objects.filter(parent__in=parents).exclude(parent=F('id'))
self.assertEqual(list(parents), [my1])
# Evaluating the children query (which has parents as part of it) does
# not change results for the parents query.
self.assertEqual(list(children), [my2])
self.assertEqual(list(parents), [my1])
class Ticket20101Tests(TestCase):
def test_ticket_20101(self):
"""
Tests QuerySet ORed combining in exclude subquery case.
"""
t = Tag.objects.create(name='foo')
a1 = Annotation.objects.create(tag=t, name='a1')
a2 = Annotation.objects.create(tag=t, name='a2')
a3 = Annotation.objects.create(tag=t, name='a3')
n = Note.objects.create(note='foo', misc='bar')
qs1 = Note.objects.exclude(annotation__in=[a1, a2])
qs2 = Note.objects.filter(annotation__in=[a3])
self.assertIn(n, qs1)
self.assertNotIn(n, qs2)
self.assertIn(n, (qs1 | qs2))
class EmptyStringPromotionTests(SimpleTestCase):
def test_empty_string_promotion(self):
qs = RelatedObject.objects.filter(single__name='')
if connection.features.interprets_empty_strings_as_nulls:
self.assertIn('LEFT OUTER JOIN', str(qs.query))
else:
self.assertNotIn('LEFT OUTER JOIN', str(qs.query))
class ValuesSubqueryTests(TestCase):
def test_values_in_subquery(self):
# If a values() queryset is used, then the given values
# will be used instead of forcing use of the relation's field.
o1 = Order.objects.create(id=-2)
o2 = Order.objects.create(id=-1)
oi1 = OrderItem.objects.create(order=o1, status=0)
oi1.status = oi1.pk
oi1.save()
OrderItem.objects.create(order=o2, status=0)
# The query below should match o1 as it has related order_item
# with id == status.
self.assertSequenceEqual(Order.objects.filter(items__in=OrderItem.objects.values_list('status')), [o1])
class DoubleInSubqueryTests(TestCase):
def test_double_subquery_in(self):
lfa1 = LeafA.objects.create(data='foo')
lfa2 = LeafA.objects.create(data='bar')
lfb1 = LeafB.objects.create(data='lfb1')
lfb2 = LeafB.objects.create(data='lfb2')
Join.objects.create(a=lfa1, b=lfb1)
Join.objects.create(a=lfa2, b=lfb2)
leaf_as = LeafA.objects.filter(data='foo').values_list('pk', flat=True)
joins = Join.objects.filter(a__in=leaf_as).values_list('b__id', flat=True)
qs = LeafB.objects.filter(pk__in=joins)
self.assertSequenceEqual(qs, [lfb1])
class Ticket18785Tests(SimpleTestCase):
def test_ticket_18785(self):
# Test join trimming from ticket18785
qs = Item.objects.exclude(
note__isnull=False
).filter(
name='something', creator__extra__isnull=True
).order_by()
self.assertEqual(1, str(qs.query).count('INNER JOIN'))
self.assertEqual(0, str(qs.query).count('OUTER JOIN'))
class Ticket20788Tests(TestCase):
def test_ticket_20788(self):
Paragraph.objects.create()
paragraph = Paragraph.objects.create()
page = paragraph.page.create()
chapter = Chapter.objects.create(paragraph=paragraph)
Book.objects.create(chapter=chapter)
paragraph2 = Paragraph.objects.create()
Page.objects.create()
chapter2 = Chapter.objects.create(paragraph=paragraph2)
book2 = Book.objects.create(chapter=chapter2)
sentences_not_in_pub = Book.objects.exclude(chapter__paragraph__page=page)
self.assertSequenceEqual(sentences_not_in_pub, [book2])
class Ticket12807Tests(TestCase):
def test_ticket_12807(self):
p1 = Paragraph.objects.create()
p2 = Paragraph.objects.create()
# The ORed condition below should have no effect on the query - the
# ~Q(pk__in=[]) will always be True.
qs = Paragraph.objects.filter((Q(pk=p2.pk) | ~Q(pk__in=[])) & Q(pk=p1.pk))
self.assertSequenceEqual(qs, [p1])
class RelatedLookupTypeTests(TestCase):
error = 'Cannot query "%s": Must be "%s" instance.'
@classmethod
def setUpTestData(cls):
cls.oa = ObjectA.objects.create(name="oa")
cls.poa = ProxyObjectA.objects.get(name="oa")
cls.coa = ChildObjectA.objects.create(name="coa")
cls.wrong_type = Order.objects.create(id=cls.oa.pk)
cls.ob = ObjectB.objects.create(name="ob", objecta=cls.oa, num=1)
cls.pob1 = ProxyObjectB.objects.create(name="pob", objecta=cls.oa, num=2)
cls.pob = ProxyObjectB.objects.all()
cls.c = ObjectC.objects.create(childobjecta=cls.coa)
def test_wrong_type_lookup(self):
"""
A ValueError is raised when the incorrect object type is passed to a
query lookup.
"""
# Passing incorrect object type
with self.assertRaisesMessage(ValueError, self.error % (self.wrong_type, ObjectA._meta.object_name)):
ObjectB.objects.get(objecta=self.wrong_type)
with self.assertRaisesMessage(ValueError, self.error % (self.wrong_type, ObjectA._meta.object_name)):
ObjectB.objects.filter(objecta__in=[self.wrong_type])
with self.assertRaisesMessage(ValueError, self.error % (self.wrong_type, ObjectA._meta.object_name)):
ObjectB.objects.filter(objecta=self.wrong_type)
with self.assertRaisesMessage(ValueError, self.error % (self.wrong_type, ObjectB._meta.object_name)):
ObjectA.objects.filter(objectb__in=[self.wrong_type, self.ob])
# Passing an object of the class on which query is done.
with self.assertRaisesMessage(ValueError, self.error % (self.ob, ObjectA._meta.object_name)):
ObjectB.objects.filter(objecta__in=[self.poa, self.ob])
with self.assertRaisesMessage(ValueError, self.error % (self.ob, ChildObjectA._meta.object_name)):
ObjectC.objects.exclude(childobjecta__in=[self.coa, self.ob])
def test_wrong_backward_lookup(self):
"""
A ValueError is raised when the incorrect object type is passed to a
query lookup for backward relations.
"""
with self.assertRaisesMessage(ValueError, self.error % (self.oa, ObjectB._meta.object_name)):
ObjectA.objects.filter(objectb__in=[self.oa, self.ob])
with self.assertRaisesMessage(ValueError, self.error % (self.oa, ObjectB._meta.object_name)):
ObjectA.objects.exclude(objectb=self.oa)
with self.assertRaisesMessage(ValueError, self.error % (self.wrong_type, ObjectB._meta.object_name)):
ObjectA.objects.get(objectb=self.wrong_type)
def test_correct_lookup(self):
"""
When passing proxy model objects, child objects, or parent objects,
lookups work fine.
"""
out_a = [self.oa]
out_b = [self.ob, self.pob1]
out_c = [self.c]
# proxy model objects
self.assertSequenceEqual(ObjectB.objects.filter(objecta=self.poa).order_by('name'), out_b)
self.assertSequenceEqual(ObjectA.objects.filter(objectb__in=self.pob).order_by('pk'), out_a * 2)
# child objects
self.assertSequenceEqual(ObjectB.objects.filter(objecta__in=[self.coa]), [])
self.assertSequenceEqual(ObjectB.objects.filter(objecta__in=[self.poa, self.coa]).order_by('name'), out_b)
self.assertSequenceEqual(
ObjectB.objects.filter(objecta__in=iter([self.poa, self.coa])).order_by('name'),
out_b
)
# parent objects
self.assertSequenceEqual(ObjectC.objects.exclude(childobjecta=self.oa), out_c)
# QuerySet related object type checking shouldn't issue queries
# (the querysets aren't evaluated here, hence zero queries) (#23266).
with self.assertNumQueries(0):
ObjectB.objects.filter(objecta__in=ObjectA.objects.all())
def test_values_queryset_lookup(self):
"""
#23396 - Ensure ValueQuerySets are not checked for compatibility with the lookup field
"""
# Make sure the num and objecta field values match.
ob = ObjectB.objects.get(name='ob')
ob.num = ob.objecta.pk
ob.save()
pob = ObjectB.objects.get(name='pob')
pob.num = pob.objecta.pk
pob.save()
self.assertSequenceEqual(ObjectB.objects.filter(
objecta__in=ObjectB.objects.all().values_list('num')
).order_by('pk'), [ob, pob])
class Ticket14056Tests(TestCase):
def test_ticket_14056(self):
s1 = SharedConnection.objects.create(data='s1')
s2 = SharedConnection.objects.create(data='s2')
s3 = SharedConnection.objects.create(data='s3')
PointerA.objects.create(connection=s2)
expected_ordering = (
[s1, s3, s2] if connection.features.nulls_order_largest
else [s2, s1, s3]
)
self.assertSequenceEqual(SharedConnection.objects.order_by('-pointera__connection', 'pk'), expected_ordering)
class Ticket20955Tests(TestCase):
def test_ticket_20955(self):
jack = Staff.objects.create(name='jackstaff')
jackstaff = StaffUser.objects.create(staff=jack)
jill = Staff.objects.create(name='jillstaff')
jillstaff = StaffUser.objects.create(staff=jill)
task = Task.objects.create(creator=jackstaff, owner=jillstaff, title="task")
task_get = Task.objects.get(pk=task.pk)
# Load data so that assertNumQueries doesn't complain about the get
# version's queries.
task_get.creator.staffuser.staff
task_get.owner.staffuser.staff
qs = Task.objects.select_related(
'creator__staffuser__staff', 'owner__staffuser__staff')
self.assertEqual(str(qs.query).count(' JOIN '), 6)
task_select_related = qs.get(pk=task.pk)
with self.assertNumQueries(0):
self.assertEqual(task_select_related.creator.staffuser.staff,
task_get.creator.staffuser.staff)
self.assertEqual(task_select_related.owner.staffuser.staff,
task_get.owner.staffuser.staff)
class Ticket21203Tests(TestCase):
def test_ticket_21203(self):
p = Ticket21203Parent.objects.create(parent_bool=True)
c = Ticket21203Child.objects.create(parent=p)
qs = Ticket21203Child.objects.select_related('parent').defer('parent__created')
self.assertSequenceEqual(qs, [c])
self.assertIs(qs[0].parent.parent_bool, True)
class ValuesJoinPromotionTests(TestCase):
def test_values_no_promotion_for_existing(self):
qs = Node.objects.filter(parent__parent__isnull=False)
self.assertIn(' INNER JOIN ', str(qs.query))
qs = qs.values('parent__parent__id')
self.assertIn(' INNER JOIN ', str(qs.query))
# Make sure there is a left outer join without the filter.
qs = Node.objects.values('parent__parent__id')
self.assertIn(' LEFT OUTER JOIN ', str(qs.query))
def test_non_nullable_fk_not_promoted(self):
qs = ObjectB.objects.values('objecta__name')
self.assertIn(' INNER JOIN ', str(qs.query))
def test_ticket_21376(self):
a = ObjectA.objects.create()
ObjectC.objects.create(objecta=a)
qs = ObjectC.objects.filter(
Q(objecta=a) | Q(objectb__objecta=a),
)
qs = qs.filter(
Q(objectb=1) | Q(objecta=a),
)
self.assertEqual(qs.count(), 1)
tblname = connection.ops.quote_name(ObjectB._meta.db_table)
self.assertIn(' LEFT OUTER JOIN %s' % tblname, str(qs.query))
class ForeignKeyToBaseExcludeTests(TestCase):
def test_ticket_21787(self):
sc1 = SpecialCategory.objects.create(special_name='sc1', name='sc1')
sc2 = SpecialCategory.objects.create(special_name='sc2', name='sc2')
sc3 = SpecialCategory.objects.create(special_name='sc3', name='sc3')
c1 = CategoryItem.objects.create(category=sc1)
CategoryItem.objects.create(category=sc2)
self.assertSequenceEqual(SpecialCategory.objects.exclude(categoryitem__id=c1.pk).order_by('name'), [sc2, sc3])
self.assertSequenceEqual(SpecialCategory.objects.filter(categoryitem__id=c1.pk), [sc1])
class ReverseM2MCustomPkTests(TestCase):
def test_ticket_21879(self):
cpt1 = CustomPkTag.objects.create(id='cpt1', tag='cpt1')
cp1 = CustomPk.objects.create(name='cp1', extra='extra')
cp1.custompktag_set.add(cpt1)
self.assertSequenceEqual(CustomPk.objects.filter(custompktag=cpt1), [cp1])
self.assertSequenceEqual(CustomPkTag.objects.filter(custom_pk=cp1), [cpt1])
class Ticket22429Tests(TestCase):
def test_ticket_22429(self):
sc1 = School.objects.create()
st1 = Student.objects.create(school=sc1)
sc2 = School.objects.create()
st2 = Student.objects.create(school=sc2)
cr = Classroom.objects.create(school=sc1)
cr.students.add(st1)
queryset = Student.objects.filter(~Q(classroom__school=F('school')))
self.assertSequenceEqual(queryset, [st2])
class Ticket23605Tests(TestCase):
def test_ticket_23605(self):
# Test filtering on a complicated q-object from ticket's report.
# The query structure is such that we have multiple nested subqueries.
# The original problem was that the inner queries weren't relabeled
# correctly.
# See also #24090.
a1 = Ticket23605A.objects.create()
a2 = Ticket23605A.objects.create()
c1 = Ticket23605C.objects.create(field_c0=10000.0)
Ticket23605B.objects.create(
field_b0=10000.0, field_b1=True,
modelc_fk=c1, modela_fk=a1)
complex_q = Q(pk__in=Ticket23605A.objects.filter(
Q(
# True for a1 as field_b0 = 10000, field_c0=10000
# False for a2 as no ticket23605b found
ticket23605b__field_b0__gte=1000000 /
F("ticket23605b__modelc_fk__field_c0")
) &
# True for a1 (field_b1=True)
Q(ticket23605b__field_b1=True) & ~Q(ticket23605b__pk__in=Ticket23605B.objects.filter(
~(
# Same filters as above commented filters, but
# double-negated (one for Q() above, one for
# parentheses). So, again a1 match, a2 not.
Q(field_b1=True) &
Q(field_b0__gte=1000000 / F("modelc_fk__field_c0"))
)
))).filter(ticket23605b__field_b1=True))
qs1 = Ticket23605A.objects.filter(complex_q)
self.assertSequenceEqual(qs1, [a1])
qs2 = Ticket23605A.objects.exclude(complex_q)
self.assertSequenceEqual(qs2, [a2])
class TestTicket24279(TestCase):
def test_ticket_24278(self):
School.objects.create()
qs = School.objects.filter(Q(pk__in=()) | Q())
self.assertQuerysetEqual(qs, [])
class TestInvalidValuesRelation(SimpleTestCase):
def test_invalid_values(self):
msg = "Field 'id' expected a number but got 'abc'."
with self.assertRaisesMessage(ValueError, msg):
Annotation.objects.filter(tag='abc')
with self.assertRaisesMessage(ValueError, msg):
Annotation.objects.filter(tag__in=[123, 'abc'])
class TestTicket24605(TestCase):
def test_ticket_24605(self):
"""
Subquery table names should be quoted.
"""
i1 = Individual.objects.create(alive=True)
RelatedIndividual.objects.create(related=i1)
i2 = Individual.objects.create(alive=False)
RelatedIndividual.objects.create(related=i2)
i3 = Individual.objects.create(alive=True)
i4 = Individual.objects.create(alive=False)
self.assertSequenceEqual(Individual.objects.filter(Q(alive=False), Q(related_individual__isnull=True)), [i4])
self.assertSequenceEqual(
Individual.objects.exclude(Q(alive=False), Q(related_individual__isnull=True)).order_by('pk'),
[i1, i2, i3]
)
class Ticket23622Tests(TestCase):
@skipUnlessDBFeature('can_distinct_on_fields')
def test_ticket_23622(self):
"""
Make sure __pk__in and __in work the same for related fields when
using a distinct on subquery.
"""
a1 = Ticket23605A.objects.create()
a2 = Ticket23605A.objects.create()
c1 = Ticket23605C.objects.create(field_c0=0.0)
Ticket23605B.objects.create(
modela_fk=a1, field_b0=123,
field_b1=True,
modelc_fk=c1,
)
Ticket23605B.objects.create(
modela_fk=a1, field_b0=23,
field_b1=True,
modelc_fk=c1,
)
Ticket23605B.objects.create(
modela_fk=a1, field_b0=234,
field_b1=True,
modelc_fk=c1,
)
Ticket23605B.objects.create(
modela_fk=a1, field_b0=12,
field_b1=True,
modelc_fk=c1,
)
Ticket23605B.objects.create(
modela_fk=a2, field_b0=567,
field_b1=True,
modelc_fk=c1,
)
Ticket23605B.objects.create(
modela_fk=a2, field_b0=76,
field_b1=True,
modelc_fk=c1,
)
Ticket23605B.objects.create(
modela_fk=a2, field_b0=7,
field_b1=True,
modelc_fk=c1,
)
Ticket23605B.objects.create(
modela_fk=a2, field_b0=56,
field_b1=True,
modelc_fk=c1,
)
qx = (
Q(ticket23605b__pk__in=Ticket23605B.objects.order_by('modela_fk', '-field_b1').distinct('modela_fk')) &
Q(ticket23605b__field_b0__gte=300)
)
qy = (
Q(ticket23605b__in=Ticket23605B.objects.order_by('modela_fk', '-field_b1').distinct('modela_fk')) &
Q(ticket23605b__field_b0__gte=300)
)
self.assertEqual(
set(Ticket23605A.objects.filter(qx).values_list('pk', flat=True)),
set(Ticket23605A.objects.filter(qy).values_list('pk', flat=True))
)
self.assertSequenceEqual(Ticket23605A.objects.filter(qx), [a2])
|
f3647e9167f1bed90ac3ec25e288248a49e3284ce99a9508c589cb36659286ce | import operator
from django.db import DatabaseError, NotSupportedError, connection
from django.db.models import Exists, F, IntegerField, OuterRef, Value
from django.test import TestCase, skipIfDBFeature, skipUnlessDBFeature
from django.test.utils import CaptureQueriesContext
from .models import Celebrity, Number, ReservedName
@skipUnlessDBFeature('supports_select_union')
class QuerySetSetOperationTests(TestCase):
@classmethod
def setUpTestData(cls):
Number.objects.bulk_create(Number(num=i, other_num=10 - i) for i in range(10))
def assertNumbersEqual(self, queryset, expected_numbers, ordered=True):
self.assertQuerysetEqual(queryset, expected_numbers, operator.attrgetter('num'), ordered)
def test_simple_union(self):
qs1 = Number.objects.filter(num__lte=1)
qs2 = Number.objects.filter(num__gte=8)
qs3 = Number.objects.filter(num=5)
self.assertNumbersEqual(qs1.union(qs2, qs3), [0, 1, 5, 8, 9], ordered=False)
@skipUnlessDBFeature('supports_select_intersection')
def test_simple_intersection(self):
qs1 = Number.objects.filter(num__lte=5)
qs2 = Number.objects.filter(num__gte=5)
qs3 = Number.objects.filter(num__gte=4, num__lte=6)
self.assertNumbersEqual(qs1.intersection(qs2, qs3), [5], ordered=False)
@skipUnlessDBFeature('supports_select_intersection')
def test_intersection_with_values(self):
ReservedName.objects.create(name='a', order=2)
qs1 = ReservedName.objects.all()
reserved_name = qs1.intersection(qs1).values('name', 'order', 'id').get()
self.assertEqual(reserved_name['name'], 'a')
self.assertEqual(reserved_name['order'], 2)
reserved_name = qs1.intersection(qs1).values_list('name', 'order', 'id').get()
self.assertEqual(reserved_name[:2], ('a', 2))
@skipUnlessDBFeature('supports_select_difference')
def test_simple_difference(self):
qs1 = Number.objects.filter(num__lte=5)
qs2 = Number.objects.filter(num__lte=4)
self.assertNumbersEqual(qs1.difference(qs2), [5], ordered=False)
def test_union_distinct(self):
qs1 = Number.objects.all()
qs2 = Number.objects.all()
self.assertEqual(len(list(qs1.union(qs2, all=True))), 20)
self.assertEqual(len(list(qs1.union(qs2))), 10)
def test_union_none(self):
qs1 = Number.objects.filter(num__lte=1)
qs2 = Number.objects.filter(num__gte=8)
qs3 = qs1.union(qs2)
self.assertSequenceEqual(qs3.none(), [])
self.assertNumbersEqual(qs3, [0, 1, 8, 9], ordered=False)
@skipUnlessDBFeature('supports_select_intersection')
def test_intersection_with_empty_qs(self):
qs1 = Number.objects.all()
qs2 = Number.objects.none()
qs3 = Number.objects.filter(pk__in=[])
self.assertEqual(len(qs1.intersection(qs2)), 0)
self.assertEqual(len(qs1.intersection(qs3)), 0)
self.assertEqual(len(qs2.intersection(qs1)), 0)
self.assertEqual(len(qs3.intersection(qs1)), 0)
self.assertEqual(len(qs2.intersection(qs2)), 0)
self.assertEqual(len(qs3.intersection(qs3)), 0)
@skipUnlessDBFeature('supports_select_difference')
def test_difference_with_empty_qs(self):
qs1 = Number.objects.all()
qs2 = Number.objects.none()
qs3 = Number.objects.filter(pk__in=[])
self.assertEqual(len(qs1.difference(qs2)), 10)
self.assertEqual(len(qs1.difference(qs3)), 10)
self.assertEqual(len(qs2.difference(qs1)), 0)
self.assertEqual(len(qs3.difference(qs1)), 0)
self.assertEqual(len(qs2.difference(qs2)), 0)
self.assertEqual(len(qs3.difference(qs3)), 0)
@skipUnlessDBFeature('supports_select_difference')
def test_difference_with_values(self):
ReservedName.objects.create(name='a', order=2)
qs1 = ReservedName.objects.all()
qs2 = ReservedName.objects.none()
reserved_name = qs1.difference(qs2).values('name', 'order', 'id').get()
self.assertEqual(reserved_name['name'], 'a')
self.assertEqual(reserved_name['order'], 2)
reserved_name = qs1.difference(qs2).values_list('name', 'order', 'id').get()
self.assertEqual(reserved_name[:2], ('a', 2))
def test_union_with_empty_qs(self):
qs1 = Number.objects.all()
qs2 = Number.objects.none()
qs3 = Number.objects.filter(pk__in=[])
self.assertEqual(len(qs1.union(qs2)), 10)
self.assertEqual(len(qs2.union(qs1)), 10)
self.assertEqual(len(qs1.union(qs3)), 10)
self.assertEqual(len(qs3.union(qs1)), 10)
self.assertEqual(len(qs2.union(qs1, qs1, qs1)), 10)
self.assertEqual(len(qs2.union(qs1, qs1, all=True)), 20)
self.assertEqual(len(qs2.union(qs2)), 0)
self.assertEqual(len(qs3.union(qs3)), 0)
def test_empty_qs_union_with_ordered_qs(self):
qs1 = Number.objects.all().order_by('num')
qs2 = Number.objects.none().union(qs1).order_by('num')
self.assertEqual(list(qs1), list(qs2))
def test_limits(self):
qs1 = Number.objects.all()
qs2 = Number.objects.all()
self.assertEqual(len(list(qs1.union(qs2)[:2])), 2)
def test_ordering(self):
qs1 = Number.objects.filter(num__lte=1)
qs2 = Number.objects.filter(num__gte=2, num__lte=3)
self.assertNumbersEqual(qs1.union(qs2).order_by('-num'), [3, 2, 1, 0])
def test_ordering_by_alias(self):
qs1 = Number.objects.filter(num__lte=1).values(alias=F('num'))
qs2 = Number.objects.filter(num__gte=2, num__lte=3).values(alias=F('num'))
self.assertQuerysetEqual(
qs1.union(qs2).order_by('-alias'),
[3, 2, 1, 0],
operator.itemgetter('alias'),
)
def test_ordering_by_f_expression(self):
qs1 = Number.objects.filter(num__lte=1)
qs2 = Number.objects.filter(num__gte=2, num__lte=3)
self.assertNumbersEqual(qs1.union(qs2).order_by(F('num').desc()), [3, 2, 1, 0])
def test_ordering_by_f_expression_and_alias(self):
qs1 = Number.objects.filter(num__lte=1).values(alias=F('other_num'))
qs2 = Number.objects.filter(num__gte=2, num__lte=3).values(alias=F('other_num'))
self.assertQuerysetEqual(
qs1.union(qs2).order_by(F('alias').desc()),
[10, 9, 8, 7],
operator.itemgetter('alias'),
)
Number.objects.create(num=-1)
self.assertQuerysetEqual(
qs1.union(qs2).order_by(F('alias').desc(nulls_last=True)),
[10, 9, 8, 7, None],
operator.itemgetter('alias'),
)
def test_union_with_values(self):
ReservedName.objects.create(name='a', order=2)
qs1 = ReservedName.objects.all()
reserved_name = qs1.union(qs1).values('name', 'order', 'id').get()
self.assertEqual(reserved_name['name'], 'a')
self.assertEqual(reserved_name['order'], 2)
reserved_name = qs1.union(qs1).values_list('name', 'order', 'id').get()
self.assertEqual(reserved_name[:2], ('a', 2))
# List of columns can be changed.
reserved_name = qs1.union(qs1).values_list('order').get()
self.assertEqual(reserved_name, (2,))
def test_union_with_two_annotated_values_list(self):
qs1 = Number.objects.filter(num=1).annotate(
count=Value(0, IntegerField()),
).values_list('num', 'count')
qs2 = Number.objects.filter(num=2).values('pk').annotate(
count=F('num'),
).annotate(
num=Value(1, IntegerField()),
).values_list('num', 'count')
self.assertCountEqual(qs1.union(qs2), [(1, 0), (2, 1)])
def test_union_with_extra_and_values_list(self):
qs1 = Number.objects.filter(num=1).extra(
select={'count': 0},
).values_list('num', 'count')
qs2 = Number.objects.filter(num=2).extra(select={'count': 1})
self.assertCountEqual(qs1.union(qs2), [(1, 0), (2, 1)])
def test_union_with_values_list_on_annotated_and_unannotated(self):
ReservedName.objects.create(name='rn1', order=1)
qs1 = Number.objects.annotate(
has_reserved_name=Exists(ReservedName.objects.filter(order=OuterRef('num')))
).filter(has_reserved_name=True)
qs2 = Number.objects.filter(num=9)
self.assertCountEqual(qs1.union(qs2).values_list('num', flat=True), [1, 9])
def test_union_with_values_list_and_order(self):
ReservedName.objects.bulk_create([
ReservedName(name='rn1', order=7),
ReservedName(name='rn2', order=5),
ReservedName(name='rn0', order=6),
ReservedName(name='rn9', order=-1),
])
qs1 = ReservedName.objects.filter(order__gte=6)
qs2 = ReservedName.objects.filter(order__lte=5)
union_qs = qs1.union(qs2)
for qs, expected_result in (
# Order by a single column.
(union_qs.order_by('-pk').values_list('order', flat=True), [-1, 6, 5, 7]),
(union_qs.order_by('pk').values_list('order', flat=True), [7, 5, 6, -1]),
(union_qs.values_list('order', flat=True).order_by('-pk'), [-1, 6, 5, 7]),
(union_qs.values_list('order', flat=True).order_by('pk'), [7, 5, 6, -1]),
# Order by multiple columns.
(union_qs.order_by('-name', 'pk').values_list('order', flat=True), [-1, 5, 7, 6]),
(union_qs.values_list('order', flat=True).order_by('-name', 'pk'), [-1, 5, 7, 6]),
):
with self.subTest(qs=qs):
self.assertEqual(list(qs), expected_result)
def test_union_with_values_list_and_order_on_annotation(self):
qs1 = Number.objects.annotate(
annotation=Value(-1),
multiplier=F('annotation'),
).filter(num__gte=6)
qs2 = Number.objects.annotate(
annotation=Value(2),
multiplier=F('annotation'),
).filter(num__lte=5)
self.assertSequenceEqual(
qs1.union(qs2).order_by('annotation', 'num').values_list('num', flat=True),
[6, 7, 8, 9, 0, 1, 2, 3, 4, 5],
)
self.assertQuerysetEqual(
qs1.union(qs2).order_by(
F('annotation') * F('multiplier'),
'num',
).values('num'),
[6, 7, 8, 9, 0, 1, 2, 3, 4, 5],
operator.itemgetter('num'),
)
def test_union_multiple_models_with_values_list_and_order(self):
reserved_name = ReservedName.objects.create(name='rn1', order=0)
qs1 = Celebrity.objects.all()
qs2 = ReservedName.objects.all()
self.assertSequenceEqual(
qs1.union(qs2).order_by('name').values_list('pk', flat=True),
[reserved_name.pk],
)
def test_union_multiple_models_with_values_list_and_order_by_extra_select(self):
reserved_name = ReservedName.objects.create(name='rn1', order=0)
qs1 = Celebrity.objects.extra(select={'extra_name': 'name'})
qs2 = ReservedName.objects.extra(select={'extra_name': 'name'})
self.assertSequenceEqual(
qs1.union(qs2).order_by('extra_name').values_list('pk', flat=True),
[reserved_name.pk],
)
def test_count_union(self):
qs1 = Number.objects.filter(num__lte=1).values('num')
qs2 = Number.objects.filter(num__gte=2, num__lte=3).values('num')
self.assertEqual(qs1.union(qs2).count(), 4)
def test_count_union_empty_result(self):
qs = Number.objects.filter(pk__in=[])
self.assertEqual(qs.union(qs).count(), 0)
@skipUnlessDBFeature('supports_select_difference')
def test_count_difference(self):
qs1 = Number.objects.filter(num__lt=10)
qs2 = Number.objects.filter(num__lt=9)
self.assertEqual(qs1.difference(qs2).count(), 1)
@skipUnlessDBFeature('supports_select_intersection')
def test_count_intersection(self):
qs1 = Number.objects.filter(num__gte=5)
qs2 = Number.objects.filter(num__lte=5)
self.assertEqual(qs1.intersection(qs2).count(), 1)
def test_exists_union(self):
qs1 = Number.objects.filter(num__gte=5)
qs2 = Number.objects.filter(num__lte=5)
with CaptureQueriesContext(connection) as context:
self.assertIs(qs1.union(qs2).exists(), True)
captured_queries = context.captured_queries
self.assertEqual(len(captured_queries), 1)
captured_sql = captured_queries[0]['sql']
self.assertNotIn(
connection.ops.quote_name(Number._meta.pk.column),
captured_sql,
)
self.assertEqual(
captured_sql.count(connection.ops.limit_offset_sql(None, 1)),
3 if connection.features.supports_slicing_ordering_in_compound else 1
)
def test_exists_union_empty_result(self):
qs = Number.objects.filter(pk__in=[])
self.assertIs(qs.union(qs).exists(), False)
@skipUnlessDBFeature('supports_select_intersection')
def test_exists_intersection(self):
qs1 = Number.objects.filter(num__gt=5)
qs2 = Number.objects.filter(num__lt=5)
self.assertIs(qs1.intersection(qs1).exists(), True)
self.assertIs(qs1.intersection(qs2).exists(), False)
@skipUnlessDBFeature('supports_select_difference')
def test_exists_difference(self):
qs1 = Number.objects.filter(num__gte=5)
qs2 = Number.objects.filter(num__gte=3)
self.assertIs(qs1.difference(qs2).exists(), False)
self.assertIs(qs2.difference(qs1).exists(), True)
def test_get_union(self):
qs = Number.objects.filter(num=2)
self.assertEqual(qs.union(qs).get().num, 2)
@skipUnlessDBFeature('supports_select_difference')
def test_get_difference(self):
qs1 = Number.objects.all()
qs2 = Number.objects.exclude(num=2)
self.assertEqual(qs1.difference(qs2).get().num, 2)
@skipUnlessDBFeature('supports_select_intersection')
def test_get_intersection(self):
qs1 = Number.objects.all()
qs2 = Number.objects.filter(num=2)
self.assertEqual(qs1.intersection(qs2).get().num, 2)
@skipUnlessDBFeature('supports_slicing_ordering_in_compound')
def test_ordering_subqueries(self):
qs1 = Number.objects.order_by('num')[:2]
qs2 = Number.objects.order_by('-num')[:2]
self.assertNumbersEqual(qs1.union(qs2).order_by('-num')[:4], [9, 8, 1, 0])
@skipIfDBFeature('supports_slicing_ordering_in_compound')
def test_unsupported_ordering_slicing_raises_db_error(self):
qs1 = Number.objects.all()
qs2 = Number.objects.all()
qs3 = Number.objects.all()
msg = 'LIMIT/OFFSET not allowed in subqueries of compound statements'
with self.assertRaisesMessage(DatabaseError, msg):
list(qs1.union(qs2[:10]))
msg = 'ORDER BY not allowed in subqueries of compound statements'
with self.assertRaisesMessage(DatabaseError, msg):
list(qs1.order_by('id').union(qs2))
with self.assertRaisesMessage(DatabaseError, msg):
list(qs1.union(qs2).order_by('id').union(qs3))
@skipIfDBFeature('supports_select_intersection')
def test_unsupported_intersection_raises_db_error(self):
qs1 = Number.objects.all()
qs2 = Number.objects.all()
msg = 'intersection is not supported on this database backend'
with self.assertRaisesMessage(NotSupportedError, msg):
list(qs1.intersection(qs2))
def test_combining_multiple_models(self):
ReservedName.objects.create(name='99 little bugs', order=99)
qs1 = Number.objects.filter(num=1).values_list('num', flat=True)
qs2 = ReservedName.objects.values_list('order')
self.assertEqual(list(qs1.union(qs2).order_by('num')), [1, 99])
def test_order_raises_on_non_selected_column(self):
qs1 = Number.objects.filter().annotate(
annotation=Value(1, IntegerField()),
).values('annotation', num2=F('num'))
qs2 = Number.objects.filter().values('id', 'num')
# Should not raise
list(qs1.union(qs2).order_by('annotation'))
list(qs1.union(qs2).order_by('num2'))
msg = 'ORDER BY term does not match any column in the result set'
# 'id' is not part of the select
with self.assertRaisesMessage(DatabaseError, msg):
list(qs1.union(qs2).order_by('id'))
# 'num' got realiased to num2
with self.assertRaisesMessage(DatabaseError, msg):
list(qs1.union(qs2).order_by('num'))
with self.assertRaisesMessage(DatabaseError, msg):
list(qs1.union(qs2).order_by(F('num')))
with self.assertRaisesMessage(DatabaseError, msg):
list(qs1.union(qs2).order_by(F('num').desc()))
# switched order, now 'exists' again:
list(qs2.union(qs1).order_by('num'))
@skipUnlessDBFeature('supports_select_difference', 'supports_select_intersection')
def test_qs_with_subcompound_qs(self):
qs1 = Number.objects.all()
qs2 = Number.objects.intersection(Number.objects.filter(num__gt=1))
self.assertEqual(qs1.difference(qs2).count(), 2)
def test_order_by_same_type(self):
qs = Number.objects.all()
union = qs.union(qs)
numbers = list(range(10))
self.assertNumbersEqual(union.order_by('num'), numbers)
self.assertNumbersEqual(union.order_by('other_num'), reversed(numbers))
def test_unsupported_operations_on_combined_qs(self):
qs = Number.objects.all()
msg = 'Calling QuerySet.%s() after %s() is not supported.'
combinators = ['union']
if connection.features.supports_select_difference:
combinators.append('difference')
if connection.features.supports_select_intersection:
combinators.append('intersection')
for combinator in combinators:
for operation in (
'alias',
'annotate',
'defer',
'delete',
'distinct',
'exclude',
'extra',
'filter',
'only',
'prefetch_related',
'select_related',
'update',
):
with self.subTest(combinator=combinator, operation=operation):
with self.assertRaisesMessage(
NotSupportedError,
msg % (operation, combinator),
):
getattr(getattr(qs, combinator)(qs), operation)()
with self.assertRaisesMessage(
NotSupportedError,
msg % ('contains', combinator),
):
obj = Number.objects.first()
getattr(qs, combinator)(qs).contains(obj)
def test_get_with_filters_unsupported_on_combined_qs(self):
qs = Number.objects.all()
msg = 'Calling QuerySet.get(...) with filters after %s() is not supported.'
combinators = ['union']
if connection.features.supports_select_difference:
combinators.append('difference')
if connection.features.supports_select_intersection:
combinators.append('intersection')
for combinator in combinators:
with self.subTest(combinator=combinator):
with self.assertRaisesMessage(NotSupportedError, msg % combinator):
getattr(qs, combinator)(qs).get(num=2)
|
76a092141099a88398aa93249dfcdf2d1afc4da69ef314222d85b7c967684dbd | import json
import unittest
import xml.etree.ElementTree
from django.db import NotSupportedError, connection, transaction
from django.db.models import Count
from django.test import TestCase, skipIfDBFeature, skipUnlessDBFeature
from django.test.utils import CaptureQueriesContext
from .models import Tag
@skipUnlessDBFeature('supports_explaining_query_execution')
class ExplainTests(TestCase):
def test_basic(self):
querysets = [
Tag.objects.filter(name='test'),
Tag.objects.filter(name='test').select_related('parent'),
Tag.objects.filter(name='test').prefetch_related('children'),
Tag.objects.filter(name='test').annotate(Count('children')),
Tag.objects.filter(name='test').values_list('name'),
Tag.objects.order_by().union(Tag.objects.order_by().filter(name='test')),
Tag.objects.all().select_for_update().filter(name='test'),
]
supported_formats = connection.features.supported_explain_formats
all_formats = (None,) + tuple(supported_formats) + tuple(f.lower() for f in supported_formats)
for idx, queryset in enumerate(querysets):
for format in all_formats:
with self.subTest(format=format, queryset=idx):
with self.assertNumQueries(1), CaptureQueriesContext(connection) as captured_queries:
result = queryset.explain(format=format)
self.assertTrue(captured_queries[0]['sql'].startswith(connection.ops.explain_prefix))
self.assertIsInstance(result, str)
self.assertTrue(result)
if format == 'xml':
try:
xml.etree.ElementTree.fromstring(result)
except xml.etree.ElementTree.ParseError as e:
self.fail(
f'QuerySet.explain() result is not valid XML: {e}'
)
elif format == 'json':
try:
json.loads(result)
except json.JSONDecodeError as e:
self.fail(
f'QuerySet.explain() result is not valid JSON: {e}'
)
@skipUnlessDBFeature('validates_explain_options')
def test_unknown_options(self):
with self.assertRaisesMessage(ValueError, 'Unknown options: test, test2'):
Tag.objects.all().explain(test=1, test2=1)
def test_unknown_format(self):
msg = 'DOES NOT EXIST is not a recognized format.'
if connection.features.supported_explain_formats:
msg += ' Allowed formats: %s' % ', '.join(sorted(connection.features.supported_explain_formats))
with self.assertRaisesMessage(ValueError, msg):
Tag.objects.all().explain(format='does not exist')
@unittest.skipUnless(connection.vendor == 'postgresql', 'PostgreSQL specific')
def test_postgres_options(self):
qs = Tag.objects.filter(name='test')
test_options = [
{'COSTS': False, 'BUFFERS': True, 'ANALYZE': True},
{'costs': False, 'buffers': True, 'analyze': True},
{'verbose': True, 'timing': True, 'analyze': True},
{'verbose': False, 'timing': False, 'analyze': True},
{'summary': True},
]
if connection.features.is_postgresql_12:
test_options.append({'settings': True})
if connection.features.is_postgresql_13:
test_options.append({'analyze': True, 'wal': True})
for options in test_options:
with self.subTest(**options), transaction.atomic():
with CaptureQueriesContext(connection) as captured_queries:
qs.explain(format='text', **options)
self.assertEqual(len(captured_queries), 1)
for name, value in options.items():
option = '{} {}'.format(name.upper(), 'true' if value else 'false')
self.assertIn(option, captured_queries[0]['sql'])
@unittest.skipUnless(connection.vendor == 'mysql', 'MySQL specific')
def test_mysql_text_to_traditional(self):
# Ensure these cached properties are initialized to prevent queries for
# the MariaDB or MySQL version during the QuerySet evaluation.
connection.features.supported_explain_formats
with CaptureQueriesContext(connection) as captured_queries:
Tag.objects.filter(name='test').explain(format='text')
self.assertEqual(len(captured_queries), 1)
self.assertIn('FORMAT=TRADITIONAL', captured_queries[0]['sql'])
@unittest.skipUnless(connection.vendor == 'mysql', 'MariaDB and MySQL >= 8.0.18 specific.')
def test_mysql_analyze(self):
qs = Tag.objects.filter(name='test')
with CaptureQueriesContext(connection) as captured_queries:
qs.explain(analyze=True)
self.assertEqual(len(captured_queries), 1)
prefix = 'ANALYZE ' if connection.mysql_is_mariadb else 'EXPLAIN ANALYZE '
self.assertTrue(captured_queries[0]['sql'].startswith(prefix))
with CaptureQueriesContext(connection) as captured_queries:
qs.explain(analyze=True, format='JSON')
self.assertEqual(len(captured_queries), 1)
if connection.mysql_is_mariadb:
self.assertIn('FORMAT=JSON', captured_queries[0]['sql'])
else:
self.assertNotIn('FORMAT=JSON', captured_queries[0]['sql'])
@skipIfDBFeature('supports_explaining_query_execution')
class ExplainUnsupportedTests(TestCase):
def test_message(self):
msg = 'This backend does not support explaining query execution.'
with self.assertRaisesMessage(NotSupportedError, msg):
Tag.objects.filter(name='test').explain()
|
b0544e928d50f0c66d9cb157cc543bd3d7f0bbd9fabca20f083eccdce9a366b8 | import datetime
from django.core.exceptions import FieldDoesNotExist
from django.db.models import F
from django.db.models.functions import Lower
from django.test import TestCase, skipUnlessDBFeature
from .models import (
Article, CustomDbColumn, CustomPk, Detail, Individual, JSONFieldNullable,
Member, Note, Number, Order, Paragraph, SpecialCategory, Tag, Valid,
)
class BulkUpdateNoteTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.notes = [
Note.objects.create(note=str(i), misc=str(i))
for i in range(10)
]
def create_tags(self):
self.tags = [
Tag.objects.create(name=str(i))
for i in range(10)
]
def test_simple(self):
for note in self.notes:
note.note = 'test-%s' % note.id
with self.assertNumQueries(1):
Note.objects.bulk_update(self.notes, ['note'])
self.assertCountEqual(
Note.objects.values_list('note', flat=True),
[cat.note for cat in self.notes]
)
def test_multiple_fields(self):
for note in self.notes:
note.note = 'test-%s' % note.id
note.misc = 'misc-%s' % note.id
with self.assertNumQueries(1):
Note.objects.bulk_update(self.notes, ['note', 'misc'])
self.assertCountEqual(
Note.objects.values_list('note', flat=True),
[cat.note for cat in self.notes]
)
self.assertCountEqual(
Note.objects.values_list('misc', flat=True),
[cat.misc for cat in self.notes]
)
def test_batch_size(self):
with self.assertNumQueries(len(self.notes)):
Note.objects.bulk_update(self.notes, fields=['note'], batch_size=1)
def test_unsaved_models(self):
objs = self.notes + [Note(note='test', misc='test')]
msg = 'All bulk_update() objects must have a primary key set.'
with self.assertRaisesMessage(ValueError, msg):
Note.objects.bulk_update(objs, fields=['note'])
def test_foreign_keys_do_not_lookup(self):
self.create_tags()
for note, tag in zip(self.notes, self.tags):
note.tag = tag
with self.assertNumQueries(1):
Note.objects.bulk_update(self.notes, ['tag'])
self.assertSequenceEqual(Note.objects.filter(tag__isnull=False), self.notes)
def test_set_field_to_null(self):
self.create_tags()
Note.objects.update(tag=self.tags[0])
for note in self.notes:
note.tag = None
Note.objects.bulk_update(self.notes, ['tag'])
self.assertCountEqual(Note.objects.filter(tag__isnull=True), self.notes)
def test_set_mixed_fields_to_null(self):
self.create_tags()
midpoint = len(self.notes) // 2
top, bottom = self.notes[:midpoint], self.notes[midpoint:]
for note in top:
note.tag = None
for note in bottom:
note.tag = self.tags[0]
Note.objects.bulk_update(self.notes, ['tag'])
self.assertCountEqual(Note.objects.filter(tag__isnull=True), top)
self.assertCountEqual(Note.objects.filter(tag__isnull=False), bottom)
def test_functions(self):
Note.objects.update(note='TEST')
for note in self.notes:
note.note = Lower('note')
Note.objects.bulk_update(self.notes, ['note'])
self.assertEqual(set(Note.objects.values_list('note', flat=True)), {'test'})
# Tests that use self.notes go here, otherwise put them in another class.
class BulkUpdateTests(TestCase):
def test_no_fields(self):
msg = 'Field names must be given to bulk_update().'
with self.assertRaisesMessage(ValueError, msg):
Note.objects.bulk_update([], fields=[])
def test_invalid_batch_size(self):
msg = 'Batch size must be a positive integer.'
with self.assertRaisesMessage(ValueError, msg):
Note.objects.bulk_update([], fields=['note'], batch_size=-1)
def test_nonexistent_field(self):
with self.assertRaisesMessage(FieldDoesNotExist, "Note has no field named 'nonexistent'"):
Note.objects.bulk_update([], ['nonexistent'])
pk_fields_error = 'bulk_update() cannot be used with primary key fields.'
def test_update_primary_key(self):
with self.assertRaisesMessage(ValueError, self.pk_fields_error):
Note.objects.bulk_update([], ['id'])
def test_update_custom_primary_key(self):
with self.assertRaisesMessage(ValueError, self.pk_fields_error):
CustomPk.objects.bulk_update([], ['name'])
def test_empty_objects(self):
with self.assertNumQueries(0):
rows_updated = Note.objects.bulk_update([], ['note'])
self.assertEqual(rows_updated, 0)
def test_large_batch(self):
Note.objects.bulk_create([
Note(note=str(i), misc=str(i))
for i in range(0, 2000)
])
notes = list(Note.objects.all())
rows_updated = Note.objects.bulk_update(notes, ['note'])
self.assertEqual(rows_updated, 2000)
def test_updated_rows_when_passing_duplicates(self):
note = Note.objects.create(note='test-note', misc='test')
rows_updated = Note.objects.bulk_update([note, note], ['note'])
self.assertEqual(rows_updated, 1)
# Duplicates in different batches.
rows_updated = Note.objects.bulk_update([note, note], ['note'], batch_size=1)
self.assertEqual(rows_updated, 2)
def test_only_concrete_fields_allowed(self):
obj = Valid.objects.create(valid='test')
detail = Detail.objects.create(data='test')
paragraph = Paragraph.objects.create(text='test')
Member.objects.create(name='test', details=detail)
msg = 'bulk_update() can only be used with concrete fields.'
with self.assertRaisesMessage(ValueError, msg):
Detail.objects.bulk_update([detail], fields=['member'])
with self.assertRaisesMessage(ValueError, msg):
Paragraph.objects.bulk_update([paragraph], fields=['page'])
with self.assertRaisesMessage(ValueError, msg):
Valid.objects.bulk_update([obj], fields=['parent'])
def test_custom_db_columns(self):
model = CustomDbColumn.objects.create(custom_column=1)
model.custom_column = 2
CustomDbColumn.objects.bulk_update([model], fields=['custom_column'])
model.refresh_from_db()
self.assertEqual(model.custom_column, 2)
def test_custom_pk(self):
custom_pks = [
CustomPk.objects.create(name='pk-%s' % i, extra='')
for i in range(10)
]
for model in custom_pks:
model.extra = 'extra-%s' % model.pk
CustomPk.objects.bulk_update(custom_pks, ['extra'])
self.assertCountEqual(
CustomPk.objects.values_list('extra', flat=True),
[cat.extra for cat in custom_pks]
)
def test_falsey_pk_value(self):
order = Order.objects.create(pk=0, name='test')
order.name = 'updated'
Order.objects.bulk_update([order], ['name'])
order.refresh_from_db()
self.assertEqual(order.name, 'updated')
def test_inherited_fields(self):
special_categories = [
SpecialCategory.objects.create(name=str(i), special_name=str(i))
for i in range(10)
]
for category in special_categories:
category.name = 'test-%s' % category.id
category.special_name = 'special-test-%s' % category.special_name
SpecialCategory.objects.bulk_update(special_categories, ['name', 'special_name'])
self.assertCountEqual(
SpecialCategory.objects.values_list('name', flat=True),
[cat.name for cat in special_categories]
)
self.assertCountEqual(
SpecialCategory.objects.values_list('special_name', flat=True),
[cat.special_name for cat in special_categories]
)
def test_field_references(self):
numbers = [Number.objects.create(num=0) for _ in range(10)]
for number in numbers:
number.num = F('num') + 1
Number.objects.bulk_update(numbers, ['num'])
self.assertCountEqual(Number.objects.filter(num=1), numbers)
def test_booleanfield(self):
individuals = [Individual.objects.create(alive=False) for _ in range(10)]
for individual in individuals:
individual.alive = True
Individual.objects.bulk_update(individuals, ['alive'])
self.assertCountEqual(Individual.objects.filter(alive=True), individuals)
def test_ipaddressfield(self):
for ip in ('2001::1', '1.2.3.4'):
with self.subTest(ip=ip):
models = [
CustomDbColumn.objects.create(ip_address='0.0.0.0')
for _ in range(10)
]
for model in models:
model.ip_address = ip
CustomDbColumn.objects.bulk_update(models, ['ip_address'])
self.assertCountEqual(CustomDbColumn.objects.filter(ip_address=ip), models)
def test_datetime_field(self):
articles = [
Article.objects.create(name=str(i), created=datetime.datetime.today())
for i in range(10)
]
point_in_time = datetime.datetime(1991, 10, 31)
for article in articles:
article.created = point_in_time
Article.objects.bulk_update(articles, ['created'])
self.assertCountEqual(Article.objects.filter(created=point_in_time), articles)
@skipUnlessDBFeature('supports_json_field')
def test_json_field(self):
JSONFieldNullable.objects.bulk_create([
JSONFieldNullable(json_field={'a': i}) for i in range(10)
])
objs = JSONFieldNullable.objects.all()
for obj in objs:
obj.json_field = {'c': obj.json_field['a'] + 1}
JSONFieldNullable.objects.bulk_update(objs, ['json_field'])
self.assertCountEqual(JSONFieldNullable.objects.filter(json_field__has_key='c'), objs)
|
46eed24a2446f8b626c0bd8215e212f6b5b7b34997ddd7f9b5ec68a1efbef168 | """
Testing some internals of the template processing. These are *not* examples to be copied in user code.
"""
from django.template import Library, TemplateSyntaxError
from django.template.base import (
FilterExpression, Lexer, Parser, Token, TokenType, Variable,
)
from django.template.defaultfilters import register as filter_library
from django.test import SimpleTestCase
class ParserTests(SimpleTestCase):
def test_token_smart_split(self):
"""
#7027 -- _() syntax should work with spaces
"""
token = Token(TokenType.BLOCK, 'sometag _("Page not found") value|yesno:_("yes,no")')
split = token.split_contents()
self.assertEqual(split, ["sometag", '_("Page not found")', 'value|yesno:_("yes,no")'])
def test_repr(self):
token = Token(TokenType.BLOCK, 'some text')
self.assertEqual(repr(token), '<Block token: "some text...">')
parser = Parser([token], builtins=[filter_library])
self.assertEqual(
repr(parser),
'<Parser tokens=[<Block token: "some text...">]>',
)
filter_expression = FilterExpression('news|upper', parser)
self.assertEqual(repr(filter_expression), "<FilterExpression 'news|upper'>")
lexer = Lexer('{% for i in 1 %}{{ a }}\n{% endfor %}')
self.assertEqual(
repr(lexer),
'<Lexer template_string="{% for i in 1 %}{{ a...", verbatim=False>',
)
def test_filter_parsing(self):
c = {"article": {"section": "News"}}
p = Parser("", builtins=[filter_library])
def fe_test(s, val):
self.assertEqual(FilterExpression(s, p).resolve(c), val)
fe_test("article.section", "News")
fe_test("article.section|upper", "NEWS")
fe_test('"News"', "News")
fe_test("'News'", "News")
fe_test(r'"Some \"Good\" News"', 'Some "Good" News')
fe_test(r'"Some \"Good\" News"', 'Some "Good" News')
fe_test(r"'Some \'Bad\' News'", "Some 'Bad' News")
fe = FilterExpression(r'"Some \"Good\" News"', p)
self.assertEqual(fe.filters, [])
self.assertEqual(fe.var, 'Some "Good" News')
# Filtered variables should reject access of attributes beginning with
# underscores.
msg = "Variables and attributes may not begin with underscores: 'article._hidden'"
with self.assertRaisesMessage(TemplateSyntaxError, msg):
FilterExpression("article._hidden|upper", p)
def test_variable_parsing(self):
c = {"article": {"section": "News"}}
self.assertEqual(Variable("article.section").resolve(c), "News")
self.assertEqual(Variable('"News"').resolve(c), "News")
self.assertEqual(Variable("'News'").resolve(c), "News")
# Translated strings are handled correctly.
self.assertEqual(Variable("_(article.section)").resolve(c), "News")
self.assertEqual(Variable('_("Good News")').resolve(c), "Good News")
self.assertEqual(Variable("_('Better News')").resolve(c), "Better News")
# Escaped quotes work correctly as well.
self.assertEqual(
Variable(r'"Some \"Good\" News"').resolve(c), 'Some "Good" News'
)
self.assertEqual(
Variable(r"'Some \'Better\' News'").resolve(c), "Some 'Better' News"
)
# Variables should reject access of attributes and variables beginning
# with underscores.
for name in ['article._hidden', '_article']:
msg = f"Variables and attributes may not begin with underscores: '{name}'"
with self.assertRaisesMessage(TemplateSyntaxError, msg):
Variable(name)
# Variables should raise on non string type
with self.assertRaisesMessage(TypeError, "Variable must be a string or number, got <class 'dict'>"):
Variable({})
def test_filter_args_count(self):
parser = Parser("")
register = Library()
@register.filter
def no_arguments(value):
pass
@register.filter
def one_argument(value, arg):
pass
@register.filter
def one_opt_argument(value, arg=False):
pass
@register.filter
def two_arguments(value, arg, arg2):
pass
@register.filter
def two_one_opt_arg(value, arg, arg2=False):
pass
parser.add_library(register)
for expr in (
'1|no_arguments:"1"',
'1|two_arguments',
'1|two_arguments:"1"',
'1|two_one_opt_arg',
):
with self.assertRaises(TemplateSyntaxError):
FilterExpression(expr, parser)
for expr in (
# Correct number of arguments
'1|no_arguments',
'1|one_argument:"1"',
# One optional
'1|one_opt_argument',
'1|one_opt_argument:"1"',
# Not supplying all
'1|two_one_opt_arg:"1"',
):
FilterExpression(expr, parser)
|
625331fd69f37bc09d9eba264ebcf2c2403b63b3d8c998857f3424e7b03ba350 | import sys
from django.contrib.auth.models import Group
from django.template import (
Context, Engine, TemplateDoesNotExist, TemplateSyntaxError,
)
from django.template.base import UNKNOWN_SOURCE
from django.test import SimpleTestCase, override_settings
from django.urls import NoReverseMatch
from django.utils import translation
from django.utils.html import escape
class TemplateTests(SimpleTestCase):
def test_string_origin(self):
template = Engine().from_string('string template')
self.assertEqual(template.origin.name, UNKNOWN_SOURCE)
self.assertIsNone(template.origin.loader_name)
self.assertEqual(template.source, 'string template')
@override_settings(SETTINGS_MODULE=None)
def test_url_reverse_no_settings_module(self):
"""
#9005 -- url tag shouldn't require settings.SETTINGS_MODULE to
be set.
"""
t = Engine(debug=True).from_string('{% url will_not_match %}')
c = Context()
with self.assertRaises(NoReverseMatch):
t.render(c)
def test_url_reverse_view_name(self):
"""
#19827 -- url tag should keep original strack trace when reraising
exception.
"""
t = Engine().from_string('{% url will_not_match %}')
c = Context()
try:
t.render(c)
except NoReverseMatch:
tb = sys.exc_info()[2]
depth = 0
while tb.tb_next is not None:
tb = tb.tb_next
depth += 1
self.assertGreater(depth, 5, "The traceback context was lost when reraising the traceback.")
def test_no_wrapped_exception(self):
"""
# 16770 -- The template system doesn't wrap exceptions, but annotates
them.
"""
engine = Engine(debug=True)
c = Context({"coconuts": lambda: 42 / 0})
t = engine.from_string("{{ coconuts }}")
with self.assertRaises(ZeroDivisionError) as e:
t.render(c)
debug = e.exception.template_debug
self.assertEqual(debug['start'], 0)
self.assertEqual(debug['end'], 14)
def test_invalid_block_suggestion(self):
"""
Error messages should include the unexpected block name and be in all
English.
"""
engine = Engine()
msg = (
"Invalid block tag on line 1: 'endblock', expected 'elif', 'else' "
"or 'endif'. Did you forget to register or load this tag?"
)
with self.settings(USE_I18N=True), translation.override('de'):
with self.assertRaisesMessage(TemplateSyntaxError, msg):
engine.from_string("{% if 1 %}lala{% endblock %}{% endif %}")
def test_unknown_block_tag(self):
engine = Engine()
msg = (
"Invalid block tag on line 1: 'foobar'. Did you forget to "
"register or load this tag?"
)
with self.assertRaisesMessage(TemplateSyntaxError, msg):
engine.from_string("lala{% foobar %}")
def test_compile_filter_expression_error(self):
"""
19819 -- Make sure the correct token is highlighted for
FilterExpression errors.
"""
engine = Engine(debug=True)
msg = "Could not parse the remainder: '@bar' from 'foo@bar'"
with self.assertRaisesMessage(TemplateSyntaxError, msg) as e:
engine.from_string("{% if 1 %}{{ foo@bar }}{% endif %}")
debug = e.exception.template_debug
self.assertEqual((debug['start'], debug['end']), (10, 23))
self.assertEqual((debug['during']), '{{ foo@bar }}')
def test_compile_tag_error(self):
"""
Errors raised while compiling nodes should include the token
information.
"""
engine = Engine(
debug=True,
libraries={'bad_tag': 'template_tests.templatetags.bad_tag'},
)
with self.assertRaises(RuntimeError) as e:
engine.from_string("{% load bad_tag %}{% badtag %}")
self.assertEqual(e.exception.template_debug['during'], '{% badtag %}')
def test_compile_tag_error_27584(self):
engine = Engine(
app_dirs=True,
debug=True,
libraries={'tag_27584': 'template_tests.templatetags.tag_27584'},
)
t = engine.get_template('27584_parent.html')
with self.assertRaises(TemplateSyntaxError) as e:
t.render(Context())
self.assertEqual(e.exception.template_debug['during'], '{% badtag %}')
def test_compile_tag_error_27956(self):
"""Errors in a child of {% extends %} are displayed correctly."""
engine = Engine(
app_dirs=True,
debug=True,
libraries={'tag_27584': 'template_tests.templatetags.tag_27584'},
)
t = engine.get_template('27956_child.html')
with self.assertRaises(TemplateSyntaxError) as e:
t.render(Context())
self.assertEqual(e.exception.template_debug['during'], '{% badtag %}')
def test_render_tag_error_in_extended_block(self):
"""Errors in extended block are displayed correctly."""
e = Engine(app_dirs=True, debug=True)
template = e.get_template('test_extends_block_error.html')
context = Context()
with self.assertRaises(TemplateDoesNotExist) as cm:
template.render(context)
self.assertEqual(
cm.exception.template_debug['during'],
escape('{% include "missing.html" %}'),
)
def test_super_errors(self):
"""
#18169 -- NoReverseMatch should not be silence in block.super.
"""
engine = Engine(app_dirs=True)
t = engine.get_template('included_content.html')
with self.assertRaises(NoReverseMatch):
t.render(Context())
def test_debug_tag_non_ascii(self):
"""
#23060 -- Test non-ASCII model representation in debug output.
"""
group = Group(name="清風")
c1 = Context({"objs": [group]})
t1 = Engine().from_string('{% debug %}')
self.assertIn("清風", t1.render(c1))
def test_extends_generic_template(self):
"""
#24338 -- Allow extending django.template.backends.django.Template
objects.
"""
engine = Engine()
parent = engine.from_string('{% block content %}parent{% endblock %}')
child = engine.from_string(
'{% extends parent %}{% block content %}child{% endblock %}')
self.assertEqual(child.render(Context({'parent': parent})), 'child')
def test_node_origin(self):
"""
#25848 -- Set origin on Node so debugging tools can determine which
template the node came from even if extending or including templates.
"""
template = Engine().from_string('content')
for node in template.nodelist:
self.assertEqual(node.origin, template.origin)
|
c8612c54d4adf202fe92d3e811a3c37a25c1153cc6ae3beb431b7395ae4576ea | from pathlib import Path
from unittest import mock
from django.template import autoreload
from django.test import SimpleTestCase, override_settings
from django.test.utils import require_jinja2
ROOT = Path(__file__).parent.absolute()
EXTRA_TEMPLATES_DIR = ROOT / "templates_extra"
@override_settings(
INSTALLED_APPS=['template_tests'],
TEMPLATES=[{
'BACKEND': 'django.template.backends.dummy.TemplateStrings',
'APP_DIRS': True,
}, {
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [EXTRA_TEMPLATES_DIR],
'OPTIONS': {
'context_processors': [
'django.template.context_processors.request',
],
'loaders': [
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
]
},
}])
class TemplateReloadTests(SimpleTestCase):
@mock.patch('django.template.autoreload.reset_loaders')
def test_template_changed(self, mock_reset):
template_path = Path(__file__).parent / 'templates' / 'index.html'
self.assertTrue(autoreload.template_changed(None, template_path))
mock_reset.assert_called_once()
@mock.patch('django.template.autoreload.reset_loaders')
def test_non_template_changed(self, mock_reset):
self.assertIsNone(autoreload.template_changed(None, Path(__file__)))
mock_reset.assert_not_called()
def test_watch_for_template_changes(self):
mock_reloader = mock.MagicMock()
autoreload.watch_for_template_changes(mock_reloader)
self.assertSequenceEqual(
sorted(mock_reloader.watch_dir.call_args_list),
[
mock.call(ROOT / 'templates', '**/*'),
mock.call(ROOT / 'templates_extra', '**/*')
]
)
def test_get_template_directories(self):
self.assertSetEqual(
autoreload.get_template_directories(),
{
ROOT / 'templates_extra',
ROOT / 'templates',
}
)
@mock.patch('django.template.loaders.base.Loader.reset')
def test_reset_all_loaders(self, mock_reset):
autoreload.reset_loaders()
self.assertEqual(mock_reset.call_count, 2)
@override_settings(
TEMPLATES=[{
'DIRS': [
str(ROOT) + '/absolute_str',
'template_tests/relative_str',
Path('template_tests/relative_path'),
],
'BACKEND': 'django.template.backends.django.DjangoTemplates',
}]
)
def test_template_dirs_normalized_to_paths(self):
self.assertSetEqual(
autoreload.get_template_directories(),
{
ROOT / 'absolute_str',
Path.cwd() / 'template_tests/relative_str',
Path.cwd() / 'template_tests/relative_path',
}
)
@require_jinja2
@override_settings(INSTALLED_APPS=['template_tests'])
class Jinja2TemplateReloadTests(SimpleTestCase):
def test_watch_for_template_changes(self):
mock_reloader = mock.MagicMock()
autoreload.watch_for_template_changes(mock_reloader)
self.assertSequenceEqual(
sorted(mock_reloader.watch_dir.call_args_list),
[
mock.call(ROOT / 'templates', '**/*'),
]
)
def test_get_template_directories(self):
self.assertSetEqual(
autoreload.get_template_directories(),
{
ROOT / 'templates',
}
)
@mock.patch('django.template.loaders.base.Loader.reset')
def test_reset_all_loaders(self, mock_reset):
autoreload.reset_loaders()
self.assertEqual(mock_reset.call_count, 0)
|
5515e2fcef0f3ea3a7e7f07f8be40cc1d2367649ee1cbce32c0492137e606f33 | import os
from django.core.exceptions import ImproperlyConfigured
from django.template import Context
from django.template.engine import Engine
from django.test import SimpleTestCase, override_settings
from .utils import ROOT, TEMPLATE_DIR
OTHER_DIR = os.path.join(ROOT, 'other_templates')
class EngineTest(SimpleTestCase):
def test_repr_empty(self):
engine = Engine()
self.assertEqual(
repr(engine),
"<Engine: app_dirs=False debug=False loaders=[("
"'django.template.loaders.cached.Loader', "
"['django.template.loaders.filesystem.Loader'])] "
"string_if_invalid='' file_charset='utf-8' builtins=["
"'django.template.defaulttags', 'django.template.defaultfilters', "
"'django.template.loader_tags'] autoescape=True>"
)
def test_repr(self):
engine = Engine(
dirs=[TEMPLATE_DIR],
context_processors=['django.template.context_processors.debug'],
debug=True,
loaders=['django.template.loaders.filesystem.Loader'],
string_if_invalid='x',
file_charset='utf-16',
libraries={'custom': 'template_tests.templatetags.custom'},
autoescape=False,
)
self.assertEqual(
repr(engine),
f"<Engine: dirs=[{TEMPLATE_DIR!r}] app_dirs=False "
"context_processors=['django.template.context_processors.debug'] "
"debug=True loaders=['django.template.loaders.filesystem.Loader'] "
"string_if_invalid='x' file_charset='utf-16' "
"libraries={'custom': 'template_tests.templatetags.custom'} "
"builtins=['django.template.defaulttags', "
"'django.template.defaultfilters', 'django.template.loader_tags'] "
"autoescape=False>"
)
class RenderToStringTest(SimpleTestCase):
def setUp(self):
self.engine = Engine(dirs=[TEMPLATE_DIR])
def test_basic_context(self):
self.assertEqual(
self.engine.render_to_string('test_context.html', {'obj': 'test'}),
'obj:test\n',
)
def test_autoescape_off(self):
engine = Engine(dirs=[TEMPLATE_DIR], autoescape=False)
self.assertEqual(
engine.render_to_string('test_context.html', {'obj': '<script>'}),
'obj:<script>\n',
)
class GetDefaultTests(SimpleTestCase):
@override_settings(TEMPLATES=[])
def test_no_engines_configured(self):
msg = 'No DjangoTemplates backend is configured.'
with self.assertRaisesMessage(ImproperlyConfigured, msg):
Engine.get_default()
@override_settings(TEMPLATES=[{
'NAME': 'default',
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'OPTIONS': {'file_charset': 'abc'},
}])
def test_single_engine_configured(self):
self.assertEqual(Engine.get_default().file_charset, 'abc')
@override_settings(TEMPLATES=[{
'NAME': 'default',
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'OPTIONS': {'file_charset': 'abc'},
}, {
'NAME': 'other',
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'OPTIONS': {'file_charset': 'def'},
}])
def test_multiple_engines_configured(self):
self.assertEqual(Engine.get_default().file_charset, 'abc')
class LoaderTests(SimpleTestCase):
def test_origin(self):
engine = Engine(dirs=[TEMPLATE_DIR], debug=True)
template = engine.get_template('index.html')
self.assertEqual(template.origin.template_name, 'index.html')
def test_loader_priority(self):
"""
#21460 -- The order of template loader works.
"""
loaders = [
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
]
engine = Engine(dirs=[OTHER_DIR, TEMPLATE_DIR], loaders=loaders)
template = engine.get_template('priority/foo.html')
self.assertEqual(template.render(Context()), 'priority\n')
def test_cached_loader_priority(self):
"""
The order of template loader works. Refs #21460.
"""
loaders = [
('django.template.loaders.cached.Loader', [
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
]),
]
engine = Engine(dirs=[OTHER_DIR, TEMPLATE_DIR], loaders=loaders)
template = engine.get_template('priority/foo.html')
self.assertEqual(template.render(Context()), 'priority\n')
template = engine.get_template('priority/foo.html')
self.assertEqual(template.render(Context()), 'priority\n')
|
fd95c2ad1f5a58d3bd549c5178221749151dd899e68a1898a59cc75bcd856d17 | from datetime import date, datetime, timedelta
from operator import attrgetter
from django.db import IntegrityError
from django.test import TestCase
from .models import (
CustomMembership, Employee, Event, Friendship, Group, Ingredient,
Invitation, Membership, Person, PersonChild, PersonSelfRefM2M, Recipe,
RecipeIngredient, Relationship, SymmetricalFriendship,
)
class M2mThroughTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.bob = Person.objects.create(name='Bob')
cls.jim = Person.objects.create(name='Jim')
cls.jane = Person.objects.create(name='Jane')
cls.rock = Group.objects.create(name='Rock')
cls.roll = Group.objects.create(name='Roll')
def test_reverse_inherited_m2m_with_through_fields_list_hashable(self):
reverse_m2m = Person._meta.get_field('events_invited')
self.assertEqual(reverse_m2m.through_fields, ['event', 'invitee'])
inherited_reverse_m2m = PersonChild._meta.get_field('events_invited')
self.assertEqual(inherited_reverse_m2m.through_fields, ['event', 'invitee'])
self.assertEqual(hash(reverse_m2m), hash(inherited_reverse_m2m))
def test_retrieve_intermediate_items(self):
Membership.objects.create(person=self.jim, group=self.rock)
Membership.objects.create(person=self.jane, group=self.rock)
expected = ['Jane', 'Jim']
self.assertQuerysetEqual(
self.rock.members.all(),
expected,
attrgetter("name")
)
def test_get_on_intermediate_model(self):
Membership.objects.create(person=self.jane, group=self.rock)
queryset = Membership.objects.get(person=self.jane, group=self.rock)
self.assertEqual(
repr(queryset),
'<Membership: Jane is a member of Rock>'
)
def test_filter_on_intermediate_model(self):
m1 = Membership.objects.create(person=self.jim, group=self.rock)
m2 = Membership.objects.create(person=self.jane, group=self.rock)
queryset = Membership.objects.filter(group=self.rock)
self.assertSequenceEqual(queryset, [m1, m2])
def test_add_on_m2m_with_intermediate_model(self):
self.rock.members.add(self.bob, through_defaults={'invite_reason': 'He is good.'})
self.assertSequenceEqual(self.rock.members.all(), [self.bob])
self.assertEqual(self.rock.membership_set.get().invite_reason, 'He is good.')
def test_add_on_m2m_with_intermediate_model_callable_through_default(self):
def invite_reason_callable():
return 'They were good at %s' % datetime.now()
self.rock.members.add(
self.bob, self.jane,
through_defaults={'invite_reason': invite_reason_callable},
)
self.assertSequenceEqual(self.rock.members.all(), [self.bob, self.jane])
self.assertEqual(
self.rock.membership_set.filter(
invite_reason__startswith='They were good at ',
).count(),
2,
)
# invite_reason_callable() is called once.
self.assertEqual(
self.bob.membership_set.get().invite_reason,
self.jane.membership_set.get().invite_reason,
)
def test_set_on_m2m_with_intermediate_model_callable_through_default(self):
self.rock.members.set(
[self.bob, self.jane],
through_defaults={'invite_reason': lambda: 'Why not?'},
)
self.assertSequenceEqual(self.rock.members.all(), [self.bob, self.jane])
self.assertEqual(
self.rock.membership_set.filter(
invite_reason__startswith='Why not?',
).count(),
2,
)
def test_add_on_m2m_with_intermediate_model_value_required(self):
self.rock.nodefaultsnonulls.add(self.jim, through_defaults={'nodefaultnonull': 1})
self.assertEqual(self.rock.testnodefaultsornulls_set.get().nodefaultnonull, 1)
def test_add_on_m2m_with_intermediate_model_value_required_fails(self):
with self.assertRaises(IntegrityError):
self.rock.nodefaultsnonulls.add(self.jim)
def test_create_on_m2m_with_intermediate_model(self):
annie = self.rock.members.create(name='Annie', through_defaults={'invite_reason': 'She was just awesome.'})
self.assertSequenceEqual(self.rock.members.all(), [annie])
self.assertEqual(self.rock.membership_set.get().invite_reason, 'She was just awesome.')
def test_create_on_m2m_with_intermediate_model_callable_through_default(self):
annie = self.rock.members.create(
name='Annie',
through_defaults={'invite_reason': lambda: 'She was just awesome.'},
)
self.assertSequenceEqual(self.rock.members.all(), [annie])
self.assertEqual(
self.rock.membership_set.get().invite_reason,
'She was just awesome.',
)
def test_create_on_m2m_with_intermediate_model_value_required(self):
self.rock.nodefaultsnonulls.create(name='Test', through_defaults={'nodefaultnonull': 1})
self.assertEqual(self.rock.testnodefaultsornulls_set.get().nodefaultnonull, 1)
def test_create_on_m2m_with_intermediate_model_value_required_fails(self):
with self.assertRaises(IntegrityError):
self.rock.nodefaultsnonulls.create(name='Test')
def test_get_or_create_on_m2m_with_intermediate_model_value_required(self):
self.rock.nodefaultsnonulls.get_or_create(name='Test', through_defaults={'nodefaultnonull': 1})
self.assertEqual(self.rock.testnodefaultsornulls_set.get().nodefaultnonull, 1)
def test_get_or_create_on_m2m_with_intermediate_model_value_required_fails(self):
with self.assertRaises(IntegrityError):
self.rock.nodefaultsnonulls.get_or_create(name='Test')
def test_update_or_create_on_m2m_with_intermediate_model_value_required(self):
self.rock.nodefaultsnonulls.update_or_create(name='Test', through_defaults={'nodefaultnonull': 1})
self.assertEqual(self.rock.testnodefaultsornulls_set.get().nodefaultnonull, 1)
def test_update_or_create_on_m2m_with_intermediate_model_value_required_fails(self):
with self.assertRaises(IntegrityError):
self.rock.nodefaultsnonulls.update_or_create(name='Test')
def test_remove_on_m2m_with_intermediate_model(self):
Membership.objects.create(person=self.jim, group=self.rock)
self.rock.members.remove(self.jim)
self.assertSequenceEqual(self.rock.members.all(), [])
def test_remove_on_m2m_with_intermediate_model_multiple(self):
Membership.objects.create(person=self.jim, group=self.rock, invite_reason='1')
Membership.objects.create(person=self.jim, group=self.rock, invite_reason='2')
self.assertSequenceEqual(self.rock.members.all(), [self.jim, self.jim])
self.rock.members.remove(self.jim)
self.assertSequenceEqual(self.rock.members.all(), [])
def test_set_on_m2m_with_intermediate_model(self):
members = list(Person.objects.filter(name__in=['Bob', 'Jim']))
self.rock.members.set(members)
self.assertSequenceEqual(self.rock.members.all(), [self.bob, self.jim])
def test_set_on_m2m_with_intermediate_model_value_required(self):
self.rock.nodefaultsnonulls.set([self.jim], through_defaults={'nodefaultnonull': 1})
self.assertEqual(self.rock.testnodefaultsornulls_set.get().nodefaultnonull, 1)
self.rock.nodefaultsnonulls.set([self.jim], through_defaults={'nodefaultnonull': 2})
self.assertEqual(self.rock.testnodefaultsornulls_set.get().nodefaultnonull, 1)
self.rock.nodefaultsnonulls.set([self.jim], through_defaults={'nodefaultnonull': 2}, clear=True)
self.assertEqual(self.rock.testnodefaultsornulls_set.get().nodefaultnonull, 2)
def test_set_on_m2m_with_intermediate_model_value_required_fails(self):
with self.assertRaises(IntegrityError):
self.rock.nodefaultsnonulls.set([self.jim])
def test_clear_removes_all_the_m2m_relationships(self):
Membership.objects.create(person=self.jim, group=self.rock)
Membership.objects.create(person=self.jane, group=self.rock)
self.rock.members.clear()
self.assertQuerysetEqual(
self.rock.members.all(),
[]
)
def test_retrieve_reverse_intermediate_items(self):
Membership.objects.create(person=self.jim, group=self.rock)
Membership.objects.create(person=self.jim, group=self.roll)
expected = ['Rock', 'Roll']
self.assertQuerysetEqual(
self.jim.group_set.all(),
expected,
attrgetter("name")
)
def test_add_on_reverse_m2m_with_intermediate_model(self):
self.bob.group_set.add(self.rock)
self.assertSequenceEqual(self.bob.group_set.all(), [self.rock])
def test_create_on_reverse_m2m_with_intermediate_model(self):
funk = self.bob.group_set.create(name='Funk')
self.assertSequenceEqual(self.bob.group_set.all(), [funk])
def test_remove_on_reverse_m2m_with_intermediate_model(self):
Membership.objects.create(person=self.bob, group=self.rock)
self.bob.group_set.remove(self.rock)
self.assertSequenceEqual(self.bob.group_set.all(), [])
def test_set_on_reverse_m2m_with_intermediate_model(self):
members = list(Group.objects.filter(name__in=['Rock', 'Roll']))
self.bob.group_set.set(members)
self.assertSequenceEqual(self.bob.group_set.all(), [self.rock, self.roll])
def test_clear_on_reverse_removes_all_the_m2m_relationships(self):
Membership.objects.create(person=self.jim, group=self.rock)
Membership.objects.create(person=self.jim, group=self.roll)
self.jim.group_set.clear()
self.assertQuerysetEqual(
self.jim.group_set.all(),
[]
)
def test_query_model_by_attribute_name_of_related_model(self):
Membership.objects.create(person=self.jim, group=self.rock)
Membership.objects.create(person=self.jane, group=self.rock)
Membership.objects.create(person=self.bob, group=self.roll)
Membership.objects.create(person=self.jim, group=self.roll)
Membership.objects.create(person=self.jane, group=self.roll)
self.assertQuerysetEqual(
Group.objects.filter(members__name='Bob'),
['Roll'],
attrgetter("name")
)
def test_order_by_relational_field_through_model(self):
today = datetime.now()
yesterday = today - timedelta(days=1)
CustomMembership.objects.create(person=self.jim, group=self.rock, date_joined=yesterday)
CustomMembership.objects.create(person=self.bob, group=self.rock, date_joined=today)
CustomMembership.objects.create(person=self.jane, group=self.roll, date_joined=yesterday)
CustomMembership.objects.create(person=self.jim, group=self.roll, date_joined=today)
self.assertSequenceEqual(
self.rock.custom_members.order_by('custom_person_related_name'),
[self.jim, self.bob]
)
self.assertSequenceEqual(
self.roll.custom_members.order_by('custom_person_related_name'),
[self.jane, self.jim]
)
def test_query_first_model_by_intermediate_model_attribute(self):
Membership.objects.create(
person=self.jane, group=self.roll,
invite_reason="She was just awesome."
)
Membership.objects.create(
person=self.jim, group=self.roll,
invite_reason="He is good."
)
Membership.objects.create(person=self.bob, group=self.roll)
qs = Group.objects.filter(
membership__invite_reason="She was just awesome."
)
self.assertQuerysetEqual(
qs,
['Roll'],
attrgetter("name")
)
def test_query_second_model_by_intermediate_model_attribute(self):
Membership.objects.create(
person=self.jane, group=self.roll,
invite_reason="She was just awesome."
)
Membership.objects.create(
person=self.jim, group=self.roll,
invite_reason="He is good."
)
Membership.objects.create(person=self.bob, group=self.roll)
qs = Person.objects.filter(
membership__invite_reason="She was just awesome."
)
self.assertQuerysetEqual(
qs,
['Jane'],
attrgetter("name")
)
def test_query_model_by_related_model_name(self):
Membership.objects.create(person=self.jim, group=self.rock)
Membership.objects.create(person=self.jane, group=self.rock)
Membership.objects.create(person=self.bob, group=self.roll)
Membership.objects.create(person=self.jim, group=self.roll)
Membership.objects.create(person=self.jane, group=self.roll)
self.assertQuerysetEqual(
Person.objects.filter(group__name="Rock"),
['Jane', 'Jim'],
attrgetter("name")
)
def test_query_model_by_custom_related_name(self):
CustomMembership.objects.create(person=self.bob, group=self.rock)
CustomMembership.objects.create(person=self.jim, group=self.rock)
self.assertQuerysetEqual(
Person.objects.filter(custom__name="Rock"),
['Bob', 'Jim'],
attrgetter("name")
)
def test_query_model_by_intermediate_can_return_non_unique_queryset(self):
Membership.objects.create(person=self.jim, group=self.rock)
Membership.objects.create(
person=self.jane, group=self.rock,
date_joined=datetime(2006, 1, 1)
)
Membership.objects.create(
person=self.bob, group=self.roll,
date_joined=datetime(2004, 1, 1))
Membership.objects.create(person=self.jim, group=self.roll)
Membership.objects.create(
person=self.jane, group=self.roll,
date_joined=datetime(2004, 1, 1))
qs = Person.objects.filter(
membership__date_joined__gt=datetime(2004, 1, 1)
)
self.assertQuerysetEqual(
qs,
['Jane', 'Jim', 'Jim'],
attrgetter("name")
)
def test_custom_related_name_forward_empty_qs(self):
self.assertQuerysetEqual(
self.rock.custom_members.all(),
[]
)
def test_custom_related_name_reverse_empty_qs(self):
self.assertQuerysetEqual(
self.bob.custom.all(),
[]
)
def test_custom_related_name_forward_non_empty_qs(self):
CustomMembership.objects.create(person=self.bob, group=self.rock)
CustomMembership.objects.create(person=self.jim, group=self.rock)
self.assertQuerysetEqual(
self.rock.custom_members.all(),
['Bob', 'Jim'],
attrgetter("name")
)
def test_custom_related_name_reverse_non_empty_qs(self):
CustomMembership.objects.create(person=self.bob, group=self.rock)
CustomMembership.objects.create(person=self.jim, group=self.rock)
self.assertQuerysetEqual(
self.bob.custom.all(),
['Rock'],
attrgetter("name")
)
def test_custom_related_name_doesnt_conflict_with_fky_related_name(self):
c = CustomMembership.objects.create(person=self.bob, group=self.rock)
self.assertSequenceEqual(self.bob.custom_person_related_name.all(), [c])
def test_through_fields(self):
"""
Relations with intermediary tables with multiple FKs
to the M2M's ``to`` model are possible.
"""
event = Event.objects.create(title='Rockwhale 2014')
Invitation.objects.create(event=event, inviter=self.bob, invitee=self.jim)
Invitation.objects.create(event=event, inviter=self.bob, invitee=self.jane)
self.assertQuerysetEqual(
event.invitees.all(),
['Jane', 'Jim'],
attrgetter('name')
)
class M2mThroughReferentialTests(TestCase):
def test_self_referential_empty_qs(self):
tony = PersonSelfRefM2M.objects.create(name="Tony")
self.assertQuerysetEqual(
tony.friends.all(),
[]
)
def test_self_referential_non_symmetrical_first_side(self):
tony = PersonSelfRefM2M.objects.create(name="Tony")
chris = PersonSelfRefM2M.objects.create(name="Chris")
Friendship.objects.create(
first=tony, second=chris, date_friended=datetime.now()
)
self.assertQuerysetEqual(
tony.friends.all(),
['Chris'],
attrgetter("name")
)
def test_self_referential_non_symmetrical_second_side(self):
tony = PersonSelfRefM2M.objects.create(name="Tony")
chris = PersonSelfRefM2M.objects.create(name="Chris")
Friendship.objects.create(
first=tony, second=chris, date_friended=datetime.now()
)
self.assertQuerysetEqual(
chris.friends.all(),
[]
)
def test_self_referential_non_symmetrical_clear_first_side(self):
tony = PersonSelfRefM2M.objects.create(name="Tony")
chris = PersonSelfRefM2M.objects.create(name="Chris")
Friendship.objects.create(
first=tony, second=chris, date_friended=datetime.now()
)
chris.friends.clear()
self.assertQuerysetEqual(
chris.friends.all(),
[]
)
# Since this isn't a symmetrical relation, Tony's friend link still exists.
self.assertQuerysetEqual(
tony.friends.all(),
['Chris'],
attrgetter("name")
)
def test_self_referential_non_symmetrical_both(self):
tony = PersonSelfRefM2M.objects.create(name="Tony")
chris = PersonSelfRefM2M.objects.create(name="Chris")
Friendship.objects.create(
first=tony, second=chris, date_friended=datetime.now()
)
Friendship.objects.create(
first=chris, second=tony, date_friended=datetime.now()
)
self.assertQuerysetEqual(
tony.friends.all(),
['Chris'],
attrgetter("name")
)
self.assertQuerysetEqual(
chris.friends.all(),
['Tony'],
attrgetter("name")
)
def test_through_fields_self_referential(self):
john = Employee.objects.create(name='john')
peter = Employee.objects.create(name='peter')
mary = Employee.objects.create(name='mary')
harry = Employee.objects.create(name='harry')
Relationship.objects.create(source=john, target=peter, another=None)
Relationship.objects.create(source=john, target=mary, another=None)
Relationship.objects.create(source=john, target=harry, another=peter)
self.assertQuerysetEqual(
john.subordinates.all(),
['peter', 'mary', 'harry'],
attrgetter('name')
)
def test_self_referential_symmetrical(self):
tony = PersonSelfRefM2M.objects.create(name='Tony')
chris = PersonSelfRefM2M.objects.create(name='Chris')
SymmetricalFriendship.objects.create(
first=tony, second=chris, date_friended=date.today(),
)
self.assertSequenceEqual(tony.sym_friends.all(), [chris])
# Manually created symmetrical m2m relation doesn't add mirror entry
# automatically.
self.assertSequenceEqual(chris.sym_friends.all(), [])
SymmetricalFriendship.objects.create(
first=chris, second=tony, date_friended=date.today()
)
self.assertSequenceEqual(chris.sym_friends.all(), [tony])
def test_add_on_symmetrical_m2m_with_intermediate_model(self):
tony = PersonSelfRefM2M.objects.create(name='Tony')
chris = PersonSelfRefM2M.objects.create(name='Chris')
date_friended = date(2017, 1, 3)
tony.sym_friends.add(chris, through_defaults={'date_friended': date_friended})
self.assertSequenceEqual(tony.sym_friends.all(), [chris])
self.assertSequenceEqual(chris.sym_friends.all(), [tony])
friendship = tony.symmetricalfriendship_set.get()
self.assertEqual(friendship.date_friended, date_friended)
def test_set_on_symmetrical_m2m_with_intermediate_model(self):
tony = PersonSelfRefM2M.objects.create(name='Tony')
chris = PersonSelfRefM2M.objects.create(name='Chris')
anne = PersonSelfRefM2M.objects.create(name='Anne')
kate = PersonSelfRefM2M.objects.create(name='Kate')
date_friended_add = date(2013, 1, 5)
date_friended_set = date.today()
tony.sym_friends.add(
anne, chris,
through_defaults={'date_friended': date_friended_add},
)
tony.sym_friends.set(
[anne, kate],
through_defaults={'date_friended': date_friended_set},
)
self.assertSequenceEqual(tony.sym_friends.all(), [anne, kate])
self.assertSequenceEqual(anne.sym_friends.all(), [tony])
self.assertSequenceEqual(kate.sym_friends.all(), [tony])
self.assertEqual(
kate.symmetricalfriendship_set.get().date_friended,
date_friended_set,
)
# Date is preserved.
self.assertEqual(
anne.symmetricalfriendship_set.get().date_friended,
date_friended_add,
)
# Recreate relationship.
tony.sym_friends.set(
[anne],
clear=True,
through_defaults={'date_friended': date_friended_set},
)
self.assertSequenceEqual(tony.sym_friends.all(), [anne])
self.assertSequenceEqual(anne.sym_friends.all(), [tony])
self.assertEqual(
anne.symmetricalfriendship_set.get().date_friended,
date_friended_set,
)
class M2mThroughToFieldsTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.pea = Ingredient.objects.create(iname='pea')
cls.potato = Ingredient.objects.create(iname='potato')
cls.tomato = Ingredient.objects.create(iname='tomato')
cls.curry = Recipe.objects.create(rname='curry')
RecipeIngredient.objects.create(recipe=cls.curry, ingredient=cls.potato)
RecipeIngredient.objects.create(recipe=cls.curry, ingredient=cls.pea)
RecipeIngredient.objects.create(recipe=cls.curry, ingredient=cls.tomato)
def test_retrieval(self):
# Forward retrieval
self.assertSequenceEqual(self.curry.ingredients.all(), [self.pea, self.potato, self.tomato])
# Backward retrieval
self.assertEqual(self.tomato.recipes.get(), self.curry)
def test_choices(self):
field = Recipe._meta.get_field('ingredients')
self.assertEqual(
[choice[0] for choice in field.get_choices(include_blank=False)],
['pea', 'potato', 'tomato']
)
|
8b7e0f9a6f14462e22eba194cc18d99065b405cd67ce07fe707532085d544ffd | from datetime import datetime
from django.db import models
# M2M described on one of the models
class Person(models.Model):
name = models.CharField(max_length=128)
class Meta:
ordering = ('name',)
class PersonChild(Person):
pass
class Group(models.Model):
name = models.CharField(max_length=128)
members = models.ManyToManyField(Person, through='Membership')
custom_members = models.ManyToManyField(Person, through='CustomMembership', related_name="custom")
nodefaultsnonulls = models.ManyToManyField(
Person,
through='TestNoDefaultsOrNulls',
related_name="testnodefaultsnonulls",
)
class Meta:
ordering = ('name',)
class Membership(models.Model):
person = models.ForeignKey(Person, models.CASCADE)
group = models.ForeignKey(Group, models.CASCADE)
date_joined = models.DateTimeField(default=datetime.now)
invite_reason = models.CharField(max_length=64, null=True)
class Meta:
ordering = ('date_joined', 'invite_reason', 'group')
def __str__(self):
return "%s is a member of %s" % (self.person.name, self.group.name)
class CustomMembership(models.Model):
person = models.ForeignKey(
Person,
models.CASCADE,
db_column="custom_person_column",
related_name="custom_person_related_name",
)
group = models.ForeignKey(Group, models.CASCADE)
weird_fk = models.ForeignKey(Membership, models.SET_NULL, null=True)
date_joined = models.DateTimeField(default=datetime.now)
class Meta:
db_table = "test_table"
ordering = ["date_joined"]
def __str__(self):
return "%s is a member of %s" % (self.person.name, self.group.name)
class TestNoDefaultsOrNulls(models.Model):
person = models.ForeignKey(Person, models.CASCADE)
group = models.ForeignKey(Group, models.CASCADE)
nodefaultnonull = models.IntegerField()
class PersonSelfRefM2M(models.Model):
name = models.CharField(max_length=5)
friends = models.ManyToManyField('self', through="Friendship", symmetrical=False)
sym_friends = models.ManyToManyField('self', through='SymmetricalFriendship', symmetrical=True)
class Friendship(models.Model):
first = models.ForeignKey(PersonSelfRefM2M, models.CASCADE, related_name="rel_from_set")
second = models.ForeignKey(PersonSelfRefM2M, models.CASCADE, related_name="rel_to_set")
date_friended = models.DateTimeField()
class SymmetricalFriendship(models.Model):
first = models.ForeignKey(PersonSelfRefM2M, models.CASCADE)
second = models.ForeignKey(PersonSelfRefM2M, models.CASCADE, related_name='+')
date_friended = models.DateField()
# Custom through link fields
class Event(models.Model):
title = models.CharField(max_length=50)
invitees = models.ManyToManyField(
to=Person,
through='Invitation',
through_fields=['event', 'invitee'],
related_name='events_invited',
)
class Invitation(models.Model):
event = models.ForeignKey(Event, models.CASCADE, related_name='invitations')
# field order is deliberately inverted. the target field is "invitee".
inviter = models.ForeignKey(Person, models.CASCADE, related_name='invitations_sent')
invitee = models.ForeignKey(Person, models.CASCADE, related_name='invitations')
class Employee(models.Model):
name = models.CharField(max_length=5)
subordinates = models.ManyToManyField(
'self',
through="Relationship",
through_fields=('source', 'target'),
symmetrical=False,
)
class Meta:
ordering = ('pk',)
class Relationship(models.Model):
# field order is deliberately inverted.
another = models.ForeignKey(Employee, models.SET_NULL, related_name="rel_another_set", null=True)
target = models.ForeignKey(Employee, models.CASCADE, related_name="rel_target_set")
source = models.ForeignKey(Employee, models.CASCADE, related_name="rel_source_set")
class Ingredient(models.Model):
iname = models.CharField(max_length=20, unique=True)
class Meta:
ordering = ('iname',)
class Recipe(models.Model):
rname = models.CharField(max_length=20, unique=True)
ingredients = models.ManyToManyField(
Ingredient, through='RecipeIngredient', related_name='recipes',
)
class Meta:
ordering = ('rname',)
class RecipeIngredient(models.Model):
ingredient = models.ForeignKey(Ingredient, models.CASCADE, to_field='iname')
recipe = models.ForeignKey(Recipe, models.CASCADE, to_field='rname')
|
42749445c75be645e03f5b91321ce7e56e75610142490a3ddb896fff953afea8 | from django.db.models.utils import create_namedtuple_class
from django.test import SimpleTestCase
class NamedTupleClassTests(SimpleTestCase):
def test_immutability(self):
row_class = create_namedtuple_class('field1', 'field2')
row = row_class('value1', 'value2')
with self.assertRaises(AttributeError):
row.field3 = 'value3'
|
eceb1277e640f8e1a0b9d53028a1fb2c9d2b38ab087d99d8c731402750b3ec36 | from datetime import datetime
from django.contrib.sitemaps import GenericSitemap
from django.test import ignore_warnings, override_settings
from django.utils.deprecation import RemovedInDjango50Warning
from .base import SitemapTestsBase
from .models import TestModel
@override_settings(ABSOLUTE_URL_OVERRIDES={})
class GenericViewsSitemapTests(SitemapTestsBase):
def test_generic_sitemap_attributes(self):
datetime_value = datetime.now()
queryset = TestModel.objects.all()
generic_sitemap = GenericSitemap(
info_dict={
'queryset': queryset,
'date_field': datetime_value,
},
priority=0.6,
changefreq='monthly',
protocol='https',
)
attr_values = (
('date_field', datetime_value),
('priority', 0.6),
('changefreq', 'monthly'),
('protocol', 'https'),
)
for attr_name, expected_value in attr_values:
with self.subTest(attr_name=attr_name):
self.assertEqual(getattr(generic_sitemap, attr_name), expected_value)
self.assertCountEqual(generic_sitemap.queryset, queryset)
def test_generic_sitemap(self):
"A minimal generic sitemap can be rendered"
response = self.client.get('/generic/sitemap.xml')
expected = ''
for pk in TestModel.objects.values_list("id", flat=True):
expected += "<url><loc>%s/testmodel/%s/</loc></url>" % (self.base_url, pk)
expected_content = """<?xml version="1.0" encoding="UTF-8"?>
<urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9" xmlns:xhtml="http://www.w3.org/1999/xhtml">
%s
</urlset>
""" % expected
self.assertXMLEqual(response.content.decode(), expected_content)
def test_generic_sitemap_lastmod(self):
test_model = TestModel.objects.first()
TestModel.objects.update(lastmod=datetime(2013, 3, 13, 10, 0, 0))
response = self.client.get('/generic-lastmod/sitemap.xml')
expected_content = """<?xml version="1.0" encoding="UTF-8"?>
<urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9" xmlns:xhtml="http://www.w3.org/1999/xhtml">
<url><loc>%s/testmodel/%s/</loc><lastmod>2013-03-13</lastmod></url>
</urlset>
""" % (self.base_url, test_model.pk)
self.assertXMLEqual(response.content.decode(), expected_content)
self.assertEqual(response.headers['Last-Modified'], 'Wed, 13 Mar 2013 10:00:00 GMT')
def test_get_protocol_defined_in_constructor(self):
for protocol in ['http', 'https']:
with self.subTest(protocol=protocol):
sitemap = GenericSitemap({'queryset': None}, protocol=protocol)
self.assertEqual(sitemap.get_protocol(), protocol)
def test_get_protocol_passed_as_argument(self):
sitemap = GenericSitemap({'queryset': None})
for protocol in ['http', 'https']:
with self.subTest(protocol=protocol):
self.assertEqual(sitemap.get_protocol(protocol), protocol)
@ignore_warnings(category=RemovedInDjango50Warning)
def test_get_protocol_default(self):
sitemap = GenericSitemap({'queryset': None})
self.assertEqual(sitemap.get_protocol(), 'http')
def test_get_protocol_default_warning(self):
sitemap = GenericSitemap({'queryset': None})
msg = (
"The default sitemap protocol will be changed from 'http' to "
"'https' in Django 5.0. Set Sitemap.protocol to silence this "
"warning."
)
with self.assertWarnsMessage(RemovedInDjango50Warning, msg):
sitemap.get_protocol()
|
5071f7551f7c22400c637bf1fa06fc278f73365d62a74560089a5fb9f4eabcec | import os
from datetime import date
from django.contrib.sitemaps import Sitemap
from django.contrib.sites.models import Site
from django.core.exceptions import ImproperlyConfigured
from django.test import ignore_warnings, modify_settings, override_settings
from django.utils import translation
from django.utils.deprecation import RemovedInDjango50Warning
from django.utils.formats import localize
from .base import SitemapTestsBase
from .models import TestModel
class HTTPSitemapTests(SitemapTestsBase):
use_sitemap_err_msg = (
'To use sitemaps, either enable the sites framework or pass a '
'Site/RequestSite object in your view.'
)
def test_simple_sitemap_index(self):
"A simple sitemap index can be rendered"
response = self.client.get('/simple/index.xml')
expected_content = """<?xml version="1.0" encoding="UTF-8"?>
<sitemapindex xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">
<sitemap><loc>%s/simple/sitemap-simple.xml</loc></sitemap>
</sitemapindex>
""" % self.base_url
self.assertXMLEqual(response.content.decode(), expected_content)
def test_sitemap_not_callable(self):
"""A sitemap may not be callable."""
response = self.client.get('/simple-not-callable/index.xml')
expected_content = """<?xml version="1.0" encoding="UTF-8"?>
<sitemapindex xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">
<sitemap><loc>%s/simple/sitemap-simple.xml</loc></sitemap>
</sitemapindex>
""" % self.base_url
self.assertXMLEqual(response.content.decode(), expected_content)
def test_paged_sitemap(self):
"""A sitemap may have multiple pages."""
response = self.client.get('/simple-paged/index.xml')
expected_content = """<?xml version="1.0" encoding="UTF-8"?>
<sitemapindex xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">
<sitemap><loc>{0}/simple/sitemap-simple.xml</loc></sitemap><sitemap><loc>{0}/simple/sitemap-simple.xml?p=2</loc></sitemap>
</sitemapindex>
""".format(self.base_url)
self.assertXMLEqual(response.content.decode(), expected_content)
@override_settings(TEMPLATES=[{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(os.path.dirname(__file__), 'templates')],
}])
def test_simple_sitemap_custom_index(self):
"A simple sitemap index can be rendered with a custom template"
response = self.client.get('/simple/custom-index.xml')
expected_content = """<?xml version="1.0" encoding="UTF-8"?>
<!-- This is a customised template -->
<sitemapindex xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">
<sitemap><loc>%s/simple/sitemap-simple.xml</loc></sitemap>
</sitemapindex>
""" % self.base_url
self.assertXMLEqual(response.content.decode(), expected_content)
def test_simple_sitemap_section(self):
"A simple sitemap section can be rendered"
response = self.client.get('/simple/sitemap-simple.xml')
expected_content = """<?xml version="1.0" encoding="UTF-8"?>
<urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9" xmlns:xhtml="http://www.w3.org/1999/xhtml">
<url><loc>%s/location/</loc><lastmod>%s</lastmod><changefreq>never</changefreq><priority>0.5</priority></url>
</urlset>
""" % (self.base_url, date.today())
self.assertXMLEqual(response.content.decode(), expected_content)
def test_no_section(self):
response = self.client.get('/simple/sitemap-simple2.xml')
self.assertEqual(str(response.context['exception']), "No sitemap available for section: 'simple2'")
self.assertEqual(response.status_code, 404)
def test_empty_page(self):
response = self.client.get('/simple/sitemap-simple.xml?p=0')
self.assertEqual(str(response.context['exception']), 'Page 0 empty')
self.assertEqual(response.status_code, 404)
def test_page_not_int(self):
response = self.client.get('/simple/sitemap-simple.xml?p=test')
self.assertEqual(str(response.context['exception']), "No page 'test'")
self.assertEqual(response.status_code, 404)
def test_simple_sitemap(self):
"A simple sitemap can be rendered"
response = self.client.get('/simple/sitemap.xml')
expected_content = """<?xml version="1.0" encoding="UTF-8"?>
<urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9" xmlns:xhtml="http://www.w3.org/1999/xhtml">
<url><loc>%s/location/</loc><lastmod>%s</lastmod><changefreq>never</changefreq><priority>0.5</priority></url>
</urlset>
""" % (self.base_url, date.today())
self.assertXMLEqual(response.content.decode(), expected_content)
@override_settings(TEMPLATES=[{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(os.path.dirname(__file__), 'templates')],
}])
def test_simple_custom_sitemap(self):
"A simple sitemap can be rendered with a custom template"
response = self.client.get('/simple/custom-sitemap.xml')
expected_content = """<?xml version="1.0" encoding="UTF-8"?>
<!-- This is a customised template -->
<urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">
<url><loc>%s/location/</loc><lastmod>%s</lastmod><changefreq>never</changefreq><priority>0.5</priority></url>
</urlset>
""" % (self.base_url, date.today())
self.assertXMLEqual(response.content.decode(), expected_content)
def test_sitemap_last_modified(self):
"Last-Modified header is set correctly"
response = self.client.get('/lastmod/sitemap.xml')
self.assertEqual(response.headers['Last-Modified'], 'Wed, 13 Mar 2013 10:00:00 GMT')
def test_sitemap_last_modified_date(self):
"""
The Last-Modified header should be support dates (without time).
"""
response = self.client.get('/lastmod/date-sitemap.xml')
self.assertEqual(response.headers['Last-Modified'], 'Wed, 13 Mar 2013 00:00:00 GMT')
def test_sitemap_last_modified_tz(self):
"""
The Last-Modified header should be converted from timezone aware dates
to GMT.
"""
response = self.client.get('/lastmod/tz-sitemap.xml')
self.assertEqual(response.headers['Last-Modified'], 'Wed, 13 Mar 2013 15:00:00 GMT')
def test_sitemap_last_modified_missing(self):
"Last-Modified header is missing when sitemap has no lastmod"
response = self.client.get('/generic/sitemap.xml')
self.assertFalse(response.has_header('Last-Modified'))
def test_sitemap_last_modified_mixed(self):
"Last-Modified header is omitted when lastmod not on all items"
response = self.client.get('/lastmod-mixed/sitemap.xml')
self.assertFalse(response.has_header('Last-Modified'))
def test_sitemaps_lastmod_mixed_ascending_last_modified_missing(self):
"""
The Last-Modified header is omitted when lastmod isn't found in all
sitemaps. Test sitemaps are sorted by lastmod in ascending order.
"""
response = self.client.get('/lastmod-sitemaps/mixed-ascending.xml')
self.assertFalse(response.has_header('Last-Modified'))
def test_sitemaps_lastmod_mixed_descending_last_modified_missing(self):
"""
The Last-Modified header is omitted when lastmod isn't found in all
sitemaps. Test sitemaps are sorted by lastmod in descending order.
"""
response = self.client.get('/lastmod-sitemaps/mixed-descending.xml')
self.assertFalse(response.has_header('Last-Modified'))
def test_sitemaps_lastmod_ascending(self):
"""
The Last-Modified header is set to the most recent sitemap lastmod.
Test sitemaps are sorted by lastmod in ascending order.
"""
response = self.client.get('/lastmod-sitemaps/ascending.xml')
self.assertEqual(response.headers['Last-Modified'], 'Sat, 20 Apr 2013 05:00:00 GMT')
def test_sitemaps_lastmod_descending(self):
"""
The Last-Modified header is set to the most recent sitemap lastmod.
Test sitemaps are sorted by lastmod in descending order.
"""
response = self.client.get('/lastmod-sitemaps/descending.xml')
self.assertEqual(response.headers['Last-Modified'], 'Sat, 20 Apr 2013 05:00:00 GMT')
@override_settings(USE_I18N=True, USE_L10N=True)
def test_localized_priority(self):
"""The priority value should not be localized."""
with translation.override('fr'):
self.assertEqual('0,3', localize(0.3))
# Priorities aren't rendered in localized format.
response = self.client.get('/simple/sitemap.xml')
self.assertContains(response, '<priority>0.5</priority>')
self.assertContains(response, '<lastmod>%s</lastmod>' % date.today())
@modify_settings(INSTALLED_APPS={'remove': 'django.contrib.sites'})
def test_requestsite_sitemap(self):
# Hitting the flatpages sitemap without the sites framework installed
# doesn't raise an exception.
response = self.client.get('/simple/sitemap.xml')
expected_content = """<?xml version="1.0" encoding="UTF-8"?>
<urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9" xmlns:xhtml="http://www.w3.org/1999/xhtml">
<url><loc>http://testserver/location/</loc><lastmod>%s</lastmod><changefreq>never</changefreq><priority>0.5</priority></url>
</urlset>
""" % date.today()
self.assertXMLEqual(response.content.decode(), expected_content)
@ignore_warnings(category=RemovedInDjango50Warning)
def test_sitemap_get_urls_no_site_1(self):
"""
Check we get ImproperlyConfigured if we don't pass a site object to
Sitemap.get_urls and no Site objects exist
"""
Site.objects.all().delete()
with self.assertRaisesMessage(ImproperlyConfigured, self.use_sitemap_err_msg):
Sitemap().get_urls()
@modify_settings(INSTALLED_APPS={'remove': 'django.contrib.sites'})
@ignore_warnings(category=RemovedInDjango50Warning)
def test_sitemap_get_urls_no_site_2(self):
"""
Check we get ImproperlyConfigured when we don't pass a site object to
Sitemap.get_urls if Site objects exists, but the sites framework is not
actually installed.
"""
with self.assertRaisesMessage(ImproperlyConfigured, self.use_sitemap_err_msg):
Sitemap().get_urls()
@ignore_warnings(category=RemovedInDjango50Warning)
def test_sitemap_item(self):
"""
Check to make sure that the raw item is included with each
Sitemap.get_url() url result.
"""
test_sitemap = Sitemap()
test_sitemap.items = TestModel.objects.order_by('pk').all
def is_testmodel(url):
return isinstance(url['item'], TestModel)
item_in_url_info = all(map(is_testmodel, test_sitemap.get_urls()))
self.assertTrue(item_in_url_info)
def test_cached_sitemap_index(self):
"""
A cached sitemap index can be rendered (#2713).
"""
response = self.client.get('/cached/index.xml')
expected_content = """<?xml version="1.0" encoding="UTF-8"?>
<sitemapindex xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">
<sitemap><loc>%s/cached/sitemap-simple.xml</loc></sitemap>
</sitemapindex>
""" % self.base_url
self.assertXMLEqual(response.content.decode(), expected_content)
def test_x_robots_sitemap(self):
response = self.client.get('/simple/index.xml')
self.assertEqual(response.headers['X-Robots-Tag'], 'noindex, noodp, noarchive')
response = self.client.get('/simple/sitemap.xml')
self.assertEqual(response.headers['X-Robots-Tag'], 'noindex, noodp, noarchive')
def test_empty_sitemap(self):
response = self.client.get('/empty/sitemap.xml')
self.assertEqual(response.status_code, 200)
@override_settings(LANGUAGES=(('en', 'English'), ('pt', 'Portuguese')))
def test_simple_i18n_sitemap_index(self):
"""
A simple i18n sitemap index can be rendered, without logging variable
lookup errors.
"""
with self.assertNoLogs('django.template', 'DEBUG'):
response = self.client.get('/simple/i18n.xml')
expected_content = """<?xml version="1.0" encoding="UTF-8"?>
<urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9" xmlns:xhtml="http://www.w3.org/1999/xhtml">
<url><loc>{0}/en/i18n/testmodel/{1}/</loc><changefreq>never</changefreq><priority>0.5</priority></url><url><loc>{0}/pt/i18n/testmodel/{1}/</loc><changefreq>never</changefreq><priority>0.5</priority></url>
</urlset>
""".format(self.base_url, self.i18n_model.pk)
self.assertXMLEqual(response.content.decode(), expected_content)
@override_settings(LANGUAGES=(('en', 'English'), ('pt', 'Portuguese')))
def test_alternate_i18n_sitemap_index(self):
"""
A i18n sitemap with alternate/hreflang links can be rendered.
"""
response = self.client.get('/alternates/i18n.xml')
url, pk = self.base_url, self.i18n_model.pk
expected_urls = f"""
<url><loc>{url}/en/i18n/testmodel/{pk}/</loc><changefreq>never</changefreq><priority>0.5</priority>
<xhtml:link rel="alternate" hreflang="en" href="{url}/en/i18n/testmodel/{pk}/"/>
<xhtml:link rel="alternate" hreflang="pt" href="{url}/pt/i18n/testmodel/{pk}/"/>
</url>
<url><loc>{url}/pt/i18n/testmodel/{pk}/</loc><changefreq>never</changefreq><priority>0.5</priority>
<xhtml:link rel="alternate" hreflang="en" href="{url}/en/i18n/testmodel/{pk}/"/>
<xhtml:link rel="alternate" hreflang="pt" href="{url}/pt/i18n/testmodel/{pk}/"/>
</url>
""".replace('\n', '')
expected_content = f"""<?xml version="1.0" encoding="UTF-8"?>
<urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9" xmlns:xhtml="http://www.w3.org/1999/xhtml">
{expected_urls}
</urlset>
"""
self.assertXMLEqual(response.content.decode(), expected_content)
@override_settings(LANGUAGES=(('en', 'English'), ('pt', 'Portuguese'), ('es', 'Spanish')))
def test_alternate_i18n_sitemap_limited(self):
"""
A i18n sitemap index with limited languages can be rendered.
"""
response = self.client.get('/limited/i18n.xml')
url, pk = self.base_url, self.i18n_model.pk
expected_urls = f"""
<url><loc>{url}/en/i18n/testmodel/{pk}/</loc><changefreq>never</changefreq><priority>0.5</priority>
<xhtml:link rel="alternate" hreflang="en" href="{url}/en/i18n/testmodel/{pk}/"/>
<xhtml:link rel="alternate" hreflang="es" href="{url}/es/i18n/testmodel/{pk}/"/>
</url>
<url><loc>{url}/es/i18n/testmodel/{pk}/</loc><changefreq>never</changefreq><priority>0.5</priority>
<xhtml:link rel="alternate" hreflang="en" href="{url}/en/i18n/testmodel/{pk}/"/>
<xhtml:link rel="alternate" hreflang="es" href="{url}/es/i18n/testmodel/{pk}/"/>
</url>
""".replace('\n', '')
expected_content = f"""<?xml version="1.0" encoding="UTF-8"?>
<urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9" xmlns:xhtml="http://www.w3.org/1999/xhtml">
{expected_urls}
</urlset>
"""
self.assertXMLEqual(response.content.decode(), expected_content)
@override_settings(LANGUAGES=(('en', 'English'), ('pt', 'Portuguese')))
def test_alternate_i18n_sitemap_xdefault(self):
"""
A i18n sitemap index with x-default can be rendered.
"""
response = self.client.get('/x-default/i18n.xml')
url, pk = self.base_url, self.i18n_model.pk
expected_urls = f"""
<url><loc>{url}/en/i18n/testmodel/{pk}/</loc><changefreq>never</changefreq><priority>0.5</priority>
<xhtml:link rel="alternate" hreflang="en" href="{url}/en/i18n/testmodel/{pk}/"/>
<xhtml:link rel="alternate" hreflang="pt" href="{url}/pt/i18n/testmodel/{pk}/"/>
<xhtml:link rel="alternate" hreflang="x-default" href="{url}/i18n/testmodel/{pk}/"/>
</url>
<url><loc>{url}/pt/i18n/testmodel/{pk}/</loc><changefreq>never</changefreq><priority>0.5</priority>
<xhtml:link rel="alternate" hreflang="en" href="{url}/en/i18n/testmodel/{pk}/"/>
<xhtml:link rel="alternate" hreflang="pt" href="{url}/pt/i18n/testmodel/{pk}/"/>
<xhtml:link rel="alternate" hreflang="x-default" href="{url}/i18n/testmodel/{pk}/"/>
</url>
""".replace('\n', '')
expected_content = f"""<?xml version="1.0" encoding="UTF-8"?>
<urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9" xmlns:xhtml="http://www.w3.org/1999/xhtml">
{expected_urls}
</urlset>
"""
self.assertXMLEqual(response.content.decode(), expected_content)
def test_sitemap_without_entries(self):
response = self.client.get('/sitemap-without-entries/sitemap.xml')
expected_content = """<?xml version="1.0" encoding="UTF-8"?>
<urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9" xmlns:xhtml="http://www.w3.org/1999/xhtml">
</urlset>"""
self.assertXMLEqual(response.content.decode(), expected_content)
|
7a5975abc200b16a36a9e97c78a39f31f901292b6ff1c1edb80cad820d5887e0 | import datetime
from django.db import connection, models, transaction
from django.db.models import Exists, OuterRef
from django.test import (
SimpleTestCase, TestCase, TransactionTestCase, skipUnlessDBFeature,
)
from .models import (
Award, AwardNote, Book, Child, Contact, Eaten, Email, File, Food, FooFile,
FooFileProxy, FooImage, FooPhoto, House, Image, Item, Location, Login,
OrderedPerson, OrgUnit, Person, Photo, PlayedWith, PlayedWithNote, Policy,
Researcher, Toy, Version,
)
# Can't run this test under SQLite, because you can't
# get two connections to an in-memory database.
@skipUnlessDBFeature('test_db_allows_multiple_connections')
class DeleteLockingTest(TransactionTestCase):
available_apps = ['delete_regress']
def setUp(self):
# Create a second connection to the default database
self.conn2 = connection.copy()
self.conn2.set_autocommit(False)
def tearDown(self):
# Close down the second connection.
self.conn2.rollback()
self.conn2.close()
def test_concurrent_delete(self):
"""Concurrent deletes don't collide and lock the database (#9479)."""
with transaction.atomic():
Book.objects.create(id=1, pagecount=100)
Book.objects.create(id=2, pagecount=200)
Book.objects.create(id=3, pagecount=300)
with transaction.atomic():
# Start a transaction on the main connection.
self.assertEqual(3, Book.objects.count())
# Delete something using another database connection.
with self.conn2.cursor() as cursor2:
cursor2.execute("DELETE from delete_regress_book WHERE id = 1")
self.conn2.commit()
# In the same transaction on the main connection, perform a
# queryset delete that covers the object deleted with the other
# connection. This causes an infinite loop under MySQL InnoDB
# unless we keep track of already deleted objects.
Book.objects.filter(pagecount__lt=250).delete()
self.assertEqual(1, Book.objects.count())
class DeleteCascadeTests(TestCase):
def test_generic_relation_cascade(self):
"""
Django cascades deletes through generic-related objects to their
reverse relations.
"""
person = Person.objects.create(name='Nelson Mandela')
award = Award.objects.create(name='Nobel', content_object=person)
AwardNote.objects.create(note='a peace prize', award=award)
self.assertEqual(AwardNote.objects.count(), 1)
person.delete()
self.assertEqual(Award.objects.count(), 0)
# first two asserts are just sanity checks, this is the kicker:
self.assertEqual(AwardNote.objects.count(), 0)
def test_fk_to_m2m_through(self):
"""
If an M2M relationship has an explicitly-specified through model, and
some other model has an FK to that through model, deletion is cascaded
from one of the participants in the M2M, to the through model, to its
related model.
"""
juan = Child.objects.create(name='Juan')
paints = Toy.objects.create(name='Paints')
played = PlayedWith.objects.create(child=juan, toy=paints, date=datetime.date.today())
PlayedWithNote.objects.create(played=played, note='the next Jackson Pollock')
self.assertEqual(PlayedWithNote.objects.count(), 1)
paints.delete()
self.assertEqual(PlayedWith.objects.count(), 0)
# first two asserts just sanity checks, this is the kicker:
self.assertEqual(PlayedWithNote.objects.count(), 0)
def test_15776(self):
policy = Policy.objects.create(pk=1, policy_number="1234")
version = Version.objects.create(policy=policy)
location = Location.objects.create(version=version)
Item.objects.create(version=version, location=location)
policy.delete()
class DeleteCascadeTransactionTests(TransactionTestCase):
available_apps = ['delete_regress']
def test_inheritance(self):
"""
Auto-created many-to-many through tables referencing a parent model are
correctly found by the delete cascade when a child of that parent is
deleted.
Refs #14896.
"""
r = Researcher.objects.create()
email = Email.objects.create(
label="office-email", email_address="[email protected]"
)
r.contacts.add(email)
email.delete()
def test_to_field(self):
"""
Cascade deletion works with ForeignKey.to_field set to non-PK.
"""
apple = Food.objects.create(name="apple")
Eaten.objects.create(food=apple, meal="lunch")
apple.delete()
self.assertFalse(Food.objects.exists())
self.assertFalse(Eaten.objects.exists())
class LargeDeleteTests(TestCase):
def test_large_deletes(self):
"Regression for #13309 -- if the number of objects > chunk size, deletion still occurs"
for x in range(300):
Book.objects.create(pagecount=x + 100)
# attach a signal to make sure we will not fast-delete
def noop(*args, **kwargs):
pass
models.signals.post_delete.connect(noop, sender=Book)
Book.objects.all().delete()
models.signals.post_delete.disconnect(noop, sender=Book)
self.assertEqual(Book.objects.count(), 0)
class ProxyDeleteTest(TestCase):
"""
Tests on_delete behavior for proxy models.
See #16128.
"""
def create_image(self):
"""Return an Image referenced by both a FooImage and a FooFile."""
# Create an Image
test_image = Image()
test_image.save()
foo_image = FooImage(my_image=test_image)
foo_image.save()
# Get the Image instance as a File
test_file = File.objects.get(pk=test_image.pk)
foo_file = FooFile(my_file=test_file)
foo_file.save()
return test_image
def test_delete_proxy(self):
"""
Deleting the *proxy* instance bubbles through to its non-proxy and
*all* referring objects are deleted.
"""
self.create_image()
Image.objects.all().delete()
# An Image deletion == File deletion
self.assertEqual(len(Image.objects.all()), 0)
self.assertEqual(len(File.objects.all()), 0)
# The Image deletion cascaded and *all* references to it are deleted.
self.assertEqual(len(FooImage.objects.all()), 0)
self.assertEqual(len(FooFile.objects.all()), 0)
def test_delete_proxy_of_proxy(self):
"""
Deleting a proxy-of-proxy instance should bubble through to its proxy
and non-proxy parents, deleting *all* referring objects.
"""
test_image = self.create_image()
# Get the Image as a Photo
test_photo = Photo.objects.get(pk=test_image.pk)
foo_photo = FooPhoto(my_photo=test_photo)
foo_photo.save()
Photo.objects.all().delete()
# A Photo deletion == Image deletion == File deletion
self.assertEqual(len(Photo.objects.all()), 0)
self.assertEqual(len(Image.objects.all()), 0)
self.assertEqual(len(File.objects.all()), 0)
# The Photo deletion should have cascaded and deleted *all*
# references to it.
self.assertEqual(len(FooPhoto.objects.all()), 0)
self.assertEqual(len(FooFile.objects.all()), 0)
self.assertEqual(len(FooImage.objects.all()), 0)
def test_delete_concrete_parent(self):
"""
Deleting an instance of a concrete model should also delete objects
referencing its proxy subclass.
"""
self.create_image()
File.objects.all().delete()
# A File deletion == Image deletion
self.assertEqual(len(File.objects.all()), 0)
self.assertEqual(len(Image.objects.all()), 0)
# The File deletion should have cascaded and deleted *all* references
# to it.
self.assertEqual(len(FooFile.objects.all()), 0)
self.assertEqual(len(FooImage.objects.all()), 0)
def test_delete_proxy_pair(self):
"""
If a pair of proxy models are linked by an FK from one concrete parent
to the other, deleting one proxy model cascade-deletes the other, and
the deletion happens in the right order (not triggering an
IntegrityError on databases unable to defer integrity checks).
Refs #17918.
"""
# Create an Image (proxy of File) and FooFileProxy (proxy of FooFile,
# which has an FK to File)
image = Image.objects.create()
as_file = File.objects.get(pk=image.pk)
FooFileProxy.objects.create(my_file=as_file)
Image.objects.all().delete()
self.assertEqual(len(FooFileProxy.objects.all()), 0)
def test_19187_values(self):
msg = 'Cannot call delete() after .values() or .values_list()'
with self.assertRaisesMessage(TypeError, msg):
Image.objects.values().delete()
with self.assertRaisesMessage(TypeError, msg):
Image.objects.values_list().delete()
class Ticket19102Tests(TestCase):
"""
Test different queries which alter the SELECT clause of the query. We
also must be using a subquery for the deletion (that is, the original
query has a join in it). The deletion should be done as "fast-path"
deletion (that is, just one query for the .delete() call).
Note that .values() is not tested here on purpose. .values().delete()
doesn't work for non fast-path deletes at all.
"""
@classmethod
def setUpTestData(cls):
cls.o1 = OrgUnit.objects.create(name='o1')
cls.o2 = OrgUnit.objects.create(name='o2')
cls.l1 = Login.objects.create(description='l1', orgunit=cls.o1)
cls.l2 = Login.objects.create(description='l2', orgunit=cls.o2)
@skipUnlessDBFeature("update_can_self_select")
def test_ticket_19102_annotate(self):
with self.assertNumQueries(1):
Login.objects.order_by('description').filter(
orgunit__name__isnull=False
).annotate(
n=models.Count('description')
).filter(
n=1, pk=self.l1.pk
).delete()
self.assertFalse(Login.objects.filter(pk=self.l1.pk).exists())
self.assertTrue(Login.objects.filter(pk=self.l2.pk).exists())
@skipUnlessDBFeature("update_can_self_select")
def test_ticket_19102_extra(self):
with self.assertNumQueries(1):
Login.objects.order_by('description').filter(
orgunit__name__isnull=False
).extra(
select={'extraf': '1'}
).filter(
pk=self.l1.pk
).delete()
self.assertFalse(Login.objects.filter(pk=self.l1.pk).exists())
self.assertTrue(Login.objects.filter(pk=self.l2.pk).exists())
@skipUnlessDBFeature("update_can_self_select")
def test_ticket_19102_select_related(self):
with self.assertNumQueries(1):
Login.objects.filter(
pk=self.l1.pk
).filter(
orgunit__name__isnull=False
).order_by(
'description'
).select_related('orgunit').delete()
self.assertFalse(Login.objects.filter(pk=self.l1.pk).exists())
self.assertTrue(Login.objects.filter(pk=self.l2.pk).exists())
@skipUnlessDBFeature("update_can_self_select")
def test_ticket_19102_defer(self):
with self.assertNumQueries(1):
Login.objects.filter(
pk=self.l1.pk
).filter(
orgunit__name__isnull=False
).order_by(
'description'
).only('id').delete()
self.assertFalse(Login.objects.filter(pk=self.l1.pk).exists())
self.assertTrue(Login.objects.filter(pk=self.l2.pk).exists())
class DeleteTests(TestCase):
def test_meta_ordered_delete(self):
# When a subquery is performed by deletion code, the subquery must be
# cleared of all ordering. There was a but that caused _meta ordering
# to be used. Refs #19720.
h = House.objects.create(address='Foo')
OrderedPerson.objects.create(name='Jack', lives_in=h)
OrderedPerson.objects.create(name='Bob', lives_in=h)
OrderedPerson.objects.filter(lives_in__address='Foo').delete()
self.assertEqual(OrderedPerson.objects.count(), 0)
def test_foreign_key_delete_nullifies_correct_columns(self):
"""
With a model (Researcher) that has two foreign keys pointing to the
same model (Contact), deleting an instance of the target model
(contact1) nullifies the correct fields of Researcher.
"""
contact1 = Contact.objects.create(label='Contact 1')
contact2 = Contact.objects.create(label='Contact 2')
researcher1 = Researcher.objects.create(
primary_contact=contact1,
secondary_contact=contact2,
)
researcher2 = Researcher.objects.create(
primary_contact=contact2,
secondary_contact=contact1,
)
contact1.delete()
researcher1.refresh_from_db()
researcher2.refresh_from_db()
self.assertIsNone(researcher1.primary_contact)
self.assertEqual(researcher1.secondary_contact, contact2)
self.assertEqual(researcher2.primary_contact, contact2)
self.assertIsNone(researcher2.secondary_contact)
def test_self_reference_with_through_m2m_at_second_level(self):
toy = Toy.objects.create(name='Paints')
child = Child.objects.create(name='Juan')
Book.objects.create(pagecount=500, owner=child)
PlayedWith.objects.create(child=child, toy=toy, date=datetime.date.today())
Book.objects.filter(Exists(
Book.objects.filter(
pk=OuterRef('pk'),
owner__toys=toy.pk,
),
)).delete()
self.assertIs(Book.objects.exists(), False)
class DeleteDistinct(SimpleTestCase):
def test_disallowed_delete_distinct(self):
msg = 'Cannot call delete() after .distinct().'
with self.assertRaisesMessage(TypeError, msg):
Book.objects.distinct().delete()
with self.assertRaisesMessage(TypeError, msg):
Book.objects.distinct('id').delete()
|
e36dda25f1aaa135b5e0ebdef2dc832ac838c4ec33e6d6efc09698455414d7af | from django.contrib.contenttypes.fields import (
GenericForeignKey, GenericRelation,
)
from django.contrib.contenttypes.models import ContentType
from django.db import models
class Award(models.Model):
name = models.CharField(max_length=25)
object_id = models.PositiveIntegerField()
content_type = models.ForeignKey(ContentType, models.CASCADE)
content_object = GenericForeignKey()
class AwardNote(models.Model):
award = models.ForeignKey(Award, models.CASCADE)
note = models.CharField(max_length=100)
class Person(models.Model):
name = models.CharField(max_length=25)
awards = GenericRelation(Award)
class Book(models.Model):
pagecount = models.IntegerField()
owner = models.ForeignKey('Child', models.CASCADE, null=True)
class Toy(models.Model):
name = models.CharField(max_length=50)
class Child(models.Model):
name = models.CharField(max_length=50)
toys = models.ManyToManyField(Toy, through='PlayedWith')
class PlayedWith(models.Model):
child = models.ForeignKey(Child, models.CASCADE)
toy = models.ForeignKey(Toy, models.CASCADE)
date = models.DateField(db_column='date_col')
class PlayedWithNote(models.Model):
played = models.ForeignKey(PlayedWith, models.CASCADE)
note = models.TextField()
class Contact(models.Model):
label = models.CharField(max_length=100)
class Email(Contact):
email_address = models.EmailField(max_length=100)
class Researcher(models.Model):
contacts = models.ManyToManyField(Contact, related_name="research_contacts")
primary_contact = models.ForeignKey(Contact, models.SET_NULL, null=True, related_name='primary_contacts')
secondary_contact = models.ForeignKey(Contact, models.SET_NULL, null=True, related_name='secondary_contacts')
class Food(models.Model):
name = models.CharField(max_length=20, unique=True)
class Eaten(models.Model):
food = models.ForeignKey(Food, models.CASCADE, to_field="name")
meal = models.CharField(max_length=20)
# Models for #15776
class Policy(models.Model):
policy_number = models.CharField(max_length=10)
class Version(models.Model):
policy = models.ForeignKey(Policy, models.CASCADE)
class Location(models.Model):
version = models.ForeignKey(Version, models.SET_NULL, blank=True, null=True)
class Item(models.Model):
version = models.ForeignKey(Version, models.CASCADE)
location = models.ForeignKey(Location, models.SET_NULL, blank=True, null=True)
# Models for #16128
class File(models.Model):
pass
class Image(File):
class Meta:
proxy = True
class Photo(Image):
class Meta:
proxy = True
class FooImage(models.Model):
my_image = models.ForeignKey(Image, models.CASCADE)
class FooFile(models.Model):
my_file = models.ForeignKey(File, models.CASCADE)
class FooPhoto(models.Model):
my_photo = models.ForeignKey(Photo, models.CASCADE)
class FooFileProxy(FooFile):
class Meta:
proxy = True
class OrgUnit(models.Model):
name = models.CharField(max_length=64, unique=True)
class Login(models.Model):
description = models.CharField(max_length=32)
orgunit = models.ForeignKey(OrgUnit, models.CASCADE)
class House(models.Model):
address = models.CharField(max_length=32)
class OrderedPerson(models.Model):
name = models.CharField(max_length=32)
lives_in = models.ForeignKey(House, models.CASCADE)
class Meta:
ordering = ['name']
|
f2f425cf6e1afb5ea9c4113f2aae97122629e2b514a535a1bb523c64ff59b9db | """
Testing using the Test Client
The test client is a class that can act like a simple
browser for testing purposes.
It allows the user to compose GET and POST requests, and
obtain the response that the server gave to those requests.
The server Response objects are annotated with the details
of the contexts and templates that were rendered during the
process of serving the request.
``Client`` objects are stateful - they will retain cookie (and
thus session) details for the lifetime of the ``Client`` instance.
This is not intended as a replacement for Twill, Selenium, or
other browser automation frameworks - it is here to allow
testing against the contexts and templates produced by a view,
rather than the HTML rendered to the end-user.
"""
import itertools
import tempfile
from unittest import mock
from django.contrib.auth.models import User
from django.core import mail
from django.http import HttpResponse, HttpResponseNotAllowed
from django.test import (
AsyncRequestFactory, Client, RequestFactory, SimpleTestCase, TestCase,
modify_settings, override_settings,
)
from django.urls import reverse_lazy
from django.utils.decorators import async_only_middleware
from .views import TwoArgException, get_view, post_view, trace_view
def middleware_urlconf(get_response):
def middleware(request):
request.urlconf = 'tests.test_client.urls_middleware_urlconf'
return get_response(request)
return middleware
@async_only_middleware
def async_middleware_urlconf(get_response):
async def middleware(request):
request.urlconf = 'tests.test_client.urls_middleware_urlconf'
return await get_response(request)
return middleware
@override_settings(ROOT_URLCONF='test_client.urls')
class ClientTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.u1 = User.objects.create_user(username='testclient', password='password')
cls.u2 = User.objects.create_user(username='inactive', password='password', is_active=False)
def test_get_view(self):
"GET a view"
# The data is ignored, but let's check it doesn't crash the system
# anyway.
data = {'var': '\xf2'}
response = self.client.get('/get_view/', data)
# Check some response details
self.assertContains(response, 'This is a test')
self.assertEqual(response.context['var'], '\xf2')
self.assertEqual(response.templates[0].name, 'GET Template')
def test_query_string_encoding(self):
# WSGI requires latin-1 encoded strings.
response = self.client.get('/get_view/?var=1\ufffd')
self.assertEqual(response.context['var'], '1\ufffd')
def test_get_data_none(self):
msg = (
"Cannot encode None for key 'value' in a query string. Did you "
"mean to pass an empty string or omit the value?"
)
with self.assertRaisesMessage(TypeError, msg):
self.client.get('/get_view/', {'value': None})
def test_get_post_view(self):
"GET a view that normally expects POSTs"
response = self.client.get('/post_view/', {})
# Check some response details
self.assertEqual(response.status_code, 200)
self.assertEqual(response.templates[0].name, 'Empty GET Template')
self.assertTemplateUsed(response, 'Empty GET Template')
self.assertTemplateNotUsed(response, 'Empty POST Template')
def test_empty_post(self):
"POST an empty dictionary to a view"
response = self.client.post('/post_view/', {})
# Check some response details
self.assertEqual(response.status_code, 200)
self.assertEqual(response.templates[0].name, 'Empty POST Template')
self.assertTemplateNotUsed(response, 'Empty GET Template')
self.assertTemplateUsed(response, 'Empty POST Template')
def test_post(self):
"POST some data to a view"
post_data = {
'value': 37
}
response = self.client.post('/post_view/', post_data)
# Check some response details
self.assertContains(response, 'Data received')
self.assertEqual(response.context['data'], '37')
self.assertEqual(response.templates[0].name, 'POST Template')
def test_post_data_none(self):
msg = (
"Cannot encode None for key 'value' as POST data. Did you mean "
"to pass an empty string or omit the value?"
)
with self.assertRaisesMessage(TypeError, msg):
self.client.post('/post_view/', {'value': None})
def test_json_serialization(self):
"""The test client serializes JSON data."""
methods = ('post', 'put', 'patch', 'delete')
tests = (
({'value': 37}, {'value': 37}),
([37, True], [37, True]),
((37, False), [37, False]),
)
for method in methods:
with self.subTest(method=method):
for data, expected in tests:
with self.subTest(data):
client_method = getattr(self.client, method)
method_name = method.upper()
response = client_method('/json_view/', data, content_type='application/json')
self.assertContains(response, 'Viewing %s page.' % method_name)
self.assertEqual(response.context['data'], expected)
def test_json_encoder_argument(self):
"""The test Client accepts a json_encoder."""
mock_encoder = mock.MagicMock()
mock_encoding = mock.MagicMock()
mock_encoder.return_value = mock_encoding
mock_encoding.encode.return_value = '{"value": 37}'
client = self.client_class(json_encoder=mock_encoder)
# Vendored tree JSON content types are accepted.
client.post('/json_view/', {'value': 37}, content_type='application/vnd.api+json')
self.assertTrue(mock_encoder.called)
self.assertTrue(mock_encoding.encode.called)
def test_put(self):
response = self.client.put('/put_view/', {'foo': 'bar'})
self.assertEqual(response.status_code, 200)
self.assertEqual(response.templates[0].name, 'PUT Template')
self.assertEqual(response.context['data'], "{'foo': 'bar'}")
self.assertEqual(response.context['Content-Length'], '14')
def test_trace(self):
"""TRACE a view"""
response = self.client.trace('/trace_view/')
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['method'], 'TRACE')
self.assertEqual(response.templates[0].name, 'TRACE Template')
def test_response_headers(self):
"Check the value of HTTP headers returned in a response"
response = self.client.get("/header_view/")
self.assertEqual(response.headers['X-DJANGO-TEST'], 'Slartibartfast')
def test_response_attached_request(self):
"""
The returned response has a ``request`` attribute with the originating
environ dict and a ``wsgi_request`` with the originating WSGIRequest.
"""
response = self.client.get("/header_view/")
self.assertTrue(hasattr(response, 'request'))
self.assertTrue(hasattr(response, 'wsgi_request'))
for key, value in response.request.items():
self.assertIn(key, response.wsgi_request.environ)
self.assertEqual(response.wsgi_request.environ[key], value)
def test_response_resolver_match(self):
"""
The response contains a ResolverMatch instance.
"""
response = self.client.get('/header_view/')
self.assertTrue(hasattr(response, 'resolver_match'))
def test_response_resolver_match_redirect_follow(self):
"""
The response ResolverMatch instance contains the correct
information when following redirects.
"""
response = self.client.get('/redirect_view/', follow=True)
self.assertEqual(response.resolver_match.url_name, 'get_view')
def test_response_resolver_match_regular_view(self):
"""
The response ResolverMatch instance contains the correct
information when accessing a regular view.
"""
response = self.client.get('/get_view/')
self.assertEqual(response.resolver_match.url_name, 'get_view')
@modify_settings(MIDDLEWARE={'prepend': 'test_client.tests.middleware_urlconf'})
def test_response_resolver_match_middleware_urlconf(self):
response = self.client.get('/middleware_urlconf_view/')
self.assertEqual(response.resolver_match.url_name, 'middleware_urlconf_view')
def test_raw_post(self):
"POST raw data (with a content type) to a view"
test_doc = """<?xml version="1.0" encoding="utf-8"?>
<library><book><title>Blink</title><author>Malcolm Gladwell</author></book></library>
"""
response = self.client.post('/raw_post_view/', test_doc, content_type='text/xml')
self.assertEqual(response.status_code, 200)
self.assertEqual(response.templates[0].name, "Book template")
self.assertEqual(response.content, b"Blink - Malcolm Gladwell")
def test_insecure(self):
"GET a URL through http"
response = self.client.get('/secure_view/', secure=False)
self.assertFalse(response.test_was_secure_request)
self.assertEqual(response.test_server_port, '80')
def test_secure(self):
"GET a URL through https"
response = self.client.get('/secure_view/', secure=True)
self.assertTrue(response.test_was_secure_request)
self.assertEqual(response.test_server_port, '443')
def test_redirect(self):
"GET a URL that redirects elsewhere"
response = self.client.get('/redirect_view/')
self.assertRedirects(response, '/get_view/')
def test_redirect_with_query(self):
"GET a URL that redirects with given GET parameters"
response = self.client.get('/redirect_view/', {'var': 'value'})
self.assertRedirects(response, '/get_view/?var=value')
def test_redirect_with_query_ordering(self):
"""assertRedirects() ignores the order of query string parameters."""
response = self.client.get('/redirect_view/', {'var': 'value', 'foo': 'bar'})
self.assertRedirects(response, '/get_view/?var=value&foo=bar')
self.assertRedirects(response, '/get_view/?foo=bar&var=value')
def test_permanent_redirect(self):
"GET a URL that redirects permanently elsewhere"
response = self.client.get('/permanent_redirect_view/')
self.assertRedirects(response, '/get_view/', status_code=301)
def test_temporary_redirect(self):
"GET a URL that does a non-permanent redirect"
response = self.client.get('/temporary_redirect_view/')
self.assertRedirects(response, '/get_view/', status_code=302)
def test_redirect_to_strange_location(self):
"GET a URL that redirects to a non-200 page"
response = self.client.get('/double_redirect_view/')
# The response was a 302, and that the attempt to get the redirection
# location returned 301 when retrieved
self.assertRedirects(response, '/permanent_redirect_view/', target_status_code=301)
def test_follow_redirect(self):
"A URL that redirects can be followed to termination."
response = self.client.get('/double_redirect_view/', follow=True)
self.assertRedirects(response, '/get_view/', status_code=302, target_status_code=200)
self.assertEqual(len(response.redirect_chain), 2)
def test_follow_relative_redirect(self):
"A URL with a relative redirect can be followed."
response = self.client.get('/accounts/', follow=True)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.request['PATH_INFO'], '/accounts/login/')
def test_follow_relative_redirect_no_trailing_slash(self):
"A URL with a relative redirect with no trailing slash can be followed."
response = self.client.get('/accounts/no_trailing_slash', follow=True)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.request['PATH_INFO'], '/accounts/login/')
def test_follow_307_and_308_redirect(self):
"""
A 307 or 308 redirect preserves the request method after the redirect.
"""
methods = ('get', 'post', 'head', 'options', 'put', 'patch', 'delete', 'trace')
codes = (307, 308)
for method, code in itertools.product(methods, codes):
with self.subTest(method=method, code=code):
req_method = getattr(self.client, method)
response = req_method('/redirect_view_%s/' % code, data={'value': 'test'}, follow=True)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.request['PATH_INFO'], '/post_view/')
self.assertEqual(response.request['REQUEST_METHOD'], method.upper())
def test_follow_307_and_308_preserves_query_string(self):
methods = ('post', 'options', 'put', 'patch', 'delete', 'trace')
codes = (307, 308)
for method, code in itertools.product(methods, codes):
with self.subTest(method=method, code=code):
req_method = getattr(self.client, method)
response = req_method(
'/redirect_view_%s_query_string/' % code,
data={'value': 'test'},
follow=True,
)
self.assertRedirects(response, '/post_view/?hello=world', status_code=code)
self.assertEqual(response.request['QUERY_STRING'], 'hello=world')
def test_follow_307_and_308_get_head_query_string(self):
methods = ('get', 'head')
codes = (307, 308)
for method, code in itertools.product(methods, codes):
with self.subTest(method=method, code=code):
req_method = getattr(self.client, method)
response = req_method(
'/redirect_view_%s_query_string/' % code,
data={'value': 'test'},
follow=True,
)
self.assertRedirects(response, '/post_view/?hello=world', status_code=code)
self.assertEqual(response.request['QUERY_STRING'], 'value=test')
def test_follow_307_and_308_preserves_post_data(self):
for code in (307, 308):
with self.subTest(code=code):
response = self.client.post('/redirect_view_%s/' % code, data={'value': 'test'}, follow=True)
self.assertContains(response, 'test is the value')
def test_follow_307_and_308_preserves_put_body(self):
for code in (307, 308):
with self.subTest(code=code):
response = self.client.put('/redirect_view_%s/?to=/put_view/' % code, data='a=b', follow=True)
self.assertContains(response, 'a=b is the body')
def test_follow_307_and_308_preserves_get_params(self):
data = {'var': 30, 'to': '/get_view/'}
for code in (307, 308):
with self.subTest(code=code):
response = self.client.get('/redirect_view_%s/' % code, data=data, follow=True)
self.assertContains(response, '30 is the value')
def test_redirect_http(self):
"GET a URL that redirects to an http URI"
response = self.client.get('/http_redirect_view/', follow=True)
self.assertFalse(response.test_was_secure_request)
def test_redirect_https(self):
"GET a URL that redirects to an https URI"
response = self.client.get('/https_redirect_view/', follow=True)
self.assertTrue(response.test_was_secure_request)
def test_notfound_response(self):
"GET a URL that responds as '404:Not Found'"
response = self.client.get('/bad_view/')
self.assertContains(response, 'MAGIC', status_code=404)
def test_valid_form(self):
"POST valid data to a form"
post_data = {
'text': 'Hello World',
'email': '[email protected]',
'value': 37,
'single': 'b',
'multi': ('b', 'c', 'e')
}
response = self.client.post('/form_view/', post_data)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, "Valid POST Template")
def test_valid_form_with_hints(self):
"GET a form, providing hints in the GET data"
hints = {
'text': 'Hello World',
'multi': ('b', 'c', 'e')
}
response = self.client.get('/form_view/', data=hints)
# The multi-value data has been rolled out ok
self.assertContains(response, 'Select a valid choice.', 0)
self.assertTemplateUsed(response, "Form GET Template")
def test_incomplete_data_form(self):
"POST incomplete data to a form"
post_data = {
'text': 'Hello World',
'value': 37
}
response = self.client.post('/form_view/', post_data)
self.assertContains(response, 'This field is required.', 3)
self.assertTemplateUsed(response, "Invalid POST Template")
self.assertFormError(response, 'form', 'email', 'This field is required.')
self.assertFormError(response, 'form', 'single', 'This field is required.')
self.assertFormError(response, 'form', 'multi', 'This field is required.')
def test_form_error(self):
"POST erroneous data to a form"
post_data = {
'text': 'Hello World',
'email': 'not an email address',
'value': 37,
'single': 'b',
'multi': ('b', 'c', 'e')
}
response = self.client.post('/form_view/', post_data)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, "Invalid POST Template")
self.assertFormError(response, 'form', 'email', 'Enter a valid email address.')
def test_valid_form_with_template(self):
"POST valid data to a form using multiple templates"
post_data = {
'text': 'Hello World',
'email': '[email protected]',
'value': 37,
'single': 'b',
'multi': ('b', 'c', 'e')
}
response = self.client.post('/form_view_with_template/', post_data)
self.assertContains(response, 'POST data OK')
self.assertTemplateUsed(response, "form_view.html")
self.assertTemplateUsed(response, 'base.html')
self.assertTemplateNotUsed(response, "Valid POST Template")
def test_incomplete_data_form_with_template(self):
"POST incomplete data to a form using multiple templates"
post_data = {
'text': 'Hello World',
'value': 37
}
response = self.client.post('/form_view_with_template/', post_data)
self.assertContains(response, 'POST data has errors')
self.assertTemplateUsed(response, 'form_view.html')
self.assertTemplateUsed(response, 'base.html')
self.assertTemplateNotUsed(response, "Invalid POST Template")
self.assertFormError(response, 'form', 'email', 'This field is required.')
self.assertFormError(response, 'form', 'single', 'This field is required.')
self.assertFormError(response, 'form', 'multi', 'This field is required.')
def test_form_error_with_template(self):
"POST erroneous data to a form using multiple templates"
post_data = {
'text': 'Hello World',
'email': 'not an email address',
'value': 37,
'single': 'b',
'multi': ('b', 'c', 'e')
}
response = self.client.post('/form_view_with_template/', post_data)
self.assertContains(response, 'POST data has errors')
self.assertTemplateUsed(response, "form_view.html")
self.assertTemplateUsed(response, 'base.html')
self.assertTemplateNotUsed(response, "Invalid POST Template")
self.assertFormError(response, 'form', 'email', 'Enter a valid email address.')
def test_unknown_page(self):
"GET an invalid URL"
response = self.client.get('/unknown_view/')
# The response was a 404
self.assertEqual(response.status_code, 404)
def test_url_parameters(self):
"Make sure that URL ;-parameters are not stripped."
response = self.client.get('/unknown_view/;some-parameter')
# The path in the response includes it (ignore that it's a 404)
self.assertEqual(response.request['PATH_INFO'], '/unknown_view/;some-parameter')
def test_view_with_login(self):
"Request a page that is protected with @login_required"
# Get the page without logging in. Should result in 302.
response = self.client.get('/login_protected_view/')
self.assertRedirects(response, '/accounts/login/?next=/login_protected_view/')
# Log in
login = self.client.login(username='testclient', password='password')
self.assertTrue(login, 'Could not log in')
# Request a page that requires a login
response = self.client.get('/login_protected_view/')
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['user'].username, 'testclient')
@override_settings(
INSTALLED_APPS=['django.contrib.auth'],
SESSION_ENGINE='django.contrib.sessions.backends.file',
)
def test_view_with_login_when_sessions_app_is_not_installed(self):
self.test_view_with_login()
def test_view_with_force_login(self):
"Request a page that is protected with @login_required"
# Get the page without logging in. Should result in 302.
response = self.client.get('/login_protected_view/')
self.assertRedirects(response, '/accounts/login/?next=/login_protected_view/')
# Log in
self.client.force_login(self.u1)
# Request a page that requires a login
response = self.client.get('/login_protected_view/')
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['user'].username, 'testclient')
def test_view_with_method_login(self):
"Request a page that is protected with a @login_required method"
# Get the page without logging in. Should result in 302.
response = self.client.get('/login_protected_method_view/')
self.assertRedirects(response, '/accounts/login/?next=/login_protected_method_view/')
# Log in
login = self.client.login(username='testclient', password='password')
self.assertTrue(login, 'Could not log in')
# Request a page that requires a login
response = self.client.get('/login_protected_method_view/')
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['user'].username, 'testclient')
def test_view_with_method_force_login(self):
"Request a page that is protected with a @login_required method"
# Get the page without logging in. Should result in 302.
response = self.client.get('/login_protected_method_view/')
self.assertRedirects(response, '/accounts/login/?next=/login_protected_method_view/')
# Log in
self.client.force_login(self.u1)
# Request a page that requires a login
response = self.client.get('/login_protected_method_view/')
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['user'].username, 'testclient')
def test_view_with_login_and_custom_redirect(self):
"Request a page that is protected with @login_required(redirect_field_name='redirect_to')"
# Get the page without logging in. Should result in 302.
response = self.client.get('/login_protected_view_custom_redirect/')
self.assertRedirects(response, '/accounts/login/?redirect_to=/login_protected_view_custom_redirect/')
# Log in
login = self.client.login(username='testclient', password='password')
self.assertTrue(login, 'Could not log in')
# Request a page that requires a login
response = self.client.get('/login_protected_view_custom_redirect/')
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['user'].username, 'testclient')
def test_view_with_force_login_and_custom_redirect(self):
"""
Request a page that is protected with
@login_required(redirect_field_name='redirect_to')
"""
# Get the page without logging in. Should result in 302.
response = self.client.get('/login_protected_view_custom_redirect/')
self.assertRedirects(response, '/accounts/login/?redirect_to=/login_protected_view_custom_redirect/')
# Log in
self.client.force_login(self.u1)
# Request a page that requires a login
response = self.client.get('/login_protected_view_custom_redirect/')
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['user'].username, 'testclient')
def test_view_with_bad_login(self):
"Request a page that is protected with @login, but use bad credentials"
login = self.client.login(username='otheruser', password='nopassword')
self.assertFalse(login)
def test_view_with_inactive_login(self):
"""
An inactive user may login if the authenticate backend allows it.
"""
credentials = {'username': 'inactive', 'password': 'password'}
self.assertFalse(self.client.login(**credentials))
with self.settings(AUTHENTICATION_BACKENDS=['django.contrib.auth.backends.AllowAllUsersModelBackend']):
self.assertTrue(self.client.login(**credentials))
@override_settings(
AUTHENTICATION_BACKENDS=[
'django.contrib.auth.backends.ModelBackend',
'django.contrib.auth.backends.AllowAllUsersModelBackend',
]
)
def test_view_with_inactive_force_login(self):
"Request a page that is protected with @login, but use an inactive login"
# Get the page without logging in. Should result in 302.
response = self.client.get('/login_protected_view/')
self.assertRedirects(response, '/accounts/login/?next=/login_protected_view/')
# Log in
self.client.force_login(self.u2, backend='django.contrib.auth.backends.AllowAllUsersModelBackend')
# Request a page that requires a login
response = self.client.get('/login_protected_view/')
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['user'].username, 'inactive')
def test_logout(self):
"Request a logout after logging in"
# Log in
self.client.login(username='testclient', password='password')
# Request a page that requires a login
response = self.client.get('/login_protected_view/')
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['user'].username, 'testclient')
# Log out
self.client.logout()
# Request a page that requires a login
response = self.client.get('/login_protected_view/')
self.assertRedirects(response, '/accounts/login/?next=/login_protected_view/')
def test_logout_with_force_login(self):
"Request a logout after logging in"
# Log in
self.client.force_login(self.u1)
# Request a page that requires a login
response = self.client.get('/login_protected_view/')
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['user'].username, 'testclient')
# Log out
self.client.logout()
# Request a page that requires a login
response = self.client.get('/login_protected_view/')
self.assertRedirects(response, '/accounts/login/?next=/login_protected_view/')
@override_settings(
AUTHENTICATION_BACKENDS=[
'django.contrib.auth.backends.ModelBackend',
'test_client.auth_backends.TestClientBackend',
],
)
def test_force_login_with_backend(self):
"""
Request a page that is protected with @login_required when using
force_login() and passing a backend.
"""
# Get the page without logging in. Should result in 302.
response = self.client.get('/login_protected_view/')
self.assertRedirects(response, '/accounts/login/?next=/login_protected_view/')
# Log in
self.client.force_login(self.u1, backend='test_client.auth_backends.TestClientBackend')
self.assertEqual(self.u1.backend, 'test_client.auth_backends.TestClientBackend')
# Request a page that requires a login
response = self.client.get('/login_protected_view/')
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['user'].username, 'testclient')
@override_settings(
AUTHENTICATION_BACKENDS=[
'django.contrib.auth.backends.ModelBackend',
'test_client.auth_backends.TestClientBackend',
],
)
def test_force_login_without_backend(self):
"""
force_login() without passing a backend and with multiple backends
configured should automatically use the first backend.
"""
self.client.force_login(self.u1)
response = self.client.get('/login_protected_view/')
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['user'].username, 'testclient')
self.assertEqual(self.u1.backend, 'django.contrib.auth.backends.ModelBackend')
@override_settings(AUTHENTICATION_BACKENDS=[
'test_client.auth_backends.BackendWithoutGetUserMethod',
'django.contrib.auth.backends.ModelBackend',
])
def test_force_login_with_backend_missing_get_user(self):
"""
force_login() skips auth backends without a get_user() method.
"""
self.client.force_login(self.u1)
self.assertEqual(self.u1.backend, 'django.contrib.auth.backends.ModelBackend')
@override_settings(SESSION_ENGINE="django.contrib.sessions.backends.signed_cookies")
def test_logout_cookie_sessions(self):
self.test_logout()
def test_view_with_permissions(self):
"Request a page that is protected with @permission_required"
# Get the page without logging in. Should result in 302.
response = self.client.get('/permission_protected_view/')
self.assertRedirects(response, '/accounts/login/?next=/permission_protected_view/')
# Log in
login = self.client.login(username='testclient', password='password')
self.assertTrue(login, 'Could not log in')
# Log in with wrong permissions. Should result in 302.
response = self.client.get('/permission_protected_view/')
self.assertRedirects(response, '/accounts/login/?next=/permission_protected_view/')
# TODO: Log in with right permissions and request the page again
def test_view_with_permissions_exception(self):
"Request a page that is protected with @permission_required but raises an exception"
# Get the page without logging in. Should result in 403.
response = self.client.get('/permission_protected_view_exception/')
self.assertEqual(response.status_code, 403)
# Log in
login = self.client.login(username='testclient', password='password')
self.assertTrue(login, 'Could not log in')
# Log in with wrong permissions. Should result in 403.
response = self.client.get('/permission_protected_view_exception/')
self.assertEqual(response.status_code, 403)
def test_view_with_method_permissions(self):
"Request a page that is protected with a @permission_required method"
# Get the page without logging in. Should result in 302.
response = self.client.get('/permission_protected_method_view/')
self.assertRedirects(response, '/accounts/login/?next=/permission_protected_method_view/')
# Log in
login = self.client.login(username='testclient', password='password')
self.assertTrue(login, 'Could not log in')
# Log in with wrong permissions. Should result in 302.
response = self.client.get('/permission_protected_method_view/')
self.assertRedirects(response, '/accounts/login/?next=/permission_protected_method_view/')
# TODO: Log in with right permissions and request the page again
def test_external_redirect(self):
response = self.client.get('/django_project_redirect/')
self.assertRedirects(response, 'https://www.djangoproject.com/', fetch_redirect_response=False)
def test_external_redirect_without_trailing_slash(self):
"""
Client._handle_redirects() with an empty path.
"""
response = self.client.get('/no_trailing_slash_external_redirect/', follow=True)
self.assertRedirects(response, 'https://testserver')
def test_external_redirect_with_fetch_error_msg(self):
"""
assertRedirects without fetch_redirect_response=False raises
a relevant ValueError rather than a non-descript AssertionError.
"""
response = self.client.get('/django_project_redirect/')
msg = (
"The test client is unable to fetch remote URLs (got "
"https://www.djangoproject.com/). If the host is served by Django, "
"add 'www.djangoproject.com' to ALLOWED_HOSTS. "
"Otherwise, use assertRedirects(..., fetch_redirect_response=False)."
)
with self.assertRaisesMessage(ValueError, msg):
self.assertRedirects(response, 'https://www.djangoproject.com/')
def test_session_modifying_view(self):
"Request a page that modifies the session"
# Session value isn't set initially
with self.assertRaises(KeyError):
self.client.session['tobacconist']
self.client.post('/session_view/')
# The session was modified
self.assertEqual(self.client.session['tobacconist'], 'hovercraft')
@override_settings(
INSTALLED_APPS=[],
SESSION_ENGINE='django.contrib.sessions.backends.file',
)
def test_sessions_app_is_not_installed(self):
self.test_session_modifying_view()
@override_settings(
INSTALLED_APPS=[],
SESSION_ENGINE='django.contrib.sessions.backends.nonexistent',
)
def test_session_engine_is_invalid(self):
with self.assertRaisesMessage(ImportError, 'nonexistent'):
self.test_session_modifying_view()
def test_view_with_exception(self):
"Request a page that is known to throw an error"
with self.assertRaises(KeyError):
self.client.get("/broken_view/")
def test_exc_info(self):
client = Client(raise_request_exception=False)
response = client.get("/broken_view/")
self.assertEqual(response.status_code, 500)
exc_type, exc_value, exc_traceback = response.exc_info
self.assertIs(exc_type, KeyError)
self.assertIsInstance(exc_value, KeyError)
self.assertEqual(str(exc_value), "'Oops! Looks like you wrote some bad code.'")
self.assertIsNotNone(exc_traceback)
def test_exc_info_none(self):
response = self.client.get("/get_view/")
self.assertIsNone(response.exc_info)
def test_mail_sending(self):
"Mail is redirected to a dummy outbox during test setup"
response = self.client.get('/mail_sending_view/')
self.assertEqual(response.status_code, 200)
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(mail.outbox[0].subject, 'Test message')
self.assertEqual(mail.outbox[0].body, 'This is a test email')
self.assertEqual(mail.outbox[0].from_email, '[email protected]')
self.assertEqual(mail.outbox[0].to[0], '[email protected]')
self.assertEqual(mail.outbox[0].to[1], '[email protected]')
def test_reverse_lazy_decodes(self):
"reverse_lazy() works in the test client"
data = {'var': 'data'}
response = self.client.get(reverse_lazy('get_view'), data)
# Check some response details
self.assertContains(response, 'This is a test')
def test_relative_redirect(self):
response = self.client.get('/accounts/')
self.assertRedirects(response, '/accounts/login/')
def test_relative_redirect_no_trailing_slash(self):
response = self.client.get('/accounts/no_trailing_slash')
self.assertRedirects(response, '/accounts/login/')
def test_mass_mail_sending(self):
"Mass mail is redirected to a dummy outbox during test setup"
response = self.client.get('/mass_mail_sending_view/')
self.assertEqual(response.status_code, 200)
self.assertEqual(len(mail.outbox), 2)
self.assertEqual(mail.outbox[0].subject, 'First Test message')
self.assertEqual(mail.outbox[0].body, 'This is the first test email')
self.assertEqual(mail.outbox[0].from_email, '[email protected]')
self.assertEqual(mail.outbox[0].to[0], '[email protected]')
self.assertEqual(mail.outbox[0].to[1], '[email protected]')
self.assertEqual(mail.outbox[1].subject, 'Second Test message')
self.assertEqual(mail.outbox[1].body, 'This is the second test email')
self.assertEqual(mail.outbox[1].from_email, '[email protected]')
self.assertEqual(mail.outbox[1].to[0], '[email protected]')
self.assertEqual(mail.outbox[1].to[1], '[email protected]')
def test_exception_following_nested_client_request(self):
"""
A nested test client request shouldn't clobber exception signals from
the outer client request.
"""
with self.assertRaisesMessage(Exception, 'exception message'):
self.client.get('/nesting_exception_view/')
def test_response_raises_multi_arg_exception(self):
"""A request may raise an exception with more than one required arg."""
with self.assertRaises(TwoArgException) as cm:
self.client.get('/two_arg_exception/')
self.assertEqual(cm.exception.args, ('one', 'two'))
def test_uploading_temp_file(self):
with tempfile.TemporaryFile() as test_file:
response = self.client.post('/upload_view/', data={'temp_file': test_file})
self.assertEqual(response.content, b'temp_file')
def test_uploading_named_temp_file(self):
with tempfile.NamedTemporaryFile() as test_file:
response = self.client.post(
'/upload_view/',
data={'named_temp_file': test_file},
)
self.assertEqual(response.content, b'named_temp_file')
@override_settings(
MIDDLEWARE=['django.middleware.csrf.CsrfViewMiddleware'],
ROOT_URLCONF='test_client.urls',
)
class CSRFEnabledClientTests(SimpleTestCase):
def test_csrf_enabled_client(self):
"A client can be instantiated with CSRF checks enabled"
csrf_client = Client(enforce_csrf_checks=True)
# The normal client allows the post
response = self.client.post('/post_view/', {})
self.assertEqual(response.status_code, 200)
# The CSRF-enabled client rejects it
response = csrf_client.post('/post_view/', {})
self.assertEqual(response.status_code, 403)
class CustomTestClient(Client):
i_am_customized = "Yes"
class CustomTestClientTest(SimpleTestCase):
client_class = CustomTestClient
def test_custom_test_client(self):
"""A test case can specify a custom class for self.client."""
self.assertIs(hasattr(self.client, "i_am_customized"), True)
def _generic_view(request):
return HttpResponse(status=200)
@override_settings(ROOT_URLCONF='test_client.urls')
class RequestFactoryTest(SimpleTestCase):
"""Tests for the request factory."""
# A mapping between names of HTTP/1.1 methods and their test views.
http_methods_and_views = (
('get', get_view),
('post', post_view),
('put', _generic_view),
('patch', _generic_view),
('delete', _generic_view),
('head', _generic_view),
('options', _generic_view),
('trace', trace_view),
)
request_factory = RequestFactory()
def test_request_factory(self):
"""The request factory implements all the HTTP/1.1 methods."""
for method_name, view in self.http_methods_and_views:
method = getattr(self.request_factory, method_name)
request = method('/somewhere/')
response = view(request)
self.assertEqual(response.status_code, 200)
def test_get_request_from_factory(self):
"""
The request factory returns a templated response for a GET request.
"""
request = self.request_factory.get('/somewhere/')
response = get_view(request)
self.assertContains(response, 'This is a test')
def test_trace_request_from_factory(self):
"""The request factory returns an echo response for a TRACE request."""
url_path = '/somewhere/'
request = self.request_factory.trace(url_path)
response = trace_view(request)
protocol = request.META["SERVER_PROTOCOL"]
echoed_request_line = "TRACE {} {}".format(url_path, protocol)
self.assertContains(response, echoed_request_line)
@override_settings(ROOT_URLCONF='test_client.urls')
class AsyncClientTest(TestCase):
async def test_response_resolver_match(self):
response = await self.async_client.get('/async_get_view/')
self.assertTrue(hasattr(response, 'resolver_match'))
self.assertEqual(response.resolver_match.url_name, 'async_get_view')
@modify_settings(
MIDDLEWARE={'prepend': 'test_client.tests.async_middleware_urlconf'},
)
async def test_response_resolver_match_middleware_urlconf(self):
response = await self.async_client.get('/middleware_urlconf_view/')
self.assertEqual(response.resolver_match.url_name, 'middleware_urlconf_view')
async def test_follow_parameter_not_implemented(self):
msg = 'AsyncClient request methods do not accept the follow parameter.'
tests = (
'get',
'post',
'put',
'patch',
'delete',
'head',
'options',
'trace',
)
for method_name in tests:
with self.subTest(method=method_name):
method = getattr(self.async_client, method_name)
with self.assertRaisesMessage(NotImplementedError, msg):
await method('/redirect_view/', follow=True)
async def test_get_data(self):
response = await self.async_client.get('/get_view/', {'var': 'val'})
self.assertContains(response, 'This is a test. val is the value.')
@override_settings(ROOT_URLCONF='test_client.urls')
class AsyncRequestFactoryTest(SimpleTestCase):
request_factory = AsyncRequestFactory()
async def test_request_factory(self):
tests = (
'get',
'post',
'put',
'patch',
'delete',
'head',
'options',
'trace',
)
for method_name in tests:
with self.subTest(method=method_name):
async def async_generic_view(request):
if request.method.lower() != method_name:
return HttpResponseNotAllowed(method_name)
return HttpResponse(status=200)
method = getattr(self.request_factory, method_name)
request = method('/somewhere/')
response = await async_generic_view(request)
self.assertEqual(response.status_code, 200)
async def test_request_factory_data(self):
async def async_generic_view(request):
return HttpResponse(status=200, content=request.body)
request = self.request_factory.post(
'/somewhere/',
data={'example': 'data'},
content_type='application/json',
)
self.assertEqual(request.headers['content-length'], '19')
self.assertEqual(request.headers['content-type'], 'application/json')
response = await async_generic_view(request)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, b'{"example": "data"}')
def test_request_factory_sets_headers(self):
request = self.request_factory.get(
'/somewhere/',
AUTHORIZATION='Bearer faketoken',
X_ANOTHER_HEADER='some other value',
)
self.assertEqual(request.headers['authorization'], 'Bearer faketoken')
self.assertIn('HTTP_AUTHORIZATION', request.META)
self.assertEqual(request.headers['x-another-header'], 'some other value')
self.assertIn('HTTP_X_ANOTHER_HEADER', request.META)
def test_request_factory_query_string(self):
request = self.request_factory.get('/somewhere/', {'example': 'data'})
self.assertNotIn('Query-String', request.headers)
self.assertEqual(request.GET['example'], 'data')
|
9ecc9c2ea52a2dac191f93743d469198f929cd3606298d1140fdab1e971c3f4b | from django.contrib.auth import views as auth_views
from django.urls import path
from django.views.generic import RedirectView
from . import views
urlpatterns = [
path('upload_view/', views.upload_view, name='upload_view'),
path('get_view/', views.get_view, name='get_view'),
path('post_view/', views.post_view),
path('put_view/', views.put_view),
path('trace_view/', views.trace_view),
path('header_view/', views.view_with_header),
path('raw_post_view/', views.raw_post_view),
path('redirect_view/', views.redirect_view),
path('redirect_view_307/', views.method_saving_307_redirect_view),
path(
'redirect_view_307_query_string/',
views.method_saving_307_redirect_query_string_view,
),
path('redirect_view_308/', views.method_saving_308_redirect_view),
path(
'redirect_view_308_query_string/',
views.method_saving_308_redirect_query_string_view,
),
path('secure_view/', views.view_with_secure),
path('permanent_redirect_view/', RedirectView.as_view(url='/get_view/', permanent=True)),
path('temporary_redirect_view/', RedirectView.as_view(url='/get_view/', permanent=False)),
path('http_redirect_view/', RedirectView.as_view(url='/secure_view/')),
path('https_redirect_view/', RedirectView.as_view(url='https://testserver/secure_view/')),
path('double_redirect_view/', views.double_redirect_view),
path('bad_view/', views.bad_view),
path('form_view/', views.form_view),
path('form_view_with_template/', views.form_view_with_template),
path('formset_view/', views.formset_view),
path('json_view/', views.json_view),
path('login_protected_view/', views.login_protected_view),
path('login_protected_method_view/', views.login_protected_method_view),
path('login_protected_view_custom_redirect/', views.login_protected_view_changed_redirect),
path('permission_protected_view/', views.permission_protected_view),
path('permission_protected_view_exception/', views.permission_protected_view_exception),
path('permission_protected_method_view/', views.permission_protected_method_view),
path('session_view/', views.session_view),
path('broken_view/', views.broken_view),
path('mail_sending_view/', views.mail_sending_view),
path('mass_mail_sending_view/', views.mass_mail_sending_view),
path('nesting_exception_view/', views.nesting_exception_view),
path('django_project_redirect/', views.django_project_redirect),
path('no_trailing_slash_external_redirect/', views.no_trailing_slash_external_redirect),
path('', views.index_view, name='index'), # Target for no_trailing_slash_external_redirect/ with follow=True
path('two_arg_exception/', views.two_arg_exception),
path('accounts/', RedirectView.as_view(url='login/')),
path('accounts/no_trailing_slash', RedirectView.as_view(url='login/')),
path('accounts/login/', auth_views.LoginView.as_view(template_name='login.html')),
path('accounts/logout/', auth_views.LogoutView.as_view()),
# Async views.
path('async_get_view/', views.async_get_view, name='async_get_view'),
]
|
c6971f23189de58c160a8f0ec33a969e2df55f34313c1fc0e280bce9bddd7b59 | import json
from urllib.parse import urlencode
from xml.dom.minidom import parseString
from django.contrib.auth.decorators import login_required, permission_required
from django.core import mail
from django.core.exceptions import ValidationError
from django.forms import fields
from django.forms.forms import Form
from django.forms.formsets import BaseFormSet, formset_factory
from django.http import (
HttpResponse, HttpResponseBadRequest, HttpResponseNotAllowed,
HttpResponseNotFound, HttpResponseRedirect,
)
from django.shortcuts import render
from django.template import Context, Template
from django.test import Client
from django.utils.decorators import method_decorator
def get_view(request):
"A simple view that expects a GET request, and returns a rendered template"
t = Template('This is a test. {{ var }} is the value.', name='GET Template')
c = Context({'var': request.GET.get('var', 42)})
return HttpResponse(t.render(c))
async def async_get_view(request):
return HttpResponse(b'GET content.')
def trace_view(request):
"""
A simple view that expects a TRACE request and echoes its status line.
TRACE requests should not have an entity; the view will return a 400 status
response if it is present.
"""
if request.method.upper() != "TRACE":
return HttpResponseNotAllowed("TRACE")
elif request.body:
return HttpResponseBadRequest("TRACE requests MUST NOT include an entity")
else:
protocol = request.META["SERVER_PROTOCOL"]
t = Template(
'{{ method }} {{ uri }} {{ version }}',
name="TRACE Template",
)
c = Context({
'method': request.method,
'uri': request.path,
'version': protocol,
})
return HttpResponse(t.render(c))
def put_view(request):
if request.method == 'PUT':
t = Template('Data received: {{ data }} is the body.', name='PUT Template')
c = Context({
'Content-Length': request.META['CONTENT_LENGTH'],
'data': request.body.decode(),
})
else:
t = Template('Viewing GET page.', name='Empty GET Template')
c = Context()
return HttpResponse(t.render(c))
def post_view(request):
"""A view that expects a POST, and returns a different template depending
on whether any POST data is available
"""
if request.method == 'POST':
if request.POST:
t = Template('Data received: {{ data }} is the value.', name='POST Template')
c = Context({'data': request.POST['value']})
else:
t = Template('Viewing POST page.', name='Empty POST Template')
c = Context()
else:
t = Template('Viewing GET page.', name='Empty GET Template')
c = Context()
return HttpResponse(t.render(c))
def json_view(request):
"""
A view that expects a request with the header 'application/json' and JSON
data, which is deserialized and included in the context.
"""
if request.META.get('CONTENT_TYPE') != 'application/json':
return HttpResponse()
t = Template('Viewing {} page. With data {{ data }}.'.format(request.method))
data = json.loads(request.body.decode('utf-8'))
c = Context({'data': data})
return HttpResponse(t.render(c))
def view_with_header(request):
"A view that has a custom header"
response = HttpResponse()
response.headers['X-DJANGO-TEST'] = 'Slartibartfast'
return response
def raw_post_view(request):
"""A view which expects raw XML to be posted and returns content extracted
from the XML"""
if request.method == 'POST':
root = parseString(request.body)
first_book = root.firstChild.firstChild
title, author = [n.firstChild.nodeValue for n in first_book.childNodes]
t = Template("{{ title }} - {{ author }}", name="Book template")
c = Context({"title": title, "author": author})
else:
t = Template("GET request.", name="Book GET template")
c = Context()
return HttpResponse(t.render(c))
def redirect_view(request):
"A view that redirects all requests to the GET view"
if request.GET:
query = '?' + urlencode(request.GET, True)
else:
query = ''
return HttpResponseRedirect('/get_view/' + query)
def method_saving_307_redirect_query_string_view(request):
return HttpResponseRedirect('/post_view/?hello=world', status=307)
def method_saving_308_redirect_query_string_view(request):
return HttpResponseRedirect('/post_view/?hello=world', status=308)
def _post_view_redirect(request, status_code):
"""Redirect to /post_view/ using the status code."""
redirect_to = request.GET.get('to', '/post_view/')
return HttpResponseRedirect(redirect_to, status=status_code)
def method_saving_307_redirect_view(request):
return _post_view_redirect(request, 307)
def method_saving_308_redirect_view(request):
return _post_view_redirect(request, 308)
def view_with_secure(request):
"A view that indicates if the request was secure"
response = HttpResponse()
response.test_was_secure_request = request.is_secure()
response.test_server_port = request.META.get('SERVER_PORT', 80)
return response
def double_redirect_view(request):
"A view that redirects all requests to a redirection view"
return HttpResponseRedirect('/permanent_redirect_view/')
def bad_view(request):
"A view that returns a 404 with some error content"
return HttpResponseNotFound('Not found!. This page contains some MAGIC content')
TestChoices = (
('a', 'First Choice'),
('b', 'Second Choice'),
('c', 'Third Choice'),
('d', 'Fourth Choice'),
('e', 'Fifth Choice')
)
class TestForm(Form):
text = fields.CharField()
email = fields.EmailField()
value = fields.IntegerField()
single = fields.ChoiceField(choices=TestChoices)
multi = fields.MultipleChoiceField(choices=TestChoices)
def clean(self):
cleaned_data = self.cleaned_data
if cleaned_data.get("text") == "Raise non-field error":
raise ValidationError("Non-field error.")
return cleaned_data
def form_view(request):
"A view that tests a simple form"
if request.method == 'POST':
form = TestForm(request.POST)
if form.is_valid():
t = Template('Valid POST data.', name='Valid POST Template')
c = Context()
else:
t = Template('Invalid POST data. {{ form.errors }}', name='Invalid POST Template')
c = Context({'form': form})
else:
form = TestForm(request.GET)
t = Template('Viewing base form. {{ form }}.', name='Form GET Template')
c = Context({'form': form})
return HttpResponse(t.render(c))
def form_view_with_template(request):
"A view that tests a simple form"
if request.method == 'POST':
form = TestForm(request.POST)
if form.is_valid():
message = 'POST data OK'
else:
message = 'POST data has errors'
else:
form = TestForm()
message = 'GET form page'
return render(request, 'form_view.html', {
'form': form,
'message': message,
})
class BaseTestFormSet(BaseFormSet):
def clean(self):
"""No two email addresses are the same."""
if any(self.errors):
# Don't bother validating the formset unless each form is valid
return
emails = []
for form in self.forms:
email = form.cleaned_data['email']
if email in emails:
raise ValidationError(
"Forms in a set must have distinct email addresses."
)
emails.append(email)
TestFormSet = formset_factory(TestForm, BaseTestFormSet)
def formset_view(request):
"A view that tests a simple formset"
if request.method == 'POST':
formset = TestFormSet(request.POST)
if formset.is_valid():
t = Template('Valid POST data.', name='Valid POST Template')
c = Context()
else:
t = Template('Invalid POST data. {{ my_formset.errors }}',
name='Invalid POST Template')
c = Context({'my_formset': formset})
else:
formset = TestForm(request.GET)
t = Template('Viewing base formset. {{ my_formset }}.',
name='Formset GET Template')
c = Context({'my_formset': formset})
return HttpResponse(t.render(c))
@login_required
def login_protected_view(request):
"A simple view that is login protected."
t = Template('This is a login protected test. Username is {{ user.username }}.', name='Login Template')
c = Context({'user': request.user})
return HttpResponse(t.render(c))
@login_required(redirect_field_name='redirect_to')
def login_protected_view_changed_redirect(request):
"A simple view that is login protected with a custom redirect field set"
t = Template('This is a login protected test. Username is {{ user.username }}.', name='Login Template')
c = Context({'user': request.user})
return HttpResponse(t.render(c))
def _permission_protected_view(request):
"A simple view that is permission protected."
t = Template('This is a permission protected test. '
'Username is {{ user.username }}. '
'Permissions are {{ user.get_all_permissions }}.',
name='Permissions Template')
c = Context({'user': request.user})
return HttpResponse(t.render(c))
permission_protected_view = permission_required('permission_not_granted')(_permission_protected_view)
permission_protected_view_exception = (
permission_required('permission_not_granted', raise_exception=True)(_permission_protected_view)
)
class _ViewManager:
@method_decorator(login_required)
def login_protected_view(self, request):
t = Template('This is a login protected test using a method. '
'Username is {{ user.username }}.',
name='Login Method Template')
c = Context({'user': request.user})
return HttpResponse(t.render(c))
@method_decorator(permission_required('permission_not_granted'))
def permission_protected_view(self, request):
t = Template('This is a permission protected test using a method. '
'Username is {{ user.username }}. '
'Permissions are {{ user.get_all_permissions }}.',
name='Permissions Template')
c = Context({'user': request.user})
return HttpResponse(t.render(c))
_view_manager = _ViewManager()
login_protected_method_view = _view_manager.login_protected_view
permission_protected_method_view = _view_manager.permission_protected_view
def session_view(request):
"A view that modifies the session"
request.session['tobacconist'] = 'hovercraft'
t = Template('This is a view that modifies the session.',
name='Session Modifying View Template')
c = Context()
return HttpResponse(t.render(c))
def broken_view(request):
"""A view which just raises an exception, simulating a broken view."""
raise KeyError("Oops! Looks like you wrote some bad code.")
def mail_sending_view(request):
mail.EmailMessage(
"Test message",
"This is a test email",
"[email protected]",
['[email protected]', '[email protected]']).send()
return HttpResponse("Mail sent")
def mass_mail_sending_view(request):
m1 = mail.EmailMessage(
'First Test message',
'This is the first test email',
'[email protected]',
['[email protected]', '[email protected]'])
m2 = mail.EmailMessage(
'Second Test message',
'This is the second test email',
'[email protected]',
['[email protected]', '[email protected]'])
c = mail.get_connection()
c.send_messages([m1, m2])
return HttpResponse("Mail sent")
def nesting_exception_view(request):
"""
A view that uses a nested client to call another view and then raises an
exception.
"""
client = Client()
client.get('/get_view/')
raise Exception('exception message')
def django_project_redirect(request):
return HttpResponseRedirect('https://www.djangoproject.com/')
def no_trailing_slash_external_redirect(request):
"""
RFC 2616 3.2.2: A bare domain without any abs_path element should be
treated as having the trailing `/`.
Use https://testserver, rather than an external domain, in order to allow
use of follow=True, triggering Client._handle_redirects().
"""
return HttpResponseRedirect('https://testserver')
def index_view(request):
"""Target for no_trailing_slash_external_redirect with follow=True."""
return HttpResponse('Hello world')
def upload_view(request):
"""Prints keys of request.FILES to the response."""
return HttpResponse(', '.join(request.FILES))
class TwoArgException(Exception):
def __init__(self, one, two):
pass
def two_arg_exception(request):
raise TwoArgException('one', 'two')
|
10af424550a68bdf00d5fb2f69b1f3b92b2abeecef7de9aae31db192faf931bb | """Tests related to django.db.backends that haven't been organized."""
import datetime
import threading
import unittest
import warnings
from unittest import mock
from django.core.management.color import no_style
from django.db import (
DEFAULT_DB_ALIAS, DatabaseError, IntegrityError, connection, connections,
reset_queries, transaction,
)
from django.db.backends.base.base import BaseDatabaseWrapper
from django.db.backends.signals import connection_created
from django.db.backends.utils import CursorWrapper
from django.db.models.sql.constants import CURSOR
from django.test import (
TestCase, TransactionTestCase, override_settings, skipIfDBFeature,
skipUnlessDBFeature,
)
from .models import (
Article, Object, ObjectReference, Person, Post, RawData, Reporter,
ReporterProxy, SchoolClass, SQLKeywordsModel, Square,
VeryLongModelNameZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ,
)
class DateQuotingTest(TestCase):
def test_django_date_trunc(self):
"""
Test the custom ``django_date_trunc method``, in particular against
fields which clash with strings passed to it (e.g. 'year') (#12818).
"""
updated = datetime.datetime(2010, 2, 20)
SchoolClass.objects.create(year=2009, last_updated=updated)
years = SchoolClass.objects.dates('last_updated', 'year')
self.assertEqual(list(years), [datetime.date(2010, 1, 1)])
def test_django_date_extract(self):
"""
Test the custom ``django_date_extract method``, in particular against fields
which clash with strings passed to it (e.g. 'day') (#12818).
"""
updated = datetime.datetime(2010, 2, 20)
SchoolClass.objects.create(year=2009, last_updated=updated)
classes = SchoolClass.objects.filter(last_updated__day=20)
self.assertEqual(len(classes), 1)
@override_settings(DEBUG=True)
class LastExecutedQueryTest(TestCase):
def test_last_executed_query_without_previous_query(self):
"""
last_executed_query should not raise an exception even if no previous
query has been run.
"""
with connection.cursor() as cursor:
connection.ops.last_executed_query(cursor, '', ())
def test_debug_sql(self):
list(Reporter.objects.filter(first_name="test"))
sql = connection.queries[-1]['sql'].lower()
self.assertIn("select", sql)
self.assertIn(Reporter._meta.db_table, sql)
def test_query_encoding(self):
"""last_executed_query() returns a string."""
data = RawData.objects.filter(raw_data=b'\x00\x46 \xFE').extra(select={'föö': 1})
sql, params = data.query.sql_with_params()
with data.query.get_compiler('default').execute_sql(CURSOR) as cursor:
last_sql = cursor.db.ops.last_executed_query(cursor, sql, params)
self.assertIsInstance(last_sql, str)
def test_last_executed_query(self):
# last_executed_query() interpolate all parameters, in most cases it is
# not equal to QuerySet.query.
for qs in (
Article.objects.filter(pk=1),
Article.objects.filter(pk__in=(1, 2), reporter__pk=3),
Article.objects.filter(
pk=1,
reporter__pk=9,
).exclude(reporter__pk__in=[2, 1]),
):
sql, params = qs.query.sql_with_params()
with qs.query.get_compiler(DEFAULT_DB_ALIAS).execute_sql(CURSOR) as cursor:
self.assertEqual(
cursor.db.ops.last_executed_query(cursor, sql, params),
str(qs.query),
)
@skipUnlessDBFeature('supports_paramstyle_pyformat')
def test_last_executed_query_dict(self):
square_opts = Square._meta
sql = 'INSERT INTO %s (%s, %s) VALUES (%%(root)s, %%(square)s)' % (
connection.introspection.identifier_converter(square_opts.db_table),
connection.ops.quote_name(square_opts.get_field('root').column),
connection.ops.quote_name(square_opts.get_field('square').column),
)
with connection.cursor() as cursor:
params = {'root': 2, 'square': 4}
cursor.execute(sql, params)
self.assertEqual(
cursor.db.ops.last_executed_query(cursor, sql, params),
sql % params,
)
class ParameterHandlingTest(TestCase):
def test_bad_parameter_count(self):
"An executemany call with too many/not enough parameters will raise an exception (Refs #12612)"
with connection.cursor() as cursor:
query = ('INSERT INTO %s (%s, %s) VALUES (%%s, %%s)' % (
connection.introspection.identifier_converter('backends_square'),
connection.ops.quote_name('root'),
connection.ops.quote_name('square')
))
with self.assertRaises(Exception):
cursor.executemany(query, [(1, 2, 3)])
with self.assertRaises(Exception):
cursor.executemany(query, [(1,)])
class LongNameTest(TransactionTestCase):
"""Long primary keys and model names can result in a sequence name
that exceeds the database limits, which will result in truncation
on certain databases (e.g., Postgres). The backend needs to use
the correct sequence name in last_insert_id and other places, so
check it is. Refs #8901.
"""
available_apps = ['backends']
def test_sequence_name_length_limits_create(self):
"""Test creation of model with long name and long pk name doesn't error. Ref #8901"""
VeryLongModelNameZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ.objects.create()
def test_sequence_name_length_limits_m2m(self):
"""
An m2m save of a model with a long name and a long m2m field name
doesn't error (#8901).
"""
obj = VeryLongModelNameZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ.objects.create()
rel_obj = Person.objects.create(first_name='Django', last_name='Reinhardt')
obj.m2m_also_quite_long_zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz.add(rel_obj)
def test_sequence_name_length_limits_flush(self):
"""
Sequence resetting as part of a flush with model with long name and
long pk name doesn't error (#8901).
"""
# A full flush is expensive to the full test, so we dig into the
# internals to generate the likely offending SQL and run it manually
# Some convenience aliases
VLM = VeryLongModelNameZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ
VLM_m2m = VLM.m2m_also_quite_long_zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz.through
tables = [
VLM._meta.db_table,
VLM_m2m._meta.db_table,
]
sql_list = connection.ops.sql_flush(no_style(), tables, reset_sequences=True)
connection.ops.execute_sql_flush(sql_list)
class SequenceResetTest(TestCase):
def test_generic_relation(self):
"Sequence names are correct when resetting generic relations (Ref #13941)"
# Create an object with a manually specified PK
Post.objects.create(id=10, name='1st post', text='hello world')
# Reset the sequences for the database
commands = connections[DEFAULT_DB_ALIAS].ops.sequence_reset_sql(no_style(), [Post])
with connection.cursor() as cursor:
for sql in commands:
cursor.execute(sql)
# If we create a new object now, it should have a PK greater
# than the PK we specified manually.
obj = Post.objects.create(name='New post', text='goodbye world')
self.assertGreater(obj.pk, 10)
# This test needs to run outside of a transaction, otherwise closing the
# connection would implicitly rollback and cause problems during teardown.
class ConnectionCreatedSignalTest(TransactionTestCase):
available_apps = []
# Unfortunately with sqlite3 the in-memory test database cannot be closed,
# and so it cannot be re-opened during testing.
@skipUnlessDBFeature('test_db_allows_multiple_connections')
def test_signal(self):
data = {}
def receiver(sender, connection, **kwargs):
data["connection"] = connection
connection_created.connect(receiver)
connection.close()
with connection.cursor():
pass
self.assertIs(data["connection"].connection, connection.connection)
connection_created.disconnect(receiver)
data.clear()
with connection.cursor():
pass
self.assertEqual(data, {})
class EscapingChecks(TestCase):
"""
All tests in this test case are also run with settings.DEBUG=True in
EscapingChecksDebug test case, to also test CursorDebugWrapper.
"""
bare_select_suffix = connection.features.bare_select_suffix
def test_paramless_no_escaping(self):
with connection.cursor() as cursor:
cursor.execute("SELECT '%s'" + self.bare_select_suffix)
self.assertEqual(cursor.fetchall()[0][0], '%s')
def test_parameter_escaping(self):
with connection.cursor() as cursor:
cursor.execute("SELECT '%%', %s" + self.bare_select_suffix, ('%d',))
self.assertEqual(cursor.fetchall()[0], ('%', '%d'))
@override_settings(DEBUG=True)
class EscapingChecksDebug(EscapingChecks):
pass
class BackendTestCase(TransactionTestCase):
available_apps = ['backends']
def create_squares_with_executemany(self, args):
self.create_squares(args, 'format', True)
def create_squares(self, args, paramstyle, multiple):
opts = Square._meta
tbl = connection.introspection.identifier_converter(opts.db_table)
f1 = connection.ops.quote_name(opts.get_field('root').column)
f2 = connection.ops.quote_name(opts.get_field('square').column)
if paramstyle == 'format':
query = 'INSERT INTO %s (%s, %s) VALUES (%%s, %%s)' % (tbl, f1, f2)
elif paramstyle == 'pyformat':
query = 'INSERT INTO %s (%s, %s) VALUES (%%(root)s, %%(square)s)' % (tbl, f1, f2)
else:
raise ValueError("unsupported paramstyle in test")
with connection.cursor() as cursor:
if multiple:
cursor.executemany(query, args)
else:
cursor.execute(query, args)
def test_cursor_executemany(self):
# Test cursor.executemany #4896
args = [(i, i ** 2) for i in range(-5, 6)]
self.create_squares_with_executemany(args)
self.assertEqual(Square.objects.count(), 11)
for i in range(-5, 6):
square = Square.objects.get(root=i)
self.assertEqual(square.square, i ** 2)
def test_cursor_executemany_with_empty_params_list(self):
# Test executemany with params=[] does nothing #4765
args = []
self.create_squares_with_executemany(args)
self.assertEqual(Square.objects.count(), 0)
def test_cursor_executemany_with_iterator(self):
# Test executemany accepts iterators #10320
args = ((i, i ** 2) for i in range(-3, 2))
self.create_squares_with_executemany(args)
self.assertEqual(Square.objects.count(), 5)
args = ((i, i ** 2) for i in range(3, 7))
with override_settings(DEBUG=True):
# same test for DebugCursorWrapper
self.create_squares_with_executemany(args)
self.assertEqual(Square.objects.count(), 9)
@skipUnlessDBFeature('supports_paramstyle_pyformat')
def test_cursor_execute_with_pyformat(self):
# Support pyformat style passing of parameters #10070
args = {'root': 3, 'square': 9}
self.create_squares(args, 'pyformat', multiple=False)
self.assertEqual(Square.objects.count(), 1)
@skipUnlessDBFeature('supports_paramstyle_pyformat')
def test_cursor_executemany_with_pyformat(self):
# Support pyformat style passing of parameters #10070
args = [{'root': i, 'square': i ** 2} for i in range(-5, 6)]
self.create_squares(args, 'pyformat', multiple=True)
self.assertEqual(Square.objects.count(), 11)
for i in range(-5, 6):
square = Square.objects.get(root=i)
self.assertEqual(square.square, i ** 2)
@skipUnlessDBFeature('supports_paramstyle_pyformat')
def test_cursor_executemany_with_pyformat_iterator(self):
args = ({'root': i, 'square': i ** 2} for i in range(-3, 2))
self.create_squares(args, 'pyformat', multiple=True)
self.assertEqual(Square.objects.count(), 5)
args = ({'root': i, 'square': i ** 2} for i in range(3, 7))
with override_settings(DEBUG=True):
# same test for DebugCursorWrapper
self.create_squares(args, 'pyformat', multiple=True)
self.assertEqual(Square.objects.count(), 9)
def test_unicode_fetches(self):
# fetchone, fetchmany, fetchall return strings as Unicode objects.
qn = connection.ops.quote_name
Person(first_name="John", last_name="Doe").save()
Person(first_name="Jane", last_name="Doe").save()
Person(first_name="Mary", last_name="Agnelline").save()
Person(first_name="Peter", last_name="Parker").save()
Person(first_name="Clark", last_name="Kent").save()
opts2 = Person._meta
f3, f4 = opts2.get_field('first_name'), opts2.get_field('last_name')
with connection.cursor() as cursor:
cursor.execute(
'SELECT %s, %s FROM %s ORDER BY %s' % (
qn(f3.column),
qn(f4.column),
connection.introspection.identifier_converter(opts2.db_table),
qn(f3.column),
)
)
self.assertEqual(cursor.fetchone(), ('Clark', 'Kent'))
self.assertEqual(list(cursor.fetchmany(2)), [('Jane', 'Doe'), ('John', 'Doe')])
self.assertEqual(list(cursor.fetchall()), [('Mary', 'Agnelline'), ('Peter', 'Parker')])
def test_unicode_password(self):
old_password = connection.settings_dict['PASSWORD']
connection.settings_dict['PASSWORD'] = "françois"
try:
with connection.cursor():
pass
except DatabaseError:
# As password is probably wrong, a database exception is expected
pass
except Exception as e:
self.fail('Unexpected error raised with Unicode password: %s' % e)
finally:
connection.settings_dict['PASSWORD'] = old_password
def test_database_operations_helper_class(self):
# Ticket #13630
self.assertTrue(hasattr(connection, 'ops'))
self.assertTrue(hasattr(connection.ops, 'connection'))
self.assertEqual(connection, connection.ops.connection)
def test_database_operations_init(self):
"""
DatabaseOperations initialization doesn't query the database.
See #17656.
"""
with self.assertNumQueries(0):
connection.ops.__class__(connection)
def test_cached_db_features(self):
self.assertIn(connection.features.supports_transactions, (True, False))
self.assertIn(connection.features.can_introspect_foreign_keys, (True, False))
def test_duplicate_table_error(self):
""" Creating an existing table returns a DatabaseError """
query = 'CREATE TABLE %s (id INTEGER);' % Article._meta.db_table
with connection.cursor() as cursor:
with self.assertRaises(DatabaseError):
cursor.execute(query)
def test_cursor_contextmanager(self):
"""
Cursors can be used as a context manager
"""
with connection.cursor() as cursor:
self.assertIsInstance(cursor, CursorWrapper)
# Both InterfaceError and ProgrammingError seem to be used when
# accessing closed cursor (psycopg2 has InterfaceError, rest seem
# to use ProgrammingError).
with self.assertRaises(connection.features.closed_cursor_error_class):
# cursor should be closed, so no queries should be possible.
cursor.execute("SELECT 1" + connection.features.bare_select_suffix)
@unittest.skipUnless(connection.vendor == 'postgresql',
"Psycopg2 specific cursor.closed attribute needed")
def test_cursor_contextmanager_closing(self):
# There isn't a generic way to test that cursors are closed, but
# psycopg2 offers us a way to check that by closed attribute.
# So, run only on psycopg2 for that reason.
with connection.cursor() as cursor:
self.assertIsInstance(cursor, CursorWrapper)
self.assertTrue(cursor.closed)
# Unfortunately with sqlite3 the in-memory test database cannot be closed.
@skipUnlessDBFeature('test_db_allows_multiple_connections')
def test_is_usable_after_database_disconnects(self):
"""
is_usable() doesn't crash when the database disconnects (#21553).
"""
# Open a connection to the database.
with connection.cursor():
pass
# Emulate a connection close by the database.
connection._close()
# Even then is_usable() should not raise an exception.
try:
self.assertFalse(connection.is_usable())
finally:
# Clean up the mess created by connection._close(). Since the
# connection is already closed, this crashes on some backends.
try:
connection.close()
except Exception:
pass
@override_settings(DEBUG=True)
def test_queries(self):
"""
Test the documented API of connection.queries.
"""
sql = 'SELECT 1' + connection.features.bare_select_suffix
with connection.cursor() as cursor:
reset_queries()
cursor.execute(sql)
self.assertEqual(1, len(connection.queries))
self.assertIsInstance(connection.queries, list)
self.assertIsInstance(connection.queries[0], dict)
self.assertEqual(list(connection.queries[0]), ['sql', 'time'])
self.assertEqual(connection.queries[0]['sql'], sql)
reset_queries()
self.assertEqual(0, len(connection.queries))
sql = ('INSERT INTO %s (%s, %s) VALUES (%%s, %%s)' % (
connection.introspection.identifier_converter('backends_square'),
connection.ops.quote_name('root'),
connection.ops.quote_name('square'),
))
with connection.cursor() as cursor:
cursor.executemany(sql, [(1, 1), (2, 4)])
self.assertEqual(1, len(connection.queries))
self.assertIsInstance(connection.queries, list)
self.assertIsInstance(connection.queries[0], dict)
self.assertEqual(list(connection.queries[0]), ['sql', 'time'])
self.assertEqual(connection.queries[0]['sql'], '2 times: %s' % sql)
# Unfortunately with sqlite3 the in-memory test database cannot be closed.
@skipUnlessDBFeature('test_db_allows_multiple_connections')
@override_settings(DEBUG=True)
def test_queries_limit(self):
"""
The backend doesn't store an unlimited number of queries (#12581).
"""
old_queries_limit = BaseDatabaseWrapper.queries_limit
BaseDatabaseWrapper.queries_limit = 3
new_connection = connection.copy()
# Initialize the connection and clear initialization statements.
with new_connection.cursor():
pass
new_connection.queries_log.clear()
try:
with new_connection.cursor() as cursor:
cursor.execute("SELECT 1" + new_connection.features.bare_select_suffix)
cursor.execute("SELECT 2" + new_connection.features.bare_select_suffix)
with warnings.catch_warnings(record=True) as w:
self.assertEqual(2, len(new_connection.queries))
self.assertEqual(0, len(w))
with new_connection.cursor() as cursor:
cursor.execute("SELECT 3" + new_connection.features.bare_select_suffix)
cursor.execute("SELECT 4" + new_connection.features.bare_select_suffix)
msg = "Limit for query logging exceeded, only the last 3 queries will be returned."
with self.assertWarnsMessage(UserWarning, msg):
self.assertEqual(3, len(new_connection.queries))
finally:
BaseDatabaseWrapper.queries_limit = old_queries_limit
new_connection.close()
@mock.patch('django.db.backends.utils.logger')
@override_settings(DEBUG=True)
def test_queries_logger(self, mocked_logger):
sql = 'SELECT 1' + connection.features.bare_select_suffix
with connection.cursor() as cursor:
cursor.execute(sql)
params, kwargs = mocked_logger.debug.call_args
self.assertIn('; alias=%s', params[0])
self.assertEqual(params[2], sql)
self.assertIsNone(params[3])
self.assertEqual(params[4], connection.alias)
self.assertEqual(
list(kwargs['extra']),
['duration', 'sql', 'params', 'alias'],
)
self.assertEqual(tuple(kwargs['extra'].values()), params[1:])
def test_timezone_none_use_tz_false(self):
connection.ensure_connection()
with self.settings(TIME_ZONE=None, USE_TZ=False):
connection.init_connection_state()
# These tests aren't conditional because it would require differentiating
# between MySQL+InnoDB and MySQL+MYISAM (something we currently can't do).
class FkConstraintsTests(TransactionTestCase):
available_apps = ['backends']
def setUp(self):
# Create a Reporter.
self.r = Reporter.objects.create(first_name='John', last_name='Smith')
def test_integrity_checks_on_creation(self):
"""
Try to create a model instance that violates a FK constraint. If it
fails it should fail with IntegrityError.
"""
a1 = Article(headline="This is a test", pub_date=datetime.datetime(2005, 7, 27), reporter_id=30)
try:
a1.save()
except IntegrityError:
pass
else:
self.skipTest("This backend does not support integrity checks.")
# Now that we know this backend supports integrity checks we make sure
# constraints are also enforced for proxy Refs #17519
a2 = Article(
headline='This is another test', reporter=self.r,
pub_date=datetime.datetime(2012, 8, 3),
reporter_proxy_id=30,
)
with self.assertRaises(IntegrityError):
a2.save()
def test_integrity_checks_on_update(self):
"""
Try to update a model instance introducing a FK constraint violation.
If it fails it should fail with IntegrityError.
"""
# Create an Article.
Article.objects.create(headline="Test article", pub_date=datetime.datetime(2010, 9, 4), reporter=self.r)
# Retrieve it from the DB
a1 = Article.objects.get(headline="Test article")
a1.reporter_id = 30
try:
a1.save()
except IntegrityError:
pass
else:
self.skipTest("This backend does not support integrity checks.")
# Now that we know this backend supports integrity checks we make sure
# constraints are also enforced for proxy Refs #17519
# Create another article
r_proxy = ReporterProxy.objects.get(pk=self.r.pk)
Article.objects.create(
headline='Another article',
pub_date=datetime.datetime(1988, 5, 15),
reporter=self.r, reporter_proxy=r_proxy,
)
# Retrieve the second article from the DB
a2 = Article.objects.get(headline='Another article')
a2.reporter_proxy_id = 30
with self.assertRaises(IntegrityError):
a2.save()
def test_disable_constraint_checks_manually(self):
"""
When constraint checks are disabled, should be able to write bad data
without IntegrityErrors.
"""
with transaction.atomic():
# Create an Article.
Article.objects.create(
headline="Test article",
pub_date=datetime.datetime(2010, 9, 4),
reporter=self.r,
)
# Retrieve it from the DB
a = Article.objects.get(headline="Test article")
a.reporter_id = 30
try:
connection.disable_constraint_checking()
a.save()
connection.enable_constraint_checking()
except IntegrityError:
self.fail("IntegrityError should not have occurred.")
transaction.set_rollback(True)
def test_disable_constraint_checks_context_manager(self):
"""
When constraint checks are disabled (using context manager), should be
able to write bad data without IntegrityErrors.
"""
with transaction.atomic():
# Create an Article.
Article.objects.create(
headline="Test article",
pub_date=datetime.datetime(2010, 9, 4),
reporter=self.r,
)
# Retrieve it from the DB
a = Article.objects.get(headline="Test article")
a.reporter_id = 30
try:
with connection.constraint_checks_disabled():
a.save()
except IntegrityError:
self.fail("IntegrityError should not have occurred.")
transaction.set_rollback(True)
def test_check_constraints(self):
"""
Constraint checks should raise an IntegrityError when bad data is in the DB.
"""
with transaction.atomic():
# Create an Article.
Article.objects.create(
headline="Test article",
pub_date=datetime.datetime(2010, 9, 4),
reporter=self.r,
)
# Retrieve it from the DB
a = Article.objects.get(headline="Test article")
a.reporter_id = 30
with connection.constraint_checks_disabled():
a.save()
with self.assertRaises(IntegrityError):
connection.check_constraints(table_names=[Article._meta.db_table])
transaction.set_rollback(True)
def test_check_constraints_sql_keywords(self):
with transaction.atomic():
obj = SQLKeywordsModel.objects.create(reporter=self.r)
obj.refresh_from_db()
obj.reporter_id = 30
with connection.constraint_checks_disabled():
obj.save()
with self.assertRaises(IntegrityError):
connection.check_constraints(table_names=['order'])
transaction.set_rollback(True)
class ThreadTests(TransactionTestCase):
available_apps = ['backends']
def test_default_connection_thread_local(self):
"""
The default connection (i.e. django.db.connection) is different for
each thread (#17258).
"""
# Map connections by id because connections with identical aliases
# have the same hash.
connections_dict = {}
with connection.cursor():
pass
connections_dict[id(connection)] = connection
def runner():
# Passing django.db.connection between threads doesn't work while
# connections[DEFAULT_DB_ALIAS] does.
from django.db import connections
connection = connections[DEFAULT_DB_ALIAS]
# Allow thread sharing so the connection can be closed by the
# main thread.
connection.inc_thread_sharing()
with connection.cursor():
pass
connections_dict[id(connection)] = connection
try:
for x in range(2):
t = threading.Thread(target=runner)
t.start()
t.join()
# Each created connection got different inner connection.
self.assertEqual(len({conn.connection for conn in connections_dict.values()}), 3)
finally:
# Finish by closing the connections opened by the other threads
# (the connection opened in the main thread will automatically be
# closed on teardown).
for conn in connections_dict.values():
if conn is not connection and conn.allow_thread_sharing:
conn.close()
conn.dec_thread_sharing()
def test_connections_thread_local(self):
"""
The connections are different for each thread (#17258).
"""
# Map connections by id because connections with identical aliases
# have the same hash.
connections_dict = {}
for conn in connections.all():
connections_dict[id(conn)] = conn
def runner():
from django.db import connections
for conn in connections.all():
# Allow thread sharing so the connection can be closed by the
# main thread.
conn.inc_thread_sharing()
connections_dict[id(conn)] = conn
try:
num_new_threads = 2
for x in range(num_new_threads):
t = threading.Thread(target=runner)
t.start()
t.join()
self.assertEqual(
len(connections_dict),
len(connections.all()) * (num_new_threads + 1),
)
finally:
# Finish by closing the connections opened by the other threads
# (the connection opened in the main thread will automatically be
# closed on teardown).
for conn in connections_dict.values():
if conn is not connection and conn.allow_thread_sharing:
conn.close()
conn.dec_thread_sharing()
def test_pass_connection_between_threads(self):
"""
A connection can be passed from one thread to the other (#17258).
"""
Person.objects.create(first_name="John", last_name="Doe")
def do_thread():
def runner(main_thread_connection):
from django.db import connections
connections['default'] = main_thread_connection
try:
Person.objects.get(first_name="John", last_name="Doe")
except Exception as e:
exceptions.append(e)
t = threading.Thread(target=runner, args=[connections['default']])
t.start()
t.join()
# Without touching thread sharing, which should be False by default.
exceptions = []
do_thread()
# Forbidden!
self.assertIsInstance(exceptions[0], DatabaseError)
connections['default'].close()
# After calling inc_thread_sharing() on the connection.
connections['default'].inc_thread_sharing()
try:
exceptions = []
do_thread()
# All good
self.assertEqual(exceptions, [])
finally:
connections['default'].dec_thread_sharing()
def test_closing_non_shared_connections(self):
"""
A connection that is not explicitly shareable cannot be closed by
another thread (#17258).
"""
# First, without explicitly enabling the connection for sharing.
exceptions = set()
def runner1():
def runner2(other_thread_connection):
try:
other_thread_connection.close()
except DatabaseError as e:
exceptions.add(e)
t2 = threading.Thread(target=runner2, args=[connections['default']])
t2.start()
t2.join()
t1 = threading.Thread(target=runner1)
t1.start()
t1.join()
# The exception was raised
self.assertEqual(len(exceptions), 1)
# Then, with explicitly enabling the connection for sharing.
exceptions = set()
def runner1():
def runner2(other_thread_connection):
try:
other_thread_connection.close()
except DatabaseError as e:
exceptions.add(e)
# Enable thread sharing
connections['default'].inc_thread_sharing()
try:
t2 = threading.Thread(target=runner2, args=[connections['default']])
t2.start()
t2.join()
finally:
connections['default'].dec_thread_sharing()
t1 = threading.Thread(target=runner1)
t1.start()
t1.join()
# No exception was raised
self.assertEqual(len(exceptions), 0)
def test_thread_sharing_count(self):
self.assertIs(connection.allow_thread_sharing, False)
connection.inc_thread_sharing()
self.assertIs(connection.allow_thread_sharing, True)
connection.inc_thread_sharing()
self.assertIs(connection.allow_thread_sharing, True)
connection.dec_thread_sharing()
self.assertIs(connection.allow_thread_sharing, True)
connection.dec_thread_sharing()
self.assertIs(connection.allow_thread_sharing, False)
msg = 'Cannot decrement the thread sharing count below zero.'
with self.assertRaisesMessage(RuntimeError, msg):
connection.dec_thread_sharing()
class MySQLPKZeroTests(TestCase):
"""
Zero as id for AutoField should raise exception in MySQL, because MySQL
does not allow zero for autoincrement primary key if the
NO_AUTO_VALUE_ON_ZERO SQL mode is not enabled.
"""
@skipIfDBFeature('allows_auto_pk_0')
def test_zero_as_autoval(self):
with self.assertRaises(ValueError):
Square.objects.create(id=0, root=0, square=1)
class DBConstraintTestCase(TestCase):
def test_can_reference_existent(self):
obj = Object.objects.create()
ref = ObjectReference.objects.create(obj=obj)
self.assertEqual(ref.obj, obj)
ref = ObjectReference.objects.get(obj=obj)
self.assertEqual(ref.obj, obj)
def test_can_reference_non_existent(self):
self.assertFalse(Object.objects.filter(id=12345).exists())
ref = ObjectReference.objects.create(obj_id=12345)
ref_new = ObjectReference.objects.get(obj_id=12345)
self.assertEqual(ref, ref_new)
with self.assertRaises(Object.DoesNotExist):
ref.obj
def test_many_to_many(self):
obj = Object.objects.create()
obj.related_objects.create()
self.assertEqual(Object.objects.count(), 2)
self.assertEqual(obj.related_objects.count(), 1)
intermediary_model = Object._meta.get_field("related_objects").remote_field.through
intermediary_model.objects.create(from_object_id=obj.id, to_object_id=12345)
self.assertEqual(obj.related_objects.count(), 1)
self.assertEqual(intermediary_model.objects.count(), 2)
|
b380b580cc36ca2febd3beb78b5e520b0825f3f3062ba41f1ab160fc0888f7b5 | import datetime
import itertools
import unittest
from copy import copy
from unittest import mock
from django.core.exceptions import FieldError
from django.core.management.color import no_style
from django.db import (
DatabaseError, DataError, IntegrityError, OperationalError, connection,
)
from django.db.models import (
CASCADE, PROTECT, AutoField, BigAutoField, BigIntegerField, BinaryField,
BooleanField, CharField, CheckConstraint, DateField, DateTimeField,
DecimalField, F, FloatField, ForeignKey, ForeignObject, Index,
IntegerField, JSONField, ManyToManyField, Model, OneToOneField, OrderBy,
PositiveIntegerField, Q, SlugField, SmallAutoField, SmallIntegerField,
TextField, TimeField, UniqueConstraint, UUIDField, Value,
)
from django.db.models.fields.json import KeyTextTransform
from django.db.models.functions import Abs, Cast, Collate, Lower, Random, Upper
from django.db.models.indexes import IndexExpression
from django.db.transaction import TransactionManagementError, atomic
from django.test import (
TransactionTestCase, skipIfDBFeature, skipUnlessDBFeature,
)
from django.test.utils import (
CaptureQueriesContext, isolate_apps, register_lookup,
)
from django.utils import timezone
from .fields import (
CustomManyToManyField, InheritedManyToManyField, MediumBlobField,
)
from .models import (
Author, AuthorCharFieldWithIndex, AuthorTextFieldWithIndex,
AuthorWithDefaultHeight, AuthorWithEvenLongerName, AuthorWithIndexedName,
AuthorWithIndexedNameAndBirthday, AuthorWithUniqueName,
AuthorWithUniqueNameAndBirthday, Book, BookForeignObj, BookWeak,
BookWithLongName, BookWithO2O, BookWithoutAuthor, BookWithSlug, IntegerPK,
Node, Note, NoteRename, Tag, TagIndexed, TagM2MTest, TagUniqueRename,
Thing, UniqueTest, new_apps,
)
class SchemaTests(TransactionTestCase):
"""
Tests for the schema-alteration code.
Be aware that these tests are more liable than most to false results,
as sometimes the code to check if a test has worked is almost as complex
as the code it is testing.
"""
available_apps = []
models = [
Author, AuthorCharFieldWithIndex, AuthorTextFieldWithIndex,
AuthorWithDefaultHeight, AuthorWithEvenLongerName, Book, BookWeak,
BookWithLongName, BookWithO2O, BookWithSlug, IntegerPK, Node, Note,
Tag, TagIndexed, TagM2MTest, TagUniqueRename, Thing, UniqueTest,
]
# Utility functions
def setUp(self):
# local_models should contain test dependent model classes that will be
# automatically removed from the app cache on test tear down.
self.local_models = []
# isolated_local_models contains models that are in test methods
# decorated with @isolate_apps.
self.isolated_local_models = []
def tearDown(self):
# Delete any tables made for our models
self.delete_tables()
new_apps.clear_cache()
for model in new_apps.get_models():
model._meta._expire_cache()
if 'schema' in new_apps.all_models:
for model in self.local_models:
for many_to_many in model._meta.many_to_many:
through = many_to_many.remote_field.through
if through and through._meta.auto_created:
del new_apps.all_models['schema'][through._meta.model_name]
del new_apps.all_models['schema'][model._meta.model_name]
if self.isolated_local_models:
with connection.schema_editor() as editor:
for model in self.isolated_local_models:
editor.delete_model(model)
def delete_tables(self):
"Deletes all model tables for our models for a clean test environment"
converter = connection.introspection.identifier_converter
with connection.schema_editor() as editor:
connection.disable_constraint_checking()
table_names = connection.introspection.table_names()
if connection.features.ignores_table_name_case:
table_names = [table_name.lower() for table_name in table_names]
for model in itertools.chain(SchemaTests.models, self.local_models):
tbl = converter(model._meta.db_table)
if connection.features.ignores_table_name_case:
tbl = tbl.lower()
if tbl in table_names:
editor.delete_model(model)
table_names.remove(tbl)
connection.enable_constraint_checking()
def column_classes(self, model):
with connection.cursor() as cursor:
columns = {
d[0]: (connection.introspection.get_field_type(d[1], d), d)
for d in connection.introspection.get_table_description(
cursor,
model._meta.db_table,
)
}
# SQLite has a different format for field_type
for name, (type, desc) in columns.items():
if isinstance(type, tuple):
columns[name] = (type[0], desc)
# SQLite also doesn't error properly
if not columns:
raise DatabaseError("Table does not exist (empty pragma)")
return columns
def get_primary_key(self, table):
with connection.cursor() as cursor:
return connection.introspection.get_primary_key_column(cursor, table)
def get_indexes(self, table):
"""
Get the indexes on the table using a new cursor.
"""
with connection.cursor() as cursor:
return [
c['columns'][0]
for c in connection.introspection.get_constraints(cursor, table).values()
if c['index'] and len(c['columns']) == 1
]
def get_uniques(self, table):
with connection.cursor() as cursor:
return [
c['columns'][0]
for c in connection.introspection.get_constraints(cursor, table).values()
if c['unique'] and len(c['columns']) == 1
]
def get_constraints(self, table):
"""
Get the constraints on a table using a new cursor.
"""
with connection.cursor() as cursor:
return connection.introspection.get_constraints(cursor, table)
def get_constraints_for_column(self, model, column_name):
constraints = self.get_constraints(model._meta.db_table)
constraints_for_column = []
for name, details in constraints.items():
if details['columns'] == [column_name]:
constraints_for_column.append(name)
return sorted(constraints_for_column)
def check_added_field_default(self, schema_editor, model, field, field_name, expected_default,
cast_function=None):
with connection.cursor() as cursor:
schema_editor.add_field(model, field)
cursor.execute("SELECT {} FROM {};".format(field_name, model._meta.db_table))
database_default = cursor.fetchall()[0][0]
if cast_function and type(database_default) != type(expected_default):
database_default = cast_function(database_default)
self.assertEqual(database_default, expected_default)
def get_constraints_count(self, table, column, fk_to):
"""
Return a dict with keys 'fks', 'uniques, and 'indexes' indicating the
number of foreign keys, unique constraints, and indexes on
`table`.`column`. The `fk_to` argument is a 2-tuple specifying the
expected foreign key relationship's (table, column).
"""
with connection.cursor() as cursor:
constraints = connection.introspection.get_constraints(cursor, table)
counts = {'fks': 0, 'uniques': 0, 'indexes': 0}
for c in constraints.values():
if c['columns'] == [column]:
if c['foreign_key'] == fk_to:
counts['fks'] += 1
if c['unique']:
counts['uniques'] += 1
elif c['index']:
counts['indexes'] += 1
return counts
def get_column_collation(self, table, column):
with connection.cursor() as cursor:
return next(
f.collation
for f in connection.introspection.get_table_description(cursor, table)
if f.name == column
)
def assertIndexOrder(self, table, index, order):
constraints = self.get_constraints(table)
self.assertIn(index, constraints)
index_orders = constraints[index]['orders']
self.assertTrue(all(val == expected for val, expected in zip(index_orders, order)))
def assertForeignKeyExists(self, model, column, expected_fk_table, field='id'):
"""
Fail if the FK constraint on `model.Meta.db_table`.`column` to
`expected_fk_table`.id doesn't exist.
"""
constraints = self.get_constraints(model._meta.db_table)
constraint_fk = None
for details in constraints.values():
if details['columns'] == [column] and details['foreign_key']:
constraint_fk = details['foreign_key']
break
self.assertEqual(constraint_fk, (expected_fk_table, field))
def assertForeignKeyNotExists(self, model, column, expected_fk_table):
with self.assertRaises(AssertionError):
self.assertForeignKeyExists(model, column, expected_fk_table)
# Tests
def test_creation_deletion(self):
"""
Tries creating a model's table, and then deleting it.
"""
with connection.schema_editor() as editor:
# Create the table
editor.create_model(Author)
# The table is there
list(Author.objects.all())
# Clean up that table
editor.delete_model(Author)
# No deferred SQL should be left over.
self.assertEqual(editor.deferred_sql, [])
# The table is gone
with self.assertRaises(DatabaseError):
list(Author.objects.all())
@skipUnlessDBFeature('supports_foreign_keys')
def test_fk(self):
"Creating tables out of FK order, then repointing, works"
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Book)
editor.create_model(Author)
editor.create_model(Tag)
# Initial tables are there
list(Author.objects.all())
list(Book.objects.all())
# Make sure the FK constraint is present
with self.assertRaises(IntegrityError):
Book.objects.create(
author_id=1,
title="Much Ado About Foreign Keys",
pub_date=datetime.datetime.now(),
)
# Repoint the FK constraint
old_field = Book._meta.get_field("author")
new_field = ForeignKey(Tag, CASCADE)
new_field.set_attributes_from_name("author")
with connection.schema_editor() as editor:
editor.alter_field(Book, old_field, new_field, strict=True)
self.assertForeignKeyExists(Book, 'author_id', 'schema_tag')
@skipUnlessDBFeature('can_create_inline_fk')
def test_inline_fk(self):
# Create some tables.
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(Book)
editor.create_model(Note)
self.assertForeignKeyNotExists(Note, 'book_id', 'schema_book')
# Add a foreign key from one to the other.
with connection.schema_editor() as editor:
new_field = ForeignKey(Book, CASCADE)
new_field.set_attributes_from_name('book')
editor.add_field(Note, new_field)
self.assertForeignKeyExists(Note, 'book_id', 'schema_book')
# Creating a FK field with a constraint uses a single statement without
# a deferred ALTER TABLE.
self.assertFalse([
sql for sql in (str(statement) for statement in editor.deferred_sql)
if sql.startswith('ALTER TABLE') and 'ADD CONSTRAINT' in sql
])
@skipUnlessDBFeature('can_create_inline_fk')
def test_add_inline_fk_update_data(self):
with connection.schema_editor() as editor:
editor.create_model(Node)
# Add an inline foreign key and update data in the same transaction.
new_field = ForeignKey(Node, CASCADE, related_name='new_fk', null=True)
new_field.set_attributes_from_name('new_parent_fk')
parent = Node.objects.create()
with connection.schema_editor() as editor:
editor.add_field(Node, new_field)
editor.execute('UPDATE schema_node SET new_parent_fk_id = %s;', [parent.pk])
assertIndex = (
self.assertIn
if connection.features.indexes_foreign_keys
else self.assertNotIn
)
assertIndex('new_parent_fk_id', self.get_indexes(Node._meta.db_table))
@skipUnlessDBFeature(
'can_create_inline_fk',
'allows_multiple_constraints_on_same_fields',
)
@isolate_apps('schema')
def test_add_inline_fk_index_update_data(self):
class Node(Model):
class Meta:
app_label = 'schema'
with connection.schema_editor() as editor:
editor.create_model(Node)
# Add an inline foreign key, update data, and an index in the same
# transaction.
new_field = ForeignKey(Node, CASCADE, related_name='new_fk', null=True)
new_field.set_attributes_from_name('new_parent_fk')
parent = Node.objects.create()
with connection.schema_editor() as editor:
editor.add_field(Node, new_field)
Node._meta.add_field(new_field)
editor.execute('UPDATE schema_node SET new_parent_fk_id = %s;', [parent.pk])
editor.add_index(Node, Index(fields=['new_parent_fk'], name='new_parent_inline_fk_idx'))
self.assertIn('new_parent_fk_id', self.get_indexes(Node._meta.db_table))
@skipUnlessDBFeature('supports_foreign_keys')
def test_char_field_with_db_index_to_fk(self):
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(AuthorCharFieldWithIndex)
# Change CharField to FK
old_field = AuthorCharFieldWithIndex._meta.get_field('char_field')
new_field = ForeignKey(Author, CASCADE, blank=True)
new_field.set_attributes_from_name('char_field')
with connection.schema_editor() as editor:
editor.alter_field(AuthorCharFieldWithIndex, old_field, new_field, strict=True)
self.assertForeignKeyExists(AuthorCharFieldWithIndex, 'char_field_id', 'schema_author')
@skipUnlessDBFeature('supports_foreign_keys')
@skipUnlessDBFeature('supports_index_on_text_field')
def test_text_field_with_db_index_to_fk(self):
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(AuthorTextFieldWithIndex)
# Change TextField to FK
old_field = AuthorTextFieldWithIndex._meta.get_field('text_field')
new_field = ForeignKey(Author, CASCADE, blank=True)
new_field.set_attributes_from_name('text_field')
with connection.schema_editor() as editor:
editor.alter_field(AuthorTextFieldWithIndex, old_field, new_field, strict=True)
self.assertForeignKeyExists(AuthorTextFieldWithIndex, 'text_field_id', 'schema_author')
@isolate_apps('schema')
def test_char_field_pk_to_auto_field(self):
class Foo(Model):
id = CharField(max_length=255, primary_key=True)
class Meta:
app_label = 'schema'
with connection.schema_editor() as editor:
editor.create_model(Foo)
self.isolated_local_models = [Foo]
old_field = Foo._meta.get_field('id')
new_field = AutoField(primary_key=True)
new_field.set_attributes_from_name('id')
new_field.model = Foo
with connection.schema_editor() as editor:
editor.alter_field(Foo, old_field, new_field, strict=True)
@skipUnlessDBFeature('supports_foreign_keys')
def test_fk_to_proxy(self):
"Creating a FK to a proxy model creates database constraints."
class AuthorProxy(Author):
class Meta:
app_label = 'schema'
apps = new_apps
proxy = True
class AuthorRef(Model):
author = ForeignKey(AuthorProxy, on_delete=CASCADE)
class Meta:
app_label = 'schema'
apps = new_apps
self.local_models = [AuthorProxy, AuthorRef]
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(AuthorRef)
self.assertForeignKeyExists(AuthorRef, 'author_id', 'schema_author')
@skipUnlessDBFeature('supports_foreign_keys')
def test_fk_db_constraint(self):
"The db_constraint parameter is respected"
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Tag)
editor.create_model(Author)
editor.create_model(BookWeak)
# Initial tables are there
list(Author.objects.all())
list(Tag.objects.all())
list(BookWeak.objects.all())
self.assertForeignKeyNotExists(BookWeak, 'author_id', 'schema_author')
# Make a db_constraint=False FK
new_field = ForeignKey(Tag, CASCADE, db_constraint=False)
new_field.set_attributes_from_name("tag")
with connection.schema_editor() as editor:
editor.add_field(Author, new_field)
self.assertForeignKeyNotExists(Author, 'tag_id', 'schema_tag')
# Alter to one with a constraint
new_field2 = ForeignKey(Tag, CASCADE)
new_field2.set_attributes_from_name("tag")
with connection.schema_editor() as editor:
editor.alter_field(Author, new_field, new_field2, strict=True)
self.assertForeignKeyExists(Author, 'tag_id', 'schema_tag')
# Alter to one without a constraint again
new_field2 = ForeignKey(Tag, CASCADE)
new_field2.set_attributes_from_name("tag")
with connection.schema_editor() as editor:
editor.alter_field(Author, new_field2, new_field, strict=True)
self.assertForeignKeyNotExists(Author, 'tag_id', 'schema_tag')
@isolate_apps('schema')
def test_no_db_constraint_added_during_primary_key_change(self):
"""
When a primary key that's pointed to by a ForeignKey with
db_constraint=False is altered, a foreign key constraint isn't added.
"""
class Author(Model):
class Meta:
app_label = 'schema'
class BookWeak(Model):
author = ForeignKey(Author, CASCADE, db_constraint=False)
class Meta:
app_label = 'schema'
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(BookWeak)
self.assertForeignKeyNotExists(BookWeak, 'author_id', 'schema_author')
old_field = Author._meta.get_field('id')
new_field = BigAutoField(primary_key=True)
new_field.model = Author
new_field.set_attributes_from_name('id')
# @isolate_apps() and inner models are needed to have the model
# relations populated, otherwise this doesn't act as a regression test.
self.assertEqual(len(new_field.model._meta.related_objects), 1)
with connection.schema_editor() as editor:
editor.alter_field(Author, old_field, new_field, strict=True)
self.assertForeignKeyNotExists(BookWeak, 'author_id', 'schema_author')
def _test_m2m_db_constraint(self, M2MFieldClass):
class LocalAuthorWithM2M(Model):
name = CharField(max_length=255)
class Meta:
app_label = 'schema'
apps = new_apps
self.local_models = [LocalAuthorWithM2M]
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Tag)
editor.create_model(LocalAuthorWithM2M)
# Initial tables are there
list(LocalAuthorWithM2M.objects.all())
list(Tag.objects.all())
# Make a db_constraint=False FK
new_field = M2MFieldClass(Tag, related_name="authors", db_constraint=False)
new_field.contribute_to_class(LocalAuthorWithM2M, "tags")
# Add the field
with connection.schema_editor() as editor:
editor.add_field(LocalAuthorWithM2M, new_field)
self.assertForeignKeyNotExists(new_field.remote_field.through, 'tag_id', 'schema_tag')
@skipUnlessDBFeature('supports_foreign_keys')
def test_m2m_db_constraint(self):
self._test_m2m_db_constraint(ManyToManyField)
@skipUnlessDBFeature('supports_foreign_keys')
def test_m2m_db_constraint_custom(self):
self._test_m2m_db_constraint(CustomManyToManyField)
@skipUnlessDBFeature('supports_foreign_keys')
def test_m2m_db_constraint_inherited(self):
self._test_m2m_db_constraint(InheritedManyToManyField)
def test_add_field(self):
"""
Tests adding fields to models
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
# Ensure there's no age field
columns = self.column_classes(Author)
self.assertNotIn("age", columns)
# Add the new field
new_field = IntegerField(null=True)
new_field.set_attributes_from_name("age")
with CaptureQueriesContext(connection) as ctx, connection.schema_editor() as editor:
editor.add_field(Author, new_field)
drop_default_sql = editor.sql_alter_column_no_default % {
'column': editor.quote_name(new_field.name),
}
self.assertFalse(any(drop_default_sql in query['sql'] for query in ctx.captured_queries))
columns = self.column_classes(Author)
self.assertEqual(columns['age'][0], connection.features.introspected_field_types['IntegerField'])
self.assertTrue(columns['age'][1][6])
def test_add_field_remove_field(self):
"""
Adding a field and removing it removes all deferred sql referring to it.
"""
with connection.schema_editor() as editor:
# Create a table with a unique constraint on the slug field.
editor.create_model(Tag)
# Remove the slug column.
editor.remove_field(Tag, Tag._meta.get_field('slug'))
self.assertEqual(editor.deferred_sql, [])
def test_add_field_temp_default(self):
"""
Tests adding fields to models with a temporary default
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
# Ensure there's no age field
columns = self.column_classes(Author)
self.assertNotIn("age", columns)
# Add some rows of data
Author.objects.create(name="Andrew", height=30)
Author.objects.create(name="Andrea")
# Add a not-null field
new_field = CharField(max_length=30, default="Godwin")
new_field.set_attributes_from_name("surname")
with connection.schema_editor() as editor:
editor.add_field(Author, new_field)
columns = self.column_classes(Author)
self.assertEqual(columns['surname'][0], connection.features.introspected_field_types['CharField'])
self.assertEqual(columns['surname'][1][6],
connection.features.interprets_empty_strings_as_nulls)
def test_add_field_temp_default_boolean(self):
"""
Tests adding fields to models with a temporary default where
the default is False. (#21783)
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
# Ensure there's no age field
columns = self.column_classes(Author)
self.assertNotIn("age", columns)
# Add some rows of data
Author.objects.create(name="Andrew", height=30)
Author.objects.create(name="Andrea")
# Add a not-null field
new_field = BooleanField(default=False)
new_field.set_attributes_from_name("awesome")
with connection.schema_editor() as editor:
editor.add_field(Author, new_field)
columns = self.column_classes(Author)
# BooleanField are stored as TINYINT(1) on MySQL.
field_type = columns['awesome'][0]
self.assertEqual(field_type, connection.features.introspected_field_types['BooleanField'])
def test_add_field_default_transform(self):
"""
Tests adding fields to models with a default that is not directly
valid in the database (#22581)
"""
class TestTransformField(IntegerField):
# Weird field that saves the count of items in its value
def get_default(self):
return self.default
def get_prep_value(self, value):
if value is None:
return 0
return len(value)
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
# Add some rows of data
Author.objects.create(name="Andrew", height=30)
Author.objects.create(name="Andrea")
# Add the field with a default it needs to cast (to string in this case)
new_field = TestTransformField(default={1: 2})
new_field.set_attributes_from_name("thing")
with connection.schema_editor() as editor:
editor.add_field(Author, new_field)
# Ensure the field is there
columns = self.column_classes(Author)
field_type, field_info = columns['thing']
self.assertEqual(field_type, connection.features.introspected_field_types['IntegerField'])
# Make sure the values were transformed correctly
self.assertEqual(Author.objects.extra(where=["thing = 1"]).count(), 2)
def test_add_field_binary(self):
"""
Tests binary fields get a sane default (#22851)
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
# Add the new field
new_field = BinaryField(blank=True)
new_field.set_attributes_from_name("bits")
with connection.schema_editor() as editor:
editor.add_field(Author, new_field)
columns = self.column_classes(Author)
# MySQL annoyingly uses the same backend, so it'll come back as one of
# these two types.
self.assertIn(columns['bits'][0], ("BinaryField", "TextField"))
@unittest.skipUnless(connection.vendor == 'mysql', "MySQL specific")
def test_add_binaryfield_mediumblob(self):
"""
Test adding a custom-sized binary field on MySQL (#24846).
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
# Add the new field with default
new_field = MediumBlobField(blank=True, default=b'123')
new_field.set_attributes_from_name('bits')
with connection.schema_editor() as editor:
editor.add_field(Author, new_field)
columns = self.column_classes(Author)
# Introspection treats BLOBs as TextFields
self.assertEqual(columns['bits'][0], "TextField")
def test_alter(self):
"""
Tests simple altering of fields
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
# Ensure the field is right to begin with
columns = self.column_classes(Author)
self.assertEqual(columns['name'][0], connection.features.introspected_field_types['CharField'])
self.assertEqual(bool(columns['name'][1][6]), bool(connection.features.interprets_empty_strings_as_nulls))
# Alter the name field to a TextField
old_field = Author._meta.get_field("name")
new_field = TextField(null=True)
new_field.set_attributes_from_name("name")
with connection.schema_editor() as editor:
editor.alter_field(Author, old_field, new_field, strict=True)
columns = self.column_classes(Author)
self.assertEqual(columns['name'][0], "TextField")
self.assertTrue(columns['name'][1][6])
# Change nullability again
new_field2 = TextField(null=False)
new_field2.set_attributes_from_name("name")
with connection.schema_editor() as editor:
editor.alter_field(Author, new_field, new_field2, strict=True)
columns = self.column_classes(Author)
self.assertEqual(columns['name'][0], "TextField")
self.assertEqual(bool(columns['name'][1][6]), bool(connection.features.interprets_empty_strings_as_nulls))
def test_alter_auto_field_to_integer_field(self):
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
# Change AutoField to IntegerField
old_field = Author._meta.get_field('id')
new_field = IntegerField(primary_key=True)
new_field.set_attributes_from_name('id')
new_field.model = Author
with connection.schema_editor() as editor:
editor.alter_field(Author, old_field, new_field, strict=True)
# Now that ID is an IntegerField, the database raises an error if it
# isn't provided.
if not connection.features.supports_unspecified_pk:
with self.assertRaises(DatabaseError):
Author.objects.create()
def test_alter_auto_field_to_char_field(self):
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
# Change AutoField to CharField
old_field = Author._meta.get_field('id')
new_field = CharField(primary_key=True, max_length=50)
new_field.set_attributes_from_name('id')
new_field.model = Author
with connection.schema_editor() as editor:
editor.alter_field(Author, old_field, new_field, strict=True)
@isolate_apps('schema')
def test_alter_auto_field_quoted_db_column(self):
class Foo(Model):
id = AutoField(primary_key=True, db_column='"quoted_id"')
class Meta:
app_label = 'schema'
with connection.schema_editor() as editor:
editor.create_model(Foo)
self.isolated_local_models = [Foo]
old_field = Foo._meta.get_field('id')
new_field = BigAutoField(primary_key=True)
new_field.model = Foo
new_field.db_column = '"quoted_id"'
new_field.set_attributes_from_name('id')
with connection.schema_editor() as editor:
editor.alter_field(Foo, old_field, new_field, strict=True)
Foo.objects.create()
def test_alter_not_unique_field_to_primary_key(self):
# Create the table.
with connection.schema_editor() as editor:
editor.create_model(Author)
# Change UUIDField to primary key.
old_field = Author._meta.get_field('uuid')
new_field = UUIDField(primary_key=True)
new_field.set_attributes_from_name('uuid')
new_field.model = Author
with connection.schema_editor() as editor:
editor.remove_field(Author, Author._meta.get_field('id'))
editor.alter_field(Author, old_field, new_field, strict=True)
@isolate_apps('schema')
def test_alter_primary_key_quoted_db_table(self):
class Foo(Model):
class Meta:
app_label = 'schema'
db_table = '"foo"'
with connection.schema_editor() as editor:
editor.create_model(Foo)
self.isolated_local_models = [Foo]
old_field = Foo._meta.get_field('id')
new_field = BigAutoField(primary_key=True)
new_field.model = Foo
new_field.set_attributes_from_name('id')
with connection.schema_editor() as editor:
editor.alter_field(Foo, old_field, new_field, strict=True)
Foo.objects.create()
def test_alter_text_field(self):
# Regression for "BLOB/TEXT column 'info' can't have a default value")
# on MySQL.
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Note)
old_field = Note._meta.get_field("info")
new_field = TextField(blank=True)
new_field.set_attributes_from_name("info")
with connection.schema_editor() as editor:
editor.alter_field(Note, old_field, new_field, strict=True)
def test_alter_text_field_to_not_null_with_default_value(self):
with connection.schema_editor() as editor:
editor.create_model(Note)
old_field = Note._meta.get_field('address')
new_field = TextField(blank=True, default='', null=False)
new_field.set_attributes_from_name('address')
with connection.schema_editor() as editor:
editor.alter_field(Note, old_field, new_field, strict=True)
@skipUnlessDBFeature('can_defer_constraint_checks', 'can_rollback_ddl')
def test_alter_fk_checks_deferred_constraints(self):
"""
#25492 - Altering a foreign key's structure and data in the same
transaction.
"""
with connection.schema_editor() as editor:
editor.create_model(Node)
old_field = Node._meta.get_field('parent')
new_field = ForeignKey(Node, CASCADE)
new_field.set_attributes_from_name('parent')
parent = Node.objects.create()
with connection.schema_editor() as editor:
# Update the parent FK to create a deferred constraint check.
Node.objects.update(parent=parent)
editor.alter_field(Node, old_field, new_field, strict=True)
def test_alter_text_field_to_date_field(self):
"""
#25002 - Test conversion of text field to date field.
"""
with connection.schema_editor() as editor:
editor.create_model(Note)
Note.objects.create(info='1988-05-05')
old_field = Note._meta.get_field('info')
new_field = DateField(blank=True)
new_field.set_attributes_from_name('info')
with connection.schema_editor() as editor:
editor.alter_field(Note, old_field, new_field, strict=True)
# Make sure the field isn't nullable
columns = self.column_classes(Note)
self.assertFalse(columns['info'][1][6])
def test_alter_text_field_to_datetime_field(self):
"""
#25002 - Test conversion of text field to datetime field.
"""
with connection.schema_editor() as editor:
editor.create_model(Note)
Note.objects.create(info='1988-05-05 3:16:17.4567')
old_field = Note._meta.get_field('info')
new_field = DateTimeField(blank=True)
new_field.set_attributes_from_name('info')
with connection.schema_editor() as editor:
editor.alter_field(Note, old_field, new_field, strict=True)
# Make sure the field isn't nullable
columns = self.column_classes(Note)
self.assertFalse(columns['info'][1][6])
def test_alter_text_field_to_time_field(self):
"""
#25002 - Test conversion of text field to time field.
"""
with connection.schema_editor() as editor:
editor.create_model(Note)
Note.objects.create(info='3:16:17.4567')
old_field = Note._meta.get_field('info')
new_field = TimeField(blank=True)
new_field.set_attributes_from_name('info')
with connection.schema_editor() as editor:
editor.alter_field(Note, old_field, new_field, strict=True)
# Make sure the field isn't nullable
columns = self.column_classes(Note)
self.assertFalse(columns['info'][1][6])
@skipIfDBFeature('interprets_empty_strings_as_nulls')
def test_alter_textual_field_keep_null_status(self):
"""
Changing a field type shouldn't affect the not null status.
"""
with connection.schema_editor() as editor:
editor.create_model(Note)
with self.assertRaises(IntegrityError):
Note.objects.create(info=None)
old_field = Note._meta.get_field("info")
new_field = CharField(max_length=50)
new_field.set_attributes_from_name("info")
with connection.schema_editor() as editor:
editor.alter_field(Note, old_field, new_field, strict=True)
with self.assertRaises(IntegrityError):
Note.objects.create(info=None)
def test_alter_numeric_field_keep_null_status(self):
"""
Changing a field type shouldn't affect the not null status.
"""
with connection.schema_editor() as editor:
editor.create_model(UniqueTest)
with self.assertRaises(IntegrityError):
UniqueTest.objects.create(year=None, slug='aaa')
old_field = UniqueTest._meta.get_field("year")
new_field = BigIntegerField()
new_field.set_attributes_from_name("year")
with connection.schema_editor() as editor:
editor.alter_field(UniqueTest, old_field, new_field, strict=True)
with self.assertRaises(IntegrityError):
UniqueTest.objects.create(year=None, slug='bbb')
def test_alter_null_to_not_null(self):
"""
#23609 - Tests handling of default values when altering from NULL to NOT NULL.
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
# Ensure the field is right to begin with
columns = self.column_classes(Author)
self.assertTrue(columns['height'][1][6])
# Create some test data
Author.objects.create(name='Not null author', height=12)
Author.objects.create(name='Null author')
# Verify null value
self.assertEqual(Author.objects.get(name='Not null author').height, 12)
self.assertIsNone(Author.objects.get(name='Null author').height)
# Alter the height field to NOT NULL with default
old_field = Author._meta.get_field("height")
new_field = PositiveIntegerField(default=42)
new_field.set_attributes_from_name("height")
with connection.schema_editor() as editor:
editor.alter_field(Author, old_field, new_field, strict=True)
columns = self.column_classes(Author)
self.assertFalse(columns['height'][1][6])
# Verify default value
self.assertEqual(Author.objects.get(name='Not null author').height, 12)
self.assertEqual(Author.objects.get(name='Null author').height, 42)
def test_alter_charfield_to_null(self):
"""
#24307 - Should skip an alter statement on databases with
interprets_empty_strings_as_null when changing a CharField to null.
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
# Change the CharField to null
old_field = Author._meta.get_field('name')
new_field = copy(old_field)
new_field.null = True
with connection.schema_editor() as editor:
editor.alter_field(Author, old_field, new_field, strict=True)
@unittest.skipUnless(connection.vendor == 'postgresql', 'PostgreSQL specific')
def test_alter_char_field_decrease_length(self):
# Create the table.
with connection.schema_editor() as editor:
editor.create_model(Author)
Author.objects.create(name='x' * 255)
# Change max_length of CharField.
old_field = Author._meta.get_field('name')
new_field = CharField(max_length=254)
new_field.set_attributes_from_name('name')
with connection.schema_editor() as editor:
msg = 'value too long for type character varying(254)'
with self.assertRaisesMessage(DataError, msg):
editor.alter_field(Author, old_field, new_field, strict=True)
@unittest.skipUnless(connection.vendor == 'postgresql', 'PostgreSQL specific')
def test_alter_field_with_custom_db_type(self):
from django.contrib.postgres.fields import ArrayField
class Foo(Model):
field = ArrayField(CharField(max_length=255))
class Meta:
app_label = 'schema'
with connection.schema_editor() as editor:
editor.create_model(Foo)
self.isolated_local_models = [Foo]
old_field = Foo._meta.get_field('field')
new_field = ArrayField(CharField(max_length=16))
new_field.set_attributes_from_name('field')
new_field.model = Foo
with connection.schema_editor() as editor:
editor.alter_field(Foo, old_field, new_field, strict=True)
@isolate_apps('schema')
@unittest.skipUnless(connection.vendor == 'postgresql', 'PostgreSQL specific')
def test_alter_array_field_decrease_base_field_length(self):
from django.contrib.postgres.fields import ArrayField
class ArrayModel(Model):
field = ArrayField(CharField(max_length=16))
class Meta:
app_label = 'schema'
with connection.schema_editor() as editor:
editor.create_model(ArrayModel)
self.isolated_local_models = [ArrayModel]
ArrayModel.objects.create(field=['x' * 16])
old_field = ArrayModel._meta.get_field('field')
new_field = ArrayField(CharField(max_length=15))
new_field.set_attributes_from_name('field')
new_field.model = ArrayModel
with connection.schema_editor() as editor:
msg = 'value too long for type character varying(15)'
with self.assertRaisesMessage(DataError, msg):
editor.alter_field(ArrayModel, old_field, new_field, strict=True)
@isolate_apps('schema')
@unittest.skipUnless(connection.vendor == 'postgresql', 'PostgreSQL specific')
def test_alter_array_field_decrease_nested_base_field_length(self):
from django.contrib.postgres.fields import ArrayField
class ArrayModel(Model):
field = ArrayField(ArrayField(CharField(max_length=16)))
class Meta:
app_label = 'schema'
with connection.schema_editor() as editor:
editor.create_model(ArrayModel)
self.isolated_local_models = [ArrayModel]
ArrayModel.objects.create(field=[['x' * 16]])
old_field = ArrayModel._meta.get_field('field')
new_field = ArrayField(ArrayField(CharField(max_length=15)))
new_field.set_attributes_from_name('field')
new_field.model = ArrayModel
with connection.schema_editor() as editor:
msg = 'value too long for type character varying(15)'
with self.assertRaisesMessage(DataError, msg):
editor.alter_field(ArrayModel, old_field, new_field, strict=True)
def test_alter_textfield_to_null(self):
"""
#24307 - Should skip an alter statement on databases with
interprets_empty_strings_as_null when changing a TextField to null.
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Note)
# Change the TextField to null
old_field = Note._meta.get_field('info')
new_field = copy(old_field)
new_field.null = True
with connection.schema_editor() as editor:
editor.alter_field(Note, old_field, new_field, strict=True)
@skipUnlessDBFeature('supports_combined_alters')
def test_alter_null_to_not_null_keeping_default(self):
"""
#23738 - Can change a nullable field with default to non-nullable
with the same default.
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(AuthorWithDefaultHeight)
# Ensure the field is right to begin with
columns = self.column_classes(AuthorWithDefaultHeight)
self.assertTrue(columns['height'][1][6])
# Alter the height field to NOT NULL keeping the previous default
old_field = AuthorWithDefaultHeight._meta.get_field("height")
new_field = PositiveIntegerField(default=42)
new_field.set_attributes_from_name("height")
with connection.schema_editor() as editor:
editor.alter_field(AuthorWithDefaultHeight, old_field, new_field, strict=True)
columns = self.column_classes(AuthorWithDefaultHeight)
self.assertFalse(columns['height'][1][6])
@skipUnlessDBFeature('supports_foreign_keys')
def test_alter_fk(self):
"""
Tests altering of FKs
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(Book)
# Ensure the field is right to begin with
columns = self.column_classes(Book)
self.assertEqual(columns['author_id'][0], connection.features.introspected_field_types['IntegerField'])
self.assertForeignKeyExists(Book, 'author_id', 'schema_author')
# Alter the FK
old_field = Book._meta.get_field("author")
new_field = ForeignKey(Author, CASCADE, editable=False)
new_field.set_attributes_from_name("author")
with connection.schema_editor() as editor:
editor.alter_field(Book, old_field, new_field, strict=True)
columns = self.column_classes(Book)
self.assertEqual(columns['author_id'][0], connection.features.introspected_field_types['IntegerField'])
self.assertForeignKeyExists(Book, 'author_id', 'schema_author')
@skipUnlessDBFeature('supports_foreign_keys')
def test_alter_to_fk(self):
"""
#24447 - Tests adding a FK constraint for an existing column
"""
class LocalBook(Model):
author = IntegerField()
title = CharField(max_length=100, db_index=True)
pub_date = DateTimeField()
class Meta:
app_label = 'schema'
apps = new_apps
self.local_models = [LocalBook]
# Create the tables
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(LocalBook)
# Ensure no FK constraint exists
constraints = self.get_constraints(LocalBook._meta.db_table)
for details in constraints.values():
if details['foreign_key']:
self.fail('Found an unexpected FK constraint to %s' % details['columns'])
old_field = LocalBook._meta.get_field("author")
new_field = ForeignKey(Author, CASCADE)
new_field.set_attributes_from_name("author")
with connection.schema_editor() as editor:
editor.alter_field(LocalBook, old_field, new_field, strict=True)
self.assertForeignKeyExists(LocalBook, 'author_id', 'schema_author')
@skipUnlessDBFeature('supports_foreign_keys')
def test_alter_o2o_to_fk(self):
"""
#24163 - Tests altering of OneToOneField to ForeignKey
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(BookWithO2O)
# Ensure the field is right to begin with
columns = self.column_classes(BookWithO2O)
self.assertEqual(columns['author_id'][0], connection.features.introspected_field_types['IntegerField'])
# Ensure the field is unique
author = Author.objects.create(name="Joe")
BookWithO2O.objects.create(author=author, title="Django 1", pub_date=datetime.datetime.now())
with self.assertRaises(IntegrityError):
BookWithO2O.objects.create(author=author, title="Django 2", pub_date=datetime.datetime.now())
BookWithO2O.objects.all().delete()
self.assertForeignKeyExists(BookWithO2O, 'author_id', 'schema_author')
# Alter the OneToOneField to ForeignKey
old_field = BookWithO2O._meta.get_field("author")
new_field = ForeignKey(Author, CASCADE)
new_field.set_attributes_from_name("author")
with connection.schema_editor() as editor:
editor.alter_field(BookWithO2O, old_field, new_field, strict=True)
columns = self.column_classes(Book)
self.assertEqual(columns['author_id'][0], connection.features.introspected_field_types['IntegerField'])
# Ensure the field is not unique anymore
Book.objects.create(author=author, title="Django 1", pub_date=datetime.datetime.now())
Book.objects.create(author=author, title="Django 2", pub_date=datetime.datetime.now())
self.assertForeignKeyExists(Book, 'author_id', 'schema_author')
@skipUnlessDBFeature('supports_foreign_keys')
def test_alter_fk_to_o2o(self):
"""
#24163 - Tests altering of ForeignKey to OneToOneField
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(Book)
# Ensure the field is right to begin with
columns = self.column_classes(Book)
self.assertEqual(columns['author_id'][0], connection.features.introspected_field_types['IntegerField'])
# Ensure the field is not unique
author = Author.objects.create(name="Joe")
Book.objects.create(author=author, title="Django 1", pub_date=datetime.datetime.now())
Book.objects.create(author=author, title="Django 2", pub_date=datetime.datetime.now())
Book.objects.all().delete()
self.assertForeignKeyExists(Book, 'author_id', 'schema_author')
# Alter the ForeignKey to OneToOneField
old_field = Book._meta.get_field("author")
new_field = OneToOneField(Author, CASCADE)
new_field.set_attributes_from_name("author")
with connection.schema_editor() as editor:
editor.alter_field(Book, old_field, new_field, strict=True)
columns = self.column_classes(BookWithO2O)
self.assertEqual(columns['author_id'][0], connection.features.introspected_field_types['IntegerField'])
# Ensure the field is unique now
BookWithO2O.objects.create(author=author, title="Django 1", pub_date=datetime.datetime.now())
with self.assertRaises(IntegrityError):
BookWithO2O.objects.create(author=author, title="Django 2", pub_date=datetime.datetime.now())
self.assertForeignKeyExists(BookWithO2O, 'author_id', 'schema_author')
def test_alter_field_fk_to_o2o(self):
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(Book)
expected_fks = 1 if connection.features.supports_foreign_keys else 0
expected_indexes = 1 if connection.features.indexes_foreign_keys else 0
# Check the index is right to begin with.
counts = self.get_constraints_count(
Book._meta.db_table,
Book._meta.get_field('author').column,
(Author._meta.db_table, Author._meta.pk.column),
)
self.assertEqual(
counts,
{'fks': expected_fks, 'uniques': 0, 'indexes': expected_indexes},
)
old_field = Book._meta.get_field('author')
new_field = OneToOneField(Author, CASCADE)
new_field.set_attributes_from_name('author')
with connection.schema_editor() as editor:
editor.alter_field(Book, old_field, new_field, strict=True)
counts = self.get_constraints_count(
Book._meta.db_table,
Book._meta.get_field('author').column,
(Author._meta.db_table, Author._meta.pk.column),
)
# The index on ForeignKey is replaced with a unique constraint for OneToOneField.
self.assertEqual(counts, {'fks': expected_fks, 'uniques': 1, 'indexes': 0})
def test_alter_field_fk_keeps_index(self):
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(Book)
expected_fks = 1 if connection.features.supports_foreign_keys else 0
expected_indexes = 1 if connection.features.indexes_foreign_keys else 0
# Check the index is right to begin with.
counts = self.get_constraints_count(
Book._meta.db_table,
Book._meta.get_field('author').column,
(Author._meta.db_table, Author._meta.pk.column),
)
self.assertEqual(
counts,
{'fks': expected_fks, 'uniques': 0, 'indexes': expected_indexes},
)
old_field = Book._meta.get_field('author')
# on_delete changed from CASCADE.
new_field = ForeignKey(Author, PROTECT)
new_field.set_attributes_from_name('author')
with connection.schema_editor() as editor:
editor.alter_field(Book, old_field, new_field, strict=True)
counts = self.get_constraints_count(
Book._meta.db_table,
Book._meta.get_field('author').column,
(Author._meta.db_table, Author._meta.pk.column),
)
# The index remains.
self.assertEqual(
counts,
{'fks': expected_fks, 'uniques': 0, 'indexes': expected_indexes},
)
def test_alter_field_o2o_to_fk(self):
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(BookWithO2O)
expected_fks = 1 if connection.features.supports_foreign_keys else 0
# Check the unique constraint is right to begin with.
counts = self.get_constraints_count(
BookWithO2O._meta.db_table,
BookWithO2O._meta.get_field('author').column,
(Author._meta.db_table, Author._meta.pk.column),
)
self.assertEqual(counts, {'fks': expected_fks, 'uniques': 1, 'indexes': 0})
old_field = BookWithO2O._meta.get_field('author')
new_field = ForeignKey(Author, CASCADE)
new_field.set_attributes_from_name('author')
with connection.schema_editor() as editor:
editor.alter_field(BookWithO2O, old_field, new_field, strict=True)
counts = self.get_constraints_count(
BookWithO2O._meta.db_table,
BookWithO2O._meta.get_field('author').column,
(Author._meta.db_table, Author._meta.pk.column),
)
# The unique constraint on OneToOneField is replaced with an index for ForeignKey.
self.assertEqual(counts, {'fks': expected_fks, 'uniques': 0, 'indexes': 1})
def test_alter_field_o2o_keeps_unique(self):
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(BookWithO2O)
expected_fks = 1 if connection.features.supports_foreign_keys else 0
# Check the unique constraint is right to begin with.
counts = self.get_constraints_count(
BookWithO2O._meta.db_table,
BookWithO2O._meta.get_field('author').column,
(Author._meta.db_table, Author._meta.pk.column),
)
self.assertEqual(counts, {'fks': expected_fks, 'uniques': 1, 'indexes': 0})
old_field = BookWithO2O._meta.get_field('author')
# on_delete changed from CASCADE.
new_field = OneToOneField(Author, PROTECT)
new_field.set_attributes_from_name('author')
with connection.schema_editor() as editor:
editor.alter_field(BookWithO2O, old_field, new_field, strict=True)
counts = self.get_constraints_count(
BookWithO2O._meta.db_table,
BookWithO2O._meta.get_field('author').column,
(Author._meta.db_table, Author._meta.pk.column),
)
# The unique constraint remains.
self.assertEqual(counts, {'fks': expected_fks, 'uniques': 1, 'indexes': 0})
@skipUnlessDBFeature('ignores_table_name_case')
def test_alter_db_table_case(self):
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
# Alter the case of the table
old_table_name = Author._meta.db_table
with connection.schema_editor() as editor:
editor.alter_db_table(Author, old_table_name, old_table_name.upper())
def test_alter_implicit_id_to_explicit(self):
"""
Should be able to convert an implicit "id" field to an explicit "id"
primary key field.
"""
with connection.schema_editor() as editor:
editor.create_model(Author)
old_field = Author._meta.get_field("id")
new_field = AutoField(primary_key=True)
new_field.set_attributes_from_name("id")
new_field.model = Author
with connection.schema_editor() as editor:
editor.alter_field(Author, old_field, new_field, strict=True)
# This will fail if DROP DEFAULT is inadvertently executed on this
# field which drops the id sequence, at least on PostgreSQL.
Author.objects.create(name='Foo')
Author.objects.create(name='Bar')
def test_alter_autofield_pk_to_bigautofield_pk_sequence_owner(self):
"""
Converting an implicit PK to BigAutoField(primary_key=True) should keep
a sequence owner on PostgreSQL.
"""
with connection.schema_editor() as editor:
editor.create_model(Author)
old_field = Author._meta.get_field('id')
new_field = BigAutoField(primary_key=True)
new_field.set_attributes_from_name('id')
new_field.model = Author
with connection.schema_editor() as editor:
editor.alter_field(Author, old_field, new_field, strict=True)
Author.objects.create(name='Foo', pk=1)
with connection.cursor() as cursor:
sequence_reset_sqls = connection.ops.sequence_reset_sql(no_style(), [Author])
if sequence_reset_sqls:
cursor.execute(sequence_reset_sqls[0])
# Fail on PostgreSQL if sequence is missing an owner.
self.assertIsNotNone(Author.objects.create(name='Bar'))
def test_alter_autofield_pk_to_smallautofield_pk_sequence_owner(self):
"""
Converting an implicit PK to SmallAutoField(primary_key=True) should
keep a sequence owner on PostgreSQL.
"""
with connection.schema_editor() as editor:
editor.create_model(Author)
old_field = Author._meta.get_field('id')
new_field = SmallAutoField(primary_key=True)
new_field.set_attributes_from_name('id')
new_field.model = Author
with connection.schema_editor() as editor:
editor.alter_field(Author, old_field, new_field, strict=True)
Author.objects.create(name='Foo', pk=1)
with connection.cursor() as cursor:
sequence_reset_sqls = connection.ops.sequence_reset_sql(no_style(), [Author])
if sequence_reset_sqls:
cursor.execute(sequence_reset_sqls[0])
# Fail on PostgreSQL if sequence is missing an owner.
self.assertIsNotNone(Author.objects.create(name='Bar'))
def test_alter_int_pk_to_autofield_pk(self):
"""
Should be able to rename an IntegerField(primary_key=True) to
AutoField(primary_key=True).
"""
with connection.schema_editor() as editor:
editor.create_model(IntegerPK)
old_field = IntegerPK._meta.get_field('i')
new_field = AutoField(primary_key=True)
new_field.model = IntegerPK
new_field.set_attributes_from_name('i')
with connection.schema_editor() as editor:
editor.alter_field(IntegerPK, old_field, new_field, strict=True)
# A model representing the updated model.
class IntegerPKToAutoField(Model):
i = AutoField(primary_key=True)
j = IntegerField(unique=True)
class Meta:
app_label = 'schema'
apps = new_apps
db_table = IntegerPK._meta.db_table
# An id (i) is generated by the database.
obj = IntegerPKToAutoField.objects.create(j=1)
self.assertIsNotNone(obj.i)
def test_alter_int_pk_to_bigautofield_pk(self):
"""
Should be able to rename an IntegerField(primary_key=True) to
BigAutoField(primary_key=True).
"""
with connection.schema_editor() as editor:
editor.create_model(IntegerPK)
old_field = IntegerPK._meta.get_field('i')
new_field = BigAutoField(primary_key=True)
new_field.model = IntegerPK
new_field.set_attributes_from_name('i')
with connection.schema_editor() as editor:
editor.alter_field(IntegerPK, old_field, new_field, strict=True)
# A model representing the updated model.
class IntegerPKToBigAutoField(Model):
i = BigAutoField(primary_key=True)
j = IntegerField(unique=True)
class Meta:
app_label = 'schema'
apps = new_apps
db_table = IntegerPK._meta.db_table
# An id (i) is generated by the database.
obj = IntegerPKToBigAutoField.objects.create(j=1)
self.assertIsNotNone(obj.i)
@isolate_apps('schema')
def test_alter_smallint_pk_to_smallautofield_pk(self):
"""
Should be able to rename an SmallIntegerField(primary_key=True) to
SmallAutoField(primary_key=True).
"""
class SmallIntegerPK(Model):
i = SmallIntegerField(primary_key=True)
class Meta:
app_label = 'schema'
with connection.schema_editor() as editor:
editor.create_model(SmallIntegerPK)
self.isolated_local_models = [SmallIntegerPK]
old_field = SmallIntegerPK._meta.get_field('i')
new_field = SmallAutoField(primary_key=True)
new_field.model = SmallIntegerPK
new_field.set_attributes_from_name('i')
with connection.schema_editor() as editor:
editor.alter_field(SmallIntegerPK, old_field, new_field, strict=True)
def test_alter_int_pk_to_int_unique(self):
"""
Should be able to rename an IntegerField(primary_key=True) to
IntegerField(unique=True).
"""
with connection.schema_editor() as editor:
editor.create_model(IntegerPK)
# Delete the old PK
old_field = IntegerPK._meta.get_field('i')
new_field = IntegerField(unique=True)
new_field.model = IntegerPK
new_field.set_attributes_from_name('i')
with connection.schema_editor() as editor:
editor.alter_field(IntegerPK, old_field, new_field, strict=True)
# The primary key constraint is gone. Result depends on database:
# 'id' for SQLite, None for others (must not be 'i').
self.assertIn(self.get_primary_key(IntegerPK._meta.db_table), ('id', None))
# Set up a model class as it currently stands. The original IntegerPK
# class is now out of date and some backends make use of the whole
# model class when modifying a field (such as sqlite3 when remaking a
# table) so an outdated model class leads to incorrect results.
class Transitional(Model):
i = IntegerField(unique=True)
j = IntegerField(unique=True)
class Meta:
app_label = 'schema'
apps = new_apps
db_table = 'INTEGERPK'
# model requires a new PK
old_field = Transitional._meta.get_field('j')
new_field = IntegerField(primary_key=True)
new_field.model = Transitional
new_field.set_attributes_from_name('j')
with connection.schema_editor() as editor:
editor.alter_field(Transitional, old_field, new_field, strict=True)
# Create a model class representing the updated model.
class IntegerUnique(Model):
i = IntegerField(unique=True)
j = IntegerField(primary_key=True)
class Meta:
app_label = 'schema'
apps = new_apps
db_table = 'INTEGERPK'
# Ensure unique constraint works.
IntegerUnique.objects.create(i=1, j=1)
with self.assertRaises(IntegrityError):
IntegerUnique.objects.create(i=1, j=2)
def test_rename(self):
"""
Tests simple altering of fields
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
# Ensure the field is right to begin with
columns = self.column_classes(Author)
self.assertEqual(columns['name'][0], connection.features.introspected_field_types['CharField'])
self.assertNotIn("display_name", columns)
# Alter the name field's name
old_field = Author._meta.get_field("name")
new_field = CharField(max_length=254)
new_field.set_attributes_from_name("display_name")
with connection.schema_editor() as editor:
editor.alter_field(Author, old_field, new_field, strict=True)
columns = self.column_classes(Author)
self.assertEqual(columns['display_name'][0], connection.features.introspected_field_types['CharField'])
self.assertNotIn("name", columns)
@isolate_apps('schema')
def test_rename_referenced_field(self):
class Author(Model):
name = CharField(max_length=255, unique=True)
class Meta:
app_label = 'schema'
class Book(Model):
author = ForeignKey(Author, CASCADE, to_field='name')
class Meta:
app_label = 'schema'
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(Book)
new_field = CharField(max_length=255, unique=True)
new_field.set_attributes_from_name('renamed')
with connection.schema_editor(atomic=connection.features.supports_atomic_references_rename) as editor:
editor.alter_field(Author, Author._meta.get_field('name'), new_field)
# Ensure the foreign key reference was updated.
self.assertForeignKeyExists(Book, 'author_id', 'schema_author', 'renamed')
@skipIfDBFeature('interprets_empty_strings_as_nulls')
def test_rename_keep_null_status(self):
"""
Renaming a field shouldn't affect the not null status.
"""
with connection.schema_editor() as editor:
editor.create_model(Note)
with self.assertRaises(IntegrityError):
Note.objects.create(info=None)
old_field = Note._meta.get_field("info")
new_field = TextField()
new_field.set_attributes_from_name("detail_info")
with connection.schema_editor() as editor:
editor.alter_field(Note, old_field, new_field, strict=True)
columns = self.column_classes(Note)
self.assertEqual(columns['detail_info'][0], "TextField")
self.assertNotIn("info", columns)
with self.assertRaises(IntegrityError):
NoteRename.objects.create(detail_info=None)
def _test_m2m_create(self, M2MFieldClass):
"""
Tests M2M fields on models during creation
"""
class LocalBookWithM2M(Model):
author = ForeignKey(Author, CASCADE)
title = CharField(max_length=100, db_index=True)
pub_date = DateTimeField()
tags = M2MFieldClass("TagM2MTest", related_name="books")
class Meta:
app_label = 'schema'
apps = new_apps
self.local_models = [LocalBookWithM2M]
# Create the tables
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(TagM2MTest)
editor.create_model(LocalBookWithM2M)
# Ensure there is now an m2m table there
columns = self.column_classes(LocalBookWithM2M._meta.get_field("tags").remote_field.through)
self.assertEqual(columns['tagm2mtest_id'][0], connection.features.introspected_field_types['IntegerField'])
def test_m2m_create(self):
self._test_m2m_create(ManyToManyField)
def test_m2m_create_custom(self):
self._test_m2m_create(CustomManyToManyField)
def test_m2m_create_inherited(self):
self._test_m2m_create(InheritedManyToManyField)
def _test_m2m_create_through(self, M2MFieldClass):
"""
Tests M2M fields on models during creation with through models
"""
class LocalTagThrough(Model):
book = ForeignKey("schema.LocalBookWithM2MThrough", CASCADE)
tag = ForeignKey("schema.TagM2MTest", CASCADE)
class Meta:
app_label = 'schema'
apps = new_apps
class LocalBookWithM2MThrough(Model):
tags = M2MFieldClass("TagM2MTest", related_name="books", through=LocalTagThrough)
class Meta:
app_label = 'schema'
apps = new_apps
self.local_models = [LocalTagThrough, LocalBookWithM2MThrough]
# Create the tables
with connection.schema_editor() as editor:
editor.create_model(LocalTagThrough)
editor.create_model(TagM2MTest)
editor.create_model(LocalBookWithM2MThrough)
# Ensure there is now an m2m table there
columns = self.column_classes(LocalTagThrough)
self.assertEqual(columns['book_id'][0], connection.features.introspected_field_types['IntegerField'])
self.assertEqual(columns['tag_id'][0], connection.features.introspected_field_types['IntegerField'])
def test_m2m_create_through(self):
self._test_m2m_create_through(ManyToManyField)
def test_m2m_create_through_custom(self):
self._test_m2m_create_through(CustomManyToManyField)
def test_m2m_create_through_inherited(self):
self._test_m2m_create_through(InheritedManyToManyField)
def _test_m2m(self, M2MFieldClass):
"""
Tests adding/removing M2M fields on models
"""
class LocalAuthorWithM2M(Model):
name = CharField(max_length=255)
class Meta:
app_label = 'schema'
apps = new_apps
self.local_models = [LocalAuthorWithM2M]
# Create the tables
with connection.schema_editor() as editor:
editor.create_model(LocalAuthorWithM2M)
editor.create_model(TagM2MTest)
# Create an M2M field
new_field = M2MFieldClass("schema.TagM2MTest", related_name="authors")
new_field.contribute_to_class(LocalAuthorWithM2M, "tags")
# Ensure there's no m2m table there
with self.assertRaises(DatabaseError):
self.column_classes(new_field.remote_field.through)
# Add the field
with connection.schema_editor() as editor:
editor.add_field(LocalAuthorWithM2M, new_field)
# Ensure there is now an m2m table there
columns = self.column_classes(new_field.remote_field.through)
self.assertEqual(columns['tagm2mtest_id'][0], connection.features.introspected_field_types['IntegerField'])
# "Alter" the field. This should not rename the DB table to itself.
with connection.schema_editor() as editor:
editor.alter_field(LocalAuthorWithM2M, new_field, new_field, strict=True)
# Remove the M2M table again
with connection.schema_editor() as editor:
editor.remove_field(LocalAuthorWithM2M, new_field)
# Ensure there's no m2m table there
with self.assertRaises(DatabaseError):
self.column_classes(new_field.remote_field.through)
# Make sure the model state is coherent with the table one now that
# we've removed the tags field.
opts = LocalAuthorWithM2M._meta
opts.local_many_to_many.remove(new_field)
del new_apps.all_models['schema'][new_field.remote_field.through._meta.model_name]
opts._expire_cache()
def test_m2m(self):
self._test_m2m(ManyToManyField)
def test_m2m_custom(self):
self._test_m2m(CustomManyToManyField)
def test_m2m_inherited(self):
self._test_m2m(InheritedManyToManyField)
def _test_m2m_through_alter(self, M2MFieldClass):
"""
Tests altering M2Ms with explicit through models (should no-op)
"""
class LocalAuthorTag(Model):
author = ForeignKey("schema.LocalAuthorWithM2MThrough", CASCADE)
tag = ForeignKey("schema.TagM2MTest", CASCADE)
class Meta:
app_label = 'schema'
apps = new_apps
class LocalAuthorWithM2MThrough(Model):
name = CharField(max_length=255)
tags = M2MFieldClass("schema.TagM2MTest", related_name="authors", through=LocalAuthorTag)
class Meta:
app_label = 'schema'
apps = new_apps
self.local_models = [LocalAuthorTag, LocalAuthorWithM2MThrough]
# Create the tables
with connection.schema_editor() as editor:
editor.create_model(LocalAuthorTag)
editor.create_model(LocalAuthorWithM2MThrough)
editor.create_model(TagM2MTest)
# Ensure the m2m table is there
self.assertEqual(len(self.column_classes(LocalAuthorTag)), 3)
# "Alter" the field's blankness. This should not actually do anything.
old_field = LocalAuthorWithM2MThrough._meta.get_field("tags")
new_field = M2MFieldClass("schema.TagM2MTest", related_name="authors", through=LocalAuthorTag)
new_field.contribute_to_class(LocalAuthorWithM2MThrough, "tags")
with connection.schema_editor() as editor:
editor.alter_field(LocalAuthorWithM2MThrough, old_field, new_field, strict=True)
# Ensure the m2m table is still there
self.assertEqual(len(self.column_classes(LocalAuthorTag)), 3)
def test_m2m_through_alter(self):
self._test_m2m_through_alter(ManyToManyField)
def test_m2m_through_alter_custom(self):
self._test_m2m_through_alter(CustomManyToManyField)
def test_m2m_through_alter_inherited(self):
self._test_m2m_through_alter(InheritedManyToManyField)
def _test_m2m_repoint(self, M2MFieldClass):
"""
Tests repointing M2M fields
"""
class LocalBookWithM2M(Model):
author = ForeignKey(Author, CASCADE)
title = CharField(max_length=100, db_index=True)
pub_date = DateTimeField()
tags = M2MFieldClass("TagM2MTest", related_name="books")
class Meta:
app_label = 'schema'
apps = new_apps
self.local_models = [LocalBookWithM2M]
# Create the tables
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(LocalBookWithM2M)
editor.create_model(TagM2MTest)
editor.create_model(UniqueTest)
# Ensure the M2M exists and points to TagM2MTest
if connection.features.supports_foreign_keys:
self.assertForeignKeyExists(
LocalBookWithM2M._meta.get_field("tags").remote_field.through,
'tagm2mtest_id',
'schema_tagm2mtest',
)
# Repoint the M2M
old_field = LocalBookWithM2M._meta.get_field("tags")
new_field = M2MFieldClass(UniqueTest)
new_field.contribute_to_class(LocalBookWithM2M, "uniques")
with connection.schema_editor() as editor:
editor.alter_field(LocalBookWithM2M, old_field, new_field, strict=True)
# Ensure old M2M is gone
with self.assertRaises(DatabaseError):
self.column_classes(LocalBookWithM2M._meta.get_field("tags").remote_field.through)
# This model looks like the new model and is used for teardown.
opts = LocalBookWithM2M._meta
opts.local_many_to_many.remove(old_field)
# Ensure the new M2M exists and points to UniqueTest
if connection.features.supports_foreign_keys:
self.assertForeignKeyExists(new_field.remote_field.through, 'uniquetest_id', 'schema_uniquetest')
def test_m2m_repoint(self):
self._test_m2m_repoint(ManyToManyField)
def test_m2m_repoint_custom(self):
self._test_m2m_repoint(CustomManyToManyField)
def test_m2m_repoint_inherited(self):
self._test_m2m_repoint(InheritedManyToManyField)
@isolate_apps('schema')
def test_m2m_rename_field_in_target_model(self):
class LocalTagM2MTest(Model):
title = CharField(max_length=255)
class Meta:
app_label = 'schema'
class LocalM2M(Model):
tags = ManyToManyField(LocalTagM2MTest)
class Meta:
app_label = 'schema'
# Create the tables.
with connection.schema_editor() as editor:
editor.create_model(LocalM2M)
editor.create_model(LocalTagM2MTest)
self.isolated_local_models = [LocalM2M, LocalTagM2MTest]
# Ensure the m2m table is there.
self.assertEqual(len(self.column_classes(LocalM2M)), 1)
# Alter a field in LocalTagM2MTest.
old_field = LocalTagM2MTest._meta.get_field('title')
new_field = CharField(max_length=254)
new_field.contribute_to_class(LocalTagM2MTest, 'title1')
# @isolate_apps() and inner models are needed to have the model
# relations populated, otherwise this doesn't act as a regression test.
self.assertEqual(len(new_field.model._meta.related_objects), 1)
with connection.schema_editor() as editor:
editor.alter_field(LocalTagM2MTest, old_field, new_field, strict=True)
# Ensure the m2m table is still there.
self.assertEqual(len(self.column_classes(LocalM2M)), 1)
@skipUnlessDBFeature('supports_column_check_constraints', 'can_introspect_check_constraints')
def test_check_constraints(self):
"""
Tests creating/deleting CHECK constraints
"""
# Create the tables
with connection.schema_editor() as editor:
editor.create_model(Author)
# Ensure the constraint exists
constraints = self.get_constraints(Author._meta.db_table)
if not any(details['columns'] == ['height'] and details['check'] for details in constraints.values()):
self.fail("No check constraint for height found")
# Alter the column to remove it
old_field = Author._meta.get_field("height")
new_field = IntegerField(null=True, blank=True)
new_field.set_attributes_from_name("height")
with connection.schema_editor() as editor:
editor.alter_field(Author, old_field, new_field, strict=True)
constraints = self.get_constraints(Author._meta.db_table)
for details in constraints.values():
if details['columns'] == ["height"] and details['check']:
self.fail("Check constraint for height found")
# Alter the column to re-add it
new_field2 = Author._meta.get_field("height")
with connection.schema_editor() as editor:
editor.alter_field(Author, new_field, new_field2, strict=True)
constraints = self.get_constraints(Author._meta.db_table)
if not any(details['columns'] == ['height'] and details['check'] for details in constraints.values()):
self.fail("No check constraint for height found")
@skipUnlessDBFeature('supports_column_check_constraints', 'can_introspect_check_constraints')
def test_remove_field_check_does_not_remove_meta_constraints(self):
with connection.schema_editor() as editor:
editor.create_model(Author)
# Add the custom check constraint
constraint = CheckConstraint(check=Q(height__gte=0), name='author_height_gte_0_check')
custom_constraint_name = constraint.name
Author._meta.constraints = [constraint]
with connection.schema_editor() as editor:
editor.add_constraint(Author, constraint)
# Ensure the constraints exist
constraints = self.get_constraints(Author._meta.db_table)
self.assertIn(custom_constraint_name, constraints)
other_constraints = [
name for name, details in constraints.items()
if details['columns'] == ['height'] and details['check'] and name != custom_constraint_name
]
self.assertEqual(len(other_constraints), 1)
# Alter the column to remove field check
old_field = Author._meta.get_field('height')
new_field = IntegerField(null=True, blank=True)
new_field.set_attributes_from_name('height')
with connection.schema_editor() as editor:
editor.alter_field(Author, old_field, new_field, strict=True)
constraints = self.get_constraints(Author._meta.db_table)
self.assertIn(custom_constraint_name, constraints)
other_constraints = [
name for name, details in constraints.items()
if details['columns'] == ['height'] and details['check'] and name != custom_constraint_name
]
self.assertEqual(len(other_constraints), 0)
# Alter the column to re-add field check
new_field2 = Author._meta.get_field('height')
with connection.schema_editor() as editor:
editor.alter_field(Author, new_field, new_field2, strict=True)
constraints = self.get_constraints(Author._meta.db_table)
self.assertIn(custom_constraint_name, constraints)
other_constraints = [
name for name, details in constraints.items()
if details['columns'] == ['height'] and details['check'] and name != custom_constraint_name
]
self.assertEqual(len(other_constraints), 1)
# Drop the check constraint
with connection.schema_editor() as editor:
Author._meta.constraints = []
editor.remove_constraint(Author, constraint)
def test_unique(self):
"""
Tests removing and adding unique constraints to a single column.
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Tag)
# Ensure the field is unique to begin with
Tag.objects.create(title="foo", slug="foo")
with self.assertRaises(IntegrityError):
Tag.objects.create(title="bar", slug="foo")
Tag.objects.all().delete()
# Alter the slug field to be non-unique
old_field = Tag._meta.get_field("slug")
new_field = SlugField(unique=False)
new_field.set_attributes_from_name("slug")
with connection.schema_editor() as editor:
editor.alter_field(Tag, old_field, new_field, strict=True)
# Ensure the field is no longer unique
Tag.objects.create(title="foo", slug="foo")
Tag.objects.create(title="bar", slug="foo")
Tag.objects.all().delete()
# Alter the slug field to be unique
new_field2 = SlugField(unique=True)
new_field2.set_attributes_from_name("slug")
with connection.schema_editor() as editor:
editor.alter_field(Tag, new_field, new_field2, strict=True)
# Ensure the field is unique again
Tag.objects.create(title="foo", slug="foo")
with self.assertRaises(IntegrityError):
Tag.objects.create(title="bar", slug="foo")
Tag.objects.all().delete()
# Rename the field
new_field3 = SlugField(unique=True)
new_field3.set_attributes_from_name("slug2")
with connection.schema_editor() as editor:
editor.alter_field(Tag, new_field2, new_field3, strict=True)
# Ensure the field is still unique
TagUniqueRename.objects.create(title="foo", slug2="foo")
with self.assertRaises(IntegrityError):
TagUniqueRename.objects.create(title="bar", slug2="foo")
Tag.objects.all().delete()
def test_unique_name_quoting(self):
old_table_name = TagUniqueRename._meta.db_table
try:
with connection.schema_editor() as editor:
editor.create_model(TagUniqueRename)
editor.alter_db_table(TagUniqueRename, old_table_name, 'unique-table')
TagUniqueRename._meta.db_table = 'unique-table'
# This fails if the unique index name isn't quoted.
editor.alter_unique_together(TagUniqueRename, [], (('title', 'slug2'),))
finally:
TagUniqueRename._meta.db_table = old_table_name
@isolate_apps('schema')
@skipUnlessDBFeature('supports_foreign_keys')
def test_unique_no_unnecessary_fk_drops(self):
"""
If AlterField isn't selective about dropping foreign key constraints
when modifying a field with a unique constraint, the AlterField
incorrectly drops and recreates the Book.author foreign key even though
it doesn't restrict the field being changed (#29193).
"""
class Author(Model):
name = CharField(max_length=254, unique=True)
class Meta:
app_label = 'schema'
class Book(Model):
author = ForeignKey(Author, CASCADE)
class Meta:
app_label = 'schema'
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(Book)
new_field = CharField(max_length=255, unique=True)
new_field.model = Author
new_field.set_attributes_from_name('name')
with self.assertLogs('django.db.backends.schema', 'DEBUG') as cm:
with connection.schema_editor() as editor:
editor.alter_field(Author, Author._meta.get_field('name'), new_field)
# One SQL statement is executed to alter the field.
self.assertEqual(len(cm.records), 1)
@isolate_apps('schema')
def test_unique_and_reverse_m2m(self):
"""
AlterField can modify a unique field when there's a reverse M2M
relation on the model.
"""
class Tag(Model):
title = CharField(max_length=255)
slug = SlugField(unique=True)
class Meta:
app_label = 'schema'
class Book(Model):
tags = ManyToManyField(Tag, related_name='books')
class Meta:
app_label = 'schema'
self.isolated_local_models = [Book._meta.get_field('tags').remote_field.through]
with connection.schema_editor() as editor:
editor.create_model(Tag)
editor.create_model(Book)
new_field = SlugField(max_length=75, unique=True)
new_field.model = Tag
new_field.set_attributes_from_name('slug')
with self.assertLogs('django.db.backends.schema', 'DEBUG') as cm:
with connection.schema_editor() as editor:
editor.alter_field(Tag, Tag._meta.get_field('slug'), new_field)
# One SQL statement is executed to alter the field.
self.assertEqual(len(cm.records), 1)
# Ensure that the field is still unique.
Tag.objects.create(title='foo', slug='foo')
with self.assertRaises(IntegrityError):
Tag.objects.create(title='bar', slug='foo')
@skipUnlessDBFeature('allows_multiple_constraints_on_same_fields')
def test_remove_field_unique_does_not_remove_meta_constraints(self):
with connection.schema_editor() as editor:
editor.create_model(AuthorWithUniqueName)
# Add the custom unique constraint
constraint = UniqueConstraint(fields=['name'], name='author_name_uniq')
custom_constraint_name = constraint.name
AuthorWithUniqueName._meta.constraints = [constraint]
with connection.schema_editor() as editor:
editor.add_constraint(AuthorWithUniqueName, constraint)
# Ensure the constraints exist
constraints = self.get_constraints(AuthorWithUniqueName._meta.db_table)
self.assertIn(custom_constraint_name, constraints)
other_constraints = [
name for name, details in constraints.items()
if details['columns'] == ['name'] and details['unique'] and name != custom_constraint_name
]
self.assertEqual(len(other_constraints), 1)
# Alter the column to remove field uniqueness
old_field = AuthorWithUniqueName._meta.get_field('name')
new_field = CharField(max_length=255)
new_field.set_attributes_from_name('name')
with connection.schema_editor() as editor:
editor.alter_field(AuthorWithUniqueName, old_field, new_field, strict=True)
constraints = self.get_constraints(AuthorWithUniqueName._meta.db_table)
self.assertIn(custom_constraint_name, constraints)
other_constraints = [
name for name, details in constraints.items()
if details['columns'] == ['name'] and details['unique'] and name != custom_constraint_name
]
self.assertEqual(len(other_constraints), 0)
# Alter the column to re-add field uniqueness
new_field2 = AuthorWithUniqueName._meta.get_field('name')
with connection.schema_editor() as editor:
editor.alter_field(AuthorWithUniqueName, new_field, new_field2, strict=True)
constraints = self.get_constraints(AuthorWithUniqueName._meta.db_table)
self.assertIn(custom_constraint_name, constraints)
other_constraints = [
name for name, details in constraints.items()
if details['columns'] == ['name'] and details['unique'] and name != custom_constraint_name
]
self.assertEqual(len(other_constraints), 1)
# Drop the unique constraint
with connection.schema_editor() as editor:
AuthorWithUniqueName._meta.constraints = []
editor.remove_constraint(AuthorWithUniqueName, constraint)
def test_unique_together(self):
"""
Tests removing and adding unique_together constraints on a model.
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(UniqueTest)
# Ensure the fields are unique to begin with
UniqueTest.objects.create(year=2012, slug="foo")
UniqueTest.objects.create(year=2011, slug="foo")
UniqueTest.objects.create(year=2011, slug="bar")
with self.assertRaises(IntegrityError):
UniqueTest.objects.create(year=2012, slug="foo")
UniqueTest.objects.all().delete()
# Alter the model to its non-unique-together companion
with connection.schema_editor() as editor:
editor.alter_unique_together(UniqueTest, UniqueTest._meta.unique_together, [])
# Ensure the fields are no longer unique
UniqueTest.objects.create(year=2012, slug="foo")
UniqueTest.objects.create(year=2012, slug="foo")
UniqueTest.objects.all().delete()
# Alter it back
new_field2 = SlugField(unique=True)
new_field2.set_attributes_from_name("slug")
with connection.schema_editor() as editor:
editor.alter_unique_together(UniqueTest, [], UniqueTest._meta.unique_together)
# Ensure the fields are unique again
UniqueTest.objects.create(year=2012, slug="foo")
with self.assertRaises(IntegrityError):
UniqueTest.objects.create(year=2012, slug="foo")
UniqueTest.objects.all().delete()
def test_unique_together_with_fk(self):
"""
Tests removing and adding unique_together constraints that include
a foreign key.
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(Book)
# Ensure the fields are unique to begin with
self.assertEqual(Book._meta.unique_together, ())
# Add the unique_together constraint
with connection.schema_editor() as editor:
editor.alter_unique_together(Book, [], [['author', 'title']])
# Alter it back
with connection.schema_editor() as editor:
editor.alter_unique_together(Book, [['author', 'title']], [])
def test_unique_together_with_fk_with_existing_index(self):
"""
Tests removing and adding unique_together constraints that include
a foreign key, where the foreign key is added after the model is
created.
"""
# Create the tables
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(BookWithoutAuthor)
new_field = ForeignKey(Author, CASCADE)
new_field.set_attributes_from_name('author')
editor.add_field(BookWithoutAuthor, new_field)
# Ensure the fields aren't unique to begin with
self.assertEqual(Book._meta.unique_together, ())
# Add the unique_together constraint
with connection.schema_editor() as editor:
editor.alter_unique_together(Book, [], [['author', 'title']])
# Alter it back
with connection.schema_editor() as editor:
editor.alter_unique_together(Book, [['author', 'title']], [])
@skipUnlessDBFeature('allows_multiple_constraints_on_same_fields')
def test_remove_unique_together_does_not_remove_meta_constraints(self):
with connection.schema_editor() as editor:
editor.create_model(AuthorWithUniqueNameAndBirthday)
# Add the custom unique constraint
constraint = UniqueConstraint(fields=['name', 'birthday'], name='author_name_birthday_uniq')
custom_constraint_name = constraint.name
AuthorWithUniqueNameAndBirthday._meta.constraints = [constraint]
with connection.schema_editor() as editor:
editor.add_constraint(AuthorWithUniqueNameAndBirthday, constraint)
# Ensure the constraints exist
constraints = self.get_constraints(AuthorWithUniqueNameAndBirthday._meta.db_table)
self.assertIn(custom_constraint_name, constraints)
other_constraints = [
name for name, details in constraints.items()
if details['columns'] == ['name', 'birthday'] and details['unique'] and name != custom_constraint_name
]
self.assertEqual(len(other_constraints), 1)
# Remove unique together
unique_together = AuthorWithUniqueNameAndBirthday._meta.unique_together
with connection.schema_editor() as editor:
editor.alter_unique_together(AuthorWithUniqueNameAndBirthday, unique_together, [])
constraints = self.get_constraints(AuthorWithUniqueNameAndBirthday._meta.db_table)
self.assertIn(custom_constraint_name, constraints)
other_constraints = [
name for name, details in constraints.items()
if details['columns'] == ['name', 'birthday'] and details['unique'] and name != custom_constraint_name
]
self.assertEqual(len(other_constraints), 0)
# Re-add unique together
with connection.schema_editor() as editor:
editor.alter_unique_together(AuthorWithUniqueNameAndBirthday, [], unique_together)
constraints = self.get_constraints(AuthorWithUniqueNameAndBirthday._meta.db_table)
self.assertIn(custom_constraint_name, constraints)
other_constraints = [
name for name, details in constraints.items()
if details['columns'] == ['name', 'birthday'] and details['unique'] and name != custom_constraint_name
]
self.assertEqual(len(other_constraints), 1)
# Drop the unique constraint
with connection.schema_editor() as editor:
AuthorWithUniqueNameAndBirthday._meta.constraints = []
editor.remove_constraint(AuthorWithUniqueNameAndBirthday, constraint)
def test_unique_constraint(self):
with connection.schema_editor() as editor:
editor.create_model(Author)
constraint = UniqueConstraint(fields=['name'], name='name_uq')
# Add constraint.
with connection.schema_editor() as editor:
editor.add_constraint(Author, constraint)
sql = constraint.create_sql(Author, editor)
table = Author._meta.db_table
self.assertIs(sql.references_table(table), True)
self.assertIs(sql.references_column(table, 'name'), True)
# Remove constraint.
with connection.schema_editor() as editor:
editor.remove_constraint(Author, constraint)
self.assertNotIn(constraint.name, self.get_constraints(table))
@skipUnlessDBFeature('supports_expression_indexes')
def test_func_unique_constraint(self):
with connection.schema_editor() as editor:
editor.create_model(Author)
constraint = UniqueConstraint(Upper('name').desc(), name='func_upper_uq')
# Add constraint.
with connection.schema_editor() as editor:
editor.add_constraint(Author, constraint)
sql = constraint.create_sql(Author, editor)
table = Author._meta.db_table
constraints = self.get_constraints(table)
if connection.features.supports_index_column_ordering:
self.assertIndexOrder(table, constraint.name, ['DESC'])
self.assertIn(constraint.name, constraints)
self.assertIs(constraints[constraint.name]['unique'], True)
# SQL contains a database function.
self.assertIs(sql.references_column(table, 'name'), True)
self.assertIn('UPPER(%s)' % editor.quote_name('name'), str(sql))
# Remove constraint.
with connection.schema_editor() as editor:
editor.remove_constraint(Author, constraint)
self.assertNotIn(constraint.name, self.get_constraints(table))
@skipUnlessDBFeature('supports_expression_indexes')
def test_composite_func_unique_constraint(self):
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(BookWithSlug)
constraint = UniqueConstraint(
Upper('title'),
Lower('slug'),
name='func_upper_lower_unq',
)
# Add constraint.
with connection.schema_editor() as editor:
editor.add_constraint(BookWithSlug, constraint)
sql = constraint.create_sql(BookWithSlug, editor)
table = BookWithSlug._meta.db_table
constraints = self.get_constraints(table)
self.assertIn(constraint.name, constraints)
self.assertIs(constraints[constraint.name]['unique'], True)
# SQL contains database functions.
self.assertIs(sql.references_column(table, 'title'), True)
self.assertIs(sql.references_column(table, 'slug'), True)
sql = str(sql)
self.assertIn('UPPER(%s)' % editor.quote_name('title'), sql)
self.assertIn('LOWER(%s)' % editor.quote_name('slug'), sql)
self.assertLess(sql.index('UPPER'), sql.index('LOWER'))
# Remove constraint.
with connection.schema_editor() as editor:
editor.remove_constraint(BookWithSlug, constraint)
self.assertNotIn(constraint.name, self.get_constraints(table))
@skipUnlessDBFeature('supports_expression_indexes')
def test_unique_constraint_field_and_expression(self):
with connection.schema_editor() as editor:
editor.create_model(Author)
constraint = UniqueConstraint(
F('height').desc(),
'uuid',
Lower('name').asc(),
name='func_f_lower_field_unq',
)
# Add constraint.
with connection.schema_editor() as editor:
editor.add_constraint(Author, constraint)
sql = constraint.create_sql(Author, editor)
table = Author._meta.db_table
if connection.features.supports_index_column_ordering:
self.assertIndexOrder(table, constraint.name, ['DESC', 'ASC', 'ASC'])
constraints = self.get_constraints(table)
self.assertIs(constraints[constraint.name]['unique'], True)
self.assertEqual(len(constraints[constraint.name]['columns']), 3)
self.assertEqual(constraints[constraint.name]['columns'][1], 'uuid')
# SQL contains database functions and columns.
self.assertIs(sql.references_column(table, 'height'), True)
self.assertIs(sql.references_column(table, 'name'), True)
self.assertIs(sql.references_column(table, 'uuid'), True)
self.assertIn('LOWER(%s)' % editor.quote_name('name'), str(sql))
# Remove constraint.
with connection.schema_editor() as editor:
editor.remove_constraint(Author, constraint)
self.assertNotIn(constraint.name, self.get_constraints(table))
@skipUnlessDBFeature('supports_expression_indexes', 'supports_partial_indexes')
def test_func_unique_constraint_partial(self):
with connection.schema_editor() as editor:
editor.create_model(Author)
constraint = UniqueConstraint(
Upper('name'),
name='func_upper_cond_weight_uq',
condition=Q(weight__isnull=False),
)
# Add constraint.
with connection.schema_editor() as editor:
editor.add_constraint(Author, constraint)
sql = constraint.create_sql(Author, editor)
table = Author._meta.db_table
constraints = self.get_constraints(table)
self.assertIn(constraint.name, constraints)
self.assertIs(constraints[constraint.name]['unique'], True)
self.assertIs(sql.references_column(table, 'name'), True)
self.assertIn('UPPER(%s)' % editor.quote_name('name'), str(sql))
self.assertIn(
'WHERE %s IS NOT NULL' % editor.quote_name('weight'),
str(sql),
)
# Remove constraint.
with connection.schema_editor() as editor:
editor.remove_constraint(Author, constraint)
self.assertNotIn(constraint.name, self.get_constraints(table))
@skipUnlessDBFeature('supports_expression_indexes', 'supports_covering_indexes')
def test_func_unique_constraint_covering(self):
with connection.schema_editor() as editor:
editor.create_model(Author)
constraint = UniqueConstraint(
Upper('name'),
name='func_upper_covering_uq',
include=['weight', 'height'],
)
# Add constraint.
with connection.schema_editor() as editor:
editor.add_constraint(Author, constraint)
sql = constraint.create_sql(Author, editor)
table = Author._meta.db_table
constraints = self.get_constraints(table)
self.assertIn(constraint.name, constraints)
self.assertIs(constraints[constraint.name]['unique'], True)
self.assertEqual(
constraints[constraint.name]['columns'],
[None, 'weight', 'height'],
)
self.assertIs(sql.references_column(table, 'name'), True)
self.assertIs(sql.references_column(table, 'weight'), True)
self.assertIs(sql.references_column(table, 'height'), True)
self.assertIn('UPPER(%s)' % editor.quote_name('name'), str(sql))
self.assertIn(
'INCLUDE (%s, %s)' % (
editor.quote_name('weight'),
editor.quote_name('height'),
),
str(sql),
)
# Remove constraint.
with connection.schema_editor() as editor:
editor.remove_constraint(Author, constraint)
self.assertNotIn(constraint.name, self.get_constraints(table))
@skipUnlessDBFeature('supports_expression_indexes')
def test_func_unique_constraint_lookups(self):
with connection.schema_editor() as editor:
editor.create_model(Author)
with register_lookup(CharField, Lower), register_lookup(IntegerField, Abs):
constraint = UniqueConstraint(
F('name__lower'),
F('weight__abs'),
name='func_lower_abs_lookup_uq',
)
# Add constraint.
with connection.schema_editor() as editor:
editor.add_constraint(Author, constraint)
sql = constraint.create_sql(Author, editor)
table = Author._meta.db_table
constraints = self.get_constraints(table)
self.assertIn(constraint.name, constraints)
self.assertIs(constraints[constraint.name]['unique'], True)
# SQL contains columns.
self.assertIs(sql.references_column(table, 'name'), True)
self.assertIs(sql.references_column(table, 'weight'), True)
# Remove constraint.
with connection.schema_editor() as editor:
editor.remove_constraint(Author, constraint)
self.assertNotIn(constraint.name, self.get_constraints(table))
@skipUnlessDBFeature('supports_expression_indexes')
def test_func_unique_constraint_collate(self):
collation = connection.features.test_collations.get('non_default')
if not collation:
self.skipTest(
'This backend does not support case-insensitive collations.'
)
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(BookWithSlug)
constraint = UniqueConstraint(
Collate(F('title'), collation=collation).desc(),
Collate('slug', collation=collation),
name='func_collate_uq',
)
# Add constraint.
with connection.schema_editor() as editor:
editor.add_constraint(BookWithSlug, constraint)
sql = constraint.create_sql(BookWithSlug, editor)
table = BookWithSlug._meta.db_table
constraints = self.get_constraints(table)
self.assertIn(constraint.name, constraints)
self.assertIs(constraints[constraint.name]['unique'], True)
if connection.features.supports_index_column_ordering:
self.assertIndexOrder(table, constraint.name, ['DESC', 'ASC'])
# SQL contains columns and a collation.
self.assertIs(sql.references_column(table, 'title'), True)
self.assertIs(sql.references_column(table, 'slug'), True)
self.assertIn('COLLATE %s' % editor.quote_name(collation), str(sql))
# Remove constraint.
with connection.schema_editor() as editor:
editor.remove_constraint(BookWithSlug, constraint)
self.assertNotIn(constraint.name, self.get_constraints(table))
@skipIfDBFeature('supports_expression_indexes')
def test_func_unique_constraint_unsupported(self):
# UniqueConstraint is ignored on databases that don't support indexes on
# expressions.
with connection.schema_editor() as editor:
editor.create_model(Author)
constraint = UniqueConstraint(F('name'), name='func_name_uq')
with connection.schema_editor() as editor, self.assertNumQueries(0):
self.assertIsNone(editor.add_constraint(Author, constraint))
self.assertIsNone(editor.remove_constraint(Author, constraint))
@skipUnlessDBFeature('supports_expression_indexes')
def test_func_unique_constraint_nonexistent_field(self):
constraint = UniqueConstraint(Lower('nonexistent'), name='func_nonexistent_uq')
msg = (
"Cannot resolve keyword 'nonexistent' into field. Choices are: "
"height, id, name, uuid, weight"
)
with self.assertRaisesMessage(FieldError, msg):
with connection.schema_editor() as editor:
editor.add_constraint(Author, constraint)
@skipUnlessDBFeature('supports_expression_indexes')
def test_func_unique_constraint_nondeterministic(self):
with connection.schema_editor() as editor:
editor.create_model(Author)
constraint = UniqueConstraint(Random(), name='func_random_uq')
with connection.schema_editor() as editor:
with self.assertRaises(DatabaseError):
editor.add_constraint(Author, constraint)
def test_index_together(self):
"""
Tests removing and adding index_together constraints on a model.
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Tag)
# Ensure there's no index on the year/slug columns first
self.assertIs(
any(
c["index"]
for c in self.get_constraints("schema_tag").values()
if c['columns'] == ["slug", "title"]
),
False,
)
# Alter the model to add an index
with connection.schema_editor() as editor:
editor.alter_index_together(Tag, [], [("slug", "title")])
# Ensure there is now an index
self.assertIs(
any(
c["index"]
for c in self.get_constraints("schema_tag").values()
if c['columns'] == ["slug", "title"]
),
True,
)
# Alter it back
new_field2 = SlugField(unique=True)
new_field2.set_attributes_from_name("slug")
with connection.schema_editor() as editor:
editor.alter_index_together(Tag, [("slug", "title")], [])
# Ensure there's no index
self.assertIs(
any(
c["index"]
for c in self.get_constraints("schema_tag").values()
if c['columns'] == ["slug", "title"]
),
False,
)
def test_index_together_with_fk(self):
"""
Tests removing and adding index_together constraints that include
a foreign key.
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(Book)
# Ensure the fields are unique to begin with
self.assertEqual(Book._meta.index_together, ())
# Add the unique_together constraint
with connection.schema_editor() as editor:
editor.alter_index_together(Book, [], [['author', 'title']])
# Alter it back
with connection.schema_editor() as editor:
editor.alter_index_together(Book, [['author', 'title']], [])
def test_create_index_together(self):
"""
Tests creating models with index_together already defined
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(TagIndexed)
# Ensure there is an index
self.assertIs(
any(
c["index"]
for c in self.get_constraints("schema_tagindexed").values()
if c['columns'] == ["slug", "title"]
),
True,
)
@skipUnlessDBFeature('allows_multiple_constraints_on_same_fields')
def test_remove_index_together_does_not_remove_meta_indexes(self):
with connection.schema_editor() as editor:
editor.create_model(AuthorWithIndexedNameAndBirthday)
# Add the custom index
index = Index(fields=['name', 'birthday'], name='author_name_birthday_idx')
custom_index_name = index.name
AuthorWithIndexedNameAndBirthday._meta.indexes = [index]
with connection.schema_editor() as editor:
editor.add_index(AuthorWithIndexedNameAndBirthday, index)
# Ensure the indexes exist
constraints = self.get_constraints(AuthorWithIndexedNameAndBirthday._meta.db_table)
self.assertIn(custom_index_name, constraints)
other_constraints = [
name for name, details in constraints.items()
if details['columns'] == ['name', 'birthday'] and details['index'] and name != custom_index_name
]
self.assertEqual(len(other_constraints), 1)
# Remove index together
index_together = AuthorWithIndexedNameAndBirthday._meta.index_together
with connection.schema_editor() as editor:
editor.alter_index_together(AuthorWithIndexedNameAndBirthday, index_together, [])
constraints = self.get_constraints(AuthorWithIndexedNameAndBirthday._meta.db_table)
self.assertIn(custom_index_name, constraints)
other_constraints = [
name for name, details in constraints.items()
if details['columns'] == ['name', 'birthday'] and details['index'] and name != custom_index_name
]
self.assertEqual(len(other_constraints), 0)
# Re-add index together
with connection.schema_editor() as editor:
editor.alter_index_together(AuthorWithIndexedNameAndBirthday, [], index_together)
constraints = self.get_constraints(AuthorWithIndexedNameAndBirthday._meta.db_table)
self.assertIn(custom_index_name, constraints)
other_constraints = [
name for name, details in constraints.items()
if details['columns'] == ['name', 'birthday'] and details['index'] and name != custom_index_name
]
self.assertEqual(len(other_constraints), 1)
# Drop the index
with connection.schema_editor() as editor:
AuthorWithIndexedNameAndBirthday._meta.indexes = []
editor.remove_index(AuthorWithIndexedNameAndBirthday, index)
@isolate_apps('schema')
def test_db_table(self):
"""
Tests renaming of the table
"""
class Author(Model):
name = CharField(max_length=255)
class Meta:
app_label = 'schema'
class Book(Model):
author = ForeignKey(Author, CASCADE)
class Meta:
app_label = 'schema'
# Create the table and one referring it.
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(Book)
# Ensure the table is there to begin with
columns = self.column_classes(Author)
self.assertEqual(columns['name'][0], connection.features.introspected_field_types['CharField'])
# Alter the table
with connection.schema_editor(atomic=connection.features.supports_atomic_references_rename) as editor:
editor.alter_db_table(Author, "schema_author", "schema_otherauthor")
Author._meta.db_table = "schema_otherauthor"
columns = self.column_classes(Author)
self.assertEqual(columns['name'][0], connection.features.introspected_field_types['CharField'])
# Ensure the foreign key reference was updated
self.assertForeignKeyExists(Book, "author_id", "schema_otherauthor")
# Alter the table again
with connection.schema_editor(atomic=connection.features.supports_atomic_references_rename) as editor:
editor.alter_db_table(Author, "schema_otherauthor", "schema_author")
# Ensure the table is still there
Author._meta.db_table = "schema_author"
columns = self.column_classes(Author)
self.assertEqual(columns['name'][0], connection.features.introspected_field_types['CharField'])
def test_add_remove_index(self):
"""
Tests index addition and removal
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
# Ensure the table is there and has no index
self.assertNotIn('title', self.get_indexes(Author._meta.db_table))
# Add the index
index = Index(fields=['name'], name='author_title_idx')
with connection.schema_editor() as editor:
editor.add_index(Author, index)
self.assertIn('name', self.get_indexes(Author._meta.db_table))
# Drop the index
with connection.schema_editor() as editor:
editor.remove_index(Author, index)
self.assertNotIn('name', self.get_indexes(Author._meta.db_table))
def test_remove_db_index_doesnt_remove_custom_indexes(self):
"""
Changing db_index to False doesn't remove indexes from Meta.indexes.
"""
with connection.schema_editor() as editor:
editor.create_model(AuthorWithIndexedName)
# Ensure the table has its index
self.assertIn('name', self.get_indexes(AuthorWithIndexedName._meta.db_table))
# Add the custom index
index = Index(fields=['-name'], name='author_name_idx')
author_index_name = index.name
with connection.schema_editor() as editor:
db_index_name = editor._create_index_name(
table_name=AuthorWithIndexedName._meta.db_table,
column_names=('name',),
)
try:
AuthorWithIndexedName._meta.indexes = [index]
with connection.schema_editor() as editor:
editor.add_index(AuthorWithIndexedName, index)
old_constraints = self.get_constraints(AuthorWithIndexedName._meta.db_table)
self.assertIn(author_index_name, old_constraints)
self.assertIn(db_index_name, old_constraints)
# Change name field to db_index=False
old_field = AuthorWithIndexedName._meta.get_field('name')
new_field = CharField(max_length=255)
new_field.set_attributes_from_name('name')
with connection.schema_editor() as editor:
editor.alter_field(AuthorWithIndexedName, old_field, new_field, strict=True)
new_constraints = self.get_constraints(AuthorWithIndexedName._meta.db_table)
self.assertNotIn(db_index_name, new_constraints)
# The index from Meta.indexes is still in the database.
self.assertIn(author_index_name, new_constraints)
# Drop the index
with connection.schema_editor() as editor:
editor.remove_index(AuthorWithIndexedName, index)
finally:
AuthorWithIndexedName._meta.indexes = []
def test_order_index(self):
"""
Indexes defined with ordering (ASC/DESC) defined on column
"""
with connection.schema_editor() as editor:
editor.create_model(Author)
# The table doesn't have an index
self.assertNotIn('title', self.get_indexes(Author._meta.db_table))
index_name = 'author_name_idx'
# Add the index
index = Index(fields=['name', '-weight'], name=index_name)
with connection.schema_editor() as editor:
editor.add_index(Author, index)
if connection.features.supports_index_column_ordering:
self.assertIndexOrder(Author._meta.db_table, index_name, ['ASC', 'DESC'])
# Drop the index
with connection.schema_editor() as editor:
editor.remove_index(Author, index)
def test_indexes(self):
"""
Tests creation/altering of indexes
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(Book)
# Ensure the table is there and has the right index
self.assertIn(
"title",
self.get_indexes(Book._meta.db_table),
)
# Alter to remove the index
old_field = Book._meta.get_field("title")
new_field = CharField(max_length=100, db_index=False)
new_field.set_attributes_from_name("title")
with connection.schema_editor() as editor:
editor.alter_field(Book, old_field, new_field, strict=True)
# Ensure the table is there and has no index
self.assertNotIn(
"title",
self.get_indexes(Book._meta.db_table),
)
# Alter to re-add the index
new_field2 = Book._meta.get_field("title")
with connection.schema_editor() as editor:
editor.alter_field(Book, new_field, new_field2, strict=True)
# Ensure the table is there and has the index again
self.assertIn(
"title",
self.get_indexes(Book._meta.db_table),
)
# Add a unique column, verify that creates an implicit index
new_field3 = BookWithSlug._meta.get_field("slug")
with connection.schema_editor() as editor:
editor.add_field(Book, new_field3)
self.assertIn(
"slug",
self.get_uniques(Book._meta.db_table),
)
# Remove the unique, check the index goes with it
new_field4 = CharField(max_length=20, unique=False)
new_field4.set_attributes_from_name("slug")
with connection.schema_editor() as editor:
editor.alter_field(BookWithSlug, new_field3, new_field4, strict=True)
self.assertNotIn(
"slug",
self.get_uniques(Book._meta.db_table),
)
def test_text_field_with_db_index(self):
with connection.schema_editor() as editor:
editor.create_model(AuthorTextFieldWithIndex)
# The text_field index is present if the database supports it.
assertion = self.assertIn if connection.features.supports_index_on_text_field else self.assertNotIn
assertion('text_field', self.get_indexes(AuthorTextFieldWithIndex._meta.db_table))
def _index_expressions_wrappers(self):
index_expression = IndexExpression()
index_expression.set_wrapper_classes(connection)
return ', '.join([
wrapper_cls.__qualname__ for wrapper_cls in index_expression.wrapper_classes
])
@skipUnlessDBFeature('supports_expression_indexes')
def test_func_index_multiple_wrapper_references(self):
index = Index(OrderBy(F('name').desc(), descending=True), name='name')
msg = (
"Multiple references to %s can't be used in an indexed expression."
% self._index_expressions_wrappers()
)
with connection.schema_editor() as editor:
with self.assertRaisesMessage(ValueError, msg):
editor.add_index(Author, index)
@skipUnlessDBFeature('supports_expression_indexes')
def test_func_index_invalid_topmost_expressions(self):
index = Index(Upper(F('name').desc()), name='name')
msg = (
'%s must be topmost expressions in an indexed expression.'
% self._index_expressions_wrappers()
)
with connection.schema_editor() as editor:
with self.assertRaisesMessage(ValueError, msg):
editor.add_index(Author, index)
@skipUnlessDBFeature('supports_expression_indexes')
def test_func_index(self):
with connection.schema_editor() as editor:
editor.create_model(Author)
index = Index(Lower('name').desc(), name='func_lower_idx')
# Add index.
with connection.schema_editor() as editor:
editor.add_index(Author, index)
sql = index.create_sql(Author, editor)
table = Author._meta.db_table
if connection.features.supports_index_column_ordering:
self.assertIndexOrder(table, index.name, ['DESC'])
# SQL contains a database function.
self.assertIs(sql.references_column(table, 'name'), True)
self.assertIn('LOWER(%s)' % editor.quote_name('name'), str(sql))
# Remove index.
with connection.schema_editor() as editor:
editor.remove_index(Author, index)
self.assertNotIn(index.name, self.get_constraints(table))
@skipUnlessDBFeature('supports_expression_indexes')
def test_func_index_f(self):
with connection.schema_editor() as editor:
editor.create_model(Tag)
index = Index('slug', F('title').desc(), name='func_f_idx')
# Add index.
with connection.schema_editor() as editor:
editor.add_index(Tag, index)
sql = index.create_sql(Tag, editor)
table = Tag._meta.db_table
self.assertIn(index.name, self.get_constraints(table))
if connection.features.supports_index_column_ordering:
self.assertIndexOrder(Tag._meta.db_table, index.name, ['ASC', 'DESC'])
# SQL contains columns.
self.assertIs(sql.references_column(table, 'slug'), True)
self.assertIs(sql.references_column(table, 'title'), True)
# Remove index.
with connection.schema_editor() as editor:
editor.remove_index(Tag, index)
self.assertNotIn(index.name, self.get_constraints(table))
@skipUnlessDBFeature('supports_expression_indexes')
def test_func_index_lookups(self):
with connection.schema_editor() as editor:
editor.create_model(Author)
with register_lookup(CharField, Lower), register_lookup(IntegerField, Abs):
index = Index(
F('name__lower'),
F('weight__abs'),
name='func_lower_abs_lookup_idx',
)
# Add index.
with connection.schema_editor() as editor:
editor.add_index(Author, index)
sql = index.create_sql(Author, editor)
table = Author._meta.db_table
self.assertIn(index.name, self.get_constraints(table))
# SQL contains columns.
self.assertIs(sql.references_column(table, 'name'), True)
self.assertIs(sql.references_column(table, 'weight'), True)
# Remove index.
with connection.schema_editor() as editor:
editor.remove_index(Author, index)
self.assertNotIn(index.name, self.get_constraints(table))
@skipUnlessDBFeature('supports_expression_indexes')
def test_composite_func_index(self):
with connection.schema_editor() as editor:
editor.create_model(Author)
index = Index(Lower('name'), Upper('name'), name='func_lower_upper_idx')
# Add index.
with connection.schema_editor() as editor:
editor.add_index(Author, index)
sql = index.create_sql(Author, editor)
table = Author._meta.db_table
self.assertIn(index.name, self.get_constraints(table))
# SQL contains database functions.
self.assertIs(sql.references_column(table, 'name'), True)
sql = str(sql)
self.assertIn('LOWER(%s)' % editor.quote_name('name'), sql)
self.assertIn('UPPER(%s)' % editor.quote_name('name'), sql)
self.assertLess(sql.index('LOWER'), sql.index('UPPER'))
# Remove index.
with connection.schema_editor() as editor:
editor.remove_index(Author, index)
self.assertNotIn(index.name, self.get_constraints(table))
@skipUnlessDBFeature('supports_expression_indexes')
def test_composite_func_index_field_and_expression(self):
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(Book)
index = Index(
F('author').desc(),
Lower('title').asc(),
'pub_date',
name='func_f_lower_field_idx',
)
# Add index.
with connection.schema_editor() as editor:
editor.add_index(Book, index)
sql = index.create_sql(Book, editor)
table = Book._meta.db_table
constraints = self.get_constraints(table)
if connection.features.supports_index_column_ordering:
self.assertIndexOrder(table, index.name, ['DESC', 'ASC', 'ASC'])
self.assertEqual(len(constraints[index.name]['columns']), 3)
self.assertEqual(constraints[index.name]['columns'][2], 'pub_date')
# SQL contains database functions and columns.
self.assertIs(sql.references_column(table, 'author_id'), True)
self.assertIs(sql.references_column(table, 'title'), True)
self.assertIs(sql.references_column(table, 'pub_date'), True)
self.assertIn('LOWER(%s)' % editor.quote_name('title'), str(sql))
# Remove index.
with connection.schema_editor() as editor:
editor.remove_index(Book, index)
self.assertNotIn(index.name, self.get_constraints(table))
@skipUnlessDBFeature('supports_expression_indexes')
@isolate_apps('schema')
def test_func_index_f_decimalfield(self):
class Node(Model):
value = DecimalField(max_digits=5, decimal_places=2)
class Meta:
app_label = 'schema'
with connection.schema_editor() as editor:
editor.create_model(Node)
index = Index(F('value'), name='func_f_decimalfield_idx')
# Add index.
with connection.schema_editor() as editor:
editor.add_index(Node, index)
sql = index.create_sql(Node, editor)
table = Node._meta.db_table
self.assertIn(index.name, self.get_constraints(table))
self.assertIs(sql.references_column(table, 'value'), True)
# SQL doesn't contain casting.
self.assertNotIn('CAST', str(sql))
# Remove index.
with connection.schema_editor() as editor:
editor.remove_index(Node, index)
self.assertNotIn(index.name, self.get_constraints(table))
@skipUnlessDBFeature('supports_expression_indexes')
def test_func_index_cast(self):
with connection.schema_editor() as editor:
editor.create_model(Author)
index = Index(Cast('weight', FloatField()), name='func_cast_idx')
# Add index.
with connection.schema_editor() as editor:
editor.add_index(Author, index)
sql = index.create_sql(Author, editor)
table = Author._meta.db_table
self.assertIn(index.name, self.get_constraints(table))
self.assertIs(sql.references_column(table, 'weight'), True)
# Remove index.
with connection.schema_editor() as editor:
editor.remove_index(Author, index)
self.assertNotIn(index.name, self.get_constraints(table))
@skipUnlessDBFeature('supports_expression_indexes')
def test_func_index_collate(self):
collation = connection.features.test_collations.get('non_default')
if not collation:
self.skipTest(
'This backend does not support case-insensitive collations.'
)
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(BookWithSlug)
index = Index(
Collate(F('title'), collation=collation).desc(),
Collate('slug', collation=collation),
name='func_collate_idx',
)
# Add index.
with connection.schema_editor() as editor:
editor.add_index(BookWithSlug, index)
sql = index.create_sql(BookWithSlug, editor)
table = Book._meta.db_table
self.assertIn(index.name, self.get_constraints(table))
if connection.features.supports_index_column_ordering:
self.assertIndexOrder(table, index.name, ['DESC', 'ASC'])
# SQL contains columns and a collation.
self.assertIs(sql.references_column(table, 'title'), True)
self.assertIs(sql.references_column(table, 'slug'), True)
self.assertIn('COLLATE %s' % editor.quote_name(collation), str(sql))
# Remove index.
with connection.schema_editor() as editor:
editor.remove_index(Book, index)
self.assertNotIn(index.name, self.get_constraints(table))
@skipUnlessDBFeature('supports_expression_indexes')
@skipIfDBFeature('collate_as_index_expression')
def test_func_index_collate_f_ordered(self):
collation = connection.features.test_collations.get('non_default')
if not collation:
self.skipTest(
'This backend does not support case-insensitive collations.'
)
with connection.schema_editor() as editor:
editor.create_model(Author)
index = Index(
Collate(F('name').desc(), collation=collation),
name='func_collate_f_desc_idx',
)
# Add index.
with connection.schema_editor() as editor:
editor.add_index(Author, index)
sql = index.create_sql(Author, editor)
table = Author._meta.db_table
self.assertIn(index.name, self.get_constraints(table))
if connection.features.supports_index_column_ordering:
self.assertIndexOrder(table, index.name, ['DESC'])
# SQL contains columns and a collation.
self.assertIs(sql.references_column(table, 'name'), True)
self.assertIn('COLLATE %s' % editor.quote_name(collation), str(sql))
# Remove index.
with connection.schema_editor() as editor:
editor.remove_index(Author, index)
self.assertNotIn(index.name, self.get_constraints(table))
@skipUnlessDBFeature('supports_expression_indexes')
def test_func_index_calc(self):
with connection.schema_editor() as editor:
editor.create_model(Author)
index = Index(F('height') / (F('weight') + Value(5)), name='func_calc_idx')
# Add index.
with connection.schema_editor() as editor:
editor.add_index(Author, index)
sql = index.create_sql(Author, editor)
table = Author._meta.db_table
self.assertIn(index.name, self.get_constraints(table))
# SQL contains columns and expressions.
self.assertIs(sql.references_column(table, 'height'), True)
self.assertIs(sql.references_column(table, 'weight'), True)
sql = str(sql)
self.assertIs(
sql.index(editor.quote_name('height')) <
sql.index('/') <
sql.index(editor.quote_name('weight')) <
sql.index('+') <
sql.index('5'),
True,
)
# Remove index.
with connection.schema_editor() as editor:
editor.remove_index(Author, index)
self.assertNotIn(index.name, self.get_constraints(table))
@skipUnlessDBFeature('supports_expression_indexes', 'supports_json_field')
@isolate_apps('schema')
def test_func_index_json_key_transform(self):
class JSONModel(Model):
field = JSONField()
class Meta:
app_label = 'schema'
with connection.schema_editor() as editor:
editor.create_model(JSONModel)
self.isolated_local_models = [JSONModel]
index = Index('field__some_key', name='func_json_key_idx')
with connection.schema_editor() as editor:
editor.add_index(JSONModel, index)
sql = index.create_sql(JSONModel, editor)
table = JSONModel._meta.db_table
self.assertIn(index.name, self.get_constraints(table))
self.assertIs(sql.references_column(table, 'field'), True)
with connection.schema_editor() as editor:
editor.remove_index(JSONModel, index)
self.assertNotIn(index.name, self.get_constraints(table))
@skipUnlessDBFeature('supports_expression_indexes', 'supports_json_field')
@isolate_apps('schema')
def test_func_index_json_key_transform_cast(self):
class JSONModel(Model):
field = JSONField()
class Meta:
app_label = 'schema'
with connection.schema_editor() as editor:
editor.create_model(JSONModel)
self.isolated_local_models = [JSONModel]
index = Index(
Cast(KeyTextTransform('some_key', 'field'), IntegerField()),
name='func_json_key_cast_idx',
)
with connection.schema_editor() as editor:
editor.add_index(JSONModel, index)
sql = index.create_sql(JSONModel, editor)
table = JSONModel._meta.db_table
self.assertIn(index.name, self.get_constraints(table))
self.assertIs(sql.references_column(table, 'field'), True)
with connection.schema_editor() as editor:
editor.remove_index(JSONModel, index)
self.assertNotIn(index.name, self.get_constraints(table))
@skipIfDBFeature('supports_expression_indexes')
def test_func_index_unsupported(self):
# Index is ignored on databases that don't support indexes on
# expressions.
with connection.schema_editor() as editor:
editor.create_model(Author)
index = Index(F('name'), name='random_idx')
with connection.schema_editor() as editor, self.assertNumQueries(0):
self.assertIsNone(editor.add_index(Author, index))
self.assertIsNone(editor.remove_index(Author, index))
@skipUnlessDBFeature('supports_expression_indexes')
def test_func_index_nonexistent_field(self):
index = Index(Lower('nonexistent'), name='func_nonexistent_idx')
msg = (
"Cannot resolve keyword 'nonexistent' into field. Choices are: "
"height, id, name, uuid, weight"
)
with self.assertRaisesMessage(FieldError, msg):
with connection.schema_editor() as editor:
editor.add_index(Author, index)
@skipUnlessDBFeature('supports_expression_indexes')
def test_func_index_nondeterministic(self):
with connection.schema_editor() as editor:
editor.create_model(Author)
index = Index(Random(), name='func_random_idx')
with connection.schema_editor() as editor:
with self.assertRaises(DatabaseError):
editor.add_index(Author, index)
def test_primary_key(self):
"""
Tests altering of the primary key
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Tag)
# Ensure the table is there and has the right PK
self.assertEqual(self.get_primary_key(Tag._meta.db_table), 'id')
# Alter to change the PK
id_field = Tag._meta.get_field("id")
old_field = Tag._meta.get_field("slug")
new_field = SlugField(primary_key=True)
new_field.set_attributes_from_name("slug")
new_field.model = Tag
with connection.schema_editor() as editor:
editor.remove_field(Tag, id_field)
editor.alter_field(Tag, old_field, new_field)
# Ensure the PK changed
self.assertNotIn(
'id',
self.get_indexes(Tag._meta.db_table),
)
self.assertEqual(self.get_primary_key(Tag._meta.db_table), 'slug')
def test_context_manager_exit(self):
"""
Ensures transaction is correctly closed when an error occurs
inside a SchemaEditor context.
"""
class SomeError(Exception):
pass
try:
with connection.schema_editor():
raise SomeError
except SomeError:
self.assertFalse(connection.in_atomic_block)
@skipIfDBFeature('can_rollback_ddl')
def test_unsupported_transactional_ddl_disallowed(self):
message = (
"Executing DDL statements while in a transaction on databases "
"that can't perform a rollback is prohibited."
)
with atomic(), connection.schema_editor() as editor:
with self.assertRaisesMessage(TransactionManagementError, message):
editor.execute(editor.sql_create_table % {'table': 'foo', 'definition': ''})
@skipUnlessDBFeature('supports_foreign_keys', 'indexes_foreign_keys')
def test_foreign_key_index_long_names_regression(self):
"""
Regression test for #21497.
Only affects databases that supports foreign keys.
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(AuthorWithEvenLongerName)
editor.create_model(BookWithLongName)
# Find the properly shortened column name
column_name = connection.ops.quote_name("author_foreign_key_with_really_long_field_name_id")
column_name = column_name[1:-1].lower() # unquote, and, for Oracle, un-upcase
# Ensure the table is there and has an index on the column
self.assertIn(
column_name,
self.get_indexes(BookWithLongName._meta.db_table),
)
@skipUnlessDBFeature('supports_foreign_keys')
def test_add_foreign_key_long_names(self):
"""
Regression test for #23009.
Only affects databases that supports foreign keys.
"""
# Create the initial tables
with connection.schema_editor() as editor:
editor.create_model(AuthorWithEvenLongerName)
editor.create_model(BookWithLongName)
# Add a second FK, this would fail due to long ref name before the fix
new_field = ForeignKey(AuthorWithEvenLongerName, CASCADE, related_name="something")
new_field.set_attributes_from_name("author_other_really_long_named_i_mean_so_long_fk")
with connection.schema_editor() as editor:
editor.add_field(BookWithLongName, new_field)
@isolate_apps('schema')
@skipUnlessDBFeature('supports_foreign_keys')
def test_add_foreign_key_quoted_db_table(self):
class Author(Model):
class Meta:
db_table = '"table_author_double_quoted"'
app_label = 'schema'
class Book(Model):
author = ForeignKey(Author, CASCADE)
class Meta:
app_label = 'schema'
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(Book)
if connection.vendor == 'mysql':
self.assertForeignKeyExists(Book, 'author_id', '"table_author_double_quoted"')
else:
self.assertForeignKeyExists(Book, 'author_id', 'table_author_double_quoted')
def test_add_foreign_object(self):
with connection.schema_editor() as editor:
editor.create_model(BookForeignObj)
new_field = ForeignObject(Author, on_delete=CASCADE, from_fields=['author_id'], to_fields=['id'])
new_field.set_attributes_from_name('author')
with connection.schema_editor() as editor:
editor.add_field(BookForeignObj, new_field)
def test_creation_deletion_reserved_names(self):
"""
Tries creating a model's table, and then deleting it when it has a
SQL reserved name.
"""
# Create the table
with connection.schema_editor() as editor:
try:
editor.create_model(Thing)
except OperationalError as e:
self.fail("Errors when applying initial migration for a model "
"with a table named after an SQL reserved word: %s" % e)
# The table is there
list(Thing.objects.all())
# Clean up that table
with connection.schema_editor() as editor:
editor.delete_model(Thing)
# The table is gone
with self.assertRaises(DatabaseError):
list(Thing.objects.all())
def test_remove_constraints_capital_letters(self):
"""
#23065 - Constraint names must be quoted if they contain capital letters.
"""
def get_field(*args, field_class=IntegerField, **kwargs):
kwargs['db_column'] = "CamelCase"
field = field_class(*args, **kwargs)
field.set_attributes_from_name("CamelCase")
return field
model = Author
field = get_field()
table = model._meta.db_table
column = field.column
identifier_converter = connection.introspection.identifier_converter
with connection.schema_editor() as editor:
editor.create_model(model)
editor.add_field(model, field)
constraint_name = 'CamelCaseIndex'
expected_constraint_name = identifier_converter(constraint_name)
editor.execute(
editor.sql_create_index % {
"table": editor.quote_name(table),
"name": editor.quote_name(constraint_name),
"using": "",
"columns": editor.quote_name(column),
"extra": "",
"condition": "",
"include": "",
}
)
self.assertIn(expected_constraint_name, self.get_constraints(model._meta.db_table))
editor.alter_field(model, get_field(db_index=True), field, strict=True)
self.assertNotIn(expected_constraint_name, self.get_constraints(model._meta.db_table))
constraint_name = 'CamelCaseUniqConstraint'
expected_constraint_name = identifier_converter(constraint_name)
editor.execute(editor._create_unique_sql(model, [field], constraint_name))
self.assertIn(expected_constraint_name, self.get_constraints(model._meta.db_table))
editor.alter_field(model, get_field(unique=True), field, strict=True)
self.assertNotIn(expected_constraint_name, self.get_constraints(model._meta.db_table))
if editor.sql_create_fk:
constraint_name = 'CamelCaseFKConstraint'
expected_constraint_name = identifier_converter(constraint_name)
editor.execute(
editor.sql_create_fk % {
"table": editor.quote_name(table),
"name": editor.quote_name(constraint_name),
"column": editor.quote_name(column),
"to_table": editor.quote_name(table),
"to_column": editor.quote_name(model._meta.auto_field.column),
"deferrable": connection.ops.deferrable_sql(),
}
)
self.assertIn(expected_constraint_name, self.get_constraints(model._meta.db_table))
editor.alter_field(model, get_field(Author, CASCADE, field_class=ForeignKey), field, strict=True)
self.assertNotIn(expected_constraint_name, self.get_constraints(model._meta.db_table))
def test_add_field_use_effective_default(self):
"""
#23987 - effective_default() should be used as the field default when
adding a new field.
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
# Ensure there's no surname field
columns = self.column_classes(Author)
self.assertNotIn("surname", columns)
# Create a row
Author.objects.create(name='Anonymous1')
# Add new CharField to ensure default will be used from effective_default
new_field = CharField(max_length=15, blank=True)
new_field.set_attributes_from_name("surname")
with connection.schema_editor() as editor:
editor.add_field(Author, new_field)
# Ensure field was added with the right default
with connection.cursor() as cursor:
cursor.execute("SELECT surname FROM schema_author;")
item = cursor.fetchall()[0]
self.assertEqual(item[0], None if connection.features.interprets_empty_strings_as_nulls else '')
def test_add_field_default_dropped(self):
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
# Ensure there's no surname field
columns = self.column_classes(Author)
self.assertNotIn("surname", columns)
# Create a row
Author.objects.create(name='Anonymous1')
# Add new CharField with a default
new_field = CharField(max_length=15, blank=True, default='surname default')
new_field.set_attributes_from_name("surname")
with connection.schema_editor() as editor:
editor.add_field(Author, new_field)
# Ensure field was added with the right default
with connection.cursor() as cursor:
cursor.execute("SELECT surname FROM schema_author;")
item = cursor.fetchall()[0]
self.assertEqual(item[0], 'surname default')
# And that the default is no longer set in the database.
field = next(
f for f in connection.introspection.get_table_description(cursor, "schema_author")
if f.name == "surname"
)
if connection.features.can_introspect_default:
self.assertIsNone(field.default)
def test_add_field_default_nullable(self):
with connection.schema_editor() as editor:
editor.create_model(Author)
# Add new nullable CharField with a default.
new_field = CharField(max_length=15, blank=True, null=True, default='surname')
new_field.set_attributes_from_name('surname')
with connection.schema_editor() as editor:
editor.add_field(Author, new_field)
Author.objects.create(name='Anonymous1')
with connection.cursor() as cursor:
cursor.execute('SELECT surname FROM schema_author;')
item = cursor.fetchall()[0]
self.assertIsNone(item[0])
field = next(
f
for f in connection.introspection.get_table_description(
cursor,
'schema_author',
)
if f.name == 'surname'
)
# Field is still nullable.
self.assertTrue(field.null_ok)
# The database default is no longer set.
if connection.features.can_introspect_default:
self.assertIn(field.default, ['NULL', None])
def test_add_textfield_default_nullable(self):
with connection.schema_editor() as editor:
editor.create_model(Author)
# Add new nullable TextField with a default.
new_field = TextField(blank=True, null=True, default='text')
new_field.set_attributes_from_name('description')
with connection.schema_editor() as editor:
editor.add_field(Author, new_field)
Author.objects.create(name='Anonymous1')
with connection.cursor() as cursor:
cursor.execute('SELECT description FROM schema_author;')
item = cursor.fetchall()[0]
self.assertIsNone(item[0])
field = next(
f
for f in connection.introspection.get_table_description(
cursor,
'schema_author',
)
if f.name == 'description'
)
# Field is still nullable.
self.assertTrue(field.null_ok)
# The database default is no longer set.
if connection.features.can_introspect_default:
self.assertIn(field.default, ['NULL', None])
def test_alter_field_default_dropped(self):
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
# Create a row
Author.objects.create(name='Anonymous1')
self.assertIsNone(Author.objects.get().height)
old_field = Author._meta.get_field('height')
# The default from the new field is used in updating existing rows.
new_field = IntegerField(blank=True, default=42)
new_field.set_attributes_from_name('height')
with connection.schema_editor() as editor:
editor.alter_field(Author, old_field, new_field, strict=True)
self.assertEqual(Author.objects.get().height, 42)
# The database default should be removed.
with connection.cursor() as cursor:
field = next(
f for f in connection.introspection.get_table_description(cursor, "schema_author")
if f.name == "height"
)
if connection.features.can_introspect_default:
self.assertIsNone(field.default)
def test_alter_field_default_doesnt_perform_queries(self):
"""
No queries are performed if a field default changes and the field's
not changing from null to non-null.
"""
with connection.schema_editor() as editor:
editor.create_model(AuthorWithDefaultHeight)
old_field = AuthorWithDefaultHeight._meta.get_field('height')
new_default = old_field.default * 2
new_field = PositiveIntegerField(null=True, blank=True, default=new_default)
new_field.set_attributes_from_name('height')
with connection.schema_editor() as editor, self.assertNumQueries(0):
editor.alter_field(AuthorWithDefaultHeight, old_field, new_field, strict=True)
@skipUnlessDBFeature('supports_foreign_keys')
def test_alter_field_fk_attributes_noop(self):
"""
No queries are performed when changing field attributes that don't
affect the schema.
"""
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(Book)
old_field = Book._meta.get_field('author')
new_field = ForeignKey(
Author,
blank=True,
editable=False,
error_messages={'invalid': 'error message'},
help_text='help text',
limit_choices_to={'limit': 'choice'},
on_delete=PROTECT,
related_name='related_name',
related_query_name='related_query_name',
validators=[lambda x: x],
verbose_name='verbose name',
)
new_field.set_attributes_from_name('author')
with connection.schema_editor() as editor, self.assertNumQueries(0):
editor.alter_field(Book, old_field, new_field, strict=True)
with connection.schema_editor() as editor, self.assertNumQueries(0):
editor.alter_field(Book, new_field, old_field, strict=True)
def test_add_textfield_unhashable_default(self):
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
# Create a row
Author.objects.create(name='Anonymous1')
# Create a field that has an unhashable default
new_field = TextField(default={})
new_field.set_attributes_from_name("info")
with connection.schema_editor() as editor:
editor.add_field(Author, new_field)
@unittest.skipUnless(connection.vendor == 'postgresql', "PostgreSQL specific")
def test_add_indexed_charfield(self):
field = CharField(max_length=255, db_index=True)
field.set_attributes_from_name('nom_de_plume')
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.add_field(Author, field)
# Should create two indexes; one for like operator.
self.assertEqual(
self.get_constraints_for_column(Author, 'nom_de_plume'),
['schema_author_nom_de_plume_7570a851', 'schema_author_nom_de_plume_7570a851_like'],
)
@unittest.skipUnless(connection.vendor == 'postgresql', "PostgreSQL specific")
def test_add_unique_charfield(self):
field = CharField(max_length=255, unique=True)
field.set_attributes_from_name('nom_de_plume')
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.add_field(Author, field)
# Should create two indexes; one for like operator.
self.assertEqual(
self.get_constraints_for_column(Author, 'nom_de_plume'),
['schema_author_nom_de_plume_7570a851_like', 'schema_author_nom_de_plume_key']
)
@unittest.skipUnless(connection.vendor == 'postgresql', "PostgreSQL specific")
def test_alter_field_add_index_to_charfield(self):
# Create the table and verify no initial indexes.
with connection.schema_editor() as editor:
editor.create_model(Author)
self.assertEqual(self.get_constraints_for_column(Author, 'name'), [])
# Alter to add db_index=True and create 2 indexes.
old_field = Author._meta.get_field('name')
new_field = CharField(max_length=255, db_index=True)
new_field.set_attributes_from_name('name')
with connection.schema_editor() as editor:
editor.alter_field(Author, old_field, new_field, strict=True)
self.assertEqual(
self.get_constraints_for_column(Author, 'name'),
['schema_author_name_1fbc5617', 'schema_author_name_1fbc5617_like']
)
# Remove db_index=True to drop both indexes.
with connection.schema_editor() as editor:
editor.alter_field(Author, new_field, old_field, strict=True)
self.assertEqual(self.get_constraints_for_column(Author, 'name'), [])
@unittest.skipUnless(connection.vendor == 'postgresql', "PostgreSQL specific")
def test_alter_field_add_unique_to_charfield(self):
# Create the table and verify no initial indexes.
with connection.schema_editor() as editor:
editor.create_model(Author)
self.assertEqual(self.get_constraints_for_column(Author, 'name'), [])
# Alter to add unique=True and create 2 indexes.
old_field = Author._meta.get_field('name')
new_field = CharField(max_length=255, unique=True)
new_field.set_attributes_from_name('name')
with connection.schema_editor() as editor:
editor.alter_field(Author, old_field, new_field, strict=True)
self.assertEqual(
self.get_constraints_for_column(Author, 'name'),
['schema_author_name_1fbc5617_like', 'schema_author_name_1fbc5617_uniq']
)
# Remove unique=True to drop both indexes.
with connection.schema_editor() as editor:
editor.alter_field(Author, new_field, old_field, strict=True)
self.assertEqual(self.get_constraints_for_column(Author, 'name'), [])
@unittest.skipUnless(connection.vendor == 'postgresql', "PostgreSQL specific")
def test_alter_field_add_index_to_textfield(self):
# Create the table and verify no initial indexes.
with connection.schema_editor() as editor:
editor.create_model(Note)
self.assertEqual(self.get_constraints_for_column(Note, 'info'), [])
# Alter to add db_index=True and create 2 indexes.
old_field = Note._meta.get_field('info')
new_field = TextField(db_index=True)
new_field.set_attributes_from_name('info')
with connection.schema_editor() as editor:
editor.alter_field(Note, old_field, new_field, strict=True)
self.assertEqual(
self.get_constraints_for_column(Note, 'info'),
['schema_note_info_4b0ea695', 'schema_note_info_4b0ea695_like']
)
# Remove db_index=True to drop both indexes.
with connection.schema_editor() as editor:
editor.alter_field(Note, new_field, old_field, strict=True)
self.assertEqual(self.get_constraints_for_column(Note, 'info'), [])
@unittest.skipUnless(connection.vendor == 'postgresql', "PostgreSQL specific")
def test_alter_field_add_unique_to_charfield_with_db_index(self):
# Create the table and verify initial indexes.
with connection.schema_editor() as editor:
editor.create_model(BookWithoutAuthor)
self.assertEqual(
self.get_constraints_for_column(BookWithoutAuthor, 'title'),
['schema_book_title_2dfb2dff', 'schema_book_title_2dfb2dff_like']
)
# Alter to add unique=True (should replace the index)
old_field = BookWithoutAuthor._meta.get_field('title')
new_field = CharField(max_length=100, db_index=True, unique=True)
new_field.set_attributes_from_name('title')
with connection.schema_editor() as editor:
editor.alter_field(BookWithoutAuthor, old_field, new_field, strict=True)
self.assertEqual(
self.get_constraints_for_column(BookWithoutAuthor, 'title'),
['schema_book_title_2dfb2dff_like', 'schema_book_title_2dfb2dff_uniq']
)
# Alter to remove unique=True (should drop unique index)
new_field2 = CharField(max_length=100, db_index=True)
new_field2.set_attributes_from_name('title')
with connection.schema_editor() as editor:
editor.alter_field(BookWithoutAuthor, new_field, new_field2, strict=True)
self.assertEqual(
self.get_constraints_for_column(BookWithoutAuthor, 'title'),
['schema_book_title_2dfb2dff', 'schema_book_title_2dfb2dff_like']
)
@unittest.skipUnless(connection.vendor == 'postgresql', "PostgreSQL specific")
def test_alter_field_remove_unique_and_db_index_from_charfield(self):
# Create the table and verify initial indexes.
with connection.schema_editor() as editor:
editor.create_model(BookWithoutAuthor)
self.assertEqual(
self.get_constraints_for_column(BookWithoutAuthor, 'title'),
['schema_book_title_2dfb2dff', 'schema_book_title_2dfb2dff_like']
)
# Alter to add unique=True (should replace the index)
old_field = BookWithoutAuthor._meta.get_field('title')
new_field = CharField(max_length=100, db_index=True, unique=True)
new_field.set_attributes_from_name('title')
with connection.schema_editor() as editor:
editor.alter_field(BookWithoutAuthor, old_field, new_field, strict=True)
self.assertEqual(
self.get_constraints_for_column(BookWithoutAuthor, 'title'),
['schema_book_title_2dfb2dff_like', 'schema_book_title_2dfb2dff_uniq']
)
# Alter to remove both unique=True and db_index=True (should drop all indexes)
new_field2 = CharField(max_length=100)
new_field2.set_attributes_from_name('title')
with connection.schema_editor() as editor:
editor.alter_field(BookWithoutAuthor, new_field, new_field2, strict=True)
self.assertEqual(self.get_constraints_for_column(BookWithoutAuthor, 'title'), [])
@unittest.skipUnless(connection.vendor == 'postgresql', "PostgreSQL specific")
def test_alter_field_swap_unique_and_db_index_with_charfield(self):
# Create the table and verify initial indexes.
with connection.schema_editor() as editor:
editor.create_model(BookWithoutAuthor)
self.assertEqual(
self.get_constraints_for_column(BookWithoutAuthor, 'title'),
['schema_book_title_2dfb2dff', 'schema_book_title_2dfb2dff_like']
)
# Alter to set unique=True and remove db_index=True (should replace the index)
old_field = BookWithoutAuthor._meta.get_field('title')
new_field = CharField(max_length=100, unique=True)
new_field.set_attributes_from_name('title')
with connection.schema_editor() as editor:
editor.alter_field(BookWithoutAuthor, old_field, new_field, strict=True)
self.assertEqual(
self.get_constraints_for_column(BookWithoutAuthor, 'title'),
['schema_book_title_2dfb2dff_like', 'schema_book_title_2dfb2dff_uniq']
)
# Alter to set db_index=True and remove unique=True (should restore index)
new_field2 = CharField(max_length=100, db_index=True)
new_field2.set_attributes_from_name('title')
with connection.schema_editor() as editor:
editor.alter_field(BookWithoutAuthor, new_field, new_field2, strict=True)
self.assertEqual(
self.get_constraints_for_column(BookWithoutAuthor, 'title'),
['schema_book_title_2dfb2dff', 'schema_book_title_2dfb2dff_like']
)
@unittest.skipUnless(connection.vendor == 'postgresql', "PostgreSQL specific")
def test_alter_field_add_db_index_to_charfield_with_unique(self):
# Create the table and verify initial indexes.
with connection.schema_editor() as editor:
editor.create_model(Tag)
self.assertEqual(
self.get_constraints_for_column(Tag, 'slug'),
['schema_tag_slug_2c418ba3_like', 'schema_tag_slug_key']
)
# Alter to add db_index=True
old_field = Tag._meta.get_field('slug')
new_field = SlugField(db_index=True, unique=True)
new_field.set_attributes_from_name('slug')
with connection.schema_editor() as editor:
editor.alter_field(Tag, old_field, new_field, strict=True)
self.assertEqual(
self.get_constraints_for_column(Tag, 'slug'),
['schema_tag_slug_2c418ba3_like', 'schema_tag_slug_key']
)
# Alter to remove db_index=True
new_field2 = SlugField(unique=True)
new_field2.set_attributes_from_name('slug')
with connection.schema_editor() as editor:
editor.alter_field(Tag, new_field, new_field2, strict=True)
self.assertEqual(
self.get_constraints_for_column(Tag, 'slug'),
['schema_tag_slug_2c418ba3_like', 'schema_tag_slug_key']
)
def test_alter_field_add_index_to_integerfield(self):
# Create the table and verify no initial indexes.
with connection.schema_editor() as editor:
editor.create_model(Author)
self.assertEqual(self.get_constraints_for_column(Author, 'weight'), [])
# Alter to add db_index=True and create index.
old_field = Author._meta.get_field('weight')
new_field = IntegerField(null=True, db_index=True)
new_field.set_attributes_from_name('weight')
with connection.schema_editor() as editor:
editor.alter_field(Author, old_field, new_field, strict=True)
self.assertEqual(self.get_constraints_for_column(Author, 'weight'), ['schema_author_weight_587740f9'])
# Remove db_index=True to drop index.
with connection.schema_editor() as editor:
editor.alter_field(Author, new_field, old_field, strict=True)
self.assertEqual(self.get_constraints_for_column(Author, 'weight'), [])
def test_alter_pk_with_self_referential_field(self):
"""
Changing the primary key field name of a model with a self-referential
foreign key (#26384).
"""
with connection.schema_editor() as editor:
editor.create_model(Node)
old_field = Node._meta.get_field('node_id')
new_field = AutoField(primary_key=True)
new_field.set_attributes_from_name('id')
with connection.schema_editor() as editor:
editor.alter_field(Node, old_field, new_field, strict=True)
self.assertForeignKeyExists(Node, 'parent_id', Node._meta.db_table)
@mock.patch('django.db.backends.base.schema.datetime')
@mock.patch('django.db.backends.base.schema.timezone')
def test_add_datefield_and_datetimefield_use_effective_default(self, mocked_datetime, mocked_tz):
"""
effective_default() should be used for DateField, DateTimeField, and
TimeField if auto_now or auto_now_add is set (#25005).
"""
now = datetime.datetime(month=1, day=1, year=2000, hour=1, minute=1)
now_tz = datetime.datetime(month=1, day=1, year=2000, hour=1, minute=1, tzinfo=timezone.utc)
mocked_datetime.now = mock.MagicMock(return_value=now)
mocked_tz.now = mock.MagicMock(return_value=now_tz)
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
# Check auto_now/auto_now_add attributes are not defined
columns = self.column_classes(Author)
self.assertNotIn("dob_auto_now", columns)
self.assertNotIn("dob_auto_now_add", columns)
self.assertNotIn("dtob_auto_now", columns)
self.assertNotIn("dtob_auto_now_add", columns)
self.assertNotIn("tob_auto_now", columns)
self.assertNotIn("tob_auto_now_add", columns)
# Create a row
Author.objects.create(name='Anonymous1')
# Ensure fields were added with the correct defaults
dob_auto_now = DateField(auto_now=True)
dob_auto_now.set_attributes_from_name('dob_auto_now')
self.check_added_field_default(
editor, Author, dob_auto_now, 'dob_auto_now', now.date(),
cast_function=lambda x: x.date(),
)
dob_auto_now_add = DateField(auto_now_add=True)
dob_auto_now_add.set_attributes_from_name('dob_auto_now_add')
self.check_added_field_default(
editor, Author, dob_auto_now_add, 'dob_auto_now_add', now.date(),
cast_function=lambda x: x.date(),
)
dtob_auto_now = DateTimeField(auto_now=True)
dtob_auto_now.set_attributes_from_name('dtob_auto_now')
self.check_added_field_default(
editor, Author, dtob_auto_now, 'dtob_auto_now', now,
)
dt_tm_of_birth_auto_now_add = DateTimeField(auto_now_add=True)
dt_tm_of_birth_auto_now_add.set_attributes_from_name('dtob_auto_now_add')
self.check_added_field_default(
editor, Author, dt_tm_of_birth_auto_now_add, 'dtob_auto_now_add', now,
)
tob_auto_now = TimeField(auto_now=True)
tob_auto_now.set_attributes_from_name('tob_auto_now')
self.check_added_field_default(
editor, Author, tob_auto_now, 'tob_auto_now', now.time(),
cast_function=lambda x: x.time(),
)
tob_auto_now_add = TimeField(auto_now_add=True)
tob_auto_now_add.set_attributes_from_name('tob_auto_now_add')
self.check_added_field_default(
editor, Author, tob_auto_now_add, 'tob_auto_now_add', now.time(),
cast_function=lambda x: x.time(),
)
def test_namespaced_db_table_create_index_name(self):
"""
Table names are stripped of their namespace/schema before being used to
generate index names.
"""
with connection.schema_editor() as editor:
max_name_length = connection.ops.max_name_length() or 200
namespace = 'n' * max_name_length
table_name = 't' * max_name_length
namespaced_table_name = '"%s"."%s"' % (namespace, table_name)
self.assertEqual(
editor._create_index_name(table_name, []),
editor._create_index_name(namespaced_table_name, []),
)
@unittest.skipUnless(connection.vendor == 'oracle', 'Oracle specific db_table syntax')
def test_creation_with_db_table_double_quotes(self):
oracle_user = connection.creation._test_database_user()
class Student(Model):
name = CharField(max_length=30)
class Meta:
app_label = 'schema'
apps = new_apps
db_table = '"%s"."DJANGO_STUDENT_TABLE"' % oracle_user
class Document(Model):
name = CharField(max_length=30)
students = ManyToManyField(Student)
class Meta:
app_label = 'schema'
apps = new_apps
db_table = '"%s"."DJANGO_DOCUMENT_TABLE"' % oracle_user
self.local_models = [Student, Document]
with connection.schema_editor() as editor:
editor.create_model(Student)
editor.create_model(Document)
doc = Document.objects.create(name='Test Name')
student = Student.objects.create(name='Some man')
doc.students.add(student)
@isolate_apps('schema')
@unittest.skipUnless(connection.vendor == 'postgresql', 'PostgreSQL specific db_table syntax.')
def test_namespaced_db_table_foreign_key_reference(self):
with connection.cursor() as cursor:
cursor.execute('CREATE SCHEMA django_schema_tests')
def delete_schema():
with connection.cursor() as cursor:
cursor.execute('DROP SCHEMA django_schema_tests CASCADE')
self.addCleanup(delete_schema)
class Author(Model):
class Meta:
app_label = 'schema'
class Book(Model):
class Meta:
app_label = 'schema'
db_table = '"django_schema_tests"."schema_book"'
author = ForeignKey(Author, CASCADE)
author.set_attributes_from_name('author')
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(Book)
editor.add_field(Book, author)
def test_rename_table_renames_deferred_sql_references(self):
atomic_rename = connection.features.supports_atomic_references_rename
with connection.schema_editor(atomic=atomic_rename) as editor:
editor.create_model(Author)
editor.create_model(Book)
editor.alter_db_table(Author, 'schema_author', 'schema_renamed_author')
editor.alter_db_table(Author, 'schema_book', 'schema_renamed_book')
try:
self.assertGreater(len(editor.deferred_sql), 0)
for statement in editor.deferred_sql:
self.assertIs(statement.references_table('schema_author'), False)
self.assertIs(statement.references_table('schema_book'), False)
finally:
editor.alter_db_table(Author, 'schema_renamed_author', 'schema_author')
editor.alter_db_table(Author, 'schema_renamed_book', 'schema_book')
def test_rename_column_renames_deferred_sql_references(self):
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(Book)
old_title = Book._meta.get_field('title')
new_title = CharField(max_length=100, db_index=True)
new_title.set_attributes_from_name('renamed_title')
editor.alter_field(Book, old_title, new_title)
old_author = Book._meta.get_field('author')
new_author = ForeignKey(Author, CASCADE)
new_author.set_attributes_from_name('renamed_author')
editor.alter_field(Book, old_author, new_author)
self.assertGreater(len(editor.deferred_sql), 0)
for statement in editor.deferred_sql:
self.assertIs(statement.references_column('book', 'title'), False)
self.assertIs(statement.references_column('book', 'author_id'), False)
@isolate_apps('schema')
def test_referenced_field_without_constraint_rename_inside_atomic_block(self):
"""
Foreign keys without database level constraint don't prevent the field
they reference from being renamed in an atomic block.
"""
class Foo(Model):
field = CharField(max_length=255, unique=True)
class Meta:
app_label = 'schema'
class Bar(Model):
foo = ForeignKey(Foo, CASCADE, to_field='field', db_constraint=False)
class Meta:
app_label = 'schema'
self.isolated_local_models = [Foo, Bar]
with connection.schema_editor() as editor:
editor.create_model(Foo)
editor.create_model(Bar)
new_field = CharField(max_length=255, unique=True)
new_field.set_attributes_from_name('renamed')
with connection.schema_editor(atomic=True) as editor:
editor.alter_field(Foo, Foo._meta.get_field('field'), new_field)
@isolate_apps('schema')
def test_referenced_table_without_constraint_rename_inside_atomic_block(self):
"""
Foreign keys without database level constraint don't prevent the table
they reference from being renamed in an atomic block.
"""
class Foo(Model):
field = CharField(max_length=255, unique=True)
class Meta:
app_label = 'schema'
class Bar(Model):
foo = ForeignKey(Foo, CASCADE, to_field='field', db_constraint=False)
class Meta:
app_label = 'schema'
self.isolated_local_models = [Foo, Bar]
with connection.schema_editor() as editor:
editor.create_model(Foo)
editor.create_model(Bar)
new_field = CharField(max_length=255, unique=True)
new_field.set_attributes_from_name('renamed')
with connection.schema_editor(atomic=True) as editor:
editor.alter_db_table(Foo, Foo._meta.db_table, 'renamed_table')
Foo._meta.db_table = 'renamed_table'
@isolate_apps('schema')
@skipUnlessDBFeature('supports_collation_on_charfield')
def test_db_collation_charfield(self):
collation = connection.features.test_collations.get('non_default')
if not collation:
self.skipTest('Language collations are not supported.')
class Foo(Model):
field = CharField(max_length=255, db_collation=collation)
class Meta:
app_label = 'schema'
self.isolated_local_models = [Foo]
with connection.schema_editor() as editor:
editor.create_model(Foo)
self.assertEqual(
self.get_column_collation(Foo._meta.db_table, 'field'),
collation,
)
@isolate_apps('schema')
@skipUnlessDBFeature('supports_collation_on_textfield')
def test_db_collation_textfield(self):
collation = connection.features.test_collations.get('non_default')
if not collation:
self.skipTest('Language collations are not supported.')
class Foo(Model):
field = TextField(db_collation=collation)
class Meta:
app_label = 'schema'
self.isolated_local_models = [Foo]
with connection.schema_editor() as editor:
editor.create_model(Foo)
self.assertEqual(
self.get_column_collation(Foo._meta.db_table, 'field'),
collation,
)
@skipUnlessDBFeature('supports_collation_on_charfield')
def test_add_field_db_collation(self):
collation = connection.features.test_collations.get('non_default')
if not collation:
self.skipTest('Language collations are not supported.')
with connection.schema_editor() as editor:
editor.create_model(Author)
new_field = CharField(max_length=255, db_collation=collation)
new_field.set_attributes_from_name('alias')
with connection.schema_editor() as editor:
editor.add_field(Author, new_field)
columns = self.column_classes(Author)
self.assertEqual(
columns['alias'][0],
connection.features.introspected_field_types['CharField'],
)
self.assertEqual(columns['alias'][1][8], collation)
@skipUnlessDBFeature('supports_collation_on_charfield')
def test_alter_field_db_collation(self):
collation = connection.features.test_collations.get('non_default')
if not collation:
self.skipTest('Language collations are not supported.')
with connection.schema_editor() as editor:
editor.create_model(Author)
old_field = Author._meta.get_field('name')
new_field = CharField(max_length=255, db_collation=collation)
new_field.set_attributes_from_name('name')
new_field.model = Author
with connection.schema_editor() as editor:
editor.alter_field(Author, old_field, new_field, strict=True)
self.assertEqual(
self.get_column_collation(Author._meta.db_table, 'name'),
collation,
)
with connection.schema_editor() as editor:
editor.alter_field(Author, new_field, old_field, strict=True)
self.assertIsNone(self.get_column_collation(Author._meta.db_table, 'name'))
@skipUnlessDBFeature('supports_collation_on_charfield')
def test_alter_field_type_and_db_collation(self):
collation = connection.features.test_collations.get('non_default')
if not collation:
self.skipTest('Language collations are not supported.')
with connection.schema_editor() as editor:
editor.create_model(Note)
old_field = Note._meta.get_field('info')
new_field = CharField(max_length=255, db_collation=collation)
new_field.set_attributes_from_name('info')
new_field.model = Note
with connection.schema_editor() as editor:
editor.alter_field(Note, old_field, new_field, strict=True)
columns = self.column_classes(Note)
self.assertEqual(
columns['info'][0],
connection.features.introspected_field_types['CharField'],
)
self.assertEqual(columns['info'][1][8], collation)
with connection.schema_editor() as editor:
editor.alter_field(Note, new_field, old_field, strict=True)
columns = self.column_classes(Note)
self.assertEqual(columns['info'][0], 'TextField')
self.assertIsNone(columns['info'][1][8])
@skipUnlessDBFeature(
'supports_collation_on_charfield',
'supports_non_deterministic_collations',
)
def test_ci_cs_db_collation(self):
cs_collation = connection.features.test_collations.get('cs')
ci_collation = connection.features.test_collations.get('ci')
try:
if connection.vendor == 'mysql':
cs_collation = 'latin1_general_cs'
elif connection.vendor == 'postgresql':
cs_collation = 'en-x-icu'
with connection.cursor() as cursor:
cursor.execute(
"CREATE COLLATION IF NOT EXISTS case_insensitive "
"(provider = icu, locale = 'und-u-ks-level2', "
"deterministic = false)"
)
ci_collation = 'case_insensitive'
# Create the table.
with connection.schema_editor() as editor:
editor.create_model(Author)
# Case-insensitive collation.
old_field = Author._meta.get_field('name')
new_field_ci = CharField(max_length=255, db_collation=ci_collation)
new_field_ci.set_attributes_from_name('name')
new_field_ci.model = Author
with connection.schema_editor() as editor:
editor.alter_field(Author, old_field, new_field_ci, strict=True)
Author.objects.create(name='ANDREW')
self.assertIs(Author.objects.filter(name='Andrew').exists(), True)
# Case-sensitive collation.
new_field_cs = CharField(max_length=255, db_collation=cs_collation)
new_field_cs.set_attributes_from_name('name')
new_field_cs.model = Author
with connection.schema_editor() as editor:
editor.alter_field(Author, new_field_ci, new_field_cs, strict=True)
self.assertIs(Author.objects.filter(name='Andrew').exists(), False)
finally:
if connection.vendor == 'postgresql':
with connection.cursor() as cursor:
cursor.execute('DROP COLLATION IF EXISTS case_insensitive')
|
e2b27a7472233ca71b9311269ac73303d16687c7b233f32516bcd9812487f4da | from django.apps.registry import Apps
from django.db import models
# Because we want to test creation and deletion of these as separate things,
# these models are all inserted into a separate Apps so the main test
# runner doesn't migrate them.
new_apps = Apps()
class Author(models.Model):
name = models.CharField(max_length=255)
height = models.PositiveIntegerField(null=True, blank=True)
weight = models.IntegerField(null=True, blank=True)
uuid = models.UUIDField(null=True)
class Meta:
apps = new_apps
class AuthorCharFieldWithIndex(models.Model):
char_field = models.CharField(max_length=31, db_index=True)
class Meta:
apps = new_apps
class AuthorTextFieldWithIndex(models.Model):
text_field = models.TextField(db_index=True)
class Meta:
apps = new_apps
class AuthorWithDefaultHeight(models.Model):
name = models.CharField(max_length=255)
height = models.PositiveIntegerField(null=True, blank=True, default=42)
class Meta:
apps = new_apps
class AuthorWithEvenLongerName(models.Model):
name = models.CharField(max_length=255)
height = models.PositiveIntegerField(null=True, blank=True)
class Meta:
apps = new_apps
class AuthorWithIndexedName(models.Model):
name = models.CharField(max_length=255, db_index=True)
class Meta:
apps = new_apps
class AuthorWithUniqueName(models.Model):
name = models.CharField(max_length=255, unique=True)
class Meta:
apps = new_apps
class AuthorWithIndexedNameAndBirthday(models.Model):
name = models.CharField(max_length=255)
birthday = models.DateField()
class Meta:
apps = new_apps
index_together = [['name', 'birthday']]
class AuthorWithUniqueNameAndBirthday(models.Model):
name = models.CharField(max_length=255)
birthday = models.DateField()
class Meta:
apps = new_apps
unique_together = [['name', 'birthday']]
class Book(models.Model):
author = models.ForeignKey(Author, models.CASCADE)
title = models.CharField(max_length=100, db_index=True)
pub_date = models.DateTimeField()
# tags = models.ManyToManyField("Tag", related_name="books")
class Meta:
apps = new_apps
class BookWeak(models.Model):
author = models.ForeignKey(Author, models.CASCADE, db_constraint=False)
title = models.CharField(max_length=100, db_index=True)
pub_date = models.DateTimeField()
class Meta:
apps = new_apps
class BookWithLongName(models.Model):
author_foreign_key_with_really_long_field_name = models.ForeignKey(
AuthorWithEvenLongerName,
models.CASCADE,
)
class Meta:
apps = new_apps
class BookWithO2O(models.Model):
author = models.OneToOneField(Author, models.CASCADE)
title = models.CharField(max_length=100, db_index=True)
pub_date = models.DateTimeField()
class Meta:
apps = new_apps
db_table = "schema_book"
class BookWithSlug(models.Model):
author = models.ForeignKey(Author, models.CASCADE)
title = models.CharField(max_length=100, db_index=True)
pub_date = models.DateTimeField()
slug = models.CharField(max_length=20, unique=True)
class Meta:
apps = new_apps
db_table = "schema_book"
class BookWithoutAuthor(models.Model):
title = models.CharField(max_length=100, db_index=True)
pub_date = models.DateTimeField()
class Meta:
apps = new_apps
db_table = "schema_book"
class BookForeignObj(models.Model):
title = models.CharField(max_length=100, db_index=True)
author_id = models.IntegerField()
class Meta:
apps = new_apps
class IntegerPK(models.Model):
i = models.IntegerField(primary_key=True)
j = models.IntegerField(unique=True)
class Meta:
apps = new_apps
db_table = "INTEGERPK" # uppercase to ensure proper quoting
class Note(models.Model):
info = models.TextField()
address = models.TextField(null=True)
class Meta:
apps = new_apps
class NoteRename(models.Model):
detail_info = models.TextField()
class Meta:
apps = new_apps
db_table = "schema_note"
class Tag(models.Model):
title = models.CharField(max_length=255)
slug = models.SlugField(unique=True)
class Meta:
apps = new_apps
class TagIndexed(models.Model):
title = models.CharField(max_length=255)
slug = models.SlugField(unique=True)
class Meta:
apps = new_apps
index_together = [["slug", "title"]]
class TagM2MTest(models.Model):
title = models.CharField(max_length=255)
slug = models.SlugField(unique=True)
class Meta:
apps = new_apps
class TagUniqueRename(models.Model):
title = models.CharField(max_length=255)
slug2 = models.SlugField(unique=True)
class Meta:
apps = new_apps
db_table = "schema_tag"
# Based on tests/reserved_names/models.py
class Thing(models.Model):
when = models.CharField(max_length=1, primary_key=True)
class Meta:
apps = new_apps
db_table = 'drop'
def __str__(self):
return self.when
class UniqueTest(models.Model):
year = models.IntegerField()
slug = models.SlugField(unique=False)
class Meta:
apps = new_apps
unique_together = ["year", "slug"]
class Node(models.Model):
node_id = models.AutoField(primary_key=True)
parent = models.ForeignKey('self', models.CASCADE, null=True, blank=True)
class Meta:
apps = new_apps
|
2b3903c97b89580e839de813ceafc61443d6838b35811daec61ee0d4c72b595a | from functools import partial
from django.db import models
from django.db.models.fields.related import (
RECURSIVE_RELATIONSHIP_CONSTANT, ManyToManyDescriptor, RelatedField,
create_many_to_many_intermediary_model,
)
class CustomManyToManyField(RelatedField):
"""
Ticket #24104 - Need to have a custom ManyToManyField,
which is not an inheritor of ManyToManyField.
"""
many_to_many = True
def __init__(self, to, db_constraint=True, swappable=True, related_name=None, related_query_name=None,
limit_choices_to=None, symmetrical=None, through=None, through_fields=None, db_table=None, **kwargs):
try:
to._meta
except AttributeError:
to = str(to)
kwargs['rel'] = models.ManyToManyRel(
self, to,
related_name=related_name,
related_query_name=related_query_name,
limit_choices_to=limit_choices_to,
symmetrical=symmetrical if symmetrical is not None else (to == RECURSIVE_RELATIONSHIP_CONSTANT),
through=through,
through_fields=through_fields,
db_constraint=db_constraint,
)
self.swappable = swappable
self.db_table = db_table
if kwargs['rel'].through is not None and self.db_table is not None:
raise ValueError(
'Cannot specify a db_table if an intermediary model is used.'
)
super().__init__(
related_name=related_name,
related_query_name=related_query_name,
limit_choices_to=limit_choices_to,
**kwargs,
)
def contribute_to_class(self, cls, name, **kwargs):
if self.remote_field.symmetrical and (
self.remote_field.model == "self" or self.remote_field.model == cls._meta.object_name):
self.remote_field.related_name = "%s_rel_+" % name
super().contribute_to_class(cls, name, **kwargs)
if not self.remote_field.through and not cls._meta.abstract and not cls._meta.swapped:
self.remote_field.through = create_many_to_many_intermediary_model(self, cls)
setattr(cls, self.name, ManyToManyDescriptor(self.remote_field))
self.m2m_db_table = partial(self._get_m2m_db_table, cls._meta)
def get_internal_type(self):
return 'ManyToManyField'
# Copy those methods from ManyToManyField because they don't call super() internally
contribute_to_related_class = models.ManyToManyField.__dict__['contribute_to_related_class']
_get_m2m_attr = models.ManyToManyField.__dict__['_get_m2m_attr']
_get_m2m_reverse_attr = models.ManyToManyField.__dict__['_get_m2m_reverse_attr']
_get_m2m_db_table = models.ManyToManyField.__dict__['_get_m2m_db_table']
class InheritedManyToManyField(models.ManyToManyField):
pass
class MediumBlobField(models.BinaryField):
"""
A MySQL BinaryField that uses a different blob size.
"""
def db_type(self, connection):
return 'MEDIUMBLOB'
|
c0a710c26e04e3a18a495d718702da91ebede58d37fb220c786355e85ca3bffd | from django.contrib.admin import ModelAdmin, TabularInline
from django.contrib.admin.helpers import InlineAdminForm
from django.contrib.admin.tests import AdminSeleniumTestCase
from django.contrib.auth.models import Permission, User
from django.contrib.contenttypes.models import ContentType
from django.test import RequestFactory, TestCase, override_settings
from django.urls import reverse
from .admin import InnerInline, site as admin_site
from .models import (
Author, BinaryTree, Book, Chapter, Child, ChildModel1, ChildModel2,
Fashionista, FootNote, Holder, Holder2, Holder3, Holder4, Inner, Inner2,
Inner3, Inner4Stacked, Inner4Tabular, Novel, OutfitItem, Parent,
ParentModelWithCustomPk, Person, Poll, Profile, ProfileCollection,
Question, Sighting, SomeChildModel, SomeParentModel, Teacher,
)
INLINE_CHANGELINK_HTML = 'class="inlinechangelink">Change</a>'
class TestDataMixin:
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', email='[email protected]', password='secret')
@override_settings(ROOT_URLCONF='admin_inlines.urls')
class TestInline(TestDataMixin, TestCase):
factory = RequestFactory()
@classmethod
def setUpTestData(cls):
super().setUpTestData()
cls.holder = Holder.objects.create(dummy=13)
Inner.objects.create(dummy=42, holder=cls.holder)
def setUp(self):
self.client.force_login(self.superuser)
def test_can_delete(self):
"""
can_delete should be passed to inlineformset factory.
"""
response = self.client.get(
reverse('admin:admin_inlines_holder_change', args=(self.holder.id,))
)
inner_formset = response.context['inline_admin_formsets'][0].formset
expected = InnerInline.can_delete
actual = inner_formset.can_delete
self.assertEqual(expected, actual, 'can_delete must be equal')
def test_readonly_stacked_inline_label(self):
"""Bug #13174."""
holder = Holder.objects.create(dummy=42)
Inner.objects.create(holder=holder, dummy=42, readonly='')
response = self.client.get(
reverse('admin:admin_inlines_holder_change', args=(holder.id,))
)
self.assertContains(response, '<label>Inner readonly label:</label>')
def test_many_to_many_inlines(self):
"Autogenerated many-to-many inlines are displayed correctly (#13407)"
response = self.client.get(reverse('admin:admin_inlines_author_add'))
# The heading for the m2m inline block uses the right text
self.assertContains(response, '<h2>Author-book relationships</h2>')
# The "add another" label is correct
self.assertContains(response, 'Add another Author-book relationship')
# The '+' is dropped from the autogenerated form prefix (Author_books+)
self.assertContains(response, 'id="id_Author_books-TOTAL_FORMS"')
def test_inline_primary(self):
person = Person.objects.create(firstname='Imelda')
item = OutfitItem.objects.create(name='Shoes')
# Imelda likes shoes, but can't carry her own bags.
data = {
'shoppingweakness_set-TOTAL_FORMS': 1,
'shoppingweakness_set-INITIAL_FORMS': 0,
'shoppingweakness_set-MAX_NUM_FORMS': 0,
'_save': 'Save',
'person': person.id,
'max_weight': 0,
'shoppingweakness_set-0-item': item.id,
}
response = self.client.post(reverse('admin:admin_inlines_fashionista_add'), data)
self.assertEqual(response.status_code, 302)
self.assertEqual(len(Fashionista.objects.filter(person__firstname='Imelda')), 1)
def test_tabular_inline_column_css_class(self):
"""
Field names are included in the context to output a field-specific
CSS class name in the column headers.
"""
response = self.client.get(reverse('admin:admin_inlines_poll_add'))
text_field, call_me_field = list(response.context['inline_admin_formset'].fields())
# Editable field.
self.assertEqual(text_field['name'], 'text')
self.assertContains(response, '<th class="column-text required">')
# Read-only field.
self.assertEqual(call_me_field['name'], 'call_me')
self.assertContains(response, '<th class="column-call_me">')
def test_custom_form_tabular_inline_label(self):
"""
A model form with a form field specified (TitleForm.title1) should have
its label rendered in the tabular inline.
"""
response = self.client.get(reverse('admin:admin_inlines_titlecollection_add'))
self.assertContains(response, '<th class="column-title1 required">Title1</th>', html=True)
def test_custom_form_tabular_inline_extra_field_label(self):
response = self.client.get(reverse('admin:admin_inlines_outfititem_add'))
_, extra_field = list(response.context['inline_admin_formset'].fields())
self.assertEqual(extra_field['label'], 'Extra field')
def test_non_editable_custom_form_tabular_inline_extra_field_label(self):
response = self.client.get(reverse('admin:admin_inlines_chapter_add'))
_, extra_field = list(response.context['inline_admin_formset'].fields())
self.assertEqual(extra_field['label'], 'Extra field')
def test_custom_form_tabular_inline_overridden_label(self):
"""
SomeChildModelForm.__init__() overrides the label of a form field.
That label is displayed in the TabularInline.
"""
response = self.client.get(reverse('admin:admin_inlines_someparentmodel_add'))
field = list(response.context['inline_admin_formset'].fields())[0]
self.assertEqual(field['label'], 'new label')
self.assertContains(response, '<th class="column-name required">New label</th>', html=True)
def test_tabular_non_field_errors(self):
"""
non_field_errors are displayed correctly, including the correct value
for colspan.
"""
data = {
'title_set-TOTAL_FORMS': 1,
'title_set-INITIAL_FORMS': 0,
'title_set-MAX_NUM_FORMS': 0,
'_save': 'Save',
'title_set-0-title1': 'a title',
'title_set-0-title2': 'a different title',
}
response = self.client.post(reverse('admin:admin_inlines_titlecollection_add'), data)
# Here colspan is "4": two fields (title1 and title2), one hidden field and the delete checkbox.
self.assertContains(
response,
'<tr class="row-form-errors"><td colspan="4"><ul class="errorlist nonfield">'
'<li>The two titles must be the same</li></ul></td></tr>'
)
def test_no_parent_callable_lookup(self):
"""Admin inline `readonly_field` shouldn't invoke parent ModelAdmin callable"""
# Identically named callable isn't present in the parent ModelAdmin,
# rendering of the add view shouldn't explode
response = self.client.get(reverse('admin:admin_inlines_novel_add'))
# View should have the child inlines section
self.assertContains(
response,
'<div class="js-inline-admin-formset inline-group" id="chapter_set-group"'
)
def test_callable_lookup(self):
"""Admin inline should invoke local callable when its name is listed in readonly_fields"""
response = self.client.get(reverse('admin:admin_inlines_poll_add'))
# Add parent object view should have the child inlines section
self.assertContains(
response,
'<div class="js-inline-admin-formset inline-group" id="question_set-group"'
)
# The right callable should be used for the inline readonly_fields
# column cells
self.assertContains(response, '<p>Callable in QuestionInline</p>')
def test_help_text(self):
"""
The inlines' model field help texts are displayed when using both the
stacked and tabular layouts.
"""
response = self.client.get(reverse('admin:admin_inlines_holder4_add'))
self.assertContains(response, '<div class="help">Awesome stacked help text is awesome.</div>', 4)
self.assertContains(
response,
'<img src="/static/admin/img/icon-unknown.svg" '
'class="help help-tooltip" width="10" height="10" '
'alt="(Awesome tabular help text is awesome.)" '
'title="Awesome tabular help text is awesome.">',
1
)
# ReadOnly fields
response = self.client.get(reverse('admin:admin_inlines_capofamiglia_add'))
self.assertContains(
response,
'<img src="/static/admin/img/icon-unknown.svg" '
'class="help help-tooltip" width="10" height="10" '
'alt="(Help text for ReadOnlyInline)" '
'title="Help text for ReadOnlyInline">',
1
)
def test_tabular_model_form_meta_readonly_field(self):
"""
Tabular inlines use ModelForm.Meta.help_texts and labels for read-only
fields.
"""
response = self.client.get(reverse('admin:admin_inlines_someparentmodel_add'))
self.assertContains(
response,
'<img src="/static/admin/img/icon-unknown.svg" '
'class="help help-tooltip" width="10" height="10" '
'alt="(Help text from ModelForm.Meta)" '
'title="Help text from ModelForm.Meta">'
)
self.assertContains(response, 'Label from ModelForm.Meta')
def test_inline_hidden_field_no_column(self):
"""#18263 -- Make sure hidden fields don't get a column in tabular inlines"""
parent = SomeParentModel.objects.create(name='a')
SomeChildModel.objects.create(name='b', position='0', parent=parent)
SomeChildModel.objects.create(name='c', position='1', parent=parent)
response = self.client.get(reverse('admin:admin_inlines_someparentmodel_change', args=(parent.pk,)))
self.assertNotContains(response, '<td class="field-position">')
self.assertInHTML(
'<input id="id_somechildmodel_set-1-position" '
'name="somechildmodel_set-1-position" type="hidden" value="1">',
response.rendered_content,
)
def test_non_related_name_inline(self):
"""
Multiple inlines with related_name='+' have correct form prefixes.
"""
response = self.client.get(reverse('admin:admin_inlines_capofamiglia_add'))
self.assertContains(response, '<input type="hidden" name="-1-0-id" id="id_-1-0-id">', html=True)
self.assertContains(
response,
'<input type="hidden" name="-1-0-capo_famiglia" id="id_-1-0-capo_famiglia">',
html=True
)
self.assertContains(
response,
'<input id="id_-1-0-name" type="text" class="vTextField" name="-1-0-name" maxlength="100">',
html=True
)
self.assertContains(response, '<input type="hidden" name="-2-0-id" id="id_-2-0-id">', html=True)
self.assertContains(
response,
'<input type="hidden" name="-2-0-capo_famiglia" id="id_-2-0-capo_famiglia">',
html=True
)
self.assertContains(
response,
'<input id="id_-2-0-name" type="text" class="vTextField" name="-2-0-name" maxlength="100">',
html=True
)
@override_settings(USE_L10N=True, USE_THOUSAND_SEPARATOR=True)
def test_localize_pk_shortcut(self):
"""
The "View on Site" link is correct for locales that use thousand
separators.
"""
holder = Holder.objects.create(pk=123456789, dummy=42)
inner = Inner.objects.create(pk=987654321, holder=holder, dummy=42, readonly='')
response = self.client.get(reverse('admin:admin_inlines_holder_change', args=(holder.id,)))
inner_shortcut = 'r/%s/%s/' % (ContentType.objects.get_for_model(inner).pk, inner.pk)
self.assertContains(response, inner_shortcut)
def test_custom_pk_shortcut(self):
"""
The "View on Site" link is correct for models with a custom primary key
field.
"""
parent = ParentModelWithCustomPk.objects.create(my_own_pk="foo", name="Foo")
child1 = ChildModel1.objects.create(my_own_pk="bar", name="Bar", parent=parent)
child2 = ChildModel2.objects.create(my_own_pk="baz", name="Baz", parent=parent)
response = self.client.get(reverse('admin:admin_inlines_parentmodelwithcustompk_change', args=('foo',)))
child1_shortcut = 'r/%s/%s/' % (ContentType.objects.get_for_model(child1).pk, child1.pk)
child2_shortcut = 'r/%s/%s/' % (ContentType.objects.get_for_model(child2).pk, child2.pk)
self.assertContains(response, child1_shortcut)
self.assertContains(response, child2_shortcut)
def test_create_inlines_on_inherited_model(self):
"""
An object can be created with inlines when it inherits another class.
"""
data = {
'name': 'Martian',
'sighting_set-TOTAL_FORMS': 1,
'sighting_set-INITIAL_FORMS': 0,
'sighting_set-MAX_NUM_FORMS': 0,
'sighting_set-0-place': 'Zone 51',
'_save': 'Save',
}
response = self.client.post(reverse('admin:admin_inlines_extraterrestrial_add'), data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Sighting.objects.filter(et__name='Martian').count(), 1)
def test_custom_get_extra_form(self):
bt_head = BinaryTree.objects.create(name="Tree Head")
BinaryTree.objects.create(name="First Child", parent=bt_head)
# The maximum number of forms should respect 'get_max_num' on the
# ModelAdmin
max_forms_input = (
'<input id="id_binarytree_set-MAX_NUM_FORMS" '
'name="binarytree_set-MAX_NUM_FORMS" type="hidden" value="%d">'
)
# The total number of forms will remain the same in either case
total_forms_hidden = (
'<input id="id_binarytree_set-TOTAL_FORMS" '
'name="binarytree_set-TOTAL_FORMS" type="hidden" value="2">'
)
response = self.client.get(reverse('admin:admin_inlines_binarytree_add'))
self.assertInHTML(max_forms_input % 3, response.rendered_content)
self.assertInHTML(total_forms_hidden, response.rendered_content)
response = self.client.get(reverse('admin:admin_inlines_binarytree_change', args=(bt_head.id,)))
self.assertInHTML(max_forms_input % 2, response.rendered_content)
self.assertInHTML(total_forms_hidden, response.rendered_content)
def test_min_num(self):
"""
min_num and extra determine number of forms.
"""
class MinNumInline(TabularInline):
model = BinaryTree
min_num = 2
extra = 3
modeladmin = ModelAdmin(BinaryTree, admin_site)
modeladmin.inlines = [MinNumInline]
min_forms = (
'<input id="id_binarytree_set-MIN_NUM_FORMS" '
'name="binarytree_set-MIN_NUM_FORMS" type="hidden" value="2">'
)
total_forms = (
'<input id="id_binarytree_set-TOTAL_FORMS" '
'name="binarytree_set-TOTAL_FORMS" type="hidden" value="5">'
)
request = self.factory.get(reverse('admin:admin_inlines_binarytree_add'))
request.user = User(username='super', is_superuser=True)
response = modeladmin.changeform_view(request)
self.assertInHTML(min_forms, response.rendered_content)
self.assertInHTML(total_forms, response.rendered_content)
def test_custom_min_num(self):
bt_head = BinaryTree.objects.create(name="Tree Head")
BinaryTree.objects.create(name="First Child", parent=bt_head)
class MinNumInline(TabularInline):
model = BinaryTree
extra = 3
def get_min_num(self, request, obj=None, **kwargs):
if obj:
return 5
return 2
modeladmin = ModelAdmin(BinaryTree, admin_site)
modeladmin.inlines = [MinNumInline]
min_forms = (
'<input id="id_binarytree_set-MIN_NUM_FORMS" '
'name="binarytree_set-MIN_NUM_FORMS" type="hidden" value="%d">'
)
total_forms = (
'<input id="id_binarytree_set-TOTAL_FORMS" '
'name="binarytree_set-TOTAL_FORMS" type="hidden" value="%d">'
)
request = self.factory.get(reverse('admin:admin_inlines_binarytree_add'))
request.user = User(username='super', is_superuser=True)
response = modeladmin.changeform_view(request)
self.assertInHTML(min_forms % 2, response.rendered_content)
self.assertInHTML(total_forms % 5, response.rendered_content)
request = self.factory.get(reverse('admin:admin_inlines_binarytree_change', args=(bt_head.id,)))
request.user = User(username='super', is_superuser=True)
response = modeladmin.changeform_view(request, object_id=str(bt_head.id))
self.assertInHTML(min_forms % 5, response.rendered_content)
self.assertInHTML(total_forms % 8, response.rendered_content)
def test_inline_nonauto_noneditable_pk(self):
response = self.client.get(reverse('admin:admin_inlines_author_add'))
self.assertContains(
response,
'<input id="id_nonautopkbook_set-0-rand_pk" '
'name="nonautopkbook_set-0-rand_pk" type="hidden">',
html=True
)
self.assertContains(
response,
'<input id="id_nonautopkbook_set-2-0-rand_pk" '
'name="nonautopkbook_set-2-0-rand_pk" type="hidden">',
html=True
)
def test_inline_nonauto_noneditable_inherited_pk(self):
response = self.client.get(reverse('admin:admin_inlines_author_add'))
self.assertContains(
response,
'<input id="id_nonautopkbookchild_set-0-nonautopkbook_ptr" '
'name="nonautopkbookchild_set-0-nonautopkbook_ptr" type="hidden">',
html=True
)
self.assertContains(
response,
'<input id="id_nonautopkbookchild_set-2-nonautopkbook_ptr" '
'name="nonautopkbookchild_set-2-nonautopkbook_ptr" type="hidden">',
html=True
)
def test_inline_editable_pk(self):
response = self.client.get(reverse('admin:admin_inlines_author_add'))
self.assertContains(
response,
'<input class="vIntegerField" id="id_editablepkbook_set-0-manual_pk" '
'name="editablepkbook_set-0-manual_pk" type="number">',
html=True, count=1
)
self.assertContains(
response,
'<input class="vIntegerField" id="id_editablepkbook_set-2-0-manual_pk" '
'name="editablepkbook_set-2-0-manual_pk" type="number">',
html=True, count=1
)
def test_stacked_inline_edit_form_contains_has_original_class(self):
holder = Holder.objects.create(dummy=1)
holder.inner_set.create(dummy=1)
response = self.client.get(reverse('admin:admin_inlines_holder_change', args=(holder.pk,)))
self.assertContains(
response,
'<div class="inline-related has_original" id="inner_set-0">',
count=1
)
self.assertContains(
response,
'<div class="inline-related" id="inner_set-1">',
count=1
)
def test_inlines_show_change_link_registered(self):
"Inlines `show_change_link` for registered models when enabled."
holder = Holder4.objects.create(dummy=1)
item1 = Inner4Stacked.objects.create(dummy=1, holder=holder)
item2 = Inner4Tabular.objects.create(dummy=1, holder=holder)
items = (
('inner4stacked', item1.pk),
('inner4tabular', item2.pk),
)
response = self.client.get(reverse('admin:admin_inlines_holder4_change', args=(holder.pk,)))
self.assertTrue(response.context['inline_admin_formset'].opts.has_registered_model)
for model, pk in items:
url = reverse('admin:admin_inlines_%s_change' % model, args=(pk,))
self.assertContains(response, '<a href="%s" %s' % (url, INLINE_CHANGELINK_HTML))
def test_inlines_show_change_link_unregistered(self):
"Inlines `show_change_link` disabled for unregistered models."
parent = ParentModelWithCustomPk.objects.create(my_own_pk="foo", name="Foo")
ChildModel1.objects.create(my_own_pk="bar", name="Bar", parent=parent)
ChildModel2.objects.create(my_own_pk="baz", name="Baz", parent=parent)
response = self.client.get(reverse('admin:admin_inlines_parentmodelwithcustompk_change', args=('foo',)))
self.assertFalse(response.context['inline_admin_formset'].opts.has_registered_model)
self.assertNotContains(response, INLINE_CHANGELINK_HTML)
def test_tabular_inline_show_change_link_false_registered(self):
"Inlines `show_change_link` disabled by default."
poll = Poll.objects.create(name="New poll")
Question.objects.create(poll=poll)
response = self.client.get(reverse('admin:admin_inlines_poll_change', args=(poll.pk,)))
self.assertTrue(response.context['inline_admin_formset'].opts.has_registered_model)
self.assertNotContains(response, INLINE_CHANGELINK_HTML)
def test_noneditable_inline_has_field_inputs(self):
"""Inlines without change permission shows field inputs on add form."""
response = self.client.get(reverse('admin:admin_inlines_novelreadonlychapter_add'))
self.assertContains(
response,
'<input type="text" name="chapter_set-0-name" '
'class="vTextField" maxlength="40" id="id_chapter_set-0-name">',
html=True
)
def test_inlines_plural_heading_foreign_key(self):
response = self.client.get(reverse('admin:admin_inlines_holder4_add'))
self.assertContains(response, '<h2>Inner4 stackeds</h2>', html=True)
self.assertContains(response, '<h2>Inner4 tabulars</h2>', html=True)
def test_inlines_singular_heading_one_to_one(self):
response = self.client.get(reverse('admin:admin_inlines_person_add'))
self.assertContains(response, '<h2>Author</h2>', html=True) # Tabular.
self.assertContains(response, '<h2>Fashionista</h2>', html=True) # Stacked.
@override_settings(ROOT_URLCONF='admin_inlines.urls')
class TestInlineMedia(TestDataMixin, TestCase):
def setUp(self):
self.client.force_login(self.superuser)
def test_inline_media_only_base(self):
holder = Holder(dummy=13)
holder.save()
Inner(dummy=42, holder=holder).save()
change_url = reverse('admin:admin_inlines_holder_change', args=(holder.id,))
response = self.client.get(change_url)
self.assertContains(response, 'my_awesome_admin_scripts.js')
def test_inline_media_only_inline(self):
holder = Holder3(dummy=13)
holder.save()
Inner3(dummy=42, holder=holder).save()
change_url = reverse('admin:admin_inlines_holder3_change', args=(holder.id,))
response = self.client.get(change_url)
self.assertEqual(
response.context['inline_admin_formsets'][0].media._js,
[
'admin/js/vendor/jquery/jquery.min.js',
'my_awesome_inline_scripts.js',
'custom_number.js',
'admin/js/jquery.init.js',
'admin/js/inlines.js',
]
)
self.assertContains(response, 'my_awesome_inline_scripts.js')
def test_all_inline_media(self):
holder = Holder2(dummy=13)
holder.save()
Inner2(dummy=42, holder=holder).save()
change_url = reverse('admin:admin_inlines_holder2_change', args=(holder.id,))
response = self.client.get(change_url)
self.assertContains(response, 'my_awesome_admin_scripts.js')
self.assertContains(response, 'my_awesome_inline_scripts.js')
@override_settings(ROOT_URLCONF='admin_inlines.urls')
class TestInlineAdminForm(TestCase):
def test_immutable_content_type(self):
"""Regression for #9362
The problem depends only on InlineAdminForm and its "original"
argument, so we can safely set the other arguments to None/{}. We just
need to check that the content_type argument of Child isn't altered by
the internals of the inline form."""
sally = Teacher.objects.create(name='Sally')
john = Parent.objects.create(name='John')
joe = Child.objects.create(name='Joe', teacher=sally, parent=john)
iaf = InlineAdminForm(None, None, {}, {}, joe)
parent_ct = ContentType.objects.get_for_model(Parent)
self.assertEqual(iaf.original.content_type, parent_ct)
@override_settings(ROOT_URLCONF='admin_inlines.urls')
class TestInlineProtectedOnDelete(TestDataMixin, TestCase):
def setUp(self):
self.client.force_login(self.superuser)
def test_deleting_inline_with_protected_delete_does_not_validate(self):
lotr = Novel.objects.create(name='Lord of the rings')
chapter = Chapter.objects.create(novel=lotr, name='Many Meetings')
foot_note = FootNote.objects.create(chapter=chapter, note='yadda yadda')
change_url = reverse('admin:admin_inlines_novel_change', args=(lotr.id,))
response = self.client.get(change_url)
data = {
'name': lotr.name,
'chapter_set-TOTAL_FORMS': 1,
'chapter_set-INITIAL_FORMS': 1,
'chapter_set-MAX_NUM_FORMS': 1000,
'_save': 'Save',
'chapter_set-0-id': chapter.id,
'chapter_set-0-name': chapter.name,
'chapter_set-0-novel': lotr.id,
'chapter_set-0-DELETE': 'on'
}
response = self.client.post(change_url, data)
self.assertContains(response, "Deleting chapter %s would require deleting "
"the following protected related objects: foot note %s"
% (chapter, foot_note))
@override_settings(ROOT_URLCONF='admin_inlines.urls')
class TestInlinePermissions(TestCase):
"""
Make sure the admin respects permissions for objects that are edited
inline. Refs #8060.
"""
@classmethod
def setUpTestData(cls):
cls.user = User(username='admin', is_staff=True, is_active=True)
cls.user.set_password('secret')
cls.user.save()
cls.author_ct = ContentType.objects.get_for_model(Author)
cls.holder_ct = ContentType.objects.get_for_model(Holder2)
cls.book_ct = ContentType.objects.get_for_model(Book)
cls.inner_ct = ContentType.objects.get_for_model(Inner2)
# User always has permissions to add and change Authors, and Holders,
# the main (parent) models of the inlines. Permissions on the inlines
# vary per test.
permission = Permission.objects.get(codename='add_author', content_type=cls.author_ct)
cls.user.user_permissions.add(permission)
permission = Permission.objects.get(codename='change_author', content_type=cls.author_ct)
cls.user.user_permissions.add(permission)
permission = Permission.objects.get(codename='add_holder2', content_type=cls.holder_ct)
cls.user.user_permissions.add(permission)
permission = Permission.objects.get(codename='change_holder2', content_type=cls.holder_ct)
cls.user.user_permissions.add(permission)
author = Author.objects.create(pk=1, name='The Author')
cls.book = author.books.create(name='The inline Book')
cls.author_change_url = reverse('admin:admin_inlines_author_change', args=(author.id,))
# Get the ID of the automatically created intermediate model for the Author-Book m2m
author_book_auto_m2m_intermediate = Author.books.through.objects.get(author=author, book=cls.book)
cls.author_book_auto_m2m_intermediate_id = author_book_auto_m2m_intermediate.pk
cls.holder = Holder2.objects.create(dummy=13)
cls.inner2 = Inner2.objects.create(dummy=42, holder=cls.holder)
def setUp(self):
self.holder_change_url = reverse('admin:admin_inlines_holder2_change', args=(self.holder.id,))
self.client.force_login(self.user)
def test_inline_add_m2m_noperm(self):
response = self.client.get(reverse('admin:admin_inlines_author_add'))
# No change permission on books, so no inline
self.assertNotContains(response, '<h2>Author-book relationships</h2>')
self.assertNotContains(response, 'Add another Author-Book Relationship')
self.assertNotContains(response, 'id="id_Author_books-TOTAL_FORMS"')
def test_inline_add_fk_noperm(self):
response = self.client.get(reverse('admin:admin_inlines_holder2_add'))
# No permissions on Inner2s, so no inline
self.assertNotContains(response, '<h2>Inner2s</h2>')
self.assertNotContains(response, 'Add another Inner2')
self.assertNotContains(response, 'id="id_inner2_set-TOTAL_FORMS"')
def test_inline_change_m2m_noperm(self):
response = self.client.get(self.author_change_url)
# No change permission on books, so no inline
self.assertNotContains(response, '<h2>Author-book relationships</h2>')
self.assertNotContains(response, 'Add another Author-Book Relationship')
self.assertNotContains(response, 'id="id_Author_books-TOTAL_FORMS"')
def test_inline_change_fk_noperm(self):
response = self.client.get(self.holder_change_url)
# No permissions on Inner2s, so no inline
self.assertNotContains(response, '<h2>Inner2s</h2>')
self.assertNotContains(response, 'Add another Inner2')
self.assertNotContains(response, 'id="id_inner2_set-TOTAL_FORMS"')
def test_inline_add_m2m_view_only_perm(self):
permission = Permission.objects.get(codename='view_book', content_type=self.book_ct)
self.user.user_permissions.add(permission)
response = self.client.get(reverse('admin:admin_inlines_author_add'))
# View-only inlines. (It could be nicer to hide the empty, non-editable
# inlines on the add page.)
self.assertIs(response.context['inline_admin_formset'].has_view_permission, True)
self.assertIs(response.context['inline_admin_formset'].has_add_permission, False)
self.assertIs(response.context['inline_admin_formset'].has_change_permission, False)
self.assertIs(response.context['inline_admin_formset'].has_delete_permission, False)
self.assertContains(response, '<h2>Author-book relationships</h2>')
self.assertContains(
response,
'<input type="hidden" name="Author_books-TOTAL_FORMS" value="0" '
'id="id_Author_books-TOTAL_FORMS">',
html=True,
)
self.assertNotContains(response, 'Add another Author-Book Relationship')
def test_inline_add_m2m_add_perm(self):
permission = Permission.objects.get(codename='add_book', content_type=self.book_ct)
self.user.user_permissions.add(permission)
response = self.client.get(reverse('admin:admin_inlines_author_add'))
# No change permission on Books, so no inline
self.assertNotContains(response, '<h2>Author-book relationships</h2>')
self.assertNotContains(response, 'Add another Author-Book Relationship')
self.assertNotContains(response, 'id="id_Author_books-TOTAL_FORMS"')
def test_inline_add_fk_add_perm(self):
permission = Permission.objects.get(codename='add_inner2', content_type=self.inner_ct)
self.user.user_permissions.add(permission)
response = self.client.get(reverse('admin:admin_inlines_holder2_add'))
# Add permission on inner2s, so we get the inline
self.assertContains(response, '<h2>Inner2s</h2>')
self.assertContains(response, 'Add another Inner2')
self.assertContains(response, '<input type="hidden" id="id_inner2_set-TOTAL_FORMS" '
'value="3" name="inner2_set-TOTAL_FORMS">', html=True)
def test_inline_change_m2m_add_perm(self):
permission = Permission.objects.get(codename='add_book', content_type=self.book_ct)
self.user.user_permissions.add(permission)
response = self.client.get(self.author_change_url)
# No change permission on books, so no inline
self.assertNotContains(response, '<h2>Author-book relationships</h2>')
self.assertNotContains(response, 'Add another Author-Book Relationship')
self.assertNotContains(response, 'id="id_Author_books-TOTAL_FORMS"')
self.assertNotContains(response, 'id="id_Author_books-0-DELETE"')
def test_inline_change_m2m_view_only_perm(self):
permission = Permission.objects.get(codename='view_book', content_type=self.book_ct)
self.user.user_permissions.add(permission)
response = self.client.get(self.author_change_url)
# View-only inlines.
self.assertIs(response.context['inline_admin_formset'].has_view_permission, True)
self.assertIs(response.context['inline_admin_formset'].has_add_permission, False)
self.assertIs(response.context['inline_admin_formset'].has_change_permission, False)
self.assertIs(response.context['inline_admin_formset'].has_delete_permission, False)
self.assertContains(response, '<h2>Author-book relationships</h2>')
self.assertContains(
response,
'<input type="hidden" name="Author_books-TOTAL_FORMS" value="1" '
'id="id_Author_books-TOTAL_FORMS">',
html=True,
)
# The field in the inline is read-only.
self.assertContains(response, '<p>%s</p>' % self.book)
self.assertNotContains(
response,
'<input type="checkbox" name="Author_books-0-DELETE" id="id_Author_books-0-DELETE">',
html=True,
)
def test_inline_change_m2m_change_perm(self):
permission = Permission.objects.get(codename='change_book', content_type=self.book_ct)
self.user.user_permissions.add(permission)
response = self.client.get(self.author_change_url)
# We have change perm on books, so we can add/change/delete inlines
self.assertIs(response.context['inline_admin_formset'].has_view_permission, True)
self.assertIs(response.context['inline_admin_formset'].has_add_permission, True)
self.assertIs(response.context['inline_admin_formset'].has_change_permission, True)
self.assertIs(response.context['inline_admin_formset'].has_delete_permission, True)
self.assertContains(response, '<h2>Author-book relationships</h2>')
self.assertContains(response, 'Add another Author-book relationship')
self.assertContains(response, '<input type="hidden" id="id_Author_books-TOTAL_FORMS" '
'value="4" name="Author_books-TOTAL_FORMS">', html=True)
self.assertContains(
response,
'<input type="hidden" id="id_Author_books-0-id" value="%i" '
'name="Author_books-0-id">' % self.author_book_auto_m2m_intermediate_id,
html=True
)
self.assertContains(response, 'id="id_Author_books-0-DELETE"')
def test_inline_change_fk_add_perm(self):
permission = Permission.objects.get(codename='add_inner2', content_type=self.inner_ct)
self.user.user_permissions.add(permission)
response = self.client.get(self.holder_change_url)
# Add permission on inner2s, so we can add but not modify existing
self.assertContains(response, '<h2>Inner2s</h2>')
self.assertContains(response, 'Add another Inner2')
# 3 extra forms only, not the existing instance form
self.assertContains(
response,
'<input type="hidden" id="id_inner2_set-TOTAL_FORMS" value="3" '
'name="inner2_set-TOTAL_FORMS">',
html=True
)
self.assertNotContains(
response,
'<input type="hidden" id="id_inner2_set-0-id" value="%i" name="inner2_set-0-id">' % self.inner2.id,
html=True
)
def test_inline_change_fk_change_perm(self):
permission = Permission.objects.get(codename='change_inner2', content_type=self.inner_ct)
self.user.user_permissions.add(permission)
response = self.client.get(self.holder_change_url)
# Change permission on inner2s, so we can change existing but not add new
self.assertContains(response, '<h2>Inner2s</h2>', count=2)
# Just the one form for existing instances
self.assertContains(
response, '<input type="hidden" id="id_inner2_set-TOTAL_FORMS" value="1" name="inner2_set-TOTAL_FORMS">',
html=True
)
self.assertContains(
response,
'<input type="hidden" id="id_inner2_set-0-id" value="%i" name="inner2_set-0-id">' % self.inner2.id,
html=True
)
# max-num 0 means we can't add new ones
self.assertContains(
response,
'<input type="hidden" id="id_inner2_set-MAX_NUM_FORMS" value="0" name="inner2_set-MAX_NUM_FORMS">',
html=True
)
# TabularInline
self.assertContains(response, '<th class="column-dummy required">Dummy</th>', html=True)
self.assertContains(
response,
'<input type="number" name="inner2_set-2-0-dummy" value="%s" '
'class="vIntegerField" id="id_inner2_set-2-0-dummy">' % self.inner2.dummy,
html=True,
)
def test_inline_change_fk_add_change_perm(self):
permission = Permission.objects.get(codename='add_inner2', content_type=self.inner_ct)
self.user.user_permissions.add(permission)
permission = Permission.objects.get(codename='change_inner2', content_type=self.inner_ct)
self.user.user_permissions.add(permission)
response = self.client.get(self.holder_change_url)
# Add/change perm, so we can add new and change existing
self.assertContains(response, '<h2>Inner2s</h2>')
# One form for existing instance and three extra for new
self.assertContains(
response, '<input type="hidden" id="id_inner2_set-TOTAL_FORMS" value="4" name="inner2_set-TOTAL_FORMS">',
html=True
)
self.assertContains(
response,
'<input type="hidden" id="id_inner2_set-0-id" value="%i" name="inner2_set-0-id">' % self.inner2.id,
html=True
)
def test_inline_change_fk_change_del_perm(self):
permission = Permission.objects.get(codename='change_inner2', content_type=self.inner_ct)
self.user.user_permissions.add(permission)
permission = Permission.objects.get(codename='delete_inner2', content_type=self.inner_ct)
self.user.user_permissions.add(permission)
response = self.client.get(self.holder_change_url)
# Change/delete perm on inner2s, so we can change/delete existing
self.assertContains(response, '<h2>Inner2s</h2>')
# One form for existing instance only, no new
self.assertContains(
response,
'<input type="hidden" id="id_inner2_set-TOTAL_FORMS" value="1" name="inner2_set-TOTAL_FORMS">',
html=True
)
self.assertContains(
response,
'<input type="hidden" id="id_inner2_set-0-id" value="%i" name="inner2_set-0-id">' % self.inner2.id,
html=True
)
self.assertContains(response, 'id="id_inner2_set-0-DELETE"')
def test_inline_change_fk_all_perms(self):
permission = Permission.objects.get(codename='add_inner2', content_type=self.inner_ct)
self.user.user_permissions.add(permission)
permission = Permission.objects.get(codename='change_inner2', content_type=self.inner_ct)
self.user.user_permissions.add(permission)
permission = Permission.objects.get(codename='delete_inner2', content_type=self.inner_ct)
self.user.user_permissions.add(permission)
response = self.client.get(self.holder_change_url)
# All perms on inner2s, so we can add/change/delete
self.assertContains(response, '<h2>Inner2s</h2>', count=2)
# One form for existing instance only, three for new
self.assertContains(
response,
'<input type="hidden" id="id_inner2_set-TOTAL_FORMS" value="4" name="inner2_set-TOTAL_FORMS">',
html=True
)
self.assertContains(
response,
'<input type="hidden" id="id_inner2_set-0-id" value="%i" name="inner2_set-0-id">' % self.inner2.id,
html=True
)
self.assertContains(response, 'id="id_inner2_set-0-DELETE"')
# TabularInline
self.assertContains(response, '<th class="column-dummy required">Dummy</th>', html=True)
self.assertContains(
response,
'<input type="number" name="inner2_set-2-0-dummy" value="%s" '
'class="vIntegerField" id="id_inner2_set-2-0-dummy">' % self.inner2.dummy,
html=True,
)
@override_settings(ROOT_URLCONF='admin_inlines.urls')
class TestReadOnlyChangeViewInlinePermissions(TestCase):
@classmethod
def setUpTestData(cls):
cls.user = User.objects.create_user('testing', password='password', is_staff=True)
cls.user.user_permissions.add(
Permission.objects.get(codename='view_poll', content_type=ContentType.objects.get_for_model(Poll))
)
cls.user.user_permissions.add(
*Permission.objects.filter(
codename__endswith="question", content_type=ContentType.objects.get_for_model(Question)
).values_list('pk', flat=True)
)
cls.poll = Poll.objects.create(name="Survey")
cls.add_url = reverse('admin:admin_inlines_poll_add')
cls.change_url = reverse('admin:admin_inlines_poll_change', args=(cls.poll.id,))
def setUp(self):
self.client.force_login(self.user)
def test_add_url_not_allowed(self):
response = self.client.get(self.add_url)
self.assertEqual(response.status_code, 403)
response = self.client.post(self.add_url, {})
self.assertEqual(response.status_code, 403)
def test_post_to_change_url_not_allowed(self):
response = self.client.post(self.change_url, {})
self.assertEqual(response.status_code, 403)
def test_get_to_change_url_is_allowed(self):
response = self.client.get(self.change_url)
self.assertEqual(response.status_code, 200)
def test_main_model_is_rendered_as_read_only(self):
response = self.client.get(self.change_url)
self.assertContains(
response,
'<div class="readonly">%s</div>' % self.poll.name,
html=True
)
input = '<input type="text" name="name" value="%s" class="vTextField" maxlength="40" required id="id_name">'
self.assertNotContains(
response,
input % self.poll.name,
html=True
)
def test_inlines_are_rendered_as_read_only(self):
question = Question.objects.create(text="How will this be rendered?", poll=self.poll)
response = self.client.get(self.change_url)
self.assertContains(
response,
'<td class="field-text"><p>%s</p></td>' % question.text,
html=True
)
self.assertNotContains(response, 'id="id_question_set-0-text"')
self.assertNotContains(response, 'id="id_related_objs-0-DELETE"')
def test_submit_line_shows_only_close_button(self):
response = self.client.get(self.change_url)
self.assertContains(
response,
'<a href="/admin/admin_inlines/poll/" class="closelink">Close</a>',
html=True
)
delete_link = '<p class="deletelink-box"><a href="/admin/admin_inlines/poll/%s/delete/" class="deletelink">Delete</a></p>' # noqa
self.assertNotContains(
response,
delete_link % self.poll.id,
html=True
)
self.assertNotContains(response, '<input type="submit" value="Save and add another" name="_addanother">')
self.assertNotContains(response, '<input type="submit" value="Save and continue editing" name="_continue">')
def test_inline_delete_buttons_are_not_shown(self):
Question.objects.create(text="How will this be rendered?", poll=self.poll)
response = self.client.get(self.change_url)
self.assertNotContains(
response,
'<input type="checkbox" name="question_set-0-DELETE" id="id_question_set-0-DELETE">',
html=True
)
def test_extra_inlines_are_not_shown(self):
response = self.client.get(self.change_url)
self.assertNotContains(response, 'id="id_question_set-0-text"')
@override_settings(ROOT_URLCONF='admin_inlines.urls')
class SeleniumTests(AdminSeleniumTestCase):
available_apps = ['admin_inlines'] + AdminSeleniumTestCase.available_apps
def setUp(self):
User.objects.create_superuser(username='super', password='secret', email='[email protected]')
def test_add_stackeds(self):
"""
The "Add another XXX" link correctly adds items to the stacked formset.
"""
self.admin_login(username='super', password='secret')
self.selenium.get(self.live_server_url + reverse('admin:admin_inlines_holder4_add'))
inline_id = '#inner4stacked_set-group'
def rows_length():
return len(self.selenium.find_elements_by_css_selector('%s .dynamic-inner4stacked_set' % inline_id))
self.assertEqual(rows_length(), 3)
add_button = self.selenium.find_element_by_link_text(
'Add another Inner4 stacked')
add_button.click()
self.assertEqual(rows_length(), 4)
def test_delete_stackeds(self):
self.admin_login(username='super', password='secret')
self.selenium.get(self.live_server_url + reverse('admin:admin_inlines_holder4_add'))
inline_id = '#inner4stacked_set-group'
def rows_length():
return len(self.selenium.find_elements_by_css_selector('%s .dynamic-inner4stacked_set' % inline_id))
self.assertEqual(rows_length(), 3)
add_button = self.selenium.find_element_by_link_text(
'Add another Inner4 stacked')
add_button.click()
add_button.click()
self.assertEqual(rows_length(), 5, msg="sanity check")
for delete_link in self.selenium.find_elements_by_css_selector('%s .inline-deletelink' % inline_id):
delete_link.click()
with self.disable_implicit_wait():
self.assertEqual(rows_length(), 0)
def test_delete_invalid_stacked_inlines(self):
from selenium.common.exceptions import NoSuchElementException
self.admin_login(username='super', password='secret')
self.selenium.get(self.live_server_url + reverse('admin:admin_inlines_holder4_add'))
inline_id = '#inner4stacked_set-group'
def rows_length():
return len(self.selenium.find_elements_by_css_selector('%s .dynamic-inner4stacked_set' % inline_id))
self.assertEqual(rows_length(), 3)
add_button = self.selenium.find_element_by_link_text(
'Add another Inner4 stacked')
add_button.click()
add_button.click()
self.assertEqual(len(self.selenium.find_elements_by_css_selector('#id_inner4stacked_set-4-dummy')), 1)
# Enter some data and click 'Save'.
self.selenium.find_element_by_name('dummy').send_keys('1')
self.selenium.find_element_by_name('inner4stacked_set-0-dummy').send_keys('100')
self.selenium.find_element_by_name('inner4stacked_set-1-dummy').send_keys('101')
self.selenium.find_element_by_name('inner4stacked_set-2-dummy').send_keys('222')
self.selenium.find_element_by_name('inner4stacked_set-3-dummy').send_keys('103')
self.selenium.find_element_by_name('inner4stacked_set-4-dummy').send_keys('222')
with self.wait_page_loaded():
self.selenium.find_element_by_xpath('//input[@value="Save"]').click()
self.assertEqual(rows_length(), 5, msg="sanity check")
errorlist = self.selenium.find_element_by_css_selector(
'%s .dynamic-inner4stacked_set .errorlist li' % inline_id
)
self.assertEqual('Please correct the duplicate values below.', errorlist.text)
delete_link = self.selenium.find_element_by_css_selector('#inner4stacked_set-4 .inline-deletelink')
delete_link.click()
self.assertEqual(rows_length(), 4)
with self.disable_implicit_wait(), self.assertRaises(NoSuchElementException):
self.selenium.find_element_by_css_selector('%s .dynamic-inner4stacked_set .errorlist li' % inline_id)
with self.wait_page_loaded():
self.selenium.find_element_by_xpath('//input[@value="Save"]').click()
# The objects have been created in the database.
self.assertEqual(Inner4Stacked.objects.all().count(), 4)
def test_delete_invalid_tabular_inlines(self):
from selenium.common.exceptions import NoSuchElementException
self.admin_login(username='super', password='secret')
self.selenium.get(self.live_server_url + reverse('admin:admin_inlines_holder4_add'))
inline_id = '#inner4tabular_set-group'
def rows_length():
return len(self.selenium.find_elements_by_css_selector('%s .dynamic-inner4tabular_set' % inline_id))
self.assertEqual(rows_length(), 3)
add_button = self.selenium.find_element_by_link_text(
'Add another Inner4 tabular')
add_button.click()
add_button.click()
self.assertEqual(len(self.selenium.find_elements_by_css_selector('#id_inner4tabular_set-4-dummy')), 1)
# Enter some data and click 'Save'.
self.selenium.find_element_by_name('dummy').send_keys('1')
self.selenium.find_element_by_name('inner4tabular_set-0-dummy').send_keys('100')
self.selenium.find_element_by_name('inner4tabular_set-1-dummy').send_keys('101')
self.selenium.find_element_by_name('inner4tabular_set-2-dummy').send_keys('222')
self.selenium.find_element_by_name('inner4tabular_set-3-dummy').send_keys('103')
self.selenium.find_element_by_name('inner4tabular_set-4-dummy').send_keys('222')
with self.wait_page_loaded():
self.selenium.find_element_by_xpath('//input[@value="Save"]').click()
self.assertEqual(rows_length(), 5, msg="sanity check")
# Non-field errorlist is in its own <tr> just before
# tr#inner4tabular_set-3:
errorlist = self.selenium.find_element_by_css_selector(
'%s #inner4tabular_set-3 + .row-form-errors .errorlist li' % inline_id
)
self.assertEqual('Please correct the duplicate values below.', errorlist.text)
delete_link = self.selenium.find_element_by_css_selector('#inner4tabular_set-4 .inline-deletelink')
delete_link.click()
self.assertEqual(rows_length(), 4)
with self.disable_implicit_wait(), self.assertRaises(NoSuchElementException):
self.selenium.find_element_by_css_selector('%s .dynamic-inner4tabular_set .errorlist li' % inline_id)
with self.wait_page_loaded():
self.selenium.find_element_by_xpath('//input[@value="Save"]').click()
# The objects have been created in the database.
self.assertEqual(Inner4Tabular.objects.all().count(), 4)
def test_add_inlines(self):
"""
The "Add another XXX" link correctly adds items to the inline form.
"""
self.admin_login(username='super', password='secret')
self.selenium.get(self.live_server_url + reverse('admin:admin_inlines_profilecollection_add'))
# There's only one inline to start with and it has the correct ID.
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'.dynamic-profile_set')), 1)
self.assertEqual(self.selenium.find_elements_by_css_selector(
'.dynamic-profile_set')[0].get_attribute('id'),
'profile_set-0')
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'.dynamic-profile_set#profile_set-0 input[name=profile_set-0-first_name]')), 1)
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'.dynamic-profile_set#profile_set-0 input[name=profile_set-0-last_name]')), 1)
# Add an inline
self.selenium.find_element_by_link_text('Add another Profile').click()
# The inline has been added, it has the right id, and it contains the
# correct fields.
self.assertEqual(len(self.selenium.find_elements_by_css_selector('.dynamic-profile_set')), 2)
self.assertEqual(self.selenium.find_elements_by_css_selector(
'.dynamic-profile_set')[1].get_attribute('id'), 'profile_set-1')
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'.dynamic-profile_set#profile_set-1 input[name=profile_set-1-first_name]')), 1)
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'.dynamic-profile_set#profile_set-1 input[name=profile_set-1-last_name]')), 1)
# Let's add another one to be sure
self.selenium.find_element_by_link_text('Add another Profile').click()
self.assertEqual(len(self.selenium.find_elements_by_css_selector('.dynamic-profile_set')), 3)
self.assertEqual(self.selenium.find_elements_by_css_selector(
'.dynamic-profile_set')[2].get_attribute('id'), 'profile_set-2')
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'.dynamic-profile_set#profile_set-2 input[name=profile_set-2-first_name]')), 1)
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'.dynamic-profile_set#profile_set-2 input[name=profile_set-2-last_name]')), 1)
# Enter some data and click 'Save'
self.selenium.find_element_by_name('profile_set-0-first_name').send_keys('0 first name 1')
self.selenium.find_element_by_name('profile_set-0-last_name').send_keys('0 last name 2')
self.selenium.find_element_by_name('profile_set-1-first_name').send_keys('1 first name 1')
self.selenium.find_element_by_name('profile_set-1-last_name').send_keys('1 last name 2')
self.selenium.find_element_by_name('profile_set-2-first_name').send_keys('2 first name 1')
self.selenium.find_element_by_name('profile_set-2-last_name').send_keys('2 last name 2')
with self.wait_page_loaded():
self.selenium.find_element_by_xpath('//input[@value="Save"]').click()
# The objects have been created in the database
self.assertEqual(ProfileCollection.objects.all().count(), 1)
self.assertEqual(Profile.objects.all().count(), 3)
def test_add_inline_link_absent_for_view_only_parent_model(self):
from selenium.common.exceptions import NoSuchElementException
user = User.objects.create_user('testing', password='password', is_staff=True)
user.user_permissions.add(
Permission.objects.get(codename='view_poll', content_type=ContentType.objects.get_for_model(Poll))
)
user.user_permissions.add(
*Permission.objects.filter(
codename__endswith="question", content_type=ContentType.objects.get_for_model(Question)
).values_list('pk', flat=True)
)
self.admin_login(username='testing', password='password')
poll = Poll.objects.create(name="Survey")
change_url = reverse('admin:admin_inlines_poll_change', args=(poll.id,))
self.selenium.get(self.live_server_url + change_url)
with self.disable_implicit_wait():
with self.assertRaises(NoSuchElementException):
self.selenium.find_element_by_link_text('Add another Question')
def test_delete_inlines(self):
self.admin_login(username='super', password='secret')
self.selenium.get(self.live_server_url + reverse('admin:admin_inlines_profilecollection_add'))
# Add a few inlines
self.selenium.find_element_by_link_text('Add another Profile').click()
self.selenium.find_element_by_link_text('Add another Profile').click()
self.selenium.find_element_by_link_text('Add another Profile').click()
self.selenium.find_element_by_link_text('Add another Profile').click()
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'#profile_set-group table tr.dynamic-profile_set')), 5)
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'form#profilecollection_form tr.dynamic-profile_set#profile_set-0')), 1)
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'form#profilecollection_form tr.dynamic-profile_set#profile_set-1')), 1)
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'form#profilecollection_form tr.dynamic-profile_set#profile_set-2')), 1)
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'form#profilecollection_form tr.dynamic-profile_set#profile_set-3')), 1)
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'form#profilecollection_form tr.dynamic-profile_set#profile_set-4')), 1)
# Click on a few delete buttons
self.selenium.find_element_by_css_selector(
'form#profilecollection_form tr.dynamic-profile_set#profile_set-1 td.delete a').click()
self.selenium.find_element_by_css_selector(
'form#profilecollection_form tr.dynamic-profile_set#profile_set-2 td.delete a').click()
# The rows are gone and the IDs have been re-sequenced
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'#profile_set-group table tr.dynamic-profile_set')), 3)
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'form#profilecollection_form tr.dynamic-profile_set#profile_set-0')), 1)
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'form#profilecollection_form tr.dynamic-profile_set#profile_set-1')), 1)
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'form#profilecollection_form tr.dynamic-profile_set#profile_set-2')), 1)
def test_collapsed_inlines(self):
# Collapsed inlines have SHOW/HIDE links.
self.admin_login(username='super', password='secret')
self.selenium.get(self.live_server_url + reverse('admin:admin_inlines_author_add'))
# One field is in a stacked inline, other in a tabular one.
test_fields = ['#id_nonautopkbook_set-0-title', '#id_nonautopkbook_set-2-0-title']
show_links = self.selenium.find_elements_by_link_text('SHOW')
self.assertEqual(len(show_links), 3)
for show_index, field_name in enumerate(test_fields, 0):
self.wait_until_invisible(field_name)
show_links[show_index].click()
self.wait_until_visible(field_name)
hide_links = self.selenium.find_elements_by_link_text('HIDE')
self.assertEqual(len(hide_links), 2)
for hide_index, field_name in enumerate(test_fields, 0):
self.wait_until_visible(field_name)
hide_links[hide_index].click()
self.wait_until_invisible(field_name)
def test_added_stacked_inline_with_collapsed_fields(self):
self.admin_login(username='super', password='secret')
self.selenium.get(self.live_server_url + reverse('admin:admin_inlines_teacher_add'))
self.selenium.find_element_by_link_text('Add another Child').click()
test_fields = ['#id_child_set-0-name', '#id_child_set-1-name']
show_links = self.selenium.find_elements_by_link_text('SHOW')
self.assertEqual(len(show_links), 2)
for show_index, field_name in enumerate(test_fields, 0):
self.wait_until_invisible(field_name)
show_links[show_index].click()
self.wait_until_visible(field_name)
hide_links = self.selenium.find_elements_by_link_text('HIDE')
self.assertEqual(len(hide_links), 2)
for hide_index, field_name in enumerate(test_fields, 0):
self.wait_until_visible(field_name)
hide_links[hide_index].click()
self.wait_until_invisible(field_name)
def assertBorder(self, element, border):
width, style, color = border.split(' ')
border_properties = [
'border-bottom-%s',
'border-left-%s',
'border-right-%s',
'border-top-%s',
]
for prop in border_properties:
prop = prop % 'width'
self.assertEqual(element.value_of_css_property(prop), width)
for prop in border_properties:
prop = prop % 'style'
self.assertEqual(element.value_of_css_property(prop), style)
# Convert hex color to rgb.
self.assertRegex(color, '#[0-9a-f]{6}')
r, g, b = int(color[1:3], 16), int(color[3:5], 16), int(color[5:], 16)
# The value may be expressed as either rgb() or rgba() depending on the
# browser.
colors = [
'rgb(%d, %d, %d)' % (r, g, b),
'rgba(%d, %d, %d, 1)' % (r, g, b),
]
for prop in border_properties:
prop = prop % 'color'
self.assertIn(element.value_of_css_property(prop), colors)
def test_inline_formset_error_input_border(self):
self.admin_login(username='super', password='secret')
self.selenium.get(self.live_server_url + reverse('admin:admin_inlines_holder5_add'))
self.wait_until_visible('#id_dummy')
self.selenium.find_element_by_id('id_dummy').send_keys(1)
fields = ['id_inner5stacked_set-0-dummy', 'id_inner5tabular_set-0-dummy']
show_links = self.selenium.find_elements_by_link_text('SHOW')
for show_index, field_name in enumerate(fields):
show_links[show_index].click()
self.wait_until_visible('#' + field_name)
self.selenium.find_element_by_id(field_name).send_keys(1)
# Before save all inputs have default border
for inline in ('stacked', 'tabular'):
for field_name in ('name', 'select', 'text'):
element_id = 'id_inner5%s_set-0-%s' % (inline, field_name)
self.assertBorder(
self.selenium.find_element_by_id(element_id),
'1px solid #cccccc',
)
self.selenium.find_element_by_xpath('//input[@value="Save"]').click()
# Test the red border around inputs by css selectors
stacked_selectors = ['.errors input', '.errors select', '.errors textarea']
for selector in stacked_selectors:
self.assertBorder(
self.selenium.find_element_by_css_selector(selector),
'1px solid #ba2121',
)
tabular_selectors = [
'td ul.errorlist + input', 'td ul.errorlist + select', 'td ul.errorlist + textarea'
]
for selector in tabular_selectors:
self.assertBorder(
self.selenium.find_element_by_css_selector(selector),
'1px solid #ba2121',
)
def test_inline_formset_error(self):
self.admin_login(username='super', password='secret')
self.selenium.get(self.live_server_url + reverse('admin:admin_inlines_holder5_add'))
stacked_inline_formset_selector = 'div#inner5stacked_set-group fieldset.module.collapse'
tabular_inline_formset_selector = 'div#inner5tabular_set-group fieldset.module.collapse'
# Inlines without errors, both inlines collapsed
self.selenium.find_element_by_xpath('//input[@value="Save"]').click()
self.assertEqual(
len(self.selenium.find_elements_by_css_selector(stacked_inline_formset_selector + '.collapsed')), 1
)
self.assertEqual(
len(self.selenium.find_elements_by_css_selector(tabular_inline_formset_selector + '.collapsed')), 1
)
show_links = self.selenium.find_elements_by_link_text('SHOW')
self.assertEqual(len(show_links), 2)
# Inlines with errors, both inlines expanded
test_fields = ['#id_inner5stacked_set-0-dummy', '#id_inner5tabular_set-0-dummy']
for show_index, field_name in enumerate(test_fields):
show_links[show_index].click()
self.wait_until_visible(field_name)
self.selenium.find_element_by_id(field_name[1:]).send_keys(1)
hide_links = self.selenium.find_elements_by_link_text('HIDE')
self.assertEqual(len(hide_links), 2)
for hide_index, field_name in enumerate(test_fields):
hide_link = hide_links[hide_index]
self.selenium.execute_script('window.scrollTo(0, %s);' % hide_link.location['y'])
hide_link.click()
self.wait_until_invisible(field_name)
self.selenium.find_element_by_xpath('//input[@value="Save"]').click()
self.assertEqual(
len(self.selenium.find_elements_by_css_selector(stacked_inline_formset_selector + '.collapsed')), 0
)
self.assertEqual(
len(self.selenium.find_elements_by_css_selector(tabular_inline_formset_selector + '.collapsed')), 0
)
self.assertEqual(
len(self.selenium.find_elements_by_css_selector(stacked_inline_formset_selector)), 1
)
self.assertEqual(
len(self.selenium.find_elements_by_css_selector(tabular_inline_formset_selector)), 1
)
def test_inlines_verbose_name(self):
"""
The item added by the "Add another XXX" link must use the correct
verbose_name in the inline form.
"""
self.admin_login(username='super', password='secret')
# Hide sidebar.
self.selenium.get(self.live_server_url + reverse('admin:admin_inlines_course_add'))
toggle_button = self.selenium.find_element_by_css_selector('#toggle-nav-sidebar')
toggle_button.click()
# Each combination of horizontal/vertical filter with stacked/tabular
# inlines.
tests = [
'admin:admin_inlines_course_add',
'admin:admin_inlines_courseproxy_add',
'admin:admin_inlines_courseproxy1_add',
'admin:admin_inlines_courseproxy2_add',
]
css_selector = '.dynamic-class_set#class_set-%s h2'
for url_name in tests:
with self.subTest(url=url_name):
self.selenium.get(self.live_server_url + reverse(url_name))
# First inline shows the verbose_name.
available, chosen = self.selenium.find_elements_by_css_selector(css_selector % 0)
self.assertEqual(available.text, 'AVAILABLE ATTENDANT')
self.assertEqual(chosen.text, 'CHOSEN ATTENDANT')
# Added inline should also have the correct verbose_name.
self.selenium.find_element_by_link_text('Add another Class').click()
available, chosen = self.selenium.find_elements_by_css_selector(css_selector % 1)
self.assertEqual(available.text, 'AVAILABLE ATTENDANT')
self.assertEqual(chosen.text, 'CHOSEN ATTENDANT')
# Third inline should also have the correct verbose_name.
self.selenium.find_element_by_link_text('Add another Class').click()
available, chosen = self.selenium.find_elements_by_css_selector(css_selector % 2)
self.assertEqual(available.text, 'AVAILABLE ATTENDANT')
self.assertEqual(chosen.text, 'CHOSEN ATTENDANT')
|
be13d696f4fa892b9b94e5959bcf376bdc94263a8745bec14a40c654b540b1dd | from unittest import mock
from django.core import checks
from django.core.checks import Error, Warning
from django.db import models
from django.test import SimpleTestCase, TestCase, skipUnlessDBFeature
from django.test.utils import (
isolate_apps, modify_settings, override_settings, override_system_checks,
)
class EmptyRouter:
pass
@isolate_apps('check_framework', attr_name='apps')
@override_system_checks([checks.model_checks.check_all_models])
class DuplicateDBTableTests(SimpleTestCase):
def test_collision_in_same_app(self):
class Model1(models.Model):
class Meta:
db_table = 'test_table'
class Model2(models.Model):
class Meta:
db_table = 'test_table'
self.assertEqual(checks.run_checks(app_configs=self.apps.get_app_configs()), [
Error(
"db_table 'test_table' is used by multiple models: "
"check_framework.Model1, check_framework.Model2.",
obj='test_table',
id='models.E028',
)
])
@override_settings(DATABASE_ROUTERS=['check_framework.test_model_checks.EmptyRouter'])
def test_collision_in_same_app_database_routers_installed(self):
class Model1(models.Model):
class Meta:
db_table = 'test_table'
class Model2(models.Model):
class Meta:
db_table = 'test_table'
self.assertEqual(checks.run_checks(app_configs=self.apps.get_app_configs()), [
Warning(
"db_table 'test_table' is used by multiple models: "
"check_framework.Model1, check_framework.Model2.",
hint=(
'You have configured settings.DATABASE_ROUTERS. Verify '
'that check_framework.Model1, check_framework.Model2 are '
'correctly routed to separate databases.'
),
obj='test_table',
id='models.W035',
)
])
@modify_settings(INSTALLED_APPS={'append': 'basic'})
@isolate_apps('basic', 'check_framework', kwarg_name='apps')
def test_collision_across_apps(self, apps):
class Model1(models.Model):
class Meta:
app_label = 'basic'
db_table = 'test_table'
class Model2(models.Model):
class Meta:
app_label = 'check_framework'
db_table = 'test_table'
self.assertEqual(checks.run_checks(app_configs=apps.get_app_configs()), [
Error(
"db_table 'test_table' is used by multiple models: "
"basic.Model1, check_framework.Model2.",
obj='test_table',
id='models.E028',
)
])
@modify_settings(INSTALLED_APPS={'append': 'basic'})
@override_settings(DATABASE_ROUTERS=['check_framework.test_model_checks.EmptyRouter'])
@isolate_apps('basic', 'check_framework', kwarg_name='apps')
def test_collision_across_apps_database_routers_installed(self, apps):
class Model1(models.Model):
class Meta:
app_label = 'basic'
db_table = 'test_table'
class Model2(models.Model):
class Meta:
app_label = 'check_framework'
db_table = 'test_table'
self.assertEqual(checks.run_checks(app_configs=apps.get_app_configs()), [
Warning(
"db_table 'test_table' is used by multiple models: "
"basic.Model1, check_framework.Model2.",
hint=(
'You have configured settings.DATABASE_ROUTERS. Verify '
'that basic.Model1, check_framework.Model2 are correctly '
'routed to separate databases.'
),
obj='test_table',
id='models.W035',
)
])
def test_no_collision_for_unmanaged_models(self):
class Unmanaged(models.Model):
class Meta:
db_table = 'test_table'
managed = False
class Managed(models.Model):
class Meta:
db_table = 'test_table'
self.assertEqual(checks.run_checks(app_configs=self.apps.get_app_configs()), [])
def test_no_collision_for_proxy_models(self):
class Model(models.Model):
class Meta:
db_table = 'test_table'
class ProxyModel(Model):
class Meta:
proxy = True
self.assertEqual(Model._meta.db_table, ProxyModel._meta.db_table)
self.assertEqual(checks.run_checks(app_configs=self.apps.get_app_configs()), [])
@isolate_apps('check_framework', attr_name='apps')
@override_system_checks([checks.model_checks.check_all_models])
class IndexNameTests(SimpleTestCase):
def test_collision_in_same_model(self):
index = models.Index(fields=['id'], name='foo')
class Model(models.Model):
class Meta:
indexes = [index, index]
self.assertEqual(checks.run_checks(app_configs=self.apps.get_app_configs()), [
Error(
"index name 'foo' is not unique for model check_framework.Model.",
id='models.E029',
),
])
def test_collision_in_different_models(self):
index = models.Index(fields=['id'], name='foo')
class Model1(models.Model):
class Meta:
indexes = [index]
class Model2(models.Model):
class Meta:
indexes = [index]
self.assertEqual(checks.run_checks(app_configs=self.apps.get_app_configs()), [
Error(
"index name 'foo' is not unique among models: "
"check_framework.Model1, check_framework.Model2.",
id='models.E030',
),
])
def test_collision_abstract_model(self):
class AbstractModel(models.Model):
class Meta:
indexes = [models.Index(fields=['id'], name='foo')]
abstract = True
class Model1(AbstractModel):
pass
class Model2(AbstractModel):
pass
self.assertEqual(checks.run_checks(app_configs=self.apps.get_app_configs()), [
Error(
"index name 'foo' is not unique among models: "
"check_framework.Model1, check_framework.Model2.",
id='models.E030',
),
])
def test_no_collision_abstract_model_interpolation(self):
class AbstractModel(models.Model):
name = models.CharField(max_length=20)
class Meta:
indexes = [models.Index(fields=['name'], name='%(app_label)s_%(class)s_foo')]
abstract = True
class Model1(AbstractModel):
pass
class Model2(AbstractModel):
pass
self.assertEqual(checks.run_checks(app_configs=self.apps.get_app_configs()), [])
@modify_settings(INSTALLED_APPS={'append': 'basic'})
@isolate_apps('basic', 'check_framework', kwarg_name='apps')
def test_collision_across_apps(self, apps):
index = models.Index(fields=['id'], name='foo')
class Model1(models.Model):
class Meta:
app_label = 'basic'
indexes = [index]
class Model2(models.Model):
class Meta:
app_label = 'check_framework'
indexes = [index]
self.assertEqual(checks.run_checks(app_configs=apps.get_app_configs()), [
Error(
"index name 'foo' is not unique among models: basic.Model1, "
"check_framework.Model2.",
id='models.E030',
),
])
@modify_settings(INSTALLED_APPS={'append': 'basic'})
@isolate_apps('basic', 'check_framework', kwarg_name='apps')
def test_no_collision_across_apps_interpolation(self, apps):
index = models.Index(fields=['id'], name='%(app_label)s_%(class)s_foo')
class Model1(models.Model):
class Meta:
app_label = 'basic'
constraints = [index]
class Model2(models.Model):
class Meta:
app_label = 'check_framework'
constraints = [index]
self.assertEqual(checks.run_checks(app_configs=apps.get_app_configs()), [])
@isolate_apps('check_framework', attr_name='apps')
@override_system_checks([checks.model_checks.check_all_models])
@skipUnlessDBFeature('supports_table_check_constraints')
class ConstraintNameTests(TestCase):
def test_collision_in_same_model(self):
class Model(models.Model):
class Meta:
constraints = [
models.CheckConstraint(check=models.Q(id__gt=0), name='foo'),
models.CheckConstraint(check=models.Q(id__lt=100), name='foo'),
]
self.assertEqual(checks.run_checks(app_configs=self.apps.get_app_configs()), [
Error(
"constraint name 'foo' is not unique for model "
"check_framework.Model.",
id='models.E031',
),
])
def test_collision_in_different_models(self):
constraint = models.CheckConstraint(check=models.Q(id__gt=0), name='foo')
class Model1(models.Model):
class Meta:
constraints = [constraint]
class Model2(models.Model):
class Meta:
constraints = [constraint]
self.assertEqual(checks.run_checks(app_configs=self.apps.get_app_configs()), [
Error(
"constraint name 'foo' is not unique among models: "
"check_framework.Model1, check_framework.Model2.",
id='models.E032',
),
])
def test_collision_abstract_model(self):
class AbstractModel(models.Model):
class Meta:
constraints = [models.CheckConstraint(check=models.Q(id__gt=0), name='foo')]
abstract = True
class Model1(AbstractModel):
pass
class Model2(AbstractModel):
pass
self.assertEqual(checks.run_checks(app_configs=self.apps.get_app_configs()), [
Error(
"constraint name 'foo' is not unique among models: "
"check_framework.Model1, check_framework.Model2.",
id='models.E032',
),
])
def test_no_collision_abstract_model_interpolation(self):
class AbstractModel(models.Model):
class Meta:
constraints = [
models.CheckConstraint(check=models.Q(id__gt=0), name='%(app_label)s_%(class)s_foo'),
]
abstract = True
class Model1(AbstractModel):
pass
class Model2(AbstractModel):
pass
self.assertEqual(checks.run_checks(app_configs=self.apps.get_app_configs()), [])
@modify_settings(INSTALLED_APPS={'append': 'basic'})
@isolate_apps('basic', 'check_framework', kwarg_name='apps')
def test_collision_across_apps(self, apps):
constraint = models.CheckConstraint(check=models.Q(id__gt=0), name='foo')
class Model1(models.Model):
class Meta:
app_label = 'basic'
constraints = [constraint]
class Model2(models.Model):
class Meta:
app_label = 'check_framework'
constraints = [constraint]
self.assertEqual(checks.run_checks(app_configs=apps.get_app_configs()), [
Error(
"constraint name 'foo' is not unique among models: "
"basic.Model1, check_framework.Model2.",
id='models.E032',
),
])
@modify_settings(INSTALLED_APPS={'append': 'basic'})
@isolate_apps('basic', 'check_framework', kwarg_name='apps')
def test_no_collision_across_apps_interpolation(self, apps):
constraint = models.CheckConstraint(check=models.Q(id__gt=0), name='%(app_label)s_%(class)s_foo')
class Model1(models.Model):
class Meta:
app_label = 'basic'
constraints = [constraint]
class Model2(models.Model):
class Meta:
app_label = 'check_framework'
constraints = [constraint]
self.assertEqual(checks.run_checks(app_configs=apps.get_app_configs()), [])
def mocked_is_overridden(self, setting):
# Force treating DEFAULT_AUTO_FIELD = 'django.db.models.AutoField' as a not
# overridden setting.
return (
setting != 'DEFAULT_AUTO_FIELD' or
self.DEFAULT_AUTO_FIELD != 'django.db.models.AutoField'
)
@mock.patch('django.conf.UserSettingsHolder.is_overridden', mocked_is_overridden)
@override_settings(DEFAULT_AUTO_FIELD='django.db.models.AutoField')
@isolate_apps('check_framework.apps.CheckDefaultPKConfig', attr_name='apps')
@override_system_checks([checks.model_checks.check_all_models])
class ModelDefaultAutoFieldTests(SimpleTestCase):
msg = (
"Auto-created primary key used when not defining a primary key type, "
"by default 'django.db.models.AutoField'."
)
hint = (
"Configure the DEFAULT_AUTO_FIELD setting or the "
"CheckDefaultPKConfig.default_auto_field attribute to point to a "
"subclass of AutoField, e.g. 'django.db.models.BigAutoField'."
)
def test_auto_created_pk(self):
class Model(models.Model):
pass
self.assertEqual(checks.run_checks(app_configs=self.apps.get_app_configs()), [
Warning(self.msg, hint=self.hint, obj=Model, id='models.W042'),
])
def test_explicit_inherited_pk(self):
class Parent(models.Model):
id = models.AutoField(primary_key=True)
class Child(Parent):
pass
self.assertEqual(checks.run_checks(app_configs=self.apps.get_app_configs()), [])
def test_skipped_on_model_with_invalid_app_label(self):
class Model(models.Model):
class Meta:
app_label = 'invalid_app_label'
self.assertEqual(Model.check(), [])
def test_skipped_on_abstract_model(self):
class Abstract(models.Model):
class Meta:
abstract = True
# Call .check() because abstract models are not registered.
self.assertEqual(Abstract.check(), [])
def test_explicit_inherited_parent_link(self):
class Parent(models.Model):
id = models.AutoField(primary_key=True)
class Child(Parent):
parent_ptr = models.OneToOneField(Parent, models.CASCADE, parent_link=True)
self.assertEqual(checks.run_checks(app_configs=self.apps.get_app_configs()), [])
def test_auto_created_inherited_pk(self):
class Parent(models.Model):
pass
class Child(Parent):
pass
self.assertEqual(checks.run_checks(app_configs=self.apps.get_app_configs()), [
Warning(self.msg, hint=self.hint, obj=Parent, id='models.W042'),
])
def test_auto_created_inherited_parent_link(self):
class Parent(models.Model):
pass
class Child(Parent):
parent_ptr = models.OneToOneField(Parent, models.CASCADE, parent_link=True)
self.assertEqual(checks.run_checks(app_configs=self.apps.get_app_configs()), [
Warning(self.msg, hint=self.hint, obj=Parent, id='models.W042'),
])
def test_auto_created_pk_inherited_abstract_parent(self):
class Parent(models.Model):
class Meta:
abstract = True
class Child(Parent):
pass
self.assertEqual(checks.run_checks(app_configs=self.apps.get_app_configs()), [
Warning(self.msg, hint=self.hint, obj=Child, id='models.W042'),
])
@override_settings(DEFAULT_AUTO_FIELD='django.db.models.BigAutoField')
def test_default_auto_field_setting(self):
class Model(models.Model):
pass
self.assertEqual(checks.run_checks(app_configs=self.apps.get_app_configs()), [])
def test_explicit_pk(self):
class Model(models.Model):
id = models.BigAutoField(primary_key=True)
self.assertEqual(checks.run_checks(app_configs=self.apps.get_app_configs()), [])
@isolate_apps('check_framework.apps.CheckPKConfig', kwarg_name='apps')
def test_app_default_auto_field(self, apps):
class ModelWithPkViaAppConfig(models.Model):
class Meta:
app_label = 'check_framework.apps.CheckPKConfig'
self.assertEqual(checks.run_checks(app_configs=apps.get_app_configs()), [])
|
f423957ad83f5099794068a1c9748488ce46fefd3ee0385c86d5a58f5767e0b9 | import pathlib
from django.core.checks import Warning
from django.core.checks.caches import (
E001, check_cache_location_not_exposed, check_default_cache_is_configured,
check_file_based_cache_is_absolute,
)
from django.test import SimpleTestCase
from django.test.utils import override_settings
class CheckCacheSettingsAppDirsTest(SimpleTestCase):
VALID_CACHES_CONFIGURATION = {
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
},
}
INVALID_CACHES_CONFIGURATION = {
'other': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
},
}
@override_settings(CACHES=VALID_CACHES_CONFIGURATION)
def test_default_cache_included(self):
"""
Don't error if 'default' is present in CACHES setting.
"""
self.assertEqual(check_default_cache_is_configured(None), [])
@override_settings(CACHES=INVALID_CACHES_CONFIGURATION)
def test_default_cache_not_included(self):
"""
Error if 'default' not present in CACHES setting.
"""
self.assertEqual(check_default_cache_is_configured(None), [E001])
class CheckCacheLocationTest(SimpleTestCase):
warning_message = (
"Your 'default' cache configuration might expose your cache or lead "
"to corruption of your data because its LOCATION %s %s."
)
@staticmethod
def get_settings(setting, cache_path, setting_path):
return {
'CACHES': {
'default': {
'BACKEND': 'django.core.cache.backends.filebased.FileBasedCache',
'LOCATION': cache_path,
},
},
setting: [setting_path] if setting == 'STATICFILES_DIRS' else setting_path,
}
def test_cache_path_matches_media_static_setting(self):
root = pathlib.Path.cwd()
for setting in ('MEDIA_ROOT', 'STATIC_ROOT', 'STATICFILES_DIRS'):
settings = self.get_settings(setting, root, root)
with self.subTest(setting=setting), self.settings(**settings):
msg = self.warning_message % ('matches', setting)
self.assertEqual(check_cache_location_not_exposed(None), [
Warning(msg, id='caches.W002'),
])
def test_cache_path_inside_media_static_setting(self):
root = pathlib.Path.cwd()
for setting in ('MEDIA_ROOT', 'STATIC_ROOT', 'STATICFILES_DIRS'):
settings = self.get_settings(setting, root / 'cache', root)
with self.subTest(setting=setting), self.settings(**settings):
msg = self.warning_message % ('is inside', setting)
self.assertEqual(check_cache_location_not_exposed(None), [
Warning(msg, id='caches.W002'),
])
def test_cache_path_contains_media_static_setting(self):
root = pathlib.Path.cwd()
for setting in ('MEDIA_ROOT', 'STATIC_ROOT', 'STATICFILES_DIRS'):
settings = self.get_settings(setting, root, root / 'other')
with self.subTest(setting=setting), self.settings(**settings):
msg = self.warning_message % ('contains', setting)
self.assertEqual(check_cache_location_not_exposed(None), [
Warning(msg, id='caches.W002'),
])
def test_cache_path_not_conflict(self):
root = pathlib.Path.cwd()
for setting in ('MEDIA_ROOT', 'STATIC_ROOT', 'STATICFILES_DIRS'):
settings = self.get_settings(setting, root / 'cache', root / 'other')
with self.subTest(setting=setting), self.settings(**settings):
self.assertEqual(check_cache_location_not_exposed(None), [])
def test_staticfiles_dirs_prefix(self):
root = pathlib.Path.cwd()
tests = [
(root, root, 'matches'),
(root / 'cache', root, 'is inside'),
(root, root / 'other', 'contains'),
]
for cache_path, setting_path, msg in tests:
settings = self.get_settings(
'STATICFILES_DIRS',
cache_path,
('prefix', setting_path),
)
with self.subTest(path=setting_path), self.settings(**settings):
msg = self.warning_message % (msg, 'STATICFILES_DIRS')
self.assertEqual(check_cache_location_not_exposed(None), [
Warning(msg, id='caches.W002'),
])
def test_staticfiles_dirs_prefix_not_conflict(self):
root = pathlib.Path.cwd()
settings = self.get_settings(
'STATICFILES_DIRS',
root / 'cache',
('prefix', root / 'other'),
)
with self.settings(**settings):
self.assertEqual(check_cache_location_not_exposed(None), [])
class CheckCacheAbsolutePath(SimpleTestCase):
def test_absolute_path(self):
with self.settings(CACHES={
'default': {
'BACKEND': 'django.core.cache.backends.filebased.FileBasedCache',
'LOCATION': pathlib.Path.cwd() / 'cache',
},
}):
self.assertEqual(check_file_based_cache_is_absolute(None), [])
def test_relative_path(self):
with self.settings(CACHES={
'default': {
'BACKEND': 'django.core.cache.backends.filebased.FileBasedCache',
'LOCATION': 'cache',
},
}):
self.assertEqual(check_file_based_cache_is_absolute(None), [
Warning(
"Your 'default' cache LOCATION path is relative. Use an "
"absolute path instead.",
id='caches.W003',
),
])
|
62df66ef46af991867a8f0195043d0505c7f5f7373eddf7b6f2eefa97784fa4f | import sys
from io import StringIO
from django.apps import apps
from django.core import checks
from django.core.checks import Error, Warning
from django.core.checks.messages import CheckMessage
from django.core.checks.registry import CheckRegistry
from django.core.management import call_command
from django.core.management.base import CommandError
from django.db import models
from django.test import SimpleTestCase
from django.test.utils import (
isolate_apps, override_settings, override_system_checks,
)
from .models import SimpleModel, my_check
class DummyObj:
def __repr__(self):
return "obj"
class SystemCheckFrameworkTests(SimpleTestCase):
def test_register_and_run_checks(self):
def f(**kwargs):
calls[0] += 1
return [1, 2, 3]
def f2(**kwargs):
return [4]
def f3(**kwargs):
return [5]
calls = [0]
# test register as decorator
registry = CheckRegistry()
registry.register()(f)
registry.register("tag1", "tag2")(f2)
registry.register("tag2", deploy=True)(f3)
# test register as function
registry2 = CheckRegistry()
registry2.register(f)
registry2.register(f2, "tag1", "tag2")
registry2.register(f3, "tag2", deploy=True)
# check results
errors = registry.run_checks()
errors2 = registry2.run_checks()
self.assertEqual(errors, errors2)
self.assertEqual(sorted(errors), [1, 2, 3, 4])
self.assertEqual(calls[0], 2)
errors = registry.run_checks(tags=["tag1"])
errors2 = registry2.run_checks(tags=["tag1"])
self.assertEqual(errors, errors2)
self.assertEqual(sorted(errors), [4])
errors = registry.run_checks(tags=["tag1", "tag2"], include_deployment_checks=True)
errors2 = registry2.run_checks(tags=["tag1", "tag2"], include_deployment_checks=True)
self.assertEqual(errors, errors2)
self.assertEqual(sorted(errors), [4, 5])
def test_register_no_kwargs_error(self):
registry = CheckRegistry()
msg = 'Check functions must accept keyword arguments (**kwargs).'
with self.assertRaisesMessage(TypeError, msg):
@registry.register
def no_kwargs(app_configs, databases):
pass
def test_register_run_checks_non_iterable(self):
registry = CheckRegistry()
@registry.register
def return_non_iterable(**kwargs):
return Error('Message')
msg = (
'The function %r did not return a list. All functions registered '
'with the checks registry must return a list.' % return_non_iterable
)
with self.assertRaisesMessage(TypeError, msg):
registry.run_checks()
class MessageTests(SimpleTestCase):
def test_printing(self):
e = Error("Message", hint="Hint", obj=DummyObj())
expected = "obj: Message\n\tHINT: Hint"
self.assertEqual(str(e), expected)
def test_printing_no_hint(self):
e = Error("Message", obj=DummyObj())
expected = "obj: Message"
self.assertEqual(str(e), expected)
def test_printing_no_object(self):
e = Error("Message", hint="Hint")
expected = "?: Message\n\tHINT: Hint"
self.assertEqual(str(e), expected)
def test_printing_with_given_id(self):
e = Error("Message", hint="Hint", obj=DummyObj(), id="ID")
expected = "obj: (ID) Message\n\tHINT: Hint"
self.assertEqual(str(e), expected)
def test_printing_field_error(self):
field = SimpleModel._meta.get_field('field')
e = Error("Error", obj=field)
expected = "check_framework.SimpleModel.field: Error"
self.assertEqual(str(e), expected)
def test_printing_model_error(self):
e = Error("Error", obj=SimpleModel)
expected = "check_framework.SimpleModel: Error"
self.assertEqual(str(e), expected)
def test_printing_manager_error(self):
manager = SimpleModel.manager
e = Error("Error", obj=manager)
expected = "check_framework.SimpleModel.manager: Error"
self.assertEqual(str(e), expected)
def test_equal_to_self(self):
e = Error("Error", obj=SimpleModel)
self.assertEqual(e, e)
def test_equal_to_same_constructed_check(self):
e1 = Error("Error", obj=SimpleModel)
e2 = Error("Error", obj=SimpleModel)
self.assertEqual(e1, e2)
def test_not_equal_to_different_constructed_check(self):
e1 = Error("Error", obj=SimpleModel)
e2 = Error("Error2", obj=SimpleModel)
self.assertNotEqual(e1, e2)
def test_not_equal_to_non_check(self):
e = Error("Error", obj=DummyObj())
self.assertNotEqual(e, 'a string')
def test_invalid_level(self):
msg = 'The first argument should be level.'
with self.assertRaisesMessage(TypeError, msg):
CheckMessage('ERROR', 'Message')
def simple_system_check(**kwargs):
simple_system_check.kwargs = kwargs
return []
def tagged_system_check(**kwargs):
tagged_system_check.kwargs = kwargs
return [checks.Warning('System Check')]
tagged_system_check.tags = ['simpletag']
def deployment_system_check(**kwargs):
deployment_system_check.kwargs = kwargs
return [checks.Warning('Deployment Check')]
deployment_system_check.tags = ['deploymenttag']
class CheckCommandTests(SimpleTestCase):
def setUp(self):
simple_system_check.kwargs = None
tagged_system_check.kwargs = None
self.old_stdout, self.old_stderr = sys.stdout, sys.stderr
sys.stdout, sys.stderr = StringIO(), StringIO()
def tearDown(self):
sys.stdout, sys.stderr = self.old_stdout, self.old_stderr
@override_system_checks([simple_system_check, tagged_system_check])
def test_simple_call(self):
call_command('check')
self.assertEqual(simple_system_check.kwargs, {'app_configs': None, 'databases': None})
self.assertEqual(tagged_system_check.kwargs, {'app_configs': None, 'databases': None})
@override_system_checks([simple_system_check, tagged_system_check])
def test_given_app(self):
call_command('check', 'auth', 'admin')
auth_config = apps.get_app_config('auth')
admin_config = apps.get_app_config('admin')
self.assertEqual(simple_system_check.kwargs, {'app_configs': [auth_config, admin_config], 'databases': None})
self.assertEqual(tagged_system_check.kwargs, {'app_configs': [auth_config, admin_config], 'databases': None})
@override_system_checks([simple_system_check, tagged_system_check])
def test_given_tag(self):
call_command('check', tags=['simpletag'])
self.assertIsNone(simple_system_check.kwargs)
self.assertEqual(tagged_system_check.kwargs, {'app_configs': None, 'databases': None})
@override_system_checks([simple_system_check, tagged_system_check])
def test_invalid_tag(self):
msg = 'There is no system check with the "missingtag" tag.'
with self.assertRaisesMessage(CommandError, msg):
call_command('check', tags=['missingtag'])
@override_system_checks([simple_system_check])
def test_list_tags_empty(self):
call_command('check', list_tags=True)
self.assertEqual('\n', sys.stdout.getvalue())
@override_system_checks([tagged_system_check])
def test_list_tags(self):
call_command('check', list_tags=True)
self.assertEqual('simpletag\n', sys.stdout.getvalue())
@override_system_checks([tagged_system_check], deployment_checks=[deployment_system_check])
def test_list_deployment_check_omitted(self):
call_command('check', list_tags=True)
self.assertEqual('simpletag\n', sys.stdout.getvalue())
@override_system_checks([tagged_system_check], deployment_checks=[deployment_system_check])
def test_list_deployment_check_included(self):
call_command('check', deploy=True, list_tags=True)
self.assertEqual('deploymenttag\nsimpletag\n', sys.stdout.getvalue())
@override_system_checks([tagged_system_check], deployment_checks=[deployment_system_check])
def test_tags_deployment_check_omitted(self):
msg = 'There is no system check with the "deploymenttag" tag.'
with self.assertRaisesMessage(CommandError, msg):
call_command('check', tags=['deploymenttag'])
@override_system_checks([tagged_system_check], deployment_checks=[deployment_system_check])
def test_tags_deployment_check_included(self):
call_command('check', deploy=True, tags=['deploymenttag'])
self.assertIn('Deployment Check', sys.stderr.getvalue())
@override_system_checks([tagged_system_check])
def test_fail_level(self):
with self.assertRaises(CommandError):
call_command('check', fail_level='WARNING')
def custom_error_system_check(app_configs, **kwargs):
return [Error('Error', id='myerrorcheck.E001')]
def custom_warning_system_check(app_configs, **kwargs):
return [Warning('Warning', id='mywarningcheck.E001')]
class SilencingCheckTests(SimpleTestCase):
def setUp(self):
self.old_stdout, self.old_stderr = sys.stdout, sys.stderr
self.stdout, self.stderr = StringIO(), StringIO()
sys.stdout, sys.stderr = self.stdout, self.stderr
def tearDown(self):
sys.stdout, sys.stderr = self.old_stdout, self.old_stderr
@override_settings(SILENCED_SYSTEM_CHECKS=['myerrorcheck.E001'])
@override_system_checks([custom_error_system_check])
def test_silenced_error(self):
out = StringIO()
err = StringIO()
call_command('check', stdout=out, stderr=err)
self.assertEqual(out.getvalue(), 'System check identified no issues (1 silenced).\n')
self.assertEqual(err.getvalue(), '')
@override_settings(SILENCED_SYSTEM_CHECKS=['mywarningcheck.E001'])
@override_system_checks([custom_warning_system_check])
def test_silenced_warning(self):
out = StringIO()
err = StringIO()
call_command('check', stdout=out, stderr=err)
self.assertEqual(out.getvalue(), 'System check identified no issues (1 silenced).\n')
self.assertEqual(err.getvalue(), '')
class CheckFrameworkReservedNamesTests(SimpleTestCase):
@isolate_apps('check_framework', kwarg_name='apps')
@override_system_checks([checks.model_checks.check_all_models])
def test_model_check_method_not_shadowed(self, apps):
class ModelWithAttributeCalledCheck(models.Model):
check = 42
class ModelWithFieldCalledCheck(models.Model):
check = models.IntegerField()
class ModelWithRelatedManagerCalledCheck(models.Model):
pass
class ModelWithDescriptorCalledCheck(models.Model):
check = models.ForeignKey(ModelWithRelatedManagerCalledCheck, models.CASCADE)
article = models.ForeignKey(
ModelWithRelatedManagerCalledCheck,
models.CASCADE,
related_name='check',
)
errors = checks.run_checks(app_configs=apps.get_app_configs())
expected = [
Error(
"The 'ModelWithAttributeCalledCheck.check()' class method is "
"currently overridden by 42.",
obj=ModelWithAttributeCalledCheck,
id='models.E020'
),
Error(
"The 'ModelWithFieldCalledCheck.check()' class method is "
"currently overridden by %r." % ModelWithFieldCalledCheck.check,
obj=ModelWithFieldCalledCheck,
id='models.E020'
),
Error(
"The 'ModelWithRelatedManagerCalledCheck.check()' class method is "
"currently overridden by %r." % ModelWithRelatedManagerCalledCheck.check,
obj=ModelWithRelatedManagerCalledCheck,
id='models.E020'
),
Error(
"The 'ModelWithDescriptorCalledCheck.check()' class method is "
"currently overridden by %r." % ModelWithDescriptorCalledCheck.check,
obj=ModelWithDescriptorCalledCheck,
id='models.E020'
),
]
self.assertEqual(errors, expected)
class ChecksRunDuringTests(SimpleTestCase):
def test_registered_check_did_run(self):
self.assertTrue(my_check.did_run)
|
ca1d3e3c43fd39a415f8b5e2a875ecc98a2d135e7ba2ca872df870b51f609f7d | from django.conf import settings
from django.core.checks.messages import Error, Warning
from django.core.checks.urls import (
E006, check_url_config, check_url_namespaces_unique, check_url_settings,
get_warning_for_invalid_pattern,
)
from django.test import SimpleTestCase
from django.test.utils import override_settings
class CheckUrlConfigTests(SimpleTestCase):
@override_settings(ROOT_URLCONF='check_framework.urls.no_warnings')
def test_no_warnings(self):
result = check_url_config(None)
self.assertEqual(result, [])
@override_settings(ROOT_URLCONF='check_framework.urls.no_warnings_i18n')
def test_no_warnings_i18n(self):
self.assertEqual(check_url_config(None), [])
@override_settings(ROOT_URLCONF='check_framework.urls.warning_in_include')
def test_check_resolver_recursive(self):
# The resolver is checked recursively (examining URL patterns in include()).
result = check_url_config(None)
self.assertEqual(len(result), 1)
warning = result[0]
self.assertEqual(warning.id, 'urls.W001')
@override_settings(ROOT_URLCONF='check_framework.urls.include_with_dollar')
def test_include_with_dollar(self):
result = check_url_config(None)
self.assertEqual(len(result), 1)
warning = result[0]
self.assertEqual(warning.id, 'urls.W001')
self.assertEqual(warning.msg, (
"Your URL pattern '^include-with-dollar$' uses include with a "
"route ending with a '$'. Remove the dollar from the route to "
"avoid problems including URLs."
))
@override_settings(ROOT_URLCONF='check_framework.urls.contains_tuple')
def test_contains_tuple_not_url_instance(self):
result = check_url_config(None)
warning = result[0]
self.assertEqual(warning.id, 'urls.E004')
self.assertRegex(warning.msg, (
r"^Your URL pattern \('\^tuple/\$', <function <lambda> at 0x(\w+)>\) is "
r"invalid. Ensure that urlpatterns is a list of path\(\) and/or re_path\(\) "
r"instances\.$"
))
@override_settings(ROOT_URLCONF='check_framework.urls.include_contains_tuple')
def test_contains_included_tuple(self):
result = check_url_config(None)
warning = result[0]
self.assertEqual(warning.id, 'urls.E004')
self.assertRegex(warning.msg, (
r"^Your URL pattern \('\^tuple/\$', <function <lambda> at 0x(\w+)>\) is "
r"invalid. Ensure that urlpatterns is a list of path\(\) and/or re_path\(\) "
r"instances\.$"
))
@override_settings(ROOT_URLCONF='check_framework.urls.beginning_with_slash')
def test_beginning_with_slash(self):
msg = (
"Your URL pattern '%s' has a route beginning with a '/'. Remove "
"this slash as it is unnecessary. If this pattern is targeted in "
"an include(), ensure the include() pattern has a trailing '/'."
)
warning1, warning2 = check_url_config(None)
self.assertEqual(warning1.id, 'urls.W002')
self.assertEqual(warning1.msg, msg % '/path-starting-with-slash/')
self.assertEqual(warning2.id, 'urls.W002')
self.assertEqual(warning2.msg, msg % '/url-starting-with-slash/$')
@override_settings(
ROOT_URLCONF='check_framework.urls.beginning_with_slash',
APPEND_SLASH=False,
)
def test_beginning_with_slash_append_slash(self):
# It can be useful to start a URL pattern with a slash when
# APPEND_SLASH=False (#27238).
result = check_url_config(None)
self.assertEqual(result, [])
@override_settings(ROOT_URLCONF='check_framework.urls.name_with_colon')
def test_name_with_colon(self):
result = check_url_config(None)
self.assertEqual(len(result), 1)
warning = result[0]
self.assertEqual(warning.id, 'urls.W003')
expected_msg = "Your URL pattern '^$' [name='name_with:colon'] has a name including a ':'."
self.assertIn(expected_msg, warning.msg)
@override_settings(ROOT_URLCONF=None)
def test_no_root_urlconf_in_settings(self):
delattr(settings, 'ROOT_URLCONF')
result = check_url_config(None)
self.assertEqual(result, [])
def test_get_warning_for_invalid_pattern_string(self):
warning = get_warning_for_invalid_pattern('')[0]
self.assertEqual(
warning.hint,
"Try removing the string ''. The list of urlpatterns should "
"not have a prefix string as the first element.",
)
def test_get_warning_for_invalid_pattern_tuple(self):
warning = get_warning_for_invalid_pattern((r'^$', lambda x: x))[0]
self.assertEqual(warning.hint, "Try using path() instead of a tuple.")
def test_get_warning_for_invalid_pattern_other(self):
warning = get_warning_for_invalid_pattern(object())[0]
self.assertIsNone(warning.hint)
@override_settings(ROOT_URLCONF='check_framework.urls.non_unique_namespaces')
def test_check_non_unique_namespaces(self):
result = check_url_namespaces_unique(None)
self.assertEqual(len(result), 2)
non_unique_namespaces = ['app-ns1', 'app-1']
warning_messages = [
"URL namespace '{}' isn't unique. You may not be able to reverse "
"all URLs in this namespace".format(namespace)
for namespace in non_unique_namespaces
]
for warning in result:
self.assertIsInstance(warning, Warning)
self.assertEqual('urls.W005', warning.id)
self.assertIn(warning.msg, warning_messages)
@override_settings(ROOT_URLCONF='check_framework.urls.unique_namespaces')
def test_check_unique_namespaces(self):
result = check_url_namespaces_unique(None)
self.assertEqual(result, [])
@override_settings(ROOT_URLCONF='check_framework.urls.cbv_as_view')
def test_check_view_not_class(self):
self.assertEqual(check_url_config(None), [
Error(
"Your URL pattern 'missing_as_view' has an invalid view, pass "
"EmptyCBV.as_view() instead of EmptyCBV.",
id='urls.E009',
),
])
class UpdatedToPathTests(SimpleTestCase):
@override_settings(ROOT_URLCONF='check_framework.urls.path_compatibility.contains_re_named_group')
def test_contains_re_named_group(self):
result = check_url_config(None)
self.assertEqual(len(result), 1)
warning = result[0]
self.assertEqual(warning.id, '2_0.W001')
expected_msg = "Your URL pattern '(?P<named_group>\\d+)' has a route"
self.assertIn(expected_msg, warning.msg)
@override_settings(ROOT_URLCONF='check_framework.urls.path_compatibility.beginning_with_caret')
def test_beginning_with_caret(self):
result = check_url_config(None)
self.assertEqual(len(result), 1)
warning = result[0]
self.assertEqual(warning.id, '2_0.W001')
expected_msg = "Your URL pattern '^beginning-with-caret' has a route"
self.assertIn(expected_msg, warning.msg)
@override_settings(ROOT_URLCONF='check_framework.urls.path_compatibility.ending_with_dollar')
def test_ending_with_dollar(self):
result = check_url_config(None)
self.assertEqual(len(result), 1)
warning = result[0]
self.assertEqual(warning.id, '2_0.W001')
expected_msg = "Your URL pattern 'ending-with-dollar$' has a route"
self.assertIn(expected_msg, warning.msg)
class CheckCustomErrorHandlersTests(SimpleTestCase):
@override_settings(
ROOT_URLCONF='check_framework.urls.bad_function_based_error_handlers',
)
def test_bad_function_based_handlers(self):
result = check_url_config(None)
self.assertEqual(len(result), 4)
for code, num_params, error in zip([400, 403, 404, 500], [2, 2, 2, 1], result):
with self.subTest('handler{}'.format(code)):
self.assertEqual(error, Error(
"The custom handler{} view 'check_framework.urls."
"bad_function_based_error_handlers.bad_handler' "
"does not take the correct number of arguments (request{})."
.format(code, ', exception' if num_params == 2 else ''),
id='urls.E007',
))
@override_settings(
ROOT_URLCONF='check_framework.urls.bad_class_based_error_handlers',
)
def test_bad_class_based_handlers(self):
result = check_url_config(None)
self.assertEqual(len(result), 4)
for code, num_params, error in zip([400, 403, 404, 500], [2, 2, 2, 1], result):
with self.subTest('handler%s' % code):
self.assertEqual(error, Error(
"The custom handler%s view 'check_framework.urls."
"bad_class_based_error_handlers.HandlerView.as_view."
"<locals>.view' does not take the correct number of "
"arguments (request%s)." % (
code,
', exception' if num_params == 2 else '',
),
id='urls.E007',
))
@override_settings(ROOT_URLCONF='check_framework.urls.bad_error_handlers_invalid_path')
def test_bad_handlers_invalid_path(self):
result = check_url_config(None)
paths = [
'django.views.bad_handler',
'django.invalid_module.bad_handler',
'invalid_module.bad_handler',
'django',
]
hints = [
"Could not import '{}'. View does not exist in module django.views.",
"Could not import '{}'. Parent module django.invalid_module does not exist.",
"No module named 'invalid_module'",
"Could not import '{}'. The path must be fully qualified.",
]
for code, path, hint, error in zip([400, 403, 404, 500], paths, hints, result):
with self.subTest('handler{}'.format(code)):
self.assertEqual(error, Error(
"The custom handler{} view '{}' could not be imported.".format(code, path),
hint=hint.format(path),
id='urls.E008',
))
@override_settings(
ROOT_URLCONF='check_framework.urls.good_function_based_error_handlers',
)
def test_good_function_based_handlers(self):
result = check_url_config(None)
self.assertEqual(result, [])
@override_settings(
ROOT_URLCONF='check_framework.urls.good_class_based_error_handlers',
)
def test_good_class_based_handlers(self):
result = check_url_config(None)
self.assertEqual(result, [])
class CheckURLSettingsTests(SimpleTestCase):
@override_settings(STATIC_URL='a/', MEDIA_URL='b/')
def test_slash_no_errors(self):
self.assertEqual(check_url_settings(None), [])
@override_settings(STATIC_URL='', MEDIA_URL='')
def test_empty_string_no_errors(self):
self.assertEqual(check_url_settings(None), [])
@override_settings(STATIC_URL='noslash')
def test_static_url_no_slash(self):
self.assertEqual(check_url_settings(None), [E006('STATIC_URL')])
@override_settings(STATIC_URL='slashes//')
def test_static_url_double_slash_allowed(self):
# The check allows for a double slash, presuming the user knows what
# they are doing.
self.assertEqual(check_url_settings(None), [])
@override_settings(MEDIA_URL='noslash')
def test_media_url_no_slash(self):
self.assertEqual(check_url_settings(None), [E006('MEDIA_URL')])
|
c2d51edb4627a4ade7d0cfe0555ec0428f7a5fa34aa04fa25811e6f07dbd9770 | from django.contrib.admin.utils import quote
from django.contrib.admin.views.main import IS_POPUP_VAR
from django.contrib.auth.models import User
from django.template.response import TemplateResponse
from django.test import TestCase, override_settings
from django.urls import reverse
from .models import Action, Car, Person
@override_settings(ROOT_URLCONF='admin_custom_urls.urls',)
class AdminCustomUrlsTest(TestCase):
"""
Remember that:
* The Action model has a CharField PK.
* The ModelAdmin for Action customizes the add_view URL, it's
'<app name>/<model name>/!add/'
"""
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]')
Action.objects.create(name='delete', description='Remove things.')
Action.objects.create(name='rename', description='Gives things other names.')
Action.objects.create(name='add', description='Add things.')
Action.objects.create(name='path/to/file/', description="An action with '/' in its name.")
Action.objects.create(
name='path/to/html/document.html',
description='An action with a name similar to a HTML doc path.'
)
Action.objects.create(
name='javascript:alert(\'Hello world\');">Click here</a>',
description='An action with a name suspected of being a XSS attempt'
)
def setUp(self):
self.client.force_login(self.superuser)
def test_basic_add_GET(self):
"""
Ensure GET on the add_view works.
"""
add_url = reverse('admin_custom_urls:admin_custom_urls_action_add')
self.assertTrue(add_url.endswith('/!add/'))
response = self.client.get(add_url)
self.assertIsInstance(response, TemplateResponse)
self.assertEqual(response.status_code, 200)
def test_add_with_GET_args(self):
"""
Ensure GET on the add_view plus specifying a field value in the query
string works.
"""
response = self.client.get(reverse('admin_custom_urls:admin_custom_urls_action_add'), {'name': 'My Action'})
self.assertContains(response, 'value="My Action"')
def test_basic_add_POST(self):
"""
Ensure POST on add_view works.
"""
post_data = {
IS_POPUP_VAR: '1',
"name": 'Action added through a popup',
"description": "Description of added action",
}
response = self.client.post(reverse('admin_custom_urls:admin_custom_urls_action_add'), post_data)
self.assertContains(response, 'Action added through a popup')
def test_admin_URLs_no_clash(self):
# Should get the change_view for model instance with PK 'add', not show
# the add_view
url = reverse('admin_custom_urls:%s_action_change' % Action._meta.app_label, args=(quote('add'),))
response = self.client.get(url)
self.assertContains(response, 'Change action')
# Should correctly get the change_view for the model instance with the
# funny-looking PK (the one with a 'path/to/html/document.html' value)
url = reverse(
'admin_custom_urls:%s_action_change' % Action._meta.app_label,
args=(quote("path/to/html/document.html"),)
)
response = self.client.get(url)
self.assertContains(response, 'Change action')
self.assertContains(response, 'value="path/to/html/document.html"')
def test_post_save_add_redirect(self):
"""
ModelAdmin.response_post_save_add() controls the redirection after
the 'Save' button has been pressed when adding a new object.
"""
post_data = {'name': 'John Doe'}
self.assertEqual(Person.objects.count(), 0)
response = self.client.post(reverse('admin_custom_urls:admin_custom_urls_person_add'), post_data)
persons = Person.objects.all()
self.assertEqual(len(persons), 1)
redirect_url = reverse('admin_custom_urls:admin_custom_urls_person_history', args=[persons[0].pk])
self.assertRedirects(response, redirect_url)
def test_post_save_change_redirect(self):
"""
ModelAdmin.response_post_save_change() controls the redirection after
the 'Save' button has been pressed when editing an existing object.
"""
Person.objects.create(name='John Doe')
self.assertEqual(Person.objects.count(), 1)
person = Person.objects.all()[0]
post_url = reverse('admin_custom_urls:admin_custom_urls_person_change', args=[person.pk])
response = self.client.post(post_url, {'name': 'Jack Doe'})
self.assertRedirects(response, reverse('admin_custom_urls:admin_custom_urls_person_delete', args=[person.pk]))
def test_post_url_continue(self):
"""
The ModelAdmin.response_add()'s parameter `post_url_continue` controls
the redirection after an object has been created.
"""
post_data = {'name': 'SuperFast', '_continue': '1'}
self.assertEqual(Car.objects.count(), 0)
response = self.client.post(reverse('admin_custom_urls:admin_custom_urls_car_add'), post_data)
cars = Car.objects.all()
self.assertEqual(len(cars), 1)
self.assertRedirects(response, reverse('admin_custom_urls:admin_custom_urls_car_history', args=[cars[0].pk]))
|
71f77b0d054ddd329dafa3054741be5fc665060402df22b7a8bb577850df0fbc | """
A series of tests to establish that the command-line management tools work as
advertised - especially with regards to the handling of the
DJANGO_SETTINGS_MODULE and default settings.py files.
"""
import os
import re
import shutil
import socket
import subprocess
import sys
import tempfile
import unittest
from io import StringIO
from unittest import mock
from django import conf, get_version
from django.conf import settings
from django.core.management import (
BaseCommand, CommandError, call_command, color, execute_from_command_line,
)
from django.core.management.commands.loaddata import Command as LoaddataCommand
from django.core.management.commands.runserver import (
Command as RunserverCommand,
)
from django.core.management.commands.testserver import (
Command as TestserverCommand,
)
from django.db import ConnectionHandler, connection
from django.db.migrations.recorder import MigrationRecorder
from django.test import (
LiveServerTestCase, SimpleTestCase, TestCase, override_settings,
)
from django.test.utils import captured_stderr, captured_stdout
custom_templates_dir = os.path.join(os.path.dirname(__file__), 'custom_templates')
SYSTEM_CHECK_MSG = 'System check identified no issues'
class AdminScriptTestCase(SimpleTestCase):
def setUp(self):
tmpdir = tempfile.TemporaryDirectory()
self.addCleanup(tmpdir.cleanup)
# os.path.realpath() is required for temporary directories on macOS,
# where `/var` is a symlink to `/private/var`.
self.test_dir = os.path.realpath(os.path.join(tmpdir.name, 'test_project'))
os.mkdir(self.test_dir)
def write_settings(self, filename, apps=None, is_dir=False, sdict=None, extra=None):
if is_dir:
settings_dir = os.path.join(self.test_dir, filename)
os.mkdir(settings_dir)
settings_file_path = os.path.join(settings_dir, '__init__.py')
else:
settings_file_path = os.path.join(self.test_dir, filename)
with open(settings_file_path, 'w') as settings_file:
settings_file.write('# Settings file automatically generated by admin_scripts test case\n')
if extra:
settings_file.write("%s\n" % extra)
exports = [
'DATABASES',
'DEFAULT_AUTO_FIELD',
'ROOT_URLCONF',
'SECRET_KEY',
'USE_TZ',
]
for s in exports:
if hasattr(settings, s):
o = getattr(settings, s)
if not isinstance(o, (dict, tuple, list)):
o = "'%s'" % o
settings_file.write("%s = %s\n" % (s, o))
if apps is None:
apps = ['django.contrib.auth', 'django.contrib.contenttypes', 'admin_scripts']
settings_file.write("INSTALLED_APPS = %s\n" % apps)
if sdict:
for k, v in sdict.items():
settings_file.write("%s = %s\n" % (k, v))
def _ext_backend_paths(self):
"""
Returns the paths for any external backend packages.
"""
paths = []
for backend in settings.DATABASES.values():
package = backend['ENGINE'].split('.')[0]
if package != 'django':
backend_pkg = __import__(package)
backend_dir = os.path.dirname(backend_pkg.__file__)
paths.append(os.path.dirname(backend_dir))
return paths
def run_test(self, args, settings_file=None, apps=None):
base_dir = os.path.dirname(self.test_dir)
# The base dir for Django's tests is one level up.
tests_dir = os.path.dirname(os.path.dirname(__file__))
# The base dir for Django is one level above the test dir. We don't use
# `import django` to figure that out, so we don't pick up a Django
# from site-packages or similar.
django_dir = os.path.dirname(tests_dir)
ext_backend_base_dirs = self._ext_backend_paths()
# Define a temporary environment for the subprocess
test_environ = os.environ.copy()
# Set the test environment
if settings_file:
test_environ['DJANGO_SETTINGS_MODULE'] = settings_file
elif 'DJANGO_SETTINGS_MODULE' in test_environ:
del test_environ['DJANGO_SETTINGS_MODULE']
python_path = [base_dir, django_dir, tests_dir]
python_path.extend(ext_backend_base_dirs)
test_environ['PYTHONPATH'] = os.pathsep.join(python_path)
test_environ['PYTHONWARNINGS'] = ''
p = subprocess.run(
[sys.executable, *args],
stdout=subprocess.PIPE, stderr=subprocess.PIPE,
cwd=self.test_dir,
env=test_environ, universal_newlines=True,
)
return p.stdout, p.stderr
def run_django_admin(self, args, settings_file=None):
return self.run_test(['-m', 'django', *args], settings_file)
def run_manage(self, args, settings_file=None, manage_py=None):
template_manage_py = (
os.path.join(os.path.dirname(__file__), manage_py)
if manage_py else
os.path.join(os.path.dirname(conf.__file__), 'project_template', 'manage.py-tpl')
)
test_manage_py = os.path.join(self.test_dir, 'manage.py')
shutil.copyfile(template_manage_py, test_manage_py)
with open(test_manage_py) as fp:
manage_py_contents = fp.read()
manage_py_contents = manage_py_contents.replace(
"{{ project_name }}", "test_project")
with open(test_manage_py, 'w') as fp:
fp.write(manage_py_contents)
return self.run_test(['./manage.py', *args], settings_file)
def assertNoOutput(self, stream):
"Utility assertion: assert that the given stream is empty"
self.assertEqual(len(stream), 0, "Stream should be empty: actually contains '%s'" % stream)
def assertOutput(self, stream, msg, regex=False):
"Utility assertion: assert that the given message exists in the output"
if regex:
self.assertIsNotNone(
re.search(msg, stream),
"'%s' does not match actual output text '%s'" % (msg, stream)
)
else:
self.assertIn(msg, stream, "'%s' does not match actual output text '%s'" % (msg, stream))
def assertNotInOutput(self, stream, msg):
"Utility assertion: assert that the given message doesn't exist in the output"
self.assertNotIn(msg, stream, "'%s' matches actual output text '%s'" % (msg, stream))
##########################################################################
# DJANGO ADMIN TESTS
# This first series of test classes checks the environment processing
# of the django-admin.
##########################################################################
class DjangoAdminNoSettings(AdminScriptTestCase):
"A series of tests for django-admin when there is no settings.py file."
def test_builtin_command(self):
"no settings: django-admin builtin commands fail with an error when no settings provided"
args = ['check', 'admin_scripts']
out, err = self.run_django_admin(args)
self.assertNoOutput(out)
self.assertOutput(err, 'settings are not configured')
def test_builtin_with_bad_settings(self):
"no settings: django-admin builtin commands fail if settings file (from argument) doesn't exist"
args = ['check', '--settings=bad_settings', 'admin_scripts']
out, err = self.run_django_admin(args)
self.assertNoOutput(out)
self.assertOutput(err, "No module named '?bad_settings'?", regex=True)
def test_builtin_with_bad_environment(self):
"no settings: django-admin builtin commands fail if settings file (from environment) doesn't exist"
args = ['check', 'admin_scripts']
out, err = self.run_django_admin(args, 'bad_settings')
self.assertNoOutput(out)
self.assertOutput(err, "No module named '?bad_settings'?", regex=True)
def test_commands_with_invalid_settings(self):
"""
Commands that don't require settings succeed if the settings file
doesn't exist.
"""
args = ['startproject']
out, err = self.run_django_admin(args, settings_file='bad_settings')
self.assertNoOutput(out)
self.assertOutput(err, "You must provide a project name", regex=True)
class DjangoAdminDefaultSettings(AdminScriptTestCase):
"""
A series of tests for django-admin when using a settings.py file that
contains the test application.
"""
def setUp(self):
super().setUp()
self.write_settings('settings.py')
def test_builtin_command(self):
"default: django-admin builtin commands fail with an error when no settings provided"
args = ['check', 'admin_scripts']
out, err = self.run_django_admin(args)
self.assertNoOutput(out)
self.assertOutput(err, 'settings are not configured')
def test_builtin_with_settings(self):
"default: django-admin builtin commands succeed if settings are provided as argument"
args = ['check', '--settings=test_project.settings', 'admin_scripts']
out, err = self.run_django_admin(args)
self.assertNoOutput(err)
self.assertOutput(out, SYSTEM_CHECK_MSG)
def test_builtin_with_environment(self):
"default: django-admin builtin commands succeed if settings are provided in the environment"
args = ['check', 'admin_scripts']
out, err = self.run_django_admin(args, 'test_project.settings')
self.assertNoOutput(err)
self.assertOutput(out, SYSTEM_CHECK_MSG)
def test_builtin_with_bad_settings(self):
"default: django-admin builtin commands fail if settings file (from argument) doesn't exist"
args = ['check', '--settings=bad_settings', 'admin_scripts']
out, err = self.run_django_admin(args)
self.assertNoOutput(out)
self.assertOutput(err, "No module named '?bad_settings'?", regex=True)
def test_builtin_with_bad_environment(self):
"default: django-admin builtin commands fail if settings file (from environment) doesn't exist"
args = ['check', 'admin_scripts']
out, err = self.run_django_admin(args, 'bad_settings')
self.assertNoOutput(out)
self.assertOutput(err, "No module named '?bad_settings'?", regex=True)
def test_custom_command(self):
"default: django-admin can't execute user commands if it isn't provided settings"
args = ['noargs_command']
out, err = self.run_django_admin(args)
self.assertNoOutput(out)
self.assertOutput(err, "No Django settings specified")
self.assertOutput(err, "Unknown command: 'noargs_command'")
def test_custom_command_with_settings(self):
"default: django-admin can execute user commands if settings are provided as argument"
args = ['noargs_command', '--settings=test_project.settings']
out, err = self.run_django_admin(args)
self.assertNoOutput(err)
self.assertOutput(out, "EXECUTE: noargs_command")
def test_custom_command_with_environment(self):
"default: django-admin can execute user commands if settings are provided in environment"
args = ['noargs_command']
out, err = self.run_django_admin(args, 'test_project.settings')
self.assertNoOutput(err)
self.assertOutput(out, "EXECUTE: noargs_command")
class DjangoAdminFullPathDefaultSettings(AdminScriptTestCase):
"""
A series of tests for django-admin when using a settings.py file that
contains the test application specified using a full path.
"""
def setUp(self):
super().setUp()
self.write_settings('settings.py', ['django.contrib.auth', 'django.contrib.contenttypes',
'admin_scripts', 'admin_scripts.complex_app'])
def test_builtin_command(self):
"fulldefault: django-admin builtin commands fail with an error when no settings provided"
args = ['check', 'admin_scripts']
out, err = self.run_django_admin(args)
self.assertNoOutput(out)
self.assertOutput(err, 'settings are not configured')
def test_builtin_with_settings(self):
"fulldefault: django-admin builtin commands succeed if a settings file is provided"
args = ['check', '--settings=test_project.settings', 'admin_scripts']
out, err = self.run_django_admin(args)
self.assertNoOutput(err)
self.assertOutput(out, SYSTEM_CHECK_MSG)
def test_builtin_with_environment(self):
"fulldefault: django-admin builtin commands succeed if the environment contains settings"
args = ['check', 'admin_scripts']
out, err = self.run_django_admin(args, 'test_project.settings')
self.assertNoOutput(err)
self.assertOutput(out, SYSTEM_CHECK_MSG)
def test_builtin_with_bad_settings(self):
"fulldefault: django-admin builtin commands fail if settings file (from argument) doesn't exist"
args = ['check', '--settings=bad_settings', 'admin_scripts']
out, err = self.run_django_admin(args)
self.assertNoOutput(out)
self.assertOutput(err, "No module named '?bad_settings'?", regex=True)
def test_builtin_with_bad_environment(self):
"fulldefault: django-admin builtin commands fail if settings file (from environment) doesn't exist"
args = ['check', 'admin_scripts']
out, err = self.run_django_admin(args, 'bad_settings')
self.assertNoOutput(out)
self.assertOutput(err, "No module named '?bad_settings'?", regex=True)
def test_custom_command(self):
"fulldefault: django-admin can't execute user commands unless settings are provided"
args = ['noargs_command']
out, err = self.run_django_admin(args)
self.assertNoOutput(out)
self.assertOutput(err, "No Django settings specified")
self.assertOutput(err, "Unknown command: 'noargs_command'")
def test_custom_command_with_settings(self):
"fulldefault: django-admin can execute user commands if settings are provided as argument"
args = ['noargs_command', '--settings=test_project.settings']
out, err = self.run_django_admin(args)
self.assertNoOutput(err)
self.assertOutput(out, "EXECUTE: noargs_command")
def test_custom_command_with_environment(self):
"fulldefault: django-admin can execute user commands if settings are provided in environment"
args = ['noargs_command']
out, err = self.run_django_admin(args, 'test_project.settings')
self.assertNoOutput(err)
self.assertOutput(out, "EXECUTE: noargs_command")
class DjangoAdminMinimalSettings(AdminScriptTestCase):
"""
A series of tests for django-admin when using a settings.py file that
doesn't contain the test application.
"""
def setUp(self):
super().setUp()
self.write_settings('settings.py', apps=['django.contrib.auth', 'django.contrib.contenttypes'])
def test_builtin_command(self):
"minimal: django-admin builtin commands fail with an error when no settings provided"
args = ['check', 'admin_scripts']
out, err = self.run_django_admin(args)
self.assertNoOutput(out)
self.assertOutput(err, 'settings are not configured')
def test_builtin_with_settings(self):
"minimal: django-admin builtin commands fail if settings are provided as argument"
args = ['check', '--settings=test_project.settings', 'admin_scripts']
out, err = self.run_django_admin(args)
self.assertNoOutput(out)
self.assertOutput(err, "No installed app with label 'admin_scripts'.")
def test_builtin_with_environment(self):
"minimal: django-admin builtin commands fail if settings are provided in the environment"
args = ['check', 'admin_scripts']
out, err = self.run_django_admin(args, 'test_project.settings')
self.assertNoOutput(out)
self.assertOutput(err, "No installed app with label 'admin_scripts'.")
def test_builtin_with_bad_settings(self):
"minimal: django-admin builtin commands fail if settings file (from argument) doesn't exist"
args = ['check', '--settings=bad_settings', 'admin_scripts']
out, err = self.run_django_admin(args)
self.assertNoOutput(out)
self.assertOutput(err, "No module named '?bad_settings'?", regex=True)
def test_builtin_with_bad_environment(self):
"minimal: django-admin builtin commands fail if settings file (from environment) doesn't exist"
args = ['check', 'admin_scripts']
out, err = self.run_django_admin(args, 'bad_settings')
self.assertNoOutput(out)
self.assertOutput(err, "No module named '?bad_settings'?", regex=True)
def test_custom_command(self):
"minimal: django-admin can't execute user commands unless settings are provided"
args = ['noargs_command']
out, err = self.run_django_admin(args)
self.assertNoOutput(out)
self.assertOutput(err, "No Django settings specified")
self.assertOutput(err, "Unknown command: 'noargs_command'")
def test_custom_command_with_settings(self):
"minimal: django-admin can't execute user commands, even if settings are provided as argument"
args = ['noargs_command', '--settings=test_project.settings']
out, err = self.run_django_admin(args)
self.assertNoOutput(out)
self.assertOutput(err, "Unknown command: 'noargs_command'")
def test_custom_command_with_environment(self):
"minimal: django-admin can't execute user commands, even if settings are provided in environment"
args = ['noargs_command']
out, err = self.run_django_admin(args, 'test_project.settings')
self.assertNoOutput(out)
self.assertOutput(err, "Unknown command: 'noargs_command'")
class DjangoAdminAlternateSettings(AdminScriptTestCase):
"""
A series of tests for django-admin when using a settings file with a name
other than 'settings.py'.
"""
def setUp(self):
super().setUp()
self.write_settings('alternate_settings.py')
def test_builtin_command(self):
"alternate: django-admin builtin commands fail with an error when no settings provided"
args = ['check', 'admin_scripts']
out, err = self.run_django_admin(args)
self.assertNoOutput(out)
self.assertOutput(err, 'settings are not configured')
def test_builtin_with_settings(self):
"alternate: django-admin builtin commands succeed if settings are provided as argument"
args = ['check', '--settings=test_project.alternate_settings', 'admin_scripts']
out, err = self.run_django_admin(args)
self.assertNoOutput(err)
self.assertOutput(out, SYSTEM_CHECK_MSG)
def test_builtin_with_environment(self):
"alternate: django-admin builtin commands succeed if settings are provided in the environment"
args = ['check', 'admin_scripts']
out, err = self.run_django_admin(args, 'test_project.alternate_settings')
self.assertNoOutput(err)
self.assertOutput(out, SYSTEM_CHECK_MSG)
def test_builtin_with_bad_settings(self):
"alternate: django-admin builtin commands fail if settings file (from argument) doesn't exist"
args = ['check', '--settings=bad_settings', 'admin_scripts']
out, err = self.run_django_admin(args)
self.assertNoOutput(out)
self.assertOutput(err, "No module named '?bad_settings'?", regex=True)
def test_builtin_with_bad_environment(self):
"alternate: django-admin builtin commands fail if settings file (from environment) doesn't exist"
args = ['check', 'admin_scripts']
out, err = self.run_django_admin(args, 'bad_settings')
self.assertNoOutput(out)
self.assertOutput(err, "No module named '?bad_settings'?", regex=True)
def test_custom_command(self):
"alternate: django-admin can't execute user commands unless settings are provided"
args = ['noargs_command']
out, err = self.run_django_admin(args)
self.assertNoOutput(out)
self.assertOutput(err, "No Django settings specified")
self.assertOutput(err, "Unknown command: 'noargs_command'")
def test_custom_command_with_settings(self):
"alternate: django-admin can execute user commands if settings are provided as argument"
args = ['noargs_command', '--settings=test_project.alternate_settings']
out, err = self.run_django_admin(args)
self.assertNoOutput(err)
self.assertOutput(out, "EXECUTE: noargs_command")
def test_custom_command_with_environment(self):
"alternate: django-admin can execute user commands if settings are provided in environment"
args = ['noargs_command']
out, err = self.run_django_admin(args, 'test_project.alternate_settings')
self.assertNoOutput(err)
self.assertOutput(out, "EXECUTE: noargs_command")
class DjangoAdminMultipleSettings(AdminScriptTestCase):
"""
A series of tests for django-admin when multiple settings files
(including the default 'settings.py') are available. The default settings
file is insufficient for performing the operations described, so the
alternate settings must be used by the running script.
"""
def setUp(self):
super().setUp()
self.write_settings('settings.py', apps=['django.contrib.auth', 'django.contrib.contenttypes'])
self.write_settings('alternate_settings.py')
def test_builtin_command(self):
"alternate: django-admin builtin commands fail with an error when no settings provided"
args = ['check', 'admin_scripts']
out, err = self.run_django_admin(args)
self.assertNoOutput(out)
self.assertOutput(err, 'settings are not configured')
def test_builtin_with_settings(self):
"alternate: django-admin builtin commands succeed if settings are provided as argument"
args = ['check', '--settings=test_project.alternate_settings', 'admin_scripts']
out, err = self.run_django_admin(args)
self.assertNoOutput(err)
self.assertOutput(out, SYSTEM_CHECK_MSG)
def test_builtin_with_environment(self):
"alternate: django-admin builtin commands succeed if settings are provided in the environment"
args = ['check', 'admin_scripts']
out, err = self.run_django_admin(args, 'test_project.alternate_settings')
self.assertNoOutput(err)
self.assertOutput(out, SYSTEM_CHECK_MSG)
def test_builtin_with_bad_settings(self):
"alternate: django-admin builtin commands fail if settings file (from argument) doesn't exist"
args = ['check', '--settings=bad_settings', 'admin_scripts']
out, err = self.run_django_admin(args)
self.assertOutput(err, "No module named '?bad_settings'?", regex=True)
def test_builtin_with_bad_environment(self):
"alternate: django-admin builtin commands fail if settings file (from environment) doesn't exist"
args = ['check', 'admin_scripts']
out, err = self.run_django_admin(args, 'bad_settings')
self.assertNoOutput(out)
self.assertOutput(err, "No module named '?bad_settings'?", regex=True)
def test_custom_command(self):
"alternate: django-admin can't execute user commands unless settings are provided"
args = ['noargs_command']
out, err = self.run_django_admin(args)
self.assertNoOutput(out)
self.assertOutput(err, "No Django settings specified")
self.assertOutput(err, "Unknown command: 'noargs_command'")
def test_custom_command_with_settings(self):
"alternate: django-admin can execute user commands if settings are provided as argument"
args = ['noargs_command', '--settings=test_project.alternate_settings']
out, err = self.run_django_admin(args)
self.assertNoOutput(err)
self.assertOutput(out, "EXECUTE: noargs_command")
def test_custom_command_with_environment(self):
"alternate: django-admin can execute user commands if settings are provided in environment"
args = ['noargs_command']
out, err = self.run_django_admin(args, 'test_project.alternate_settings')
self.assertNoOutput(err)
self.assertOutput(out, "EXECUTE: noargs_command")
class DjangoAdminSettingsDirectory(AdminScriptTestCase):
"""
A series of tests for django-admin when the settings file is in a
directory. (see #9751).
"""
def setUp(self):
super().setUp()
self.write_settings('settings', is_dir=True)
def test_setup_environ(self):
"directory: startapp creates the correct directory"
args = ['startapp', 'settings_test']
app_path = os.path.join(self.test_dir, 'settings_test')
out, err = self.run_django_admin(args, 'test_project.settings')
self.assertNoOutput(err)
self.assertTrue(os.path.exists(app_path))
with open(os.path.join(app_path, 'apps.py')) as f:
content = f.read()
self.assertIn("class SettingsTestConfig(AppConfig)", content)
self.assertIn("name = 'settings_test'", content)
def test_setup_environ_custom_template(self):
"directory: startapp creates the correct directory with a custom template"
template_path = os.path.join(custom_templates_dir, 'app_template')
args = ['startapp', '--template', template_path, 'custom_settings_test']
app_path = os.path.join(self.test_dir, 'custom_settings_test')
out, err = self.run_django_admin(args, 'test_project.settings')
self.assertNoOutput(err)
self.assertTrue(os.path.exists(app_path))
self.assertTrue(os.path.exists(os.path.join(app_path, 'api.py')))
def test_startapp_unicode_name(self):
"""startapp creates the correct directory with Unicode characters."""
args = ['startapp', 'こんにちは']
app_path = os.path.join(self.test_dir, 'こんにちは')
out, err = self.run_django_admin(args, 'test_project.settings')
self.assertNoOutput(err)
self.assertTrue(os.path.exists(app_path))
with open(os.path.join(app_path, 'apps.py'), encoding='utf8') as f:
content = f.read()
self.assertIn("class こんにちはConfig(AppConfig)", content)
self.assertIn("name = 'こんにちは'", content)
def test_builtin_command(self):
"directory: django-admin builtin commands fail with an error when no settings provided"
args = ['check', 'admin_scripts']
out, err = self.run_django_admin(args)
self.assertNoOutput(out)
self.assertOutput(err, 'settings are not configured')
def test_builtin_with_bad_settings(self):
"directory: django-admin builtin commands fail if settings file (from argument) doesn't exist"
args = ['check', '--settings=bad_settings', 'admin_scripts']
out, err = self.run_django_admin(args)
self.assertOutput(err, "No module named '?bad_settings'?", regex=True)
def test_builtin_with_bad_environment(self):
"directory: django-admin builtin commands fail if settings file (from environment) doesn't exist"
args = ['check', 'admin_scripts']
out, err = self.run_django_admin(args, 'bad_settings')
self.assertNoOutput(out)
self.assertOutput(err, "No module named '?bad_settings'?", regex=True)
def test_custom_command(self):
"directory: django-admin can't execute user commands unless settings are provided"
args = ['noargs_command']
out, err = self.run_django_admin(args)
self.assertNoOutput(out)
self.assertOutput(err, "No Django settings specified")
self.assertOutput(err, "Unknown command: 'noargs_command'")
def test_builtin_with_settings(self):
"directory: django-admin builtin commands succeed if settings are provided as argument"
args = ['check', '--settings=test_project.settings', 'admin_scripts']
out, err = self.run_django_admin(args)
self.assertNoOutput(err)
self.assertOutput(out, SYSTEM_CHECK_MSG)
def test_builtin_with_environment(self):
"directory: django-admin builtin commands succeed if settings are provided in the environment"
args = ['check', 'admin_scripts']
out, err = self.run_django_admin(args, 'test_project.settings')
self.assertNoOutput(err)
self.assertOutput(out, SYSTEM_CHECK_MSG)
##########################################################################
# MANAGE.PY TESTS
# This next series of test classes checks the environment processing
# of the generated manage.py script
##########################################################################
class ManageManuallyConfiguredSettings(AdminScriptTestCase):
"""Customized manage.py calling settings.configure()."""
def test_non_existent_command_output(self):
out, err = self.run_manage(['invalid_command'], manage_py='configured_settings_manage.py')
self.assertNoOutput(out)
self.assertOutput(err, "Unknown command: 'invalid_command'")
self.assertNotInOutput(err, 'No Django settings specified')
class ManageNoSettings(AdminScriptTestCase):
"A series of tests for manage.py when there is no settings.py file."
def test_builtin_command(self):
"no settings: manage.py builtin commands fail with an error when no settings provided"
args = ['check', 'admin_scripts']
out, err = self.run_manage(args)
self.assertNoOutput(out)
self.assertOutput(err, r"No module named '?(test_project\.)?settings'?", regex=True)
def test_builtin_with_bad_settings(self):
"no settings: manage.py builtin commands fail if settings file (from argument) doesn't exist"
args = ['check', '--settings=bad_settings', 'admin_scripts']
out, err = self.run_manage(args)
self.assertNoOutput(out)
self.assertOutput(err, "No module named '?bad_settings'?", regex=True)
def test_builtin_with_bad_environment(self):
"no settings: manage.py builtin commands fail if settings file (from environment) doesn't exist"
args = ['check', 'admin_scripts']
out, err = self.run_manage(args, 'bad_settings')
self.assertNoOutput(out)
self.assertOutput(err, "No module named '?bad_settings'?", regex=True)
class ManageDefaultSettings(AdminScriptTestCase):
"""A series of tests for manage.py when using a settings.py file that
contains the test application.
"""
def setUp(self):
super().setUp()
self.write_settings('settings.py')
def test_builtin_command(self):
"default: manage.py builtin commands succeed when default settings are appropriate"
args = ['check', 'admin_scripts']
out, err = self.run_manage(args)
self.assertNoOutput(err)
self.assertOutput(out, SYSTEM_CHECK_MSG)
def test_builtin_with_settings(self):
"default: manage.py builtin commands succeed if settings are provided as argument"
args = ['check', '--settings=test_project.settings', 'admin_scripts']
out, err = self.run_manage(args)
self.assertNoOutput(err)
self.assertOutput(out, SYSTEM_CHECK_MSG)
def test_builtin_with_environment(self):
"default: manage.py builtin commands succeed if settings are provided in the environment"
args = ['check', 'admin_scripts']
out, err = self.run_manage(args, 'test_project.settings')
self.assertNoOutput(err)
self.assertOutput(out, SYSTEM_CHECK_MSG)
def test_builtin_with_bad_settings(self):
"default: manage.py builtin commands succeed if settings file (from argument) doesn't exist"
args = ['check', '--settings=bad_settings', 'admin_scripts']
out, err = self.run_manage(args)
self.assertNoOutput(out)
self.assertOutput(err, "No module named '?bad_settings'?", regex=True)
def test_builtin_with_bad_environment(self):
"default: manage.py builtin commands fail if settings file (from environment) doesn't exist"
args = ['check', 'admin_scripts']
out, err = self.run_manage(args, 'bad_settings')
self.assertNoOutput(out)
self.assertOutput(err, "No module named '?bad_settings'?", regex=True)
def test_custom_command(self):
"default: manage.py can execute user commands when default settings are appropriate"
args = ['noargs_command']
out, err = self.run_manage(args)
self.assertNoOutput(err)
self.assertOutput(out, "EXECUTE: noargs_command")
def test_custom_command_with_settings(self):
"default: manage.py can execute user commands when settings are provided as argument"
args = ['noargs_command', '--settings=test_project.settings']
out, err = self.run_manage(args)
self.assertNoOutput(err)
self.assertOutput(out, "EXECUTE: noargs_command")
def test_custom_command_with_environment(self):
"default: manage.py can execute user commands when settings are provided in environment"
args = ['noargs_command']
out, err = self.run_manage(args, 'test_project.settings')
self.assertNoOutput(err)
self.assertOutput(out, "EXECUTE: noargs_command")
class ManageFullPathDefaultSettings(AdminScriptTestCase):
"""A series of tests for manage.py when using a settings.py file that
contains the test application specified using a full path.
"""
def setUp(self):
super().setUp()
self.write_settings('settings.py', ['django.contrib.auth', 'django.contrib.contenttypes', 'admin_scripts'])
def test_builtin_command(self):
"fulldefault: manage.py builtin commands succeed when default settings are appropriate"
args = ['check', 'admin_scripts']
out, err = self.run_manage(args)
self.assertNoOutput(err)
self.assertOutput(out, SYSTEM_CHECK_MSG)
def test_builtin_with_settings(self):
"fulldefault: manage.py builtin commands succeed if settings are provided as argument"
args = ['check', '--settings=test_project.settings', 'admin_scripts']
out, err = self.run_manage(args)
self.assertNoOutput(err)
self.assertOutput(out, SYSTEM_CHECK_MSG)
def test_builtin_with_environment(self):
"fulldefault: manage.py builtin commands succeed if settings are provided in the environment"
args = ['check', 'admin_scripts']
out, err = self.run_manage(args, 'test_project.settings')
self.assertNoOutput(err)
self.assertOutput(out, SYSTEM_CHECK_MSG)
def test_builtin_with_bad_settings(self):
"fulldefault: manage.py builtin commands succeed if settings file (from argument) doesn't exist"
args = ['check', '--settings=bad_settings', 'admin_scripts']
out, err = self.run_manage(args)
self.assertNoOutput(out)
self.assertOutput(err, "No module named '?bad_settings'?", regex=True)
def test_builtin_with_bad_environment(self):
"fulldefault: manage.py builtin commands fail if settings file (from environment) doesn't exist"
args = ['check', 'admin_scripts']
out, err = self.run_manage(args, 'bad_settings')
self.assertNoOutput(out)
self.assertOutput(err, "No module named '?bad_settings'?", regex=True)
def test_custom_command(self):
"fulldefault: manage.py can execute user commands when default settings are appropriate"
args = ['noargs_command']
out, err = self.run_manage(args)
self.assertNoOutput(err)
self.assertOutput(out, "EXECUTE: noargs_command")
def test_custom_command_with_settings(self):
"fulldefault: manage.py can execute user commands when settings are provided as argument"
args = ['noargs_command', '--settings=test_project.settings']
out, err = self.run_manage(args)
self.assertNoOutput(err)
self.assertOutput(out, "EXECUTE: noargs_command")
def test_custom_command_with_environment(self):
"fulldefault: manage.py can execute user commands when settings are provided in environment"
args = ['noargs_command']
out, err = self.run_manage(args, 'test_project.settings')
self.assertNoOutput(err)
self.assertOutput(out, "EXECUTE: noargs_command")
class ManageMinimalSettings(AdminScriptTestCase):
"""A series of tests for manage.py when using a settings.py file that
doesn't contain the test application.
"""
def setUp(self):
super().setUp()
self.write_settings('settings.py', apps=['django.contrib.auth', 'django.contrib.contenttypes'])
def test_builtin_command(self):
"minimal: manage.py builtin commands fail with an error when no settings provided"
args = ['check', 'admin_scripts']
out, err = self.run_manage(args)
self.assertNoOutput(out)
self.assertOutput(err, "No installed app with label 'admin_scripts'.")
def test_builtin_with_settings(self):
"minimal: manage.py builtin commands fail if settings are provided as argument"
args = ['check', '--settings=test_project.settings', 'admin_scripts']
out, err = self.run_manage(args)
self.assertNoOutput(out)
self.assertOutput(err, "No installed app with label 'admin_scripts'.")
def test_builtin_with_environment(self):
"minimal: manage.py builtin commands fail if settings are provided in the environment"
args = ['check', 'admin_scripts']
out, err = self.run_manage(args, 'test_project.settings')
self.assertNoOutput(out)
self.assertOutput(err, "No installed app with label 'admin_scripts'.")
def test_builtin_with_bad_settings(self):
"minimal: manage.py builtin commands fail if settings file (from argument) doesn't exist"
args = ['check', '--settings=bad_settings', 'admin_scripts']
out, err = self.run_manage(args)
self.assertNoOutput(out)
self.assertOutput(err, "No module named '?bad_settings'?", regex=True)
def test_builtin_with_bad_environment(self):
"minimal: manage.py builtin commands fail if settings file (from environment) doesn't exist"
args = ['check', 'admin_scripts']
out, err = self.run_manage(args, 'bad_settings')
self.assertNoOutput(out)
self.assertOutput(err, "No module named '?bad_settings'?", regex=True)
def test_custom_command(self):
"minimal: manage.py can't execute user commands without appropriate settings"
args = ['noargs_command']
out, err = self.run_manage(args)
self.assertNoOutput(out)
self.assertOutput(err, "Unknown command: 'noargs_command'")
def test_custom_command_with_settings(self):
"minimal: manage.py can't execute user commands, even if settings are provided as argument"
args = ['noargs_command', '--settings=test_project.settings']
out, err = self.run_manage(args)
self.assertNoOutput(out)
self.assertOutput(err, "Unknown command: 'noargs_command'")
def test_custom_command_with_environment(self):
"minimal: manage.py can't execute user commands, even if settings are provided in environment"
args = ['noargs_command']
out, err = self.run_manage(args, 'test_project.settings')
self.assertNoOutput(out)
self.assertOutput(err, "Unknown command: 'noargs_command'")
class ManageAlternateSettings(AdminScriptTestCase):
"""A series of tests for manage.py when using a settings file
with a name other than 'settings.py'.
"""
def setUp(self):
super().setUp()
self.write_settings('alternate_settings.py')
def test_builtin_command(self):
"alternate: manage.py builtin commands fail with an error when no default settings provided"
args = ['check', 'admin_scripts']
out, err = self.run_manage(args)
self.assertNoOutput(out)
self.assertOutput(err, r"No module named '?(test_project\.)?settings'?", regex=True)
def test_builtin_with_settings(self):
"alternate: manage.py builtin commands work with settings provided as argument"
args = ['check', '--settings=alternate_settings', 'admin_scripts']
out, err = self.run_manage(args)
self.assertOutput(out, SYSTEM_CHECK_MSG)
self.assertNoOutput(err)
def test_builtin_with_environment(self):
"alternate: manage.py builtin commands work if settings are provided in the environment"
args = ['check', 'admin_scripts']
out, err = self.run_manage(args, 'alternate_settings')
self.assertOutput(out, SYSTEM_CHECK_MSG)
self.assertNoOutput(err)
def test_builtin_with_bad_settings(self):
"alternate: manage.py builtin commands fail if settings file (from argument) doesn't exist"
args = ['check', '--settings=bad_settings', 'admin_scripts']
out, err = self.run_manage(args)
self.assertNoOutput(out)
self.assertOutput(err, "No module named '?bad_settings'?", regex=True)
def test_builtin_with_bad_environment(self):
"alternate: manage.py builtin commands fail if settings file (from environment) doesn't exist"
args = ['check', 'admin_scripts']
out, err = self.run_manage(args, 'bad_settings')
self.assertNoOutput(out)
self.assertOutput(err, "No module named '?bad_settings'?", regex=True)
def test_custom_command(self):
"alternate: manage.py can't execute user commands without settings"
args = ['noargs_command']
out, err = self.run_manage(args)
self.assertNoOutput(out)
self.assertOutput(err, r"No module named '?(test_project\.)?settings'?", regex=True)
def test_custom_command_with_settings(self):
"alternate: manage.py can execute user commands if settings are provided as argument"
args = ['noargs_command', '--settings=alternate_settings']
out, err = self.run_manage(args)
self.assertOutput(
out,
"EXECUTE: noargs_command options=[('force_color', False), "
"('no_color', False), ('pythonpath', None), ('settings', "
"'alternate_settings'), ('traceback', False), ('verbosity', 1)]"
)
self.assertNoOutput(err)
def test_custom_command_with_environment(self):
"alternate: manage.py can execute user commands if settings are provided in environment"
args = ['noargs_command']
out, err = self.run_manage(args, 'alternate_settings')
self.assertOutput(
out,
"EXECUTE: noargs_command options=[('force_color', False), "
"('no_color', False), ('pythonpath', None), ('settings', None), "
"('traceback', False), ('verbosity', 1)]"
)
self.assertNoOutput(err)
def test_custom_command_output_color(self):
"alternate: manage.py output syntax color can be deactivated with the `--no-color` option"
args = ['noargs_command', '--no-color', '--settings=alternate_settings']
out, err = self.run_manage(args)
self.assertOutput(
out,
"EXECUTE: noargs_command options=[('force_color', False), "
"('no_color', True), ('pythonpath', None), ('settings', "
"'alternate_settings'), ('traceback', False), ('verbosity', 1)]"
)
self.assertNoOutput(err)
class ManageMultipleSettings(AdminScriptTestCase):
"""A series of tests for manage.py when multiple settings files
(including the default 'settings.py') are available. The default settings
file is insufficient for performing the operations described, so the
alternate settings must be used by the running script.
"""
def setUp(self):
super().setUp()
self.write_settings('settings.py', apps=['django.contrib.auth', 'django.contrib.contenttypes'])
self.write_settings('alternate_settings.py')
def test_builtin_command(self):
"multiple: manage.py builtin commands fail with an error when no settings provided"
args = ['check', 'admin_scripts']
out, err = self.run_manage(args)
self.assertNoOutput(out)
self.assertOutput(err, "No installed app with label 'admin_scripts'.")
def test_builtin_with_settings(self):
"multiple: manage.py builtin commands succeed if settings are provided as argument"
args = ['check', '--settings=alternate_settings', 'admin_scripts']
out, err = self.run_manage(args)
self.assertNoOutput(err)
self.assertOutput(out, SYSTEM_CHECK_MSG)
def test_builtin_with_environment(self):
"multiple: manage.py can execute builtin commands if settings are provided in the environment"
args = ['check', 'admin_scripts']
out, err = self.run_manage(args, 'alternate_settings')
self.assertNoOutput(err)
self.assertOutput(out, SYSTEM_CHECK_MSG)
def test_builtin_with_bad_settings(self):
"multiple: manage.py builtin commands fail if settings file (from argument) doesn't exist"
args = ['check', '--settings=bad_settings', 'admin_scripts']
out, err = self.run_manage(args)
self.assertNoOutput(out)
self.assertOutput(err, "No module named '?bad_settings'?", regex=True)
def test_builtin_with_bad_environment(self):
"multiple: manage.py builtin commands fail if settings file (from environment) doesn't exist"
args = ['check', 'admin_scripts']
out, err = self.run_manage(args, 'bad_settings')
self.assertNoOutput(out)
self.assertOutput(err, "No module named '?bad_settings'?", regex=True)
def test_custom_command(self):
"multiple: manage.py can't execute user commands using default settings"
args = ['noargs_command']
out, err = self.run_manage(args)
self.assertNoOutput(out)
self.assertOutput(err, "Unknown command: 'noargs_command'")
def test_custom_command_with_settings(self):
"multiple: manage.py can execute user commands if settings are provided as argument"
args = ['noargs_command', '--settings=alternate_settings']
out, err = self.run_manage(args)
self.assertNoOutput(err)
self.assertOutput(out, "EXECUTE: noargs_command")
def test_custom_command_with_environment(self):
"multiple: manage.py can execute user commands if settings are provided in environment"
args = ['noargs_command']
out, err = self.run_manage(args, 'alternate_settings')
self.assertNoOutput(err)
self.assertOutput(out, "EXECUTE: noargs_command")
class ManageSettingsWithSettingsErrors(AdminScriptTestCase):
"""
Tests for manage.py when using the default settings.py file containing
runtime errors.
"""
def write_settings_with_import_error(self, filename):
settings_file_path = os.path.join(self.test_dir, filename)
with open(settings_file_path, 'w') as settings_file:
settings_file.write('# Settings file automatically generated by admin_scripts test case\n')
settings_file.write('# The next line will cause an import error:\nimport foo42bar\n')
def test_import_error(self):
"""
import error: manage.py builtin commands shows useful diagnostic info
when settings with import errors is provided (#14130).
"""
self.write_settings_with_import_error('settings.py')
args = ['check', 'admin_scripts']
out, err = self.run_manage(args)
self.assertNoOutput(out)
self.assertOutput(err, "No module named")
self.assertOutput(err, "foo42bar")
def test_attribute_error(self):
"""
manage.py builtin commands does not swallow attribute error due to bad
settings (#18845).
"""
self.write_settings('settings.py', sdict={'BAD_VAR': 'INSTALLED_APPS.crash'})
args = ['collectstatic', 'admin_scripts']
out, err = self.run_manage(args)
self.assertNoOutput(out)
self.assertOutput(err, "AttributeError: 'list' object has no attribute 'crash'")
def test_key_error(self):
self.write_settings('settings.py', sdict={'BAD_VAR': 'DATABASES["blah"]'})
args = ['collectstatic', 'admin_scripts']
out, err = self.run_manage(args)
self.assertNoOutput(out)
self.assertOutput(err, "KeyError: 'blah'")
def test_help(self):
"""
Test listing available commands output note when only core commands are
available.
"""
self.write_settings(
'settings.py',
extra='from django.core.exceptions import ImproperlyConfigured\n'
'raise ImproperlyConfigured()',
)
args = ['help']
out, err = self.run_manage(args)
self.assertOutput(out, 'only Django core commands are listed')
self.assertNoOutput(err)
class ManageCheck(AdminScriptTestCase):
def test_nonexistent_app(self):
"""check reports an error on a nonexistent app in INSTALLED_APPS."""
self.write_settings(
'settings.py',
apps=['admin_scriptz.broken_app'],
sdict={'USE_I18N': False},
)
args = ['check']
out, err = self.run_manage(args)
self.assertNoOutput(out)
self.assertOutput(err, 'ModuleNotFoundError')
self.assertOutput(err, 'No module named')
self.assertOutput(err, 'admin_scriptz')
def test_broken_app(self):
""" manage.py check reports an ImportError if an app's models.py
raises one on import """
self.write_settings('settings.py', apps=['admin_scripts.broken_app'])
args = ['check']
out, err = self.run_manage(args)
self.assertNoOutput(out)
self.assertOutput(err, 'ImportError')
def test_complex_app(self):
""" manage.py check does not raise an ImportError validating a
complex app with nested calls to load_app """
self.write_settings(
'settings.py',
apps=[
'admin_scripts.complex_app',
'admin_scripts.simple_app',
'django.contrib.admin.apps.SimpleAdminConfig',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.messages',
],
sdict={
'DEBUG': True,
'MIDDLEWARE': [
'django.contrib.messages.middleware.MessageMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
],
'TEMPLATES': [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
],
}
)
args = ['check']
out, err = self.run_manage(args)
self.assertNoOutput(err)
self.assertEqual(out, 'System check identified no issues (0 silenced).\n')
def test_app_with_import(self):
""" manage.py check does not raise errors when an app imports a base
class that itself has an abstract base. """
self.write_settings(
'settings.py',
apps=[
'admin_scripts.app_with_import',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sites',
],
sdict={'DEBUG': True},
)
args = ['check']
out, err = self.run_manage(args)
self.assertNoOutput(err)
self.assertEqual(out, 'System check identified no issues (0 silenced).\n')
def test_output_format(self):
""" All errors/warnings should be sorted by level and by message. """
self.write_settings(
'settings.py',
apps=[
'admin_scripts.app_raising_messages',
'django.contrib.auth',
'django.contrib.contenttypes',
],
sdict={'DEBUG': True},
)
args = ['check']
out, err = self.run_manage(args)
expected_err = (
"SystemCheckError: System check identified some issues:\n"
"\n"
"ERRORS:\n"
"?: An error\n"
"\tHINT: Error hint\n"
"\n"
"WARNINGS:\n"
"a: Second warning\n"
"obj: First warning\n"
"\tHINT: Hint\n"
"\n"
"System check identified 3 issues (0 silenced).\n"
)
self.assertEqual(err, expected_err)
self.assertNoOutput(out)
def test_warning_does_not_halt(self):
"""
When there are only warnings or less serious messages, then Django
should not prevent user from launching their project, so `check`
command should not raise `CommandError` exception.
In this test we also test output format.
"""
self.write_settings(
'settings.py',
apps=[
'admin_scripts.app_raising_warning',
'django.contrib.auth',
'django.contrib.contenttypes',
],
sdict={'DEBUG': True},
)
args = ['check']
out, err = self.run_manage(args)
expected_err = (
"System check identified some issues:\n" # No "CommandError: " part
"\n"
"WARNINGS:\n"
"?: A warning\n"
"\n"
"System check identified 1 issue (0 silenced).\n"
)
self.assertEqual(err, expected_err)
self.assertNoOutput(out)
class ManageRunserver(SimpleTestCase):
def setUp(self):
def monkey_run(*args, **options):
return
self.output = StringIO()
self.cmd = RunserverCommand(stdout=self.output)
self.cmd.run = monkey_run
def assertServerSettings(self, addr, port, ipv6=False, raw_ipv6=False):
self.assertEqual(self.cmd.addr, addr)
self.assertEqual(self.cmd.port, port)
self.assertEqual(self.cmd.use_ipv6, ipv6)
self.assertEqual(self.cmd._raw_ipv6, raw_ipv6)
def test_runserver_addrport(self):
call_command(self.cmd)
self.assertServerSettings('127.0.0.1', '8000')
call_command(self.cmd, addrport="1.2.3.4:8000")
self.assertServerSettings('1.2.3.4', '8000')
call_command(self.cmd, addrport="7000")
self.assertServerSettings('127.0.0.1', '7000')
@unittest.skipUnless(socket.has_ipv6, "platform doesn't support IPv6")
def test_runner_addrport_ipv6(self):
call_command(self.cmd, addrport="", use_ipv6=True)
self.assertServerSettings('::1', '8000', ipv6=True, raw_ipv6=True)
call_command(self.cmd, addrport="7000", use_ipv6=True)
self.assertServerSettings('::1', '7000', ipv6=True, raw_ipv6=True)
call_command(self.cmd, addrport="[2001:0db8:1234:5678::9]:7000")
self.assertServerSettings('2001:0db8:1234:5678::9', '7000', ipv6=True, raw_ipv6=True)
def test_runner_hostname(self):
call_command(self.cmd, addrport="localhost:8000")
self.assertServerSettings('localhost', '8000')
call_command(self.cmd, addrport="test.domain.local:7000")
self.assertServerSettings('test.domain.local', '7000')
@unittest.skipUnless(socket.has_ipv6, "platform doesn't support IPv6")
def test_runner_hostname_ipv6(self):
call_command(self.cmd, addrport="test.domain.local:7000", use_ipv6=True)
self.assertServerSettings('test.domain.local', '7000', ipv6=True)
def test_runner_custom_defaults(self):
self.cmd.default_addr = '0.0.0.0'
self.cmd.default_port = '5000'
call_command(self.cmd)
self.assertServerSettings('0.0.0.0', '5000')
@unittest.skipUnless(socket.has_ipv6, "platform doesn't support IPv6")
def test_runner_custom_defaults_ipv6(self):
self.cmd.default_addr_ipv6 = '::'
call_command(self.cmd, use_ipv6=True)
self.assertServerSettings('::', '8000', ipv6=True, raw_ipv6=True)
def test_runner_ambiguous(self):
# Only 4 characters, all of which could be in an ipv6 address
call_command(self.cmd, addrport="beef:7654")
self.assertServerSettings('beef', '7654')
# Uses only characters that could be in an ipv6 address
call_command(self.cmd, addrport="deadbeef:7654")
self.assertServerSettings('deadbeef', '7654')
def test_no_database(self):
"""
Ensure runserver.check_migrations doesn't choke on empty DATABASES.
"""
tested_connections = ConnectionHandler({})
with mock.patch('django.core.management.base.connections', new=tested_connections):
self.cmd.check_migrations()
def test_readonly_database(self):
"""
runserver.check_migrations() doesn't choke when a database is read-only.
"""
with mock.patch.object(MigrationRecorder, 'has_table', return_value=False):
self.cmd.check_migrations()
# You have # ...
self.assertIn('unapplied migration(s)', self.output.getvalue())
@mock.patch('django.core.management.commands.runserver.run')
@mock.patch('django.core.management.base.BaseCommand.check_migrations')
@mock.patch('django.core.management.base.BaseCommand.check')
def test_skip_checks(self, mocked_check, *mocked_objects):
call_command(
'runserver',
use_reloader=False,
skip_checks=True,
stdout=self.output,
)
self.assertNotIn('Performing system checks...', self.output.getvalue())
mocked_check.assert_not_called()
self.output.truncate(0)
call_command(
'runserver',
use_reloader=False,
skip_checks=False,
stdout=self.output,
)
self.assertIn('Performing system checks...', self.output.getvalue())
mocked_check.assert_called()
class ManageRunserverMigrationWarning(TestCase):
def setUp(self):
self.stdout = StringIO()
self.runserver_command = RunserverCommand(stdout=self.stdout)
@override_settings(INSTALLED_APPS=["admin_scripts.app_waiting_migration"])
def test_migration_warning_one_app(self):
self.runserver_command.check_migrations()
output = self.stdout.getvalue()
self.assertIn('You have 1 unapplied migration(s)', output)
self.assertIn('apply the migrations for app(s): app_waiting_migration.', output)
@override_settings(
INSTALLED_APPS=[
"admin_scripts.app_waiting_migration",
"admin_scripts.another_app_waiting_migration",
],
)
def test_migration_warning_multiple_apps(self):
self.runserver_command.check_migrations()
output = self.stdout.getvalue()
self.assertIn('You have 2 unapplied migration(s)', output)
self.assertIn(
'apply the migrations for app(s): another_app_waiting_migration, '
'app_waiting_migration.', output
)
class ManageRunserverEmptyAllowedHosts(AdminScriptTestCase):
def setUp(self):
super().setUp()
self.write_settings('settings.py', sdict={
'ALLOWED_HOSTS': [],
'DEBUG': False,
})
def test_empty_allowed_hosts_error(self):
out, err = self.run_manage(['runserver'])
self.assertNoOutput(out)
self.assertOutput(err, 'CommandError: You must set settings.ALLOWED_HOSTS if DEBUG is False.')
class ManageTestserver(SimpleTestCase):
@mock.patch.object(TestserverCommand, 'handle', return_value='')
def test_testserver_handle_params(self, mock_handle):
out = StringIO()
call_command('testserver', 'blah.json', stdout=out)
mock_handle.assert_called_with(
'blah.json',
stdout=out, settings=None, pythonpath=None, verbosity=1,
traceback=False, addrport='', no_color=False, use_ipv6=False,
skip_checks=True, interactive=True, force_color=False,
)
@mock.patch('django.db.connection.creation.create_test_db', return_value='test_db')
@mock.patch.object(LoaddataCommand, 'handle', return_value='')
@mock.patch.object(RunserverCommand, 'handle', return_value='')
def test_params_to_runserver(self, mock_runserver_handle, mock_loaddata_handle, mock_create_test_db):
call_command('testserver', 'blah.json')
mock_runserver_handle.assert_called_with(
addrport='',
force_color=False,
insecure_serving=False,
no_color=False,
pythonpath=None,
settings=None,
shutdown_message=(
"\nServer stopped.\nNote that the test database, 'test_db', "
"has not been deleted. You can explore it on your own."
),
skip_checks=True,
traceback=False,
use_ipv6=False,
use_reloader=False,
use_static_handler=True,
use_threading=connection.features.test_db_allows_multiple_connections,
verbosity=1,
)
##########################################################################
# COMMAND PROCESSING TESTS
# user-space commands are correctly handled - in particular, arguments to
# the commands are correctly parsed and processed.
##########################################################################
class ColorCommand(BaseCommand):
requires_system_checks = []
def handle(self, *args, **options):
self.stdout.write('Hello, world!', self.style.ERROR)
self.stderr.write('Hello, world!', self.style.ERROR)
class CommandTypes(AdminScriptTestCase):
"Tests for the various types of base command types that can be defined."
def setUp(self):
super().setUp()
self.write_settings('settings.py')
def test_version(self):
"version is handled as a special case"
args = ['version']
out, err = self.run_manage(args)
self.assertNoOutput(err)
self.assertOutput(out, get_version())
def test_version_alternative(self):
"--version is equivalent to version"
args1, args2 = ['version'], ['--version']
# It's possible one outputs on stderr and the other on stdout, hence the set
self.assertEqual(set(self.run_manage(args1)), set(self.run_manage(args2)))
def test_help(self):
"help is handled as a special case"
args = ['help']
out, err = self.run_manage(args)
self.assertOutput(out, "Type 'manage.py help <subcommand>' for help on a specific subcommand.")
self.assertOutput(out, '[django]')
self.assertOutput(out, 'startapp')
self.assertOutput(out, 'startproject')
def test_help_commands(self):
"help --commands shows the list of all available commands"
args = ['help', '--commands']
out, err = self.run_manage(args)
self.assertNotInOutput(out, 'usage:')
self.assertNotInOutput(out, 'Options:')
self.assertNotInOutput(out, '[django]')
self.assertOutput(out, 'startapp')
self.assertOutput(out, 'startproject')
self.assertNotInOutput(out, '\n\n')
def test_help_alternative(self):
"--help is equivalent to help"
args1, args2 = ['help'], ['--help']
self.assertEqual(self.run_manage(args1), self.run_manage(args2))
def test_help_short_altert(self):
"-h is handled as a short form of --help"
args1, args2 = ['--help'], ['-h']
self.assertEqual(self.run_manage(args1), self.run_manage(args2))
def test_specific_help(self):
"--help can be used on a specific command"
args = ['check', '--help']
out, err = self.run_manage(args)
self.assertNoOutput(err)
# Command-specific options like --tag appear before options common to
# all commands like --version.
tag_location = out.find('--tag')
version_location = out.find('--version')
self.assertNotEqual(tag_location, -1)
self.assertNotEqual(version_location, -1)
self.assertLess(tag_location, version_location)
self.assertOutput(out, "Checks the entire Django project for potential problems.")
def test_color_style(self):
style = color.no_style()
self.assertEqual(style.ERROR('Hello, world!'), 'Hello, world!')
style = color.make_style('nocolor')
self.assertEqual(style.ERROR('Hello, world!'), 'Hello, world!')
style = color.make_style('dark')
self.assertIn('Hello, world!', style.ERROR('Hello, world!'))
self.assertNotEqual(style.ERROR('Hello, world!'), 'Hello, world!')
# Default palette has color.
style = color.make_style('')
self.assertIn('Hello, world!', style.ERROR('Hello, world!'))
self.assertNotEqual(style.ERROR('Hello, world!'), 'Hello, world!')
def test_command_color(self):
out = StringIO()
err = StringIO()
command = ColorCommand(stdout=out, stderr=err)
call_command(command)
if color.supports_color():
self.assertIn('Hello, world!\n', out.getvalue())
self.assertIn('Hello, world!\n', err.getvalue())
self.assertNotEqual(out.getvalue(), 'Hello, world!\n')
self.assertNotEqual(err.getvalue(), 'Hello, world!\n')
else:
self.assertEqual(out.getvalue(), 'Hello, world!\n')
self.assertEqual(err.getvalue(), 'Hello, world!\n')
def test_command_no_color(self):
"--no-color prevent colorization of the output"
out = StringIO()
err = StringIO()
command = ColorCommand(stdout=out, stderr=err, no_color=True)
call_command(command)
self.assertEqual(out.getvalue(), 'Hello, world!\n')
self.assertEqual(err.getvalue(), 'Hello, world!\n')
out = StringIO()
err = StringIO()
command = ColorCommand(stdout=out, stderr=err)
call_command(command, no_color=True)
self.assertEqual(out.getvalue(), 'Hello, world!\n')
self.assertEqual(err.getvalue(), 'Hello, world!\n')
def test_force_color_execute(self):
out = StringIO()
err = StringIO()
with mock.patch.object(sys.stdout, 'isatty', lambda: False):
command = ColorCommand(stdout=out, stderr=err)
call_command(command, force_color=True)
self.assertEqual(out.getvalue(), '\x1b[31;1mHello, world!\n\x1b[0m')
self.assertEqual(err.getvalue(), '\x1b[31;1mHello, world!\n\x1b[0m')
def test_force_color_command_init(self):
out = StringIO()
err = StringIO()
with mock.patch.object(sys.stdout, 'isatty', lambda: False):
command = ColorCommand(stdout=out, stderr=err, force_color=True)
call_command(command)
self.assertEqual(out.getvalue(), '\x1b[31;1mHello, world!\n\x1b[0m')
self.assertEqual(err.getvalue(), '\x1b[31;1mHello, world!\n\x1b[0m')
def test_no_color_force_color_mutually_exclusive_execute(self):
msg = "The --no-color and --force-color options can't be used together."
with self.assertRaisesMessage(CommandError, msg):
call_command(BaseCommand(), no_color=True, force_color=True)
def test_no_color_force_color_mutually_exclusive_command_init(self):
msg = "'no_color' and 'force_color' can't be used together."
with self.assertRaisesMessage(CommandError, msg):
call_command(BaseCommand(no_color=True, force_color=True))
def test_custom_stdout(self):
class Command(BaseCommand):
requires_system_checks = []
def handle(self, *args, **options):
self.stdout.write("Hello, World!")
out = StringIO()
command = Command(stdout=out)
call_command(command)
self.assertEqual(out.getvalue(), "Hello, World!\n")
out.truncate(0)
new_out = StringIO()
call_command(command, stdout=new_out)
self.assertEqual(out.getvalue(), "")
self.assertEqual(new_out.getvalue(), "Hello, World!\n")
def test_custom_stderr(self):
class Command(BaseCommand):
requires_system_checks = []
def handle(self, *args, **options):
self.stderr.write("Hello, World!")
err = StringIO()
command = Command(stderr=err)
call_command(command)
self.assertEqual(err.getvalue(), "Hello, World!\n")
err.truncate(0)
new_err = StringIO()
call_command(command, stderr=new_err)
self.assertEqual(err.getvalue(), "")
self.assertEqual(new_err.getvalue(), "Hello, World!\n")
def test_base_command(self):
"User BaseCommands can execute when a label is provided"
args = ['base_command', 'testlabel']
expected_labels = "('testlabel',)"
self._test_base_command(args, expected_labels)
def test_base_command_no_label(self):
"User BaseCommands can execute when no labels are provided"
args = ['base_command']
expected_labels = "()"
self._test_base_command(args, expected_labels)
def test_base_command_multiple_label(self):
"User BaseCommands can execute when no labels are provided"
args = ['base_command', 'testlabel', 'anotherlabel']
expected_labels = "('testlabel', 'anotherlabel')"
self._test_base_command(args, expected_labels)
def test_base_command_with_option(self):
"User BaseCommands can execute with options when a label is provided"
args = ['base_command', 'testlabel', '--option_a=x']
expected_labels = "('testlabel',)"
self._test_base_command(args, expected_labels, option_a="'x'")
def test_base_command_with_options(self):
"User BaseCommands can execute with multiple options when a label is provided"
args = ['base_command', 'testlabel', '-a', 'x', '--option_b=y']
expected_labels = "('testlabel',)"
self._test_base_command(args, expected_labels, option_a="'x'", option_b="'y'")
def test_base_command_with_wrong_option(self):
"User BaseCommands outputs command usage when wrong option is specified"
args = ['base_command', '--invalid']
out, err = self.run_manage(args)
self.assertNoOutput(out)
self.assertOutput(err, "usage: manage.py base_command")
self.assertOutput(err, "error: unrecognized arguments: --invalid")
def _test_base_command(self, args, labels, option_a="'1'", option_b="'2'"):
out, err = self.run_manage(args)
expected_out = (
"EXECUTE:BaseCommand labels=%s, "
"options=[('force_color', False), ('no_color', False), "
"('option_a', %s), ('option_b', %s), ('option_c', '3'), "
"('pythonpath', None), ('settings', None), ('traceback', False), "
"('verbosity', 1)]") % (labels, option_a, option_b)
self.assertNoOutput(err)
self.assertOutput(out, expected_out)
def test_base_run_from_argv(self):
"""
Test run_from_argv properly terminates even with custom execute() (#19665)
Also test proper traceback display.
"""
err = StringIO()
command = BaseCommand(stderr=err)
def raise_command_error(*args, **kwargs):
raise CommandError("Custom error")
command.execute = lambda args: args # This will trigger TypeError
# If the Exception is not CommandError it should always
# raise the original exception.
with self.assertRaises(TypeError):
command.run_from_argv(['', ''])
# If the Exception is CommandError and --traceback is not present
# this command should raise a SystemExit and don't print any
# traceback to the stderr.
command.execute = raise_command_error
err.truncate(0)
with self.assertRaises(SystemExit):
command.run_from_argv(['', ''])
err_message = err.getvalue()
self.assertNotIn("Traceback", err_message)
self.assertIn("CommandError", err_message)
# If the Exception is CommandError and --traceback is present
# this command should raise the original CommandError as if it
# were not a CommandError.
err.truncate(0)
with self.assertRaises(CommandError):
command.run_from_argv(['', '', '--traceback'])
def test_run_from_argv_non_ascii_error(self):
"""
Non-ASCII message of CommandError does not raise any
UnicodeDecodeError in run_from_argv.
"""
def raise_command_error(*args, **kwargs):
raise CommandError("Erreur personnalisée")
command = BaseCommand(stderr=StringIO())
command.execute = raise_command_error
with self.assertRaises(SystemExit):
command.run_from_argv(['', ''])
def test_run_from_argv_closes_connections(self):
"""
A command called from the command line should close connections after
being executed (#21255).
"""
command = BaseCommand()
command.check = lambda: []
command.handle = lambda *args, **kwargs: args
with mock.patch('django.core.management.base.connections') as mock_connections:
command.run_from_argv(['', ''])
# Test connections have been closed
self.assertTrue(mock_connections.close_all.called)
def test_noargs(self):
"NoArg Commands can be executed"
args = ['noargs_command']
out, err = self.run_manage(args)
self.assertNoOutput(err)
self.assertOutput(
out,
"EXECUTE: noargs_command options=[('force_color', False), "
"('no_color', False), ('pythonpath', None), ('settings', None), "
"('traceback', False), ('verbosity', 1)]"
)
def test_noargs_with_args(self):
"NoArg Commands raise an error if an argument is provided"
args = ['noargs_command', 'argument']
out, err = self.run_manage(args)
self.assertOutput(err, "error: unrecognized arguments: argument")
def test_app_command(self):
"User AppCommands can execute when a single app name is provided"
args = ['app_command', 'auth']
out, err = self.run_manage(args)
self.assertNoOutput(err)
self.assertOutput(out, "EXECUTE:AppCommand name=django.contrib.auth, options=")
self.assertOutput(
out,
", options=[('force_color', False), ('no_color', False), "
"('pythonpath', None), ('settings', None), ('traceback', False), "
"('verbosity', 1)]"
)
def test_app_command_no_apps(self):
"User AppCommands raise an error when no app name is provided"
args = ['app_command']
out, err = self.run_manage(args)
self.assertOutput(err, 'error: Enter at least one application label.')
def test_app_command_multiple_apps(self):
"User AppCommands raise an error when multiple app names are provided"
args = ['app_command', 'auth', 'contenttypes']
out, err = self.run_manage(args)
self.assertNoOutput(err)
self.assertOutput(out, "EXECUTE:AppCommand name=django.contrib.auth, options=")
self.assertOutput(
out,
", options=[('force_color', False), ('no_color', False), "
"('pythonpath', None), ('settings', None), ('traceback', False), "
"('verbosity', 1)]"
)
self.assertOutput(out, "EXECUTE:AppCommand name=django.contrib.contenttypes, options=")
self.assertOutput(
out,
", options=[('force_color', False), ('no_color', False), "
"('pythonpath', None), ('settings', None), ('traceback', False), "
"('verbosity', 1)]"
)
def test_app_command_invalid_app_label(self):
"User AppCommands can execute when a single app name is provided"
args = ['app_command', 'NOT_AN_APP']
out, err = self.run_manage(args)
self.assertOutput(err, "No installed app with label 'NOT_AN_APP'.")
def test_app_command_some_invalid_app_labels(self):
"User AppCommands can execute when some of the provided app names are invalid"
args = ['app_command', 'auth', 'NOT_AN_APP']
out, err = self.run_manage(args)
self.assertOutput(err, "No installed app with label 'NOT_AN_APP'.")
def test_label_command(self):
"User LabelCommands can execute when a label is provided"
args = ['label_command', 'testlabel']
out, err = self.run_manage(args)
self.assertNoOutput(err)
self.assertOutput(
out,
"EXECUTE:LabelCommand label=testlabel, options=[('force_color', "
"False), ('no_color', False), ('pythonpath', None), ('settings', "
"None), ('traceback', False), ('verbosity', 1)]"
)
def test_label_command_no_label(self):
"User LabelCommands raise an error if no label is provided"
args = ['label_command']
out, err = self.run_manage(args)
self.assertOutput(err, 'Enter at least one label')
def test_label_command_multiple_label(self):
"User LabelCommands are executed multiple times if multiple labels are provided"
args = ['label_command', 'testlabel', 'anotherlabel']
out, err = self.run_manage(args)
self.assertNoOutput(err)
self.assertOutput(
out,
"EXECUTE:LabelCommand label=testlabel, options=[('force_color', "
"False), ('no_color', False), ('pythonpath', None), "
"('settings', None), ('traceback', False), ('verbosity', 1)]"
)
self.assertOutput(
out,
"EXECUTE:LabelCommand label=anotherlabel, options=[('force_color', "
"False), ('no_color', False), ('pythonpath', None), "
"('settings', None), ('traceback', False), ('verbosity', 1)]"
)
class Discovery(SimpleTestCase):
def test_precedence(self):
"""
Apps listed first in INSTALLED_APPS have precedence.
"""
with self.settings(INSTALLED_APPS=['admin_scripts.complex_app',
'admin_scripts.simple_app',
'django.contrib.auth',
'django.contrib.contenttypes']):
out = StringIO()
call_command('duplicate', stdout=out)
self.assertEqual(out.getvalue().strip(), 'complex_app')
with self.settings(INSTALLED_APPS=['admin_scripts.simple_app',
'admin_scripts.complex_app',
'django.contrib.auth',
'django.contrib.contenttypes']):
out = StringIO()
call_command('duplicate', stdout=out)
self.assertEqual(out.getvalue().strip(), 'simple_app')
class ArgumentOrder(AdminScriptTestCase):
"""Tests for 2-stage argument parsing scheme.
django-admin command arguments are parsed in 2 parts; the core arguments
(--settings, --traceback and --pythonpath) are parsed using a basic parser,
ignoring any unknown options. Then the full settings are
passed to the command parser, which extracts commands of interest to the
individual command.
"""
def setUp(self):
super().setUp()
self.write_settings('settings.py', apps=['django.contrib.auth', 'django.contrib.contenttypes'])
self.write_settings('alternate_settings.py')
def test_setting_then_option(self):
""" Options passed after settings are correctly handled. """
args = ['base_command', 'testlabel', '--settings=alternate_settings', '--option_a=x']
self._test(args)
def test_setting_then_short_option(self):
""" Short options passed after settings are correctly handled. """
args = ['base_command', 'testlabel', '--settings=alternate_settings', '-a', 'x']
self._test(args)
def test_option_then_setting(self):
""" Options passed before settings are correctly handled. """
args = ['base_command', 'testlabel', '--option_a=x', '--settings=alternate_settings']
self._test(args)
def test_short_option_then_setting(self):
""" Short options passed before settings are correctly handled. """
args = ['base_command', 'testlabel', '-a', 'x', '--settings=alternate_settings']
self._test(args)
def test_option_then_setting_then_option(self):
""" Options are correctly handled when they are passed before and after
a setting. """
args = ['base_command', 'testlabel', '--option_a=x', '--settings=alternate_settings', '--option_b=y']
self._test(args, option_b="'y'")
def _test(self, args, option_b="'2'"):
out, err = self.run_manage(args)
self.assertNoOutput(err)
self.assertOutput(
out,
"EXECUTE:BaseCommand labels=('testlabel',), options=["
"('force_color', False), ('no_color', False), ('option_a', 'x'), "
"('option_b', %s), ('option_c', '3'), ('pythonpath', None), "
"('settings', 'alternate_settings'), ('traceback', False), "
"('verbosity', 1)]" % option_b
)
class ExecuteFromCommandLine(SimpleTestCase):
def test_program_name_from_argv(self):
"""
Program name is computed from the execute_from_command_line()'s argv
argument, not sys.argv.
"""
args = ['help', 'shell']
with captured_stdout() as out, captured_stderr() as err:
with mock.patch('sys.argv', [None] + args):
execute_from_command_line(['django-admin'] + args)
self.assertIn('usage: django-admin shell', out.getvalue())
self.assertEqual(err.getvalue(), '')
@override_settings(ROOT_URLCONF='admin_scripts.urls')
class StartProject(LiveServerTestCase, AdminScriptTestCase):
available_apps = [
'admin_scripts',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
]
def test_wrong_args(self):
"Make sure passing the wrong kinds of arguments outputs an error and prints usage"
out, err = self.run_django_admin(['startproject'])
self.assertNoOutput(out)
self.assertOutput(err, "usage:")
self.assertOutput(err, "You must provide a project name.")
def test_simple_project(self):
"Make sure the startproject management command creates a project"
args = ['startproject', 'testproject']
testproject_dir = os.path.join(self.test_dir, 'testproject')
out, err = self.run_django_admin(args)
self.assertNoOutput(err)
self.assertTrue(os.path.isdir(testproject_dir))
# running again..
out, err = self.run_django_admin(args)
self.assertNoOutput(out)
self.assertOutput(
err,
"CommandError: 'testproject' conflicts with the name of an "
"existing Python module and cannot be used as a project name. "
"Please try another name.",
)
def test_invalid_project_name(self):
"Make sure the startproject management command validates a project name"
for bad_name in ('7testproject', '../testproject'):
with self.subTest(project_name=bad_name):
args = ['startproject', bad_name]
testproject_dir = os.path.join(self.test_dir, bad_name)
out, err = self.run_django_admin(args)
self.assertOutput(
err,
"Error: '%s' is not a valid project name. Please make "
"sure the name is a valid identifier." % bad_name
)
self.assertFalse(os.path.exists(testproject_dir))
def test_importable_project_name(self):
"""
startproject validates that project name doesn't clash with existing
Python modules.
"""
bad_name = 'os'
args = ['startproject', bad_name]
testproject_dir = os.path.join(self.test_dir, bad_name)
out, err = self.run_django_admin(args)
self.assertOutput(
err,
"CommandError: 'os' conflicts with the name of an existing "
"Python module and cannot be used as a project name. Please try "
"another name."
)
self.assertFalse(os.path.exists(testproject_dir))
def test_simple_project_different_directory(self):
"Make sure the startproject management command creates a project in a specific directory"
args = ['startproject', 'testproject', 'othertestproject']
testproject_dir = os.path.join(self.test_dir, 'othertestproject')
os.mkdir(testproject_dir)
out, err = self.run_django_admin(args)
self.assertNoOutput(err)
self.assertTrue(os.path.exists(os.path.join(testproject_dir, 'manage.py')))
# running again..
out, err = self.run_django_admin(args)
self.assertNoOutput(out)
self.assertOutput(
err,
"already exists. Overlaying a project into an existing directory "
"won't replace conflicting files."
)
def test_custom_project_template(self):
"Make sure the startproject management command is able to use a different project template"
template_path = os.path.join(custom_templates_dir, 'project_template')
args = ['startproject', '--template', template_path, 'customtestproject']
testproject_dir = os.path.join(self.test_dir, 'customtestproject')
out, err = self.run_django_admin(args)
self.assertNoOutput(err)
self.assertTrue(os.path.isdir(testproject_dir))
self.assertTrue(os.path.exists(os.path.join(testproject_dir, 'additional_dir')))
def test_template_dir_with_trailing_slash(self):
"Ticket 17475: Template dir passed has a trailing path separator"
template_path = os.path.join(custom_templates_dir, 'project_template' + os.sep)
args = ['startproject', '--template', template_path, 'customtestproject']
testproject_dir = os.path.join(self.test_dir, 'customtestproject')
out, err = self.run_django_admin(args)
self.assertNoOutput(err)
self.assertTrue(os.path.isdir(testproject_dir))
self.assertTrue(os.path.exists(os.path.join(testproject_dir, 'additional_dir')))
def test_custom_project_template_from_tarball_by_path(self):
"Make sure the startproject management command is able to use a different project template from a tarball"
template_path = os.path.join(custom_templates_dir, 'project_template.tgz')
args = ['startproject', '--template', template_path, 'tarballtestproject']
testproject_dir = os.path.join(self.test_dir, 'tarballtestproject')
out, err = self.run_django_admin(args)
self.assertNoOutput(err)
self.assertTrue(os.path.isdir(testproject_dir))
self.assertTrue(os.path.exists(os.path.join(testproject_dir, 'run.py')))
def test_custom_project_template_from_tarball_to_alternative_location(self):
"Startproject can use a project template from a tarball and create it in a specified location"
template_path = os.path.join(custom_templates_dir, 'project_template.tgz')
args = ['startproject', '--template', template_path, 'tarballtestproject', 'altlocation']
testproject_dir = os.path.join(self.test_dir, 'altlocation')
os.mkdir(testproject_dir)
out, err = self.run_django_admin(args)
self.assertNoOutput(err)
self.assertTrue(os.path.isdir(testproject_dir))
self.assertTrue(os.path.exists(os.path.join(testproject_dir, 'run.py')))
def test_custom_project_template_from_tarball_by_url(self):
"""
The startproject management command is able to use a different project
template from a tarball via a URL.
"""
template_url = '%s/custom_templates/project_template.tgz' % self.live_server_url
args = ['startproject', '--template', template_url, 'urltestproject']
testproject_dir = os.path.join(self.test_dir, 'urltestproject')
out, err = self.run_django_admin(args)
self.assertNoOutput(err)
self.assertTrue(os.path.isdir(testproject_dir))
self.assertTrue(os.path.exists(os.path.join(testproject_dir, 'run.py')))
def test_project_template_tarball_url(self):
"Startproject management command handles project template tar/zip balls from non-canonical urls"
template_url = '%s/custom_templates/project_template.tgz/' % self.live_server_url
args = ['startproject', '--template', template_url, 'urltestproject']
testproject_dir = os.path.join(self.test_dir, 'urltestproject')
out, err = self.run_django_admin(args)
self.assertNoOutput(err)
self.assertTrue(os.path.isdir(testproject_dir))
self.assertTrue(os.path.exists(os.path.join(testproject_dir, 'run.py')))
def test_file_without_extension(self):
"Make sure the startproject management command is able to render custom files"
template_path = os.path.join(custom_templates_dir, 'project_template')
args = ['startproject', '--template', template_path, 'customtestproject', '-e', 'txt', '-n', 'Procfile']
testproject_dir = os.path.join(self.test_dir, 'customtestproject')
out, err = self.run_django_admin(args)
self.assertNoOutput(err)
self.assertTrue(os.path.isdir(testproject_dir))
self.assertTrue(os.path.exists(os.path.join(testproject_dir, 'additional_dir')))
base_path = os.path.join(testproject_dir, 'additional_dir')
for f in ('Procfile', 'additional_file.py', 'requirements.txt'):
self.assertTrue(os.path.exists(os.path.join(base_path, f)))
with open(os.path.join(base_path, f)) as fh:
self.assertEqual(fh.read().strip(), '# some file for customtestproject test project')
def test_custom_project_template_context_variables(self):
"Make sure template context variables are rendered with proper values"
template_path = os.path.join(custom_templates_dir, 'project_template')
args = ['startproject', '--template', template_path, 'another_project', 'project_dir']
testproject_dir = os.path.join(self.test_dir, 'project_dir')
os.mkdir(testproject_dir)
out, err = self.run_django_admin(args)
self.assertNoOutput(err)
test_manage_py = os.path.join(testproject_dir, 'manage.py')
with open(test_manage_py) as fp:
content = fp.read()
self.assertIn("project_name = 'another_project'", content)
self.assertIn("project_directory = '%s'" % testproject_dir, content)
def test_no_escaping_of_project_variables(self):
"Make sure template context variables are not html escaped"
# We're using a custom command so we need the alternate settings
self.write_settings('alternate_settings.py')
template_path = os.path.join(custom_templates_dir, 'project_template')
args = [
'custom_startproject', '--template', template_path,
'another_project', 'project_dir', '--extra', '<&>',
'--settings=alternate_settings',
]
testproject_dir = os.path.join(self.test_dir, 'project_dir')
os.mkdir(testproject_dir)
out, err = self.run_manage(args)
self.assertNoOutput(err)
test_manage_py = os.path.join(testproject_dir, 'additional_dir', 'extra.py')
with open(test_manage_py) as fp:
content = fp.read()
self.assertIn("<&>", content)
def test_custom_project_destination_missing(self):
"""
Make sure an exception is raised when the provided
destination directory doesn't exist
"""
template_path = os.path.join(custom_templates_dir, 'project_template')
args = ['startproject', '--template', template_path, 'yet_another_project', 'project_dir2']
testproject_dir = os.path.join(self.test_dir, 'project_dir2')
out, err = self.run_django_admin(args)
self.assertNoOutput(out)
self.assertOutput(err, "Destination directory '%s' does not exist, please create it first." % testproject_dir)
self.assertFalse(os.path.exists(testproject_dir))
def test_custom_project_template_with_non_ascii_templates(self):
"""
The startproject management command is able to render templates with
non-ASCII content.
"""
template_path = os.path.join(custom_templates_dir, 'project_template')
args = ['startproject', '--template', template_path, '--extension=txt', 'customtestproject']
testproject_dir = os.path.join(self.test_dir, 'customtestproject')
out, err = self.run_django_admin(args)
self.assertNoOutput(err)
self.assertTrue(os.path.isdir(testproject_dir))
path = os.path.join(testproject_dir, 'ticket-18091-non-ascii-template.txt')
with open(path, encoding='utf-8') as f:
self.assertEqual(f.read().splitlines(False), [
'Some non-ASCII text for testing ticket #18091:',
'üäö €'])
class StartApp(AdminScriptTestCase):
def test_invalid_name(self):
"""startapp validates that app name is a valid Python identifier."""
for bad_name in ('7testproject', '../testproject'):
with self.subTest(app_name=bad_name):
args = ['startapp', bad_name]
testproject_dir = os.path.join(self.test_dir, bad_name)
out, err = self.run_django_admin(args)
self.assertOutput(
err,
"CommandError: '{}' is not a valid app name. Please make "
"sure the name is a valid identifier.".format(bad_name)
)
self.assertFalse(os.path.exists(testproject_dir))
def test_importable_name(self):
"""
startapp validates that app name doesn't clash with existing Python
modules.
"""
bad_name = 'os'
args = ['startapp', bad_name]
testproject_dir = os.path.join(self.test_dir, bad_name)
out, err = self.run_django_admin(args)
self.assertOutput(
err,
"CommandError: 'os' conflicts with the name of an existing "
"Python module and cannot be used as an app name. Please try "
"another name."
)
self.assertFalse(os.path.exists(testproject_dir))
def test_invalid_target_name(self):
for bad_target in ('invalid.dir_name', '7invalid_dir_name', '.invalid_dir_name'):
with self.subTest(bad_target):
_, err = self.run_django_admin(['startapp', 'app', bad_target])
self.assertOutput(
err,
"CommandError: '%s' is not a valid app directory. Please "
"make sure the directory is a valid identifier." % bad_target
)
def test_importable_target_name(self):
_, err = self.run_django_admin(['startapp', 'app', 'os'])
self.assertOutput(
err,
"CommandError: 'os' conflicts with the name of an existing Python "
"module and cannot be used as an app directory. Please try "
"another directory."
)
def test_trailing_slash_in_target_app_directory_name(self):
app_dir = os.path.join(self.test_dir, 'apps', 'app1')
os.makedirs(app_dir)
_, err = self.run_django_admin(['startapp', 'app', os.path.join('apps', 'app1', '')])
self.assertNoOutput(err)
self.assertIs(os.path.exists(os.path.join(app_dir, 'apps.py')), True)
def test_overlaying_app(self):
# Use a subdirectory so it is outside the PYTHONPATH.
os.makedirs(os.path.join(self.test_dir, 'apps/app1'))
self.run_django_admin(['startapp', 'app1', 'apps/app1'])
out, err = self.run_django_admin(['startapp', 'app2', 'apps/app1'])
self.assertOutput(
err,
"already exists. Overlaying an app into an existing directory "
"won't replace conflicting files."
)
def test_template(self):
out, err = self.run_django_admin(['startapp', 'new_app'])
self.assertNoOutput(err)
app_path = os.path.join(self.test_dir, 'new_app')
self.assertIs(os.path.exists(app_path), True)
with open(os.path.join(app_path, 'apps.py')) as f:
content = f.read()
self.assertIn('class NewAppConfig(AppConfig)', content)
self.assertIn(
"default_auto_field = 'django.db.models.BigAutoField'",
content,
)
self.assertIn("name = 'new_app'", content)
class DiffSettings(AdminScriptTestCase):
"""Tests for diffsettings management command."""
def test_basic(self):
"""Runs without error and emits settings diff."""
self.write_settings('settings_to_diff.py', sdict={'FOO': '"bar"'})
args = ['diffsettings', '--settings=settings_to_diff']
out, err = self.run_manage(args)
self.assertNoOutput(err)
self.assertOutput(out, "FOO = 'bar' ###")
# Attributes from django.conf.Settings don't appear.
self.assertNotInOutput(out, 'is_overridden = ')
def test_settings_configured(self):
out, err = self.run_manage(['diffsettings'], manage_py='configured_settings_manage.py')
self.assertNoOutput(err)
self.assertOutput(out, 'CUSTOM = 1 ###\nDEBUG = True')
# Attributes from django.conf.UserSettingsHolder don't appear.
self.assertNotInOutput(out, 'default_settings = ')
def test_dynamic_settings_configured(self):
# Custom default settings appear.
out, err = self.run_manage(['diffsettings'], manage_py='configured_dynamic_settings_manage.py')
self.assertNoOutput(err)
self.assertOutput(out, "FOO = 'bar' ###")
def test_all(self):
"""The all option also shows settings with the default value."""
self.write_settings('settings_to_diff.py', sdict={'STATIC_URL': 'None'})
args = ['diffsettings', '--settings=settings_to_diff', '--all']
out, err = self.run_manage(args)
self.assertNoOutput(err)
self.assertOutput(out, "### STATIC_URL = None")
def test_custom_default(self):
"""
The --default option specifies an alternate settings module for
comparison.
"""
self.write_settings('settings_default.py', sdict={'FOO': '"foo"', 'BAR': '"bar1"'})
self.write_settings('settings_to_diff.py', sdict={'FOO': '"foo"', 'BAR': '"bar2"'})
out, err = self.run_manage(['diffsettings', '--settings=settings_to_diff', '--default=settings_default'])
self.assertNoOutput(err)
self.assertNotInOutput(out, "FOO")
self.assertOutput(out, "BAR = 'bar2'")
def test_unified(self):
"""--output=unified emits settings diff in unified mode."""
self.write_settings('settings_to_diff.py', sdict={'FOO': '"bar"'})
args = ['diffsettings', '--settings=settings_to_diff', '--output=unified']
out, err = self.run_manage(args)
self.assertNoOutput(err)
self.assertOutput(out, "+ FOO = 'bar'")
self.assertOutput(out, "- SECRET_KEY = ''")
self.assertOutput(out, "+ SECRET_KEY = 'django_tests_secret_key'")
self.assertNotInOutput(out, " APPEND_SLASH = True")
def test_unified_all(self):
"""
--output=unified --all emits settings diff in unified mode and includes
settings with the default value.
"""
self.write_settings('settings_to_diff.py', sdict={'FOO': '"bar"'})
args = ['diffsettings', '--settings=settings_to_diff', '--output=unified', '--all']
out, err = self.run_manage(args)
self.assertNoOutput(err)
self.assertOutput(out, " APPEND_SLASH = True")
self.assertOutput(out, "+ FOO = 'bar'")
self.assertOutput(out, "- SECRET_KEY = ''")
class Dumpdata(AdminScriptTestCase):
"""Tests for dumpdata management command."""
def setUp(self):
super().setUp()
self.write_settings('settings.py')
def test_pks_parsing(self):
"""Regression for #20509
Test would raise an exception rather than printing an error message.
"""
args = ['dumpdata', '--pks=1']
out, err = self.run_manage(args)
self.assertOutput(err, "You can only use --pks option with one model")
self.assertNoOutput(out)
class MainModule(AdminScriptTestCase):
"""python -m django works like django-admin."""
def test_program_name_in_help(self):
out, err = self.run_test(['-m', 'django', 'help'])
self.assertOutput(out, "Type 'python -m django help <subcommand>' for help on a specific subcommand.")
class DjangoAdminSuggestions(AdminScriptTestCase):
def setUp(self):
super().setUp()
self.write_settings('settings.py')
def test_suggestions(self):
args = ['rnserver', '--settings=test_project.settings']
out, err = self.run_django_admin(args)
self.assertNoOutput(out)
self.assertOutput(err, "Unknown command: 'rnserver'. Did you mean runserver?")
def test_no_suggestions(self):
args = ['abcdef', '--settings=test_project.settings']
out, err = self.run_django_admin(args)
self.assertNoOutput(out)
self.assertNotInOutput(err, 'Did you mean')
|
7a3e8db97a6a257a20484c6b731b6054e166400304afa94b0696fd8350807394 | from math import ceil
from operator import attrgetter
from django.db import IntegrityError, NotSupportedError, connection
from django.db.models import FileField, Value
from django.db.models.functions import Lower
from django.test import (
TestCase, override_settings, skipIfDBFeature, skipUnlessDBFeature,
)
from .models import (
BigAutoFieldModel, Country, NoFields, NullableFields, Pizzeria,
ProxyCountry, ProxyMultiCountry, ProxyMultiProxyCountry, ProxyProxyCountry,
Restaurant, SmallAutoFieldModel, State, TwoFields,
)
class BulkCreateTests(TestCase):
def setUp(self):
self.data = [
Country(name="United States of America", iso_two_letter="US"),
Country(name="The Netherlands", iso_two_letter="NL"),
Country(name="Germany", iso_two_letter="DE"),
Country(name="Czech Republic", iso_two_letter="CZ")
]
def test_simple(self):
created = Country.objects.bulk_create(self.data)
self.assertEqual(created, self.data)
self.assertQuerysetEqual(Country.objects.order_by("-name"), [
"United States of America", "The Netherlands", "Germany", "Czech Republic"
], attrgetter("name"))
created = Country.objects.bulk_create([])
self.assertEqual(created, [])
self.assertEqual(Country.objects.count(), 4)
@skipUnlessDBFeature('has_bulk_insert')
def test_efficiency(self):
with self.assertNumQueries(1):
Country.objects.bulk_create(self.data)
@skipUnlessDBFeature('has_bulk_insert')
def test_long_non_ascii_text(self):
"""
Inserting non-ASCII values with a length in the range 2001 to 4000
characters, i.e. 4002 to 8000 bytes, must be set as a CLOB on Oracle
(#22144).
"""
Country.objects.bulk_create([Country(description='Ж' * 3000)])
self.assertEqual(Country.objects.count(), 1)
@skipUnlessDBFeature('has_bulk_insert')
def test_long_and_short_text(self):
Country.objects.bulk_create([
Country(description='a' * 4001),
Country(description='a'),
Country(description='Ж' * 2001),
Country(description='Ж'),
])
self.assertEqual(Country.objects.count(), 4)
def test_multi_table_inheritance_unsupported(self):
expected_message = "Can't bulk create a multi-table inherited model"
with self.assertRaisesMessage(ValueError, expected_message):
Pizzeria.objects.bulk_create([
Pizzeria(name="The Art of Pizza"),
])
with self.assertRaisesMessage(ValueError, expected_message):
ProxyMultiCountry.objects.bulk_create([
ProxyMultiCountry(name="Fillory", iso_two_letter="FL"),
])
with self.assertRaisesMessage(ValueError, expected_message):
ProxyMultiProxyCountry.objects.bulk_create([
ProxyMultiProxyCountry(name="Fillory", iso_two_letter="FL"),
])
def test_proxy_inheritance_supported(self):
ProxyCountry.objects.bulk_create([
ProxyCountry(name="Qwghlm", iso_two_letter="QW"),
Country(name="Tortall", iso_two_letter="TA"),
])
self.assertQuerysetEqual(ProxyCountry.objects.all(), {
"Qwghlm", "Tortall"
}, attrgetter("name"), ordered=False)
ProxyProxyCountry.objects.bulk_create([
ProxyProxyCountry(name="Netherlands", iso_two_letter="NT"),
])
self.assertQuerysetEqual(ProxyProxyCountry.objects.all(), {
"Qwghlm", "Tortall", "Netherlands",
}, attrgetter("name"), ordered=False)
def test_non_auto_increment_pk(self):
State.objects.bulk_create([
State(two_letter_code=s)
for s in ["IL", "NY", "CA", "ME"]
])
self.assertQuerysetEqual(State.objects.order_by("two_letter_code"), [
"CA", "IL", "ME", "NY",
], attrgetter("two_letter_code"))
@skipUnlessDBFeature('has_bulk_insert')
def test_non_auto_increment_pk_efficiency(self):
with self.assertNumQueries(1):
State.objects.bulk_create([
State(two_letter_code=s)
for s in ["IL", "NY", "CA", "ME"]
])
self.assertQuerysetEqual(State.objects.order_by("two_letter_code"), [
"CA", "IL", "ME", "NY",
], attrgetter("two_letter_code"))
@skipIfDBFeature('allows_auto_pk_0')
def test_zero_as_autoval(self):
"""
Zero as id for AutoField should raise exception in MySQL, because MySQL
does not allow zero for automatic primary key if the
NO_AUTO_VALUE_ON_ZERO SQL mode is not enabled.
"""
valid_country = Country(name='Germany', iso_two_letter='DE')
invalid_country = Country(id=0, name='Poland', iso_two_letter='PL')
msg = 'The database backend does not accept 0 as a value for AutoField.'
with self.assertRaisesMessage(ValueError, msg):
Country.objects.bulk_create([valid_country, invalid_country])
def test_batch_same_vals(self):
# SQLite had a problem where all the same-valued models were
# collapsed to one insert.
Restaurant.objects.bulk_create([
Restaurant(name='foo') for i in range(0, 2)
])
self.assertEqual(Restaurant.objects.count(), 2)
def test_large_batch(self):
TwoFields.objects.bulk_create([
TwoFields(f1=i, f2=i + 1) for i in range(0, 1001)
])
self.assertEqual(TwoFields.objects.count(), 1001)
self.assertEqual(
TwoFields.objects.filter(f1__gte=450, f1__lte=550).count(),
101)
self.assertEqual(TwoFields.objects.filter(f2__gte=901).count(), 101)
@skipUnlessDBFeature('has_bulk_insert')
def test_large_single_field_batch(self):
# SQLite had a problem with more than 500 UNIONed selects in single
# query.
Restaurant.objects.bulk_create([
Restaurant() for i in range(0, 501)
])
@skipUnlessDBFeature('has_bulk_insert')
def test_large_batch_efficiency(self):
with override_settings(DEBUG=True):
connection.queries_log.clear()
TwoFields.objects.bulk_create([
TwoFields(f1=i, f2=i + 1) for i in range(0, 1001)
])
self.assertLess(len(connection.queries), 10)
def test_large_batch_mixed(self):
"""
Test inserting a large batch with objects having primary key set
mixed together with objects without PK set.
"""
TwoFields.objects.bulk_create([
TwoFields(id=i if i % 2 == 0 else None, f1=i, f2=i + 1)
for i in range(100000, 101000)
])
self.assertEqual(TwoFields.objects.count(), 1000)
# We can't assume much about the ID's created, except that the above
# created IDs must exist.
id_range = range(100000, 101000, 2)
self.assertEqual(TwoFields.objects.filter(id__in=id_range).count(), 500)
self.assertEqual(TwoFields.objects.exclude(id__in=id_range).count(), 500)
@skipUnlessDBFeature('has_bulk_insert')
def test_large_batch_mixed_efficiency(self):
"""
Test inserting a large batch with objects having primary key set
mixed together with objects without PK set.
"""
with override_settings(DEBUG=True):
connection.queries_log.clear()
TwoFields.objects.bulk_create([
TwoFields(id=i if i % 2 == 0 else None, f1=i, f2=i + 1)
for i in range(100000, 101000)])
self.assertLess(len(connection.queries), 10)
def test_explicit_batch_size(self):
objs = [TwoFields(f1=i, f2=i) for i in range(0, 4)]
num_objs = len(objs)
TwoFields.objects.bulk_create(objs, batch_size=1)
self.assertEqual(TwoFields.objects.count(), num_objs)
TwoFields.objects.all().delete()
TwoFields.objects.bulk_create(objs, batch_size=2)
self.assertEqual(TwoFields.objects.count(), num_objs)
TwoFields.objects.all().delete()
TwoFields.objects.bulk_create(objs, batch_size=3)
self.assertEqual(TwoFields.objects.count(), num_objs)
TwoFields.objects.all().delete()
TwoFields.objects.bulk_create(objs, batch_size=num_objs)
self.assertEqual(TwoFields.objects.count(), num_objs)
def test_empty_model(self):
NoFields.objects.bulk_create([NoFields() for i in range(2)])
self.assertEqual(NoFields.objects.count(), 2)
@skipUnlessDBFeature('has_bulk_insert')
def test_explicit_batch_size_efficiency(self):
objs = [TwoFields(f1=i, f2=i) for i in range(0, 100)]
with self.assertNumQueries(2):
TwoFields.objects.bulk_create(objs, 50)
TwoFields.objects.all().delete()
with self.assertNumQueries(1):
TwoFields.objects.bulk_create(objs, len(objs))
@skipUnlessDBFeature('has_bulk_insert')
def test_explicit_batch_size_respects_max_batch_size(self):
objs = [Country() for i in range(1000)]
fields = ['name', 'iso_two_letter', 'description']
max_batch_size = max(connection.ops.bulk_batch_size(fields, objs), 1)
with self.assertNumQueries(ceil(len(objs) / max_batch_size)):
Country.objects.bulk_create(objs, batch_size=max_batch_size + 1)
@skipUnlessDBFeature('has_bulk_insert')
def test_bulk_insert_expressions(self):
Restaurant.objects.bulk_create([
Restaurant(name="Sam's Shake Shack"),
Restaurant(name=Lower(Value("Betty's Beetroot Bar")))
])
bbb = Restaurant.objects.filter(name="betty's beetroot bar")
self.assertEqual(bbb.count(), 1)
@skipUnlessDBFeature('has_bulk_insert')
def test_bulk_insert_nullable_fields(self):
fk_to_auto_fields = {
'auto_field': NoFields.objects.create(),
'small_auto_field': SmallAutoFieldModel.objects.create(),
'big_auto_field': BigAutoFieldModel.objects.create(),
}
# NULL can be mixed with other values in nullable fields
nullable_fields = [field for field in NullableFields._meta.get_fields() if field.name != 'id']
NullableFields.objects.bulk_create([
NullableFields(**{**fk_to_auto_fields, field.name: None})
for field in nullable_fields
])
self.assertEqual(NullableFields.objects.count(), len(nullable_fields))
for field in nullable_fields:
with self.subTest(field=field):
field_value = '' if isinstance(field, FileField) else None
self.assertEqual(NullableFields.objects.filter(**{field.name: field_value}).count(), 1)
@skipUnlessDBFeature('can_return_rows_from_bulk_insert')
def test_set_pk_and_insert_single_item(self):
with self.assertNumQueries(1):
countries = Country.objects.bulk_create([self.data[0]])
self.assertEqual(len(countries), 1)
self.assertEqual(Country.objects.get(pk=countries[0].pk), countries[0])
@skipUnlessDBFeature('can_return_rows_from_bulk_insert')
def test_set_pk_and_query_efficiency(self):
with self.assertNumQueries(1):
countries = Country.objects.bulk_create(self.data)
self.assertEqual(len(countries), 4)
self.assertEqual(Country.objects.get(pk=countries[0].pk), countries[0])
self.assertEqual(Country.objects.get(pk=countries[1].pk), countries[1])
self.assertEqual(Country.objects.get(pk=countries[2].pk), countries[2])
self.assertEqual(Country.objects.get(pk=countries[3].pk), countries[3])
@skipUnlessDBFeature('can_return_rows_from_bulk_insert')
def test_set_state(self):
country_nl = Country(name='Netherlands', iso_two_letter='NL')
country_be = Country(name='Belgium', iso_two_letter='BE')
Country.objects.bulk_create([country_nl])
country_be.save()
# Objects save via bulk_create() and save() should have equal state.
self.assertEqual(country_nl._state.adding, country_be._state.adding)
self.assertEqual(country_nl._state.db, country_be._state.db)
def test_set_state_with_pk_specified(self):
state_ca = State(two_letter_code='CA')
state_ny = State(two_letter_code='NY')
State.objects.bulk_create([state_ca])
state_ny.save()
# Objects save via bulk_create() and save() should have equal state.
self.assertEqual(state_ca._state.adding, state_ny._state.adding)
self.assertEqual(state_ca._state.db, state_ny._state.db)
@skipIfDBFeature('supports_ignore_conflicts')
def test_ignore_conflicts_value_error(self):
message = 'This database backend does not support ignoring conflicts.'
with self.assertRaisesMessage(NotSupportedError, message):
TwoFields.objects.bulk_create(self.data, ignore_conflicts=True)
@skipUnlessDBFeature('supports_ignore_conflicts')
def test_ignore_conflicts_ignore(self):
data = [
TwoFields(f1=1, f2=1),
TwoFields(f1=2, f2=2),
TwoFields(f1=3, f2=3),
]
TwoFields.objects.bulk_create(data)
self.assertEqual(TwoFields.objects.count(), 3)
# With ignore_conflicts=True, conflicts are ignored.
conflicting_objects = [
TwoFields(f1=2, f2=2),
TwoFields(f1=3, f2=3),
]
TwoFields.objects.bulk_create([conflicting_objects[0]], ignore_conflicts=True)
TwoFields.objects.bulk_create(conflicting_objects, ignore_conflicts=True)
self.assertEqual(TwoFields.objects.count(), 3)
self.assertIsNone(conflicting_objects[0].pk)
self.assertIsNone(conflicting_objects[1].pk)
# New objects are created and conflicts are ignored.
new_object = TwoFields(f1=4, f2=4)
TwoFields.objects.bulk_create(conflicting_objects + [new_object], ignore_conflicts=True)
self.assertEqual(TwoFields.objects.count(), 4)
self.assertIsNone(new_object.pk)
# Without ignore_conflicts=True, there's a problem.
with self.assertRaises(IntegrityError):
TwoFields.objects.bulk_create(conflicting_objects)
def test_nullable_fk_after_parent(self):
parent = NoFields()
child = NullableFields(auto_field=parent, integer_field=88)
parent.save()
NullableFields.objects.bulk_create([child])
child = NullableFields.objects.get(integer_field=88)
self.assertEqual(child.auto_field, parent)
@skipUnlessDBFeature('can_return_rows_from_bulk_insert')
def test_nullable_fk_after_parent_bulk_create(self):
parent = NoFields()
child = NullableFields(auto_field=parent, integer_field=88)
NoFields.objects.bulk_create([parent])
NullableFields.objects.bulk_create([child])
child = NullableFields.objects.get(integer_field=88)
self.assertEqual(child.auto_field, parent)
def test_unsaved_parent(self):
parent = NoFields()
msg = (
"bulk_create() prohibited to prevent data loss due to unsaved "
"related object 'auto_field'."
)
with self.assertRaisesMessage(ValueError, msg):
NullableFields.objects.bulk_create([NullableFields(auto_field=parent)])
def test_invalid_batch_size_exception(self):
msg = 'Batch size must be a positive integer.'
with self.assertRaisesMessage(ValueError, msg):
Country.objects.bulk_create([], batch_size=-1)
|
2a26c78a66c8c5821265c637fada705dd45c78dc56a94bf5a0093c36bd2b876c | import base64
import hashlib
import os
import shutil
import sys
import tempfile as sys_tempfile
import unittest
from io import BytesIO, StringIO
from unittest import mock
from urllib.parse import quote
from django.core.exceptions import SuspiciousFileOperation
from django.core.files import temp as tempfile
from django.core.files.uploadedfile import SimpleUploadedFile, UploadedFile
from django.http.multipartparser import (
FILE, MultiPartParser, MultiPartParserError, Parser, parse_header,
)
from django.test import SimpleTestCase, TestCase, client, override_settings
from . import uploadhandler
from .models import FileModel
UNICODE_FILENAME = 'test-0123456789_中文_Orléans.jpg'
MEDIA_ROOT = sys_tempfile.mkdtemp()
UPLOAD_TO = os.path.join(MEDIA_ROOT, 'test_upload')
CANDIDATE_TRAVERSAL_FILE_NAMES = [
'/tmp/hax0rd.txt', # Absolute path, *nix-style.
'C:\\Windows\\hax0rd.txt', # Absolute path, win-style.
'C:/Windows/hax0rd.txt', # Absolute path, broken-style.
'\\tmp\\hax0rd.txt', # Absolute path, broken in a different way.
'/tmp\\hax0rd.txt', # Absolute path, broken by mixing.
'subdir/hax0rd.txt', # Descendant path, *nix-style.
'subdir\\hax0rd.txt', # Descendant path, win-style.
'sub/dir\\hax0rd.txt', # Descendant path, mixed.
'../../hax0rd.txt', # Relative path, *nix-style.
'..\\..\\hax0rd.txt', # Relative path, win-style.
'../..\\hax0rd.txt', # Relative path, mixed.
'../hax0rd.txt', # HTML entities.
'../hax0rd.txt', # HTML entities.
]
CANDIDATE_INVALID_FILE_NAMES = [
'/tmp/', # Directory, *nix-style.
'c:\\tmp\\', # Directory, win-style.
'/tmp/.', # Directory dot, *nix-style.
'c:\\tmp\\.', # Directory dot, *nix-style.
'/tmp/..', # Parent directory, *nix-style.
'c:\\tmp\\..', # Parent directory, win-style.
'', # Empty filename.
]
@override_settings(MEDIA_ROOT=MEDIA_ROOT, ROOT_URLCONF='file_uploads.urls', MIDDLEWARE=[])
class FileUploadTests(TestCase):
@classmethod
def setUpClass(cls):
super().setUpClass()
os.makedirs(MEDIA_ROOT, exist_ok=True)
cls.addClassCleanup(shutil.rmtree, MEDIA_ROOT)
def test_upload_name_is_validated(self):
candidates = [
'/tmp/',
'/tmp/..',
'/tmp/.',
]
if sys.platform == 'win32':
candidates.extend([
'c:\\tmp\\',
'c:\\tmp\\..',
'c:\\tmp\\.',
])
for file_name in candidates:
with self.subTest(file_name=file_name):
self.assertRaises(SuspiciousFileOperation, UploadedFile, name=file_name)
def test_simple_upload(self):
with open(__file__, 'rb') as fp:
post_data = {
'name': 'Ringo',
'file_field': fp,
}
response = self.client.post('/upload/', post_data)
self.assertEqual(response.status_code, 200)
def test_large_upload(self):
file = tempfile.NamedTemporaryFile
with file(suffix=".file1") as file1, file(suffix=".file2") as file2:
file1.write(b'a' * (2 ** 21))
file1.seek(0)
file2.write(b'a' * (10 * 2 ** 20))
file2.seek(0)
post_data = {
'name': 'Ringo',
'file_field1': file1,
'file_field2': file2,
}
for key in list(post_data):
try:
post_data[key + '_hash'] = hashlib.sha1(post_data[key].read()).hexdigest()
post_data[key].seek(0)
except AttributeError:
post_data[key + '_hash'] = hashlib.sha1(post_data[key].encode()).hexdigest()
response = self.client.post('/verify/', post_data)
self.assertEqual(response.status_code, 200)
def _test_base64_upload(self, content, encode=base64.b64encode):
payload = client.FakePayload("\r\n".join([
'--' + client.BOUNDARY,
'Content-Disposition: form-data; name="file"; filename="test.txt"',
'Content-Type: application/octet-stream',
'Content-Transfer-Encoding: base64',
'']))
payload.write(b'\r\n' + encode(content.encode()) + b'\r\n')
payload.write('--' + client.BOUNDARY + '--\r\n')
r = {
'CONTENT_LENGTH': len(payload),
'CONTENT_TYPE': client.MULTIPART_CONTENT,
'PATH_INFO': "/echo_content/",
'REQUEST_METHOD': 'POST',
'wsgi.input': payload,
}
response = self.client.request(**r)
self.assertEqual(response.json()['file'], content)
def test_base64_upload(self):
self._test_base64_upload("This data will be transmitted base64-encoded.")
def test_big_base64_upload(self):
self._test_base64_upload("Big data" * 68000) # > 512Kb
def test_big_base64_newlines_upload(self):
self._test_base64_upload("Big data" * 68000, encode=base64.encodebytes)
def test_unicode_file_name(self):
with sys_tempfile.TemporaryDirectory() as temp_dir:
# This file contains Chinese symbols and an accented char in the name.
with open(os.path.join(temp_dir, UNICODE_FILENAME), 'w+b') as file1:
file1.write(b'b' * (2 ** 10))
file1.seek(0)
response = self.client.post('/unicode_name/', {'file_unicode': file1})
self.assertEqual(response.status_code, 200)
def test_unicode_file_name_rfc2231(self):
"""
Test receiving file upload when filename is encoded with RFC2231
(#22971).
"""
payload = client.FakePayload()
payload.write('\r\n'.join([
'--' + client.BOUNDARY,
'Content-Disposition: form-data; name="file_unicode"; filename*=UTF-8\'\'%s' % quote(UNICODE_FILENAME),
'Content-Type: application/octet-stream',
'',
'You got pwnd.\r\n',
'\r\n--' + client.BOUNDARY + '--\r\n'
]))
r = {
'CONTENT_LENGTH': len(payload),
'CONTENT_TYPE': client.MULTIPART_CONTENT,
'PATH_INFO': "/unicode_name/",
'REQUEST_METHOD': 'POST',
'wsgi.input': payload,
}
response = self.client.request(**r)
self.assertEqual(response.status_code, 200)
def test_unicode_name_rfc2231(self):
"""
Test receiving file upload when filename is encoded with RFC2231
(#22971).
"""
payload = client.FakePayload()
payload.write(
'\r\n'.join([
'--' + client.BOUNDARY,
'Content-Disposition: form-data; name*=UTF-8\'\'file_unicode; filename*=UTF-8\'\'%s' % quote(
UNICODE_FILENAME
),
'Content-Type: application/octet-stream',
'',
'You got pwnd.\r\n',
'\r\n--' + client.BOUNDARY + '--\r\n'
])
)
r = {
'CONTENT_LENGTH': len(payload),
'CONTENT_TYPE': client.MULTIPART_CONTENT,
'PATH_INFO': "/unicode_name/",
'REQUEST_METHOD': 'POST',
'wsgi.input': payload,
}
response = self.client.request(**r)
self.assertEqual(response.status_code, 200)
def test_unicode_file_name_rfc2231_with_double_quotes(self):
payload = client.FakePayload()
payload.write('\r\n'.join([
'--' + client.BOUNDARY,
'Content-Disposition: form-data; name="file_unicode"; '
'filename*="UTF-8\'\'%s"' % quote(UNICODE_FILENAME),
'Content-Type: application/octet-stream',
'',
'You got pwnd.\r\n',
'\r\n--' + client.BOUNDARY + '--\r\n',
]))
r = {
'CONTENT_LENGTH': len(payload),
'CONTENT_TYPE': client.MULTIPART_CONTENT,
'PATH_INFO': '/unicode_name/',
'REQUEST_METHOD': 'POST',
'wsgi.input': payload,
}
response = self.client.request(**r)
self.assertEqual(response.status_code, 200)
def test_unicode_name_rfc2231_with_double_quotes(self):
payload = client.FakePayload()
payload.write('\r\n'.join([
'--' + client.BOUNDARY,
'Content-Disposition: form-data; name*="UTF-8\'\'file_unicode"; '
'filename*="UTF-8\'\'%s"' % quote(UNICODE_FILENAME),
'Content-Type: application/octet-stream',
'',
'You got pwnd.\r\n',
'\r\n--' + client.BOUNDARY + '--\r\n'
]))
r = {
'CONTENT_LENGTH': len(payload),
'CONTENT_TYPE': client.MULTIPART_CONTENT,
'PATH_INFO': '/unicode_name/',
'REQUEST_METHOD': 'POST',
'wsgi.input': payload,
}
response = self.client.request(**r)
self.assertEqual(response.status_code, 200)
def test_blank_filenames(self):
"""
Receiving file upload when filename is blank (before and after
sanitization) should be okay.
"""
filenames = [
'',
# Normalized by MultiPartParser.IE_sanitize().
'C:\\Windows\\',
# Normalized by os.path.basename().
'/',
'ends-with-slash/',
]
payload = client.FakePayload()
for i, name in enumerate(filenames):
payload.write('\r\n'.join([
'--' + client.BOUNDARY,
'Content-Disposition: form-data; name="file%s"; filename="%s"' % (i, name),
'Content-Type: application/octet-stream',
'',
'You got pwnd.\r\n'
]))
payload.write('\r\n--' + client.BOUNDARY + '--\r\n')
r = {
'CONTENT_LENGTH': len(payload),
'CONTENT_TYPE': client.MULTIPART_CONTENT,
'PATH_INFO': '/echo/',
'REQUEST_METHOD': 'POST',
'wsgi.input': payload,
}
response = self.client.request(**r)
self.assertEqual(response.status_code, 200)
# Empty filenames should be ignored
received = response.json()
for i, name in enumerate(filenames):
self.assertIsNone(received.get('file%s' % i))
def test_dangerous_file_names(self):
"""Uploaded file names should be sanitized before ever reaching the view."""
# This test simulates possible directory traversal attacks by a
# malicious uploader We have to do some monkeybusiness here to construct
# a malicious payload with an invalid file name (containing os.sep or
# os.pardir). This similar to what an attacker would need to do when
# trying such an attack.
payload = client.FakePayload()
for i, name in enumerate(CANDIDATE_TRAVERSAL_FILE_NAMES):
payload.write('\r\n'.join([
'--' + client.BOUNDARY,
'Content-Disposition: form-data; name="file%s"; filename="%s"' % (i, name),
'Content-Type: application/octet-stream',
'',
'You got pwnd.\r\n'
]))
payload.write('\r\n--' + client.BOUNDARY + '--\r\n')
r = {
'CONTENT_LENGTH': len(payload),
'CONTENT_TYPE': client.MULTIPART_CONTENT,
'PATH_INFO': "/echo/",
'REQUEST_METHOD': 'POST',
'wsgi.input': payload,
}
response = self.client.request(**r)
# The filenames should have been sanitized by the time it got to the view.
received = response.json()
for i, name in enumerate(CANDIDATE_TRAVERSAL_FILE_NAMES):
got = received["file%s" % i]
self.assertEqual(got, "hax0rd.txt")
def test_filename_overflow(self):
"""File names over 256 characters (dangerous on some platforms) get fixed up."""
long_str = 'f' * 300
cases = [
# field name, filename, expected
('long_filename', '%s.txt' % long_str, '%s.txt' % long_str[:251]),
('long_extension', 'foo.%s' % long_str, '.%s' % long_str[:254]),
('no_extension', long_str, long_str[:255]),
('no_filename', '.%s' % long_str, '.%s' % long_str[:254]),
('long_everything', '%s.%s' % (long_str, long_str), '.%s' % long_str[:254]),
]
payload = client.FakePayload()
for name, filename, _ in cases:
payload.write("\r\n".join([
'--' + client.BOUNDARY,
'Content-Disposition: form-data; name="{}"; filename="{}"',
'Content-Type: application/octet-stream',
'',
'Oops.',
''
]).format(name, filename))
payload.write('\r\n--' + client.BOUNDARY + '--\r\n')
r = {
'CONTENT_LENGTH': len(payload),
'CONTENT_TYPE': client.MULTIPART_CONTENT,
'PATH_INFO': "/echo/",
'REQUEST_METHOD': 'POST',
'wsgi.input': payload,
}
response = self.client.request(**r)
result = response.json()
for name, _, expected in cases:
got = result[name]
self.assertEqual(expected, got, 'Mismatch for {}'.format(name))
self.assertLess(len(got), 256,
"Got a long file name (%s characters)." % len(got))
def test_file_content(self):
file = tempfile.NamedTemporaryFile
with file(suffix=".ctype_extra") as no_content_type, file(suffix=".ctype_extra") as simple_file:
no_content_type.write(b'no content')
no_content_type.seek(0)
simple_file.write(b'text content')
simple_file.seek(0)
simple_file.content_type = 'text/plain'
string_io = StringIO('string content')
bytes_io = BytesIO(b'binary content')
response = self.client.post('/echo_content/', {
'no_content_type': no_content_type,
'simple_file': simple_file,
'string': string_io,
'binary': bytes_io,
})
received = response.json()
self.assertEqual(received['no_content_type'], 'no content')
self.assertEqual(received['simple_file'], 'text content')
self.assertEqual(received['string'], 'string content')
self.assertEqual(received['binary'], 'binary content')
def test_content_type_extra(self):
"""Uploaded files may have content type parameters available."""
file = tempfile.NamedTemporaryFile
with file(suffix=".ctype_extra") as no_content_type, file(suffix=".ctype_extra") as simple_file:
no_content_type.write(b'something')
no_content_type.seek(0)
simple_file.write(b'something')
simple_file.seek(0)
simple_file.content_type = 'text/plain; test-key=test_value'
response = self.client.post('/echo_content_type_extra/', {
'no_content_type': no_content_type,
'simple_file': simple_file,
})
received = response.json()
self.assertEqual(received['no_content_type'], {})
self.assertEqual(received['simple_file'], {'test-key': 'test_value'})
def test_truncated_multipart_handled_gracefully(self):
"""
If passed an incomplete multipart message, MultiPartParser does not
attempt to read beyond the end of the stream, and simply will handle
the part that can be parsed gracefully.
"""
payload_str = "\r\n".join([
'--' + client.BOUNDARY,
'Content-Disposition: form-data; name="file"; filename="foo.txt"',
'Content-Type: application/octet-stream',
'',
'file contents'
'--' + client.BOUNDARY + '--',
'',
])
payload = client.FakePayload(payload_str[:-10])
r = {
'CONTENT_LENGTH': len(payload),
'CONTENT_TYPE': client.MULTIPART_CONTENT,
'PATH_INFO': '/echo/',
'REQUEST_METHOD': 'POST',
'wsgi.input': payload,
}
self.assertEqual(self.client.request(**r).json(), {})
def test_empty_multipart_handled_gracefully(self):
"""
If passed an empty multipart message, MultiPartParser will return
an empty QueryDict.
"""
r = {
'CONTENT_LENGTH': 0,
'CONTENT_TYPE': client.MULTIPART_CONTENT,
'PATH_INFO': '/echo/',
'REQUEST_METHOD': 'POST',
'wsgi.input': client.FakePayload(b''),
}
self.assertEqual(self.client.request(**r).json(), {})
def test_custom_upload_handler(self):
file = tempfile.NamedTemporaryFile
with file() as smallfile, file() as bigfile:
# A small file (under the 5M quota)
smallfile.write(b'a' * (2 ** 21))
smallfile.seek(0)
# A big file (over the quota)
bigfile.write(b'a' * (10 * 2 ** 20))
bigfile.seek(0)
# Small file posting should work.
self.assertIn('f', self.client.post('/quota/', {'f': smallfile}).json())
# Large files don't go through.
self.assertNotIn('f', self.client.post("/quota/", {'f': bigfile}).json())
def test_broken_custom_upload_handler(self):
with tempfile.NamedTemporaryFile() as file:
file.write(b'a' * (2 ** 21))
file.seek(0)
msg = 'You cannot alter upload handlers after the upload has been processed.'
with self.assertRaisesMessage(AttributeError, msg):
self.client.post('/quota/broken/', {'f': file})
def test_stop_upload_temporary_file_handler(self):
with tempfile.NamedTemporaryFile() as temp_file:
temp_file.write(b'a')
temp_file.seek(0)
response = self.client.post('/temp_file/stop_upload/', {'file': temp_file})
temp_path = response.json()['temp_path']
self.assertIs(os.path.exists(temp_path), False)
def test_upload_interrupted_temporary_file_handler(self):
# Simulate an interrupted upload by omitting the closing boundary.
class MockedParser(Parser):
def __iter__(self):
for item in super().__iter__():
item_type, meta_data, field_stream = item
yield item_type, meta_data, field_stream
if item_type == FILE:
return
with tempfile.NamedTemporaryFile() as temp_file:
temp_file.write(b'a')
temp_file.seek(0)
with mock.patch(
'django.http.multipartparser.Parser',
MockedParser,
):
response = self.client.post(
'/temp_file/upload_interrupted/',
{'file': temp_file},
)
temp_path = response.json()['temp_path']
self.assertIs(os.path.exists(temp_path), False)
def test_fileupload_getlist(self):
file = tempfile.NamedTemporaryFile
with file() as file1, file() as file2, file() as file2a:
file1.write(b'a' * (2 ** 23))
file1.seek(0)
file2.write(b'a' * (2 * 2 ** 18))
file2.seek(0)
file2a.write(b'a' * (5 * 2 ** 20))
file2a.seek(0)
response = self.client.post('/getlist_count/', {
'file1': file1,
'field1': 'test',
'field2': 'test3',
'field3': 'test5',
'field4': 'test6',
'field5': 'test7',
'file2': (file2, file2a)
})
got = response.json()
self.assertEqual(got.get('file1'), 1)
self.assertEqual(got.get('file2'), 2)
def test_fileuploads_closed_at_request_end(self):
file = tempfile.NamedTemporaryFile
with file() as f1, file() as f2a, file() as f2b:
response = self.client.post('/fd_closing/t/', {
'file': f1,
'file2': (f2a, f2b),
})
request = response.wsgi_request
# The files were parsed.
self.assertTrue(hasattr(request, '_files'))
file = request._files['file']
self.assertTrue(file.closed)
files = request._files.getlist('file2')
self.assertTrue(files[0].closed)
self.assertTrue(files[1].closed)
def test_no_parsing_triggered_by_fd_closing(self):
file = tempfile.NamedTemporaryFile
with file() as f1, file() as f2a, file() as f2b:
response = self.client.post('/fd_closing/f/', {
'file': f1,
'file2': (f2a, f2b),
})
request = response.wsgi_request
# The fd closing logic doesn't trigger parsing of the stream
self.assertFalse(hasattr(request, '_files'))
def test_file_error_blocking(self):
"""
The server should not block when there are upload errors (bug #8622).
This can happen if something -- i.e. an exception handler -- tries to
access POST while handling an error in parsing POST. This shouldn't
cause an infinite loop!
"""
class POSTAccessingHandler(client.ClientHandler):
"""A handler that'll access POST during an exception."""
def handle_uncaught_exception(self, request, resolver, exc_info):
ret = super().handle_uncaught_exception(request, resolver, exc_info)
request.POST # evaluate
return ret
# Maybe this is a little more complicated that it needs to be; but if
# the django.test.client.FakePayload.read() implementation changes then
# this test would fail. So we need to know exactly what kind of error
# it raises when there is an attempt to read more than the available bytes:
try:
client.FakePayload(b'a').read(2)
except Exception as err:
reference_error = err
# install the custom handler that tries to access request.POST
self.client.handler = POSTAccessingHandler()
with open(__file__, 'rb') as fp:
post_data = {
'name': 'Ringo',
'file_field': fp,
}
try:
self.client.post('/upload_errors/', post_data)
except reference_error.__class__ as err:
self.assertNotEqual(
str(err),
str(reference_error),
"Caught a repeated exception that'll cause an infinite loop in file uploads."
)
except Exception as err:
# CustomUploadError is the error that should have been raised
self.assertEqual(err.__class__, uploadhandler.CustomUploadError)
def test_filename_case_preservation(self):
"""
The storage backend shouldn't mess with the case of the filenames
uploaded.
"""
# Synthesize the contents of a file upload with a mixed case filename
# so we don't have to carry such a file in the Django tests source code
# tree.
vars = {'boundary': 'oUrBoUnDaRyStRiNg'}
post_data = [
'--%(boundary)s',
'Content-Disposition: form-data; name="file_field"; filename="MiXeD_cAsE.txt"',
'Content-Type: application/octet-stream',
'',
'file contents\n'
'',
'--%(boundary)s--\r\n',
]
response = self.client.post(
'/filename_case/',
'\r\n'.join(post_data) % vars,
'multipart/form-data; boundary=%(boundary)s' % vars
)
self.assertEqual(response.status_code, 200)
id = int(response.content)
obj = FileModel.objects.get(pk=id)
# The name of the file uploaded and the file stored in the server-side
# shouldn't differ.
self.assertEqual(os.path.basename(obj.testfile.path), 'MiXeD_cAsE.txt')
def test_filename_traversal_upload(self):
os.makedirs(UPLOAD_TO, exist_ok=True)
tests = [
'../test.txt',
'../test.txt',
]
for file_name in tests:
with self.subTest(file_name=file_name):
payload = client.FakePayload()
payload.write(
'\r\n'.join([
'--' + client.BOUNDARY,
'Content-Disposition: form-data; name="my_file"; '
'filename="%s";' % file_name,
'Content-Type: text/plain',
'',
'file contents.\r\n',
'\r\n--' + client.BOUNDARY + '--\r\n',
]),
)
r = {
'CONTENT_LENGTH': len(payload),
'CONTENT_TYPE': client.MULTIPART_CONTENT,
'PATH_INFO': '/upload_traversal/',
'REQUEST_METHOD': 'POST',
'wsgi.input': payload,
}
response = self.client.request(**r)
result = response.json()
self.assertEqual(response.status_code, 200)
self.assertEqual(result['file_name'], 'test.txt')
self.assertIs(
os.path.exists(os.path.join(MEDIA_ROOT, 'test.txt')),
False,
)
self.assertIs(
os.path.exists(os.path.join(UPLOAD_TO, 'test.txt')),
True,
)
@override_settings(MEDIA_ROOT=MEDIA_ROOT)
class DirectoryCreationTests(SimpleTestCase):
"""
Tests for error handling during directory creation
via _save_FIELD_file (ticket #6450)
"""
@classmethod
def setUpClass(cls):
super().setUpClass()
os.makedirs(MEDIA_ROOT, exist_ok=True)
cls.addClassCleanup(shutil.rmtree, MEDIA_ROOT)
def setUp(self):
self.obj = FileModel()
@unittest.skipIf(sys.platform == 'win32', "Python on Windows doesn't have working os.chmod().")
def test_readonly_root(self):
"""Permission errors are not swallowed"""
os.chmod(MEDIA_ROOT, 0o500)
self.addCleanup(os.chmod, MEDIA_ROOT, 0o700)
with self.assertRaises(PermissionError):
self.obj.testfile.save('foo.txt', SimpleUploadedFile('foo.txt', b'x'), save=False)
def test_not_a_directory(self):
# Create a file with the upload directory name
open(UPLOAD_TO, 'wb').close()
self.addCleanup(os.remove, UPLOAD_TO)
msg = '%s exists and is not a directory.' % UPLOAD_TO
with self.assertRaisesMessage(FileExistsError, msg):
with SimpleUploadedFile('foo.txt', b'x') as file:
self.obj.testfile.save('foo.txt', file, save=False)
class MultiParserTests(SimpleTestCase):
def test_empty_upload_handlers(self):
# We're not actually parsing here; just checking if the parser properly
# instantiates with empty upload handlers.
MultiPartParser({
'CONTENT_TYPE': 'multipart/form-data; boundary=_foo',
'CONTENT_LENGTH': '1'
}, StringIO('x'), [], 'utf-8')
def test_invalid_content_type(self):
with self.assertRaisesMessage(MultiPartParserError, 'Invalid Content-Type: text/plain'):
MultiPartParser({
'CONTENT_TYPE': 'text/plain',
'CONTENT_LENGTH': '1',
}, StringIO('x'), [], 'utf-8')
def test_negative_content_length(self):
with self.assertRaisesMessage(MultiPartParserError, 'Invalid content length: -1'):
MultiPartParser({
'CONTENT_TYPE': 'multipart/form-data; boundary=_foo',
'CONTENT_LENGTH': -1,
}, StringIO('x'), [], 'utf-8')
def test_bad_type_content_length(self):
multipart_parser = MultiPartParser({
'CONTENT_TYPE': 'multipart/form-data; boundary=_foo',
'CONTENT_LENGTH': 'a',
}, StringIO('x'), [], 'utf-8')
self.assertEqual(multipart_parser._content_length, 0)
def test_sanitize_file_name(self):
parser = MultiPartParser({
'CONTENT_TYPE': 'multipart/form-data; boundary=_foo',
'CONTENT_LENGTH': '1'
}, StringIO('x'), [], 'utf-8')
for file_name in CANDIDATE_TRAVERSAL_FILE_NAMES:
with self.subTest(file_name=file_name):
self.assertEqual(parser.sanitize_file_name(file_name), 'hax0rd.txt')
def test_sanitize_invalid_file_name(self):
parser = MultiPartParser({
'CONTENT_TYPE': 'multipart/form-data; boundary=_foo',
'CONTENT_LENGTH': '1',
}, StringIO('x'), [], 'utf-8')
for file_name in CANDIDATE_INVALID_FILE_NAMES:
with self.subTest(file_name=file_name):
self.assertIsNone(parser.sanitize_file_name(file_name))
def test_rfc2231_parsing(self):
test_data = (
(b"Content-Type: application/x-stuff; title*=us-ascii'en-us'This%20is%20%2A%2A%2Afun%2A%2A%2A",
"This is ***fun***"),
(b"Content-Type: application/x-stuff; title*=UTF-8''foo-%c3%a4.html",
"foo-ä.html"),
(b"Content-Type: application/x-stuff; title*=iso-8859-1''foo-%E4.html",
"foo-ä.html"),
)
for raw_line, expected_title in test_data:
parsed = parse_header(raw_line)
self.assertEqual(parsed[1]['title'], expected_title)
def test_rfc2231_wrong_title(self):
"""
Test wrongly formatted RFC 2231 headers (missing double single quotes).
Parsing should not crash (#24209).
"""
test_data = (
(b"Content-Type: application/x-stuff; title*='This%20is%20%2A%2A%2Afun%2A%2A%2A",
b"'This%20is%20%2A%2A%2Afun%2A%2A%2A"),
(b"Content-Type: application/x-stuff; title*='foo.html",
b"'foo.html"),
(b"Content-Type: application/x-stuff; title*=bar.html",
b"bar.html"),
)
for raw_line, expected_title in test_data:
parsed = parse_header(raw_line)
self.assertEqual(parsed[1]['title'], expected_title)
|
8b3ffbdaa46c6f9471ccd64b712e820066cf8a816f459c948510aacc92a990b2 | from django.db import connection
from django.db.models import CharField, Max
from django.db.models.functions import Lower
from django.test import TestCase, skipUnlessDBFeature
from django.test.utils import register_lookup
from .models import Celebrity, Fan, Staff, StaffTag, Tag
@skipUnlessDBFeature('can_distinct_on_fields')
@skipUnlessDBFeature('supports_nullable_unique_constraints')
class DistinctOnTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.t1 = Tag.objects.create(name='t1')
cls.t2 = Tag.objects.create(name='t2', parent=cls.t1)
cls.t3 = Tag.objects.create(name='t3', parent=cls.t1)
cls.t4 = Tag.objects.create(name='t4', parent=cls.t3)
cls.t5 = Tag.objects.create(name='t5', parent=cls.t3)
cls.p1_o1 = Staff.objects.create(id=1, name="p1", organisation="o1")
cls.p2_o1 = Staff.objects.create(id=2, name="p2", organisation="o1")
cls.p3_o1 = Staff.objects.create(id=3, name="p3", organisation="o1")
cls.p1_o2 = Staff.objects.create(id=4, name="p1", organisation="o2")
cls.p1_o1.coworkers.add(cls.p2_o1, cls.p3_o1)
cls.st1 = StaffTag.objects.create(staff=cls.p1_o1, tag=cls.t1)
StaffTag.objects.create(staff=cls.p1_o1, tag=cls.t1)
cls.celeb1 = Celebrity.objects.create(name="c1")
cls.celeb2 = Celebrity.objects.create(name="c2")
cls.fan1 = Fan.objects.create(fan_of=cls.celeb1)
cls.fan2 = Fan.objects.create(fan_of=cls.celeb1)
cls.fan3 = Fan.objects.create(fan_of=cls.celeb2)
def test_basic_distinct_on(self):
"""QuerySet.distinct('field', ...) works"""
# (qset, expected) tuples
qsets = (
(
Staff.objects.distinct().order_by('name'),
[self.p1_o1, self.p1_o2, self.p2_o1, self.p3_o1],
),
(
Staff.objects.distinct('name').order_by('name'),
[self.p1_o1, self.p2_o1, self.p3_o1],
),
(
Staff.objects.distinct('organisation').order_by('organisation', 'name'),
[self.p1_o1, self.p1_o2],
),
(
Staff.objects.distinct('name', 'organisation').order_by('name', 'organisation'),
[self.p1_o1, self.p1_o2, self.p2_o1, self.p3_o1],
),
(
Celebrity.objects.filter(fan__in=[self.fan1, self.fan2, self.fan3]).distinct('name').order_by('name'),
[self.celeb1, self.celeb2],
),
# Does combining querysets work?
(
(Celebrity.objects.filter(fan__in=[self.fan1, self.fan2]).
distinct('name').order_by('name') |
Celebrity.objects.filter(fan__in=[self.fan3]).
distinct('name').order_by('name')),
[self.celeb1, self.celeb2],
),
(StaffTag.objects.distinct('staff', 'tag'), [self.st1]),
(
Tag.objects.order_by('parent__pk', 'pk').distinct('parent'),
[self.t2, self.t4, self.t1]
if connection.features.nulls_order_largest
else [self.t1, self.t2, self.t4],
),
(
StaffTag.objects.select_related('staff').distinct('staff__name').order_by('staff__name'),
[self.st1],
),
# Fetch the alphabetically first coworker for each worker
(
(Staff.objects.distinct('id').order_by('id', 'coworkers__name').
values_list('id', 'coworkers__name')),
[(1, 'p2'), (2, 'p1'), (3, 'p1'), (4, None)],
),
)
for qset, expected in qsets:
self.assertSequenceEqual(qset, expected)
self.assertEqual(qset.count(), len(expected))
# Combining queries with non-unique query is not allowed.
base_qs = Celebrity.objects.all()
msg = 'Cannot combine a unique query with a non-unique query.'
with self.assertRaisesMessage(TypeError, msg):
base_qs.distinct('id') & base_qs
# Combining queries with different distinct_fields is not allowed.
msg = 'Cannot combine queries with different distinct fields.'
with self.assertRaisesMessage(TypeError, msg):
base_qs.distinct('id') & base_qs.distinct('name')
# Test join unreffing
c1 = Celebrity.objects.distinct('greatest_fan__id', 'greatest_fan__fan_of')
self.assertIn('OUTER JOIN', str(c1.query))
c2 = c1.distinct('pk')
self.assertNotIn('OUTER JOIN', str(c2.query))
def test_sliced_queryset(self):
msg = 'Cannot create distinct fields once a slice has been taken.'
with self.assertRaisesMessage(TypeError, msg):
Staff.objects.all()[0:5].distinct('name')
def test_transform(self):
new_name = self.t1.name.upper()
self.assertNotEqual(self.t1.name, new_name)
Tag.objects.create(name=new_name)
with register_lookup(CharField, Lower):
self.assertCountEqual(
Tag.objects.order_by().distinct('name__lower'),
[self.t1, self.t2, self.t3, self.t4, self.t5],
)
def test_distinct_not_implemented_checks(self):
# distinct + annotate not allowed
msg = 'annotate() + distinct(fields) is not implemented.'
with self.assertRaisesMessage(NotImplementedError, msg):
Celebrity.objects.annotate(Max('id')).distinct('id')[0]
with self.assertRaisesMessage(NotImplementedError, msg):
Celebrity.objects.distinct('id').annotate(Max('id'))[0]
# However this check is done only when the query executes, so you
# can use distinct() to remove the fields before execution.
Celebrity.objects.distinct('id').annotate(Max('id')).distinct()[0]
# distinct + aggregate not allowed
msg = 'aggregate() + distinct(fields) not implemented.'
with self.assertRaisesMessage(NotImplementedError, msg):
Celebrity.objects.distinct('id').aggregate(Max('id'))
def test_distinct_on_in_ordered_subquery(self):
qs = Staff.objects.distinct('name').order_by('name', 'id')
qs = Staff.objects.filter(pk__in=qs).order_by('name')
self.assertSequenceEqual(qs, [self.p1_o1, self.p2_o1, self.p3_o1])
qs = Staff.objects.distinct('name').order_by('name', '-id')
qs = Staff.objects.filter(pk__in=qs).order_by('name')
self.assertSequenceEqual(qs, [self.p1_o2, self.p2_o1, self.p3_o1])
def test_distinct_on_get_ordering_preserved(self):
"""
Ordering shouldn't be cleared when distinct on fields are specified.
refs #25081
"""
staff = Staff.objects.distinct('name').order_by('name', '-organisation').get(name='p1')
self.assertEqual(staff.organisation, 'o2')
|
f466bde9c5bb4b231c62833d4aa5aef5282d64d188e25d43848dde105e7ce7a9 | import datetime
import pytz
from django.test import TestCase, override_settings
from django.utils import timezone
from .models import Article, Category, Comment
class DateTimesTests(TestCase):
def test_related_model_traverse(self):
a1 = Article.objects.create(
title="First one",
pub_date=datetime.datetime(2005, 7, 28, 9, 0, 0),
)
a2 = Article.objects.create(
title="Another one",
pub_date=datetime.datetime(2010, 7, 28, 10, 0, 0),
)
a3 = Article.objects.create(
title="Third one, in the first day",
pub_date=datetime.datetime(2005, 7, 28, 17, 0, 0),
)
a1.comments.create(
text="Im the HULK!",
pub_date=datetime.datetime(2005, 7, 28, 9, 30, 0),
)
a1.comments.create(
text="HULK SMASH!",
pub_date=datetime.datetime(2005, 7, 29, 1, 30, 0),
)
a2.comments.create(
text="LMAO",
pub_date=datetime.datetime(2010, 7, 28, 10, 10, 10),
)
a3.comments.create(
text="+1",
pub_date=datetime.datetime(2005, 8, 29, 10, 10, 10),
)
c = Category.objects.create(name="serious-news")
c.articles.add(a1, a3)
self.assertSequenceEqual(
Comment.objects.datetimes("article__pub_date", "year"), [
datetime.datetime(2005, 1, 1),
datetime.datetime(2010, 1, 1),
],
)
self.assertSequenceEqual(
Comment.objects.datetimes("article__pub_date", "month"), [
datetime.datetime(2005, 7, 1),
datetime.datetime(2010, 7, 1),
],
)
self.assertSequenceEqual(
Comment.objects.datetimes("article__pub_date", "week"), [
datetime.datetime(2005, 7, 25),
datetime.datetime(2010, 7, 26),
],
)
self.assertSequenceEqual(
Comment.objects.datetimes("article__pub_date", "day"), [
datetime.datetime(2005, 7, 28),
datetime.datetime(2010, 7, 28),
],
)
self.assertSequenceEqual(
Article.objects.datetimes("comments__pub_date", "day"), [
datetime.datetime(2005, 7, 28),
datetime.datetime(2005, 7, 29),
datetime.datetime(2005, 8, 29),
datetime.datetime(2010, 7, 28),
],
)
self.assertQuerysetEqual(
Article.objects.datetimes("comments__approval_date", "day"), []
)
self.assertSequenceEqual(
Category.objects.datetimes("articles__pub_date", "day"), [
datetime.datetime(2005, 7, 28),
],
)
@override_settings(USE_TZ=True)
def test_21432(self):
now = timezone.localtime(timezone.now().replace(microsecond=0))
Article.objects.create(title="First one", pub_date=now)
qs = Article.objects.datetimes('pub_date', 'second')
self.assertEqual(qs[0], now)
@override_settings(USE_TZ=True, TIME_ZONE='UTC')
def test_datetimes_ambiguous_and_invalid_times(self):
sao = pytz.timezone('America/Sao_Paulo')
utc = pytz.UTC
article = Article.objects.create(
title='Article 1',
pub_date=utc.localize(datetime.datetime(2016, 2, 21, 1)),
)
Comment.objects.create(
article=article,
pub_date=utc.localize(datetime.datetime(2016, 10, 16, 13)),
)
with timezone.override(sao):
with self.assertRaisesMessage(pytz.AmbiguousTimeError, '2016-02-20 23:00:00'):
Article.objects.datetimes('pub_date', 'hour').get()
with self.assertRaisesMessage(pytz.NonExistentTimeError, '2016-10-16 00:00:00'):
Comment.objects.datetimes('pub_date', 'day').get()
self.assertEqual(
Article.objects.datetimes('pub_date', 'hour', is_dst=False).get().dst(),
datetime.timedelta(0),
)
self.assertEqual(
Comment.objects.datetimes('pub_date', 'day', is_dst=False).get().dst(),
datetime.timedelta(0),
)
self.assertEqual(
Article.objects.datetimes('pub_date', 'hour', is_dst=True).get().dst(),
datetime.timedelta(0, 3600),
)
self.assertEqual(
Comment.objects.datetimes('pub_date', 'hour', is_dst=True).get().dst(),
datetime.timedelta(0, 3600),
)
def test_datetimes_returns_available_dates_for_given_scope_and_given_field(self):
pub_dates = [
datetime.datetime(2005, 7, 28, 12, 15),
datetime.datetime(2005, 7, 29, 2, 15),
datetime.datetime(2005, 7, 30, 5, 15),
datetime.datetime(2005, 7, 31, 19, 15)]
for i, pub_date in enumerate(pub_dates):
Article(pub_date=pub_date, title='title #{}'.format(i)).save()
self.assertSequenceEqual(
Article.objects.datetimes('pub_date', 'year'),
[datetime.datetime(2005, 1, 1, 0, 0)],
)
self.assertSequenceEqual(
Article.objects.datetimes('pub_date', 'month'),
[datetime.datetime(2005, 7, 1, 0, 0)],
)
self.assertSequenceEqual(
Article.objects.datetimes('pub_date', 'week'),
[datetime.datetime(2005, 7, 25, 0, 0)],
)
self.assertSequenceEqual(Article.objects.datetimes('pub_date', 'day'), [
datetime.datetime(2005, 7, 28, 0, 0),
datetime.datetime(2005, 7, 29, 0, 0),
datetime.datetime(2005, 7, 30, 0, 0),
datetime.datetime(2005, 7, 31, 0, 0),
])
self.assertSequenceEqual(
Article.objects.datetimes('pub_date', 'day', order='ASC'),
[
datetime.datetime(2005, 7, 28, 0, 0),
datetime.datetime(2005, 7, 29, 0, 0),
datetime.datetime(2005, 7, 30, 0, 0),
datetime.datetime(2005, 7, 31, 0, 0),
],
)
self.assertSequenceEqual(
Article.objects.datetimes('pub_date', 'day', order='DESC'),
[
datetime.datetime(2005, 7, 31, 0, 0),
datetime.datetime(2005, 7, 30, 0, 0),
datetime.datetime(2005, 7, 29, 0, 0),
datetime.datetime(2005, 7, 28, 0, 0),
],
)
def test_datetimes_has_lazy_iterator(self):
pub_dates = [
datetime.datetime(2005, 7, 28, 12, 15),
datetime.datetime(2005, 7, 29, 2, 15),
datetime.datetime(2005, 7, 30, 5, 15),
datetime.datetime(2005, 7, 31, 19, 15)]
for i, pub_date in enumerate(pub_dates):
Article(pub_date=pub_date, title='title #{}'.format(i)).save()
# Use iterator() with datetimes() to return a generator that lazily
# requests each result one at a time, to save memory.
dates = []
with self.assertNumQueries(0):
article_datetimes_iterator = Article.objects.datetimes('pub_date', 'day', order='DESC').iterator()
with self.assertNumQueries(1):
for article in article_datetimes_iterator:
dates.append(article)
self.assertEqual(dates, [
datetime.datetime(2005, 7, 31, 0, 0),
datetime.datetime(2005, 7, 30, 0, 0),
datetime.datetime(2005, 7, 29, 0, 0),
datetime.datetime(2005, 7, 28, 0, 0)])
def test_datetimes_disallows_date_fields(self):
dt = datetime.datetime(2005, 7, 28, 12, 15)
Article.objects.create(pub_date=dt, published_on=dt.date(), title="Don't put dates into datetime functions!")
with self.assertRaisesMessage(ValueError, "Cannot truncate DateField 'published_on' to DateTimeField"):
list(Article.objects.datetimes('published_on', 'second'))
def test_datetimes_fails_when_given_invalid_kind_argument(self):
msg = (
"'kind' must be one of 'year', 'month', 'week', 'day', 'hour', "
"'minute', or 'second'."
)
with self.assertRaisesMessage(ValueError, msg):
Article.objects.datetimes('pub_date', 'bad_kind')
def test_datetimes_fails_when_given_invalid_order_argument(self):
msg = "'order' must be either 'ASC' or 'DESC'."
with self.assertRaisesMessage(ValueError, msg):
Article.objects.datetimes('pub_date', 'year', order='bad order')
|
fd12baa1a4234ddc45363a561d8fdd38d24d67dbc36ab88de43f5565ab0e1639 | from math import ceil
from django.db import connection, models
from django.db.models import ProtectedError, RestrictedError
from django.db.models.deletion import Collector
from django.db.models.sql.constants import GET_ITERATOR_CHUNK_SIZE
from django.test import TestCase, skipIfDBFeature, skipUnlessDBFeature
from .models import (
B1, B2, B3, MR, A, Avatar, B, Base, Child, DeleteBottom, DeleteTop,
GenericB1, GenericB2, GenericDeleteBottom, HiddenUser, HiddenUserProfile,
M, M2MFrom, M2MTo, MRNull, Origin, P, Parent, R, RChild, RChildChild,
Referrer, S, T, User, create_a, get_default_r,
)
class OnDeleteTests(TestCase):
def setUp(self):
self.DEFAULT = get_default_r()
def test_auto(self):
a = create_a('auto')
a.auto.delete()
self.assertFalse(A.objects.filter(name='auto').exists())
def test_non_callable(self):
msg = 'on_delete must be callable.'
with self.assertRaisesMessage(TypeError, msg):
models.ForeignKey('self', on_delete=None)
with self.assertRaisesMessage(TypeError, msg):
models.OneToOneField('self', on_delete=None)
def test_auto_nullable(self):
a = create_a('auto_nullable')
a.auto_nullable.delete()
self.assertFalse(A.objects.filter(name='auto_nullable').exists())
def test_setvalue(self):
a = create_a('setvalue')
a.setvalue.delete()
a = A.objects.get(pk=a.pk)
self.assertEqual(self.DEFAULT, a.setvalue.pk)
def test_setnull(self):
a = create_a('setnull')
a.setnull.delete()
a = A.objects.get(pk=a.pk)
self.assertIsNone(a.setnull)
def test_setdefault(self):
a = create_a('setdefault')
a.setdefault.delete()
a = A.objects.get(pk=a.pk)
self.assertEqual(self.DEFAULT, a.setdefault.pk)
def test_setdefault_none(self):
a = create_a('setdefault_none')
a.setdefault_none.delete()
a = A.objects.get(pk=a.pk)
self.assertIsNone(a.setdefault_none)
def test_cascade(self):
a = create_a('cascade')
a.cascade.delete()
self.assertFalse(A.objects.filter(name='cascade').exists())
def test_cascade_nullable(self):
a = create_a('cascade_nullable')
a.cascade_nullable.delete()
self.assertFalse(A.objects.filter(name='cascade_nullable').exists())
def test_protect(self):
a = create_a('protect')
msg = (
"Cannot delete some instances of model 'R' because they are "
"referenced through protected foreign keys: 'A.protect'."
)
with self.assertRaisesMessage(ProtectedError, msg) as cm:
a.protect.delete()
self.assertEqual(cm.exception.protected_objects, {a})
def test_protect_multiple(self):
a = create_a('protect')
b = B.objects.create(protect=a.protect)
msg = (
"Cannot delete some instances of model 'R' because they are "
"referenced through protected foreign keys: 'A.protect', "
"'B.protect'."
)
with self.assertRaisesMessage(ProtectedError, msg) as cm:
a.protect.delete()
self.assertEqual(cm.exception.protected_objects, {a, b})
def test_protect_path(self):
a = create_a('protect')
a.protect.p = P.objects.create()
a.protect.save()
msg = (
"Cannot delete some instances of model 'P' because they are "
"referenced through protected foreign keys: 'R.p'."
)
with self.assertRaisesMessage(ProtectedError, msg) as cm:
a.protect.p.delete()
self.assertEqual(cm.exception.protected_objects, {a})
def test_do_nothing(self):
# Testing DO_NOTHING is a bit harder: It would raise IntegrityError for a normal model,
# so we connect to pre_delete and set the fk to a known value.
replacement_r = R.objects.create()
def check_do_nothing(sender, **kwargs):
obj = kwargs['instance']
obj.donothing_set.update(donothing=replacement_r)
models.signals.pre_delete.connect(check_do_nothing)
a = create_a('do_nothing')
a.donothing.delete()
a = A.objects.get(pk=a.pk)
self.assertEqual(replacement_r, a.donothing)
models.signals.pre_delete.disconnect(check_do_nothing)
def test_do_nothing_qscount(self):
"""
A models.DO_NOTHING relation doesn't trigger a query.
"""
b = Base.objects.create()
with self.assertNumQueries(1):
# RelToBase should not be queried.
b.delete()
self.assertEqual(Base.objects.count(), 0)
def test_inheritance_cascade_up(self):
child = RChild.objects.create()
child.delete()
self.assertFalse(R.objects.filter(pk=child.pk).exists())
def test_inheritance_cascade_down(self):
child = RChild.objects.create()
parent = child.r_ptr
parent.delete()
self.assertFalse(RChild.objects.filter(pk=child.pk).exists())
def test_cascade_from_child(self):
a = create_a('child')
a.child.delete()
self.assertFalse(A.objects.filter(name='child').exists())
self.assertFalse(R.objects.filter(pk=a.child_id).exists())
def test_cascade_from_parent(self):
a = create_a('child')
R.objects.get(pk=a.child_id).delete()
self.assertFalse(A.objects.filter(name='child').exists())
self.assertFalse(RChild.objects.filter(pk=a.child_id).exists())
def test_setnull_from_child(self):
a = create_a('child_setnull')
a.child_setnull.delete()
self.assertFalse(R.objects.filter(pk=a.child_setnull_id).exists())
a = A.objects.get(pk=a.pk)
self.assertIsNone(a.child_setnull)
def test_setnull_from_parent(self):
a = create_a('child_setnull')
R.objects.get(pk=a.child_setnull_id).delete()
self.assertFalse(RChild.objects.filter(pk=a.child_setnull_id).exists())
a = A.objects.get(pk=a.pk)
self.assertIsNone(a.child_setnull)
def test_o2o_setnull(self):
a = create_a('o2o_setnull')
a.o2o_setnull.delete()
a = A.objects.get(pk=a.pk)
self.assertIsNone(a.o2o_setnull)
def test_restrict(self):
a = create_a('restrict')
msg = (
"Cannot delete some instances of model 'R' because they are "
"referenced through restricted foreign keys: 'A.restrict'."
)
with self.assertRaisesMessage(RestrictedError, msg) as cm:
a.restrict.delete()
self.assertEqual(cm.exception.restricted_objects, {a})
def test_restrict_multiple(self):
a = create_a('restrict')
b3 = B3.objects.create(restrict=a.restrict)
msg = (
"Cannot delete some instances of model 'R' because they are "
"referenced through restricted foreign keys: 'A.restrict', "
"'B3.restrict'."
)
with self.assertRaisesMessage(RestrictedError, msg) as cm:
a.restrict.delete()
self.assertEqual(cm.exception.restricted_objects, {a, b3})
def test_restrict_path_cascade_indirect(self):
a = create_a('restrict')
a.restrict.p = P.objects.create()
a.restrict.save()
msg = (
"Cannot delete some instances of model 'P' because they are "
"referenced through restricted foreign keys: 'A.restrict'."
)
with self.assertRaisesMessage(RestrictedError, msg) as cm:
a.restrict.p.delete()
self.assertEqual(cm.exception.restricted_objects, {a})
# Object referenced also with CASCADE relationship can be deleted.
a.cascade.p = a.restrict.p
a.cascade.save()
a.restrict.p.delete()
self.assertFalse(A.objects.filter(name='restrict').exists())
self.assertFalse(R.objects.filter(pk=a.restrict_id).exists())
def test_restrict_path_cascade_direct(self):
a = create_a('restrict')
a.restrict.p = P.objects.create()
a.restrict.save()
a.cascade_p = a.restrict.p
a.save()
a.restrict.p.delete()
self.assertFalse(A.objects.filter(name='restrict').exists())
self.assertFalse(R.objects.filter(pk=a.restrict_id).exists())
def test_restrict_path_cascade_indirect_diamond(self):
delete_top = DeleteTop.objects.create()
b1 = B1.objects.create(delete_top=delete_top)
b2 = B2.objects.create(delete_top=delete_top)
delete_bottom = DeleteBottom.objects.create(b1=b1, b2=b2)
msg = (
"Cannot delete some instances of model 'B1' because they are "
"referenced through restricted foreign keys: 'DeleteBottom.b1'."
)
with self.assertRaisesMessage(RestrictedError, msg) as cm:
b1.delete()
self.assertEqual(cm.exception.restricted_objects, {delete_bottom})
self.assertTrue(DeleteTop.objects.exists())
self.assertTrue(B1.objects.exists())
self.assertTrue(B2.objects.exists())
self.assertTrue(DeleteBottom.objects.exists())
# Object referenced also with CASCADE relationship can be deleted.
delete_top.delete()
self.assertFalse(DeleteTop.objects.exists())
self.assertFalse(B1.objects.exists())
self.assertFalse(B2.objects.exists())
self.assertFalse(DeleteBottom.objects.exists())
def test_restrict_gfk_no_fast_delete(self):
delete_top = DeleteTop.objects.create()
generic_b1 = GenericB1.objects.create(generic_delete_top=delete_top)
generic_b2 = GenericB2.objects.create(generic_delete_top=delete_top)
generic_delete_bottom = GenericDeleteBottom.objects.create(
generic_b1=generic_b1,
generic_b2=generic_b2,
)
msg = (
"Cannot delete some instances of model 'GenericB1' because they "
"are referenced through restricted foreign keys: "
"'GenericDeleteBottom.generic_b1'."
)
with self.assertRaisesMessage(RestrictedError, msg) as cm:
generic_b1.delete()
self.assertEqual(cm.exception.restricted_objects, {generic_delete_bottom})
self.assertTrue(DeleteTop.objects.exists())
self.assertTrue(GenericB1.objects.exists())
self.assertTrue(GenericB2.objects.exists())
self.assertTrue(GenericDeleteBottom.objects.exists())
# Object referenced also with CASCADE relationship can be deleted.
delete_top.delete()
self.assertFalse(DeleteTop.objects.exists())
self.assertFalse(GenericB1.objects.exists())
self.assertFalse(GenericB2.objects.exists())
self.assertFalse(GenericDeleteBottom.objects.exists())
class DeletionTests(TestCase):
def test_sliced_queryset(self):
msg = "Cannot use 'limit' or 'offset' with delete()."
with self.assertRaisesMessage(TypeError, msg):
M.objects.all()[0:5].delete()
def test_pk_none(self):
m = M()
msg = "M object can't be deleted because its id attribute is set to None."
with self.assertRaisesMessage(ValueError, msg):
m.delete()
def test_m2m(self):
m = M.objects.create()
r = R.objects.create()
MR.objects.create(m=m, r=r)
r.delete()
self.assertFalse(MR.objects.exists())
r = R.objects.create()
MR.objects.create(m=m, r=r)
m.delete()
self.assertFalse(MR.objects.exists())
m = M.objects.create()
r = R.objects.create()
m.m2m.add(r)
r.delete()
through = M._meta.get_field('m2m').remote_field.through
self.assertFalse(through.objects.exists())
r = R.objects.create()
m.m2m.add(r)
m.delete()
self.assertFalse(through.objects.exists())
m = M.objects.create()
r = R.objects.create()
MRNull.objects.create(m=m, r=r)
r.delete()
self.assertFalse(not MRNull.objects.exists())
self.assertFalse(m.m2m_through_null.exists())
def test_bulk(self):
s = S.objects.create(r=R.objects.create())
for i in range(2 * GET_ITERATOR_CHUNK_SIZE):
T.objects.create(s=s)
# 1 (select related `T` instances)
# + 1 (select related `U` instances)
# + 2 (delete `T` instances in batches)
# + 1 (delete `s`)
self.assertNumQueries(5, s.delete)
self.assertFalse(S.objects.exists())
def test_instance_update(self):
deleted = []
related_setnull_sets = []
def pre_delete(sender, **kwargs):
obj = kwargs['instance']
deleted.append(obj)
if isinstance(obj, R):
related_setnull_sets.append([a.pk for a in obj.setnull_set.all()])
models.signals.pre_delete.connect(pre_delete)
a = create_a('update_setnull')
a.setnull.delete()
a = create_a('update_cascade')
a.cascade.delete()
for obj in deleted:
self.assertIsNone(obj.pk)
for pk_list in related_setnull_sets:
for a in A.objects.filter(id__in=pk_list):
self.assertIsNone(a.setnull)
models.signals.pre_delete.disconnect(pre_delete)
def test_deletion_order(self):
pre_delete_order = []
post_delete_order = []
def log_post_delete(sender, **kwargs):
pre_delete_order.append((sender, kwargs['instance'].pk))
def log_pre_delete(sender, **kwargs):
post_delete_order.append((sender, kwargs['instance'].pk))
models.signals.post_delete.connect(log_post_delete)
models.signals.pre_delete.connect(log_pre_delete)
r = R.objects.create(pk=1)
s1 = S.objects.create(pk=1, r=r)
s2 = S.objects.create(pk=2, r=r)
T.objects.create(pk=1, s=s1)
T.objects.create(pk=2, s=s2)
RChild.objects.create(r_ptr=r)
r.delete()
self.assertEqual(
pre_delete_order, [(T, 2), (T, 1), (RChild, 1), (S, 2), (S, 1), (R, 1)]
)
self.assertEqual(
post_delete_order, [(T, 1), (T, 2), (RChild, 1), (S, 1), (S, 2), (R, 1)]
)
models.signals.post_delete.disconnect(log_post_delete)
models.signals.pre_delete.disconnect(log_pre_delete)
def test_relational_post_delete_signals_happen_before_parent_object(self):
deletions = []
def log_post_delete(instance, **kwargs):
self.assertTrue(R.objects.filter(pk=instance.r_id))
self.assertIs(type(instance), S)
deletions.append(instance.id)
r = R.objects.create(pk=1)
S.objects.create(pk=1, r=r)
models.signals.post_delete.connect(log_post_delete, sender=S)
try:
r.delete()
finally:
models.signals.post_delete.disconnect(log_post_delete)
self.assertEqual(len(deletions), 1)
self.assertEqual(deletions[0], 1)
@skipUnlessDBFeature("can_defer_constraint_checks")
def test_can_defer_constraint_checks(self):
u = User.objects.create(
avatar=Avatar.objects.create()
)
a = Avatar.objects.get(pk=u.avatar_id)
# 1 query to find the users for the avatar.
# 1 query to delete the user
# 1 query to delete the avatar
# The important thing is that when we can defer constraint checks there
# is no need to do an UPDATE on User.avatar to null it out.
# Attach a signal to make sure we will not do fast_deletes.
calls = []
def noop(*args, **kwargs):
calls.append('')
models.signals.post_delete.connect(noop, sender=User)
self.assertNumQueries(3, a.delete)
self.assertFalse(User.objects.exists())
self.assertFalse(Avatar.objects.exists())
self.assertEqual(len(calls), 1)
models.signals.post_delete.disconnect(noop, sender=User)
@skipIfDBFeature("can_defer_constraint_checks")
def test_cannot_defer_constraint_checks(self):
u = User.objects.create(
avatar=Avatar.objects.create()
)
# Attach a signal to make sure we will not do fast_deletes.
calls = []
def noop(*args, **kwargs):
calls.append('')
models.signals.post_delete.connect(noop, sender=User)
a = Avatar.objects.get(pk=u.avatar_id)
# The below doesn't make sense... Why do we need to null out
# user.avatar if we are going to delete the user immediately after it,
# and there are no more cascades.
# 1 query to find the users for the avatar.
# 1 query to delete the user
# 1 query to null out user.avatar, because we can't defer the constraint
# 1 query to delete the avatar
self.assertNumQueries(4, a.delete)
self.assertFalse(User.objects.exists())
self.assertFalse(Avatar.objects.exists())
self.assertEqual(len(calls), 1)
models.signals.post_delete.disconnect(noop, sender=User)
def test_hidden_related(self):
r = R.objects.create()
h = HiddenUser.objects.create(r=r)
HiddenUserProfile.objects.create(user=h)
r.delete()
self.assertEqual(HiddenUserProfile.objects.count(), 0)
def test_large_delete(self):
TEST_SIZE = 2000
objs = [Avatar() for i in range(0, TEST_SIZE)]
Avatar.objects.bulk_create(objs)
# Calculate the number of queries needed.
batch_size = connection.ops.bulk_batch_size(['pk'], objs)
# The related fetches are done in batches.
batches = ceil(len(objs) / batch_size)
# One query for Avatar.objects.all() and then one related fast delete for
# each batch.
fetches_to_mem = 1 + batches
# The Avatar objects are going to be deleted in batches of GET_ITERATOR_CHUNK_SIZE
queries = fetches_to_mem + TEST_SIZE // GET_ITERATOR_CHUNK_SIZE
self.assertNumQueries(queries, Avatar.objects.all().delete)
self.assertFalse(Avatar.objects.exists())
def test_large_delete_related(self):
TEST_SIZE = 2000
s = S.objects.create(r=R.objects.create())
for i in range(TEST_SIZE):
T.objects.create(s=s)
batch_size = max(connection.ops.bulk_batch_size(['pk'], range(TEST_SIZE)), 1)
# TEST_SIZE / batch_size (select related `T` instances)
# + 1 (select related `U` instances)
# + TEST_SIZE / GET_ITERATOR_CHUNK_SIZE (delete `T` instances in batches)
# + 1 (delete `s`)
expected_num_queries = ceil(TEST_SIZE / batch_size)
expected_num_queries += ceil(TEST_SIZE / GET_ITERATOR_CHUNK_SIZE) + 2
self.assertNumQueries(expected_num_queries, s.delete)
self.assertFalse(S.objects.exists())
self.assertFalse(T.objects.exists())
def test_delete_with_keeping_parents(self):
child = RChild.objects.create()
parent_id = child.r_ptr_id
child.delete(keep_parents=True)
self.assertFalse(RChild.objects.filter(id=child.id).exists())
self.assertTrue(R.objects.filter(id=parent_id).exists())
def test_delete_with_keeping_parents_relationships(self):
child = RChild.objects.create()
parent_id = child.r_ptr_id
parent_referent_id = S.objects.create(r=child.r_ptr).pk
child.delete(keep_parents=True)
self.assertFalse(RChild.objects.filter(id=child.id).exists())
self.assertTrue(R.objects.filter(id=parent_id).exists())
self.assertTrue(S.objects.filter(pk=parent_referent_id).exists())
childchild = RChildChild.objects.create()
parent_id = childchild.rchild_ptr.r_ptr_id
child_id = childchild.rchild_ptr_id
parent_referent_id = S.objects.create(r=childchild.rchild_ptr.r_ptr).pk
childchild.delete(keep_parents=True)
self.assertFalse(RChildChild.objects.filter(id=childchild.id).exists())
self.assertTrue(RChild.objects.filter(id=child_id).exists())
self.assertTrue(R.objects.filter(id=parent_id).exists())
self.assertTrue(S.objects.filter(pk=parent_referent_id).exists())
def test_queryset_delete_returns_num_rows(self):
"""
QuerySet.delete() should return the number of deleted rows and a
dictionary with the number of deletions for each object type.
"""
Avatar.objects.bulk_create([Avatar(desc='a'), Avatar(desc='b'), Avatar(desc='c')])
avatars_count = Avatar.objects.count()
deleted, rows_count = Avatar.objects.all().delete()
self.assertEqual(deleted, avatars_count)
# more complex example with multiple object types
r = R.objects.create()
h1 = HiddenUser.objects.create(r=r)
HiddenUser.objects.create(r=r)
HiddenUserProfile.objects.create(user=h1)
existed_objs = {
R._meta.label: R.objects.count(),
HiddenUser._meta.label: HiddenUser.objects.count(),
HiddenUserProfile._meta.label: HiddenUserProfile.objects.count(),
}
deleted, deleted_objs = R.objects.all().delete()
self.assertCountEqual(deleted_objs.keys(), existed_objs.keys())
for k, v in existed_objs.items():
self.assertEqual(deleted_objs[k], v)
def test_model_delete_returns_num_rows(self):
"""
Model.delete() should return the number of deleted rows and a
dictionary with the number of deletions for each object type.
"""
r = R.objects.create()
h1 = HiddenUser.objects.create(r=r)
h2 = HiddenUser.objects.create(r=r)
HiddenUser.objects.create(r=r)
HiddenUserProfile.objects.create(user=h1)
HiddenUserProfile.objects.create(user=h2)
m1 = M.objects.create()
m2 = M.objects.create()
MR.objects.create(r=r, m=m1)
r.m_set.add(m1)
r.m_set.add(m2)
r.save()
existed_objs = {
R._meta.label: R.objects.count(),
HiddenUser._meta.label: HiddenUser.objects.count(),
MR._meta.label: MR.objects.count(),
HiddenUserProfile._meta.label: HiddenUserProfile.objects.count(),
M.m2m.through._meta.label: M.m2m.through.objects.count(),
}
deleted, deleted_objs = r.delete()
self.assertEqual(deleted, sum(existed_objs.values()))
self.assertCountEqual(deleted_objs.keys(), existed_objs.keys())
for k, v in existed_objs.items():
self.assertEqual(deleted_objs[k], v)
def test_proxied_model_duplicate_queries(self):
"""
#25685 - Deleting instances of a model with existing proxy
classes should not issue multiple queries during cascade
deletion of referring models.
"""
avatar = Avatar.objects.create()
# One query for the Avatar table and a second for the User one.
with self.assertNumQueries(2):
avatar.delete()
def test_only_referenced_fields_selected(self):
"""
Only referenced fields are selected during cascade deletion SELECT
unless deletion signals are connected.
"""
origin = Origin.objects.create()
expected_sql = str(
Referrer.objects.only(
# Both fields are referenced by SecondReferrer.
'id', 'unique_field',
).filter(origin__in=[origin]).query
)
with self.assertNumQueries(2) as ctx:
origin.delete()
self.assertEqual(ctx.captured_queries[0]['sql'], expected_sql)
def receiver(instance, **kwargs):
pass
# All fields are selected if deletion signals are connected.
for signal_name in ('pre_delete', 'post_delete'):
with self.subTest(signal=signal_name):
origin = Origin.objects.create()
signal = getattr(models.signals, signal_name)
signal.connect(receiver, sender=Referrer)
with self.assertNumQueries(2) as ctx:
origin.delete()
self.assertIn(
connection.ops.quote_name('large_field'),
ctx.captured_queries[0]['sql'],
)
signal.disconnect(receiver, sender=Referrer)
class FastDeleteTests(TestCase):
def test_fast_delete_all(self):
with self.assertNumQueries(1) as ctx:
User.objects.all().delete()
sql = ctx.captured_queries[0]['sql']
# No subqueries is used when performing a full delete.
self.assertNotIn('SELECT', sql)
def test_fast_delete_fk(self):
u = User.objects.create(
avatar=Avatar.objects.create()
)
a = Avatar.objects.get(pk=u.avatar_id)
# 1 query to fast-delete the user
# 1 query to delete the avatar
self.assertNumQueries(2, a.delete)
self.assertFalse(User.objects.exists())
self.assertFalse(Avatar.objects.exists())
def test_fast_delete_m2m(self):
t = M2MTo.objects.create()
f = M2MFrom.objects.create()
f.m2m.add(t)
# 1 to delete f, 1 to fast-delete m2m for f
self.assertNumQueries(2, f.delete)
def test_fast_delete_revm2m(self):
t = M2MTo.objects.create()
f = M2MFrom.objects.create()
f.m2m.add(t)
# 1 to delete t, 1 to fast-delete t's m_set
self.assertNumQueries(2, f.delete)
def test_fast_delete_qs(self):
u1 = User.objects.create()
u2 = User.objects.create()
self.assertNumQueries(1, User.objects.filter(pk=u1.pk).delete)
self.assertEqual(User.objects.count(), 1)
self.assertTrue(User.objects.filter(pk=u2.pk).exists())
def test_fast_delete_instance_set_pk_none(self):
u = User.objects.create()
# User can be fast-deleted.
collector = Collector(using='default')
self.assertTrue(collector.can_fast_delete(u))
u.delete()
self.assertIsNone(u.pk)
def test_fast_delete_joined_qs(self):
a = Avatar.objects.create(desc='a')
User.objects.create(avatar=a)
u2 = User.objects.create()
self.assertNumQueries(1, User.objects.filter(avatar__desc='a').delete)
self.assertEqual(User.objects.count(), 1)
self.assertTrue(User.objects.filter(pk=u2.pk).exists())
def test_fast_delete_inheritance(self):
c = Child.objects.create()
p = Parent.objects.create()
# 1 for self, 1 for parent
self.assertNumQueries(2, c.delete)
self.assertFalse(Child.objects.exists())
self.assertEqual(Parent.objects.count(), 1)
self.assertEqual(Parent.objects.filter(pk=p.pk).count(), 1)
# 1 for self delete, 1 for fast delete of empty "child" qs.
self.assertNumQueries(2, p.delete)
self.assertFalse(Parent.objects.exists())
# 1 for self delete, 1 for fast delete of empty "child" qs.
c = Child.objects.create()
p = c.parent_ptr
self.assertNumQueries(2, p.delete)
self.assertFalse(Parent.objects.exists())
self.assertFalse(Child.objects.exists())
def test_fast_delete_large_batch(self):
User.objects.bulk_create(User() for i in range(0, 2000))
# No problems here - we aren't going to cascade, so we will fast
# delete the objects in a single query.
self.assertNumQueries(1, User.objects.all().delete)
a = Avatar.objects.create(desc='a')
User.objects.bulk_create(User(avatar=a) for i in range(0, 2000))
# We don't hit parameter amount limits for a, so just one query for
# that + fast delete of the related objs.
self.assertNumQueries(2, a.delete)
self.assertEqual(User.objects.count(), 0)
def test_fast_delete_empty_no_update_can_self_select(self):
"""
#25932 - Fast deleting on backends that don't have the
`no_update_can_self_select` feature should work even if the specified
filter doesn't match any row.
"""
with self.assertNumQueries(1):
self.assertEqual(
User.objects.filter(avatar__desc='missing').delete(),
(0, {}),
)
def test_fast_delete_combined_relationships(self):
# The cascading fast-delete of SecondReferrer should be combined
# in a single DELETE WHERE referrer_id OR unique_field.
origin = Origin.objects.create()
referer = Referrer.objects.create(origin=origin, unique_field=42)
with self.assertNumQueries(2):
referer.delete()
def test_fast_delete_aggregation(self):
# Fast-deleting when filtering against an aggregation result in
# a single query containing a subquery.
Base.objects.create()
with self.assertNumQueries(1):
self.assertEqual(
Base.objects.annotate(
rels_count=models.Count('rels'),
).filter(rels_count=0).delete(),
(1, {'delete.Base': 1}),
)
self.assertIs(Base.objects.exists(), False)
|
fdf534b4cb3f2d8f24ecb26035b10bb32a61f1be2f4f2fcb747f5e69d67f6109 | import unittest
from django.core.checks import Error, Warning
from django.core.checks.model_checks import _check_lazy_references
from django.db import connection, connections, models
from django.db.models.functions import Abs, Lower, Round
from django.db.models.signals import post_init
from django.test import SimpleTestCase, TestCase, skipUnlessDBFeature
from django.test.utils import isolate_apps, override_settings, register_lookup
class EmptyRouter:
pass
def get_max_column_name_length():
allowed_len = None
db_alias = None
for db in ('default', 'other'):
connection = connections[db]
max_name_length = connection.ops.max_name_length()
if max_name_length is not None and not connection.features.truncates_names:
if allowed_len is None or max_name_length < allowed_len:
allowed_len = max_name_length
db_alias = db
return (allowed_len, db_alias)
@isolate_apps('invalid_models_tests')
class IndexTogetherTests(SimpleTestCase):
def test_non_iterable(self):
class Model(models.Model):
class Meta:
index_together = 42
self.assertEqual(Model.check(), [
Error(
"'index_together' must be a list or tuple.",
obj=Model,
id='models.E008',
),
])
def test_non_list(self):
class Model(models.Model):
class Meta:
index_together = 'not-a-list'
self.assertEqual(Model.check(), [
Error(
"'index_together' must be a list or tuple.",
obj=Model,
id='models.E008',
),
])
def test_list_containing_non_iterable(self):
class Model(models.Model):
class Meta:
index_together = [('a', 'b'), 42]
self.assertEqual(Model.check(), [
Error(
"All 'index_together' elements must be lists or tuples.",
obj=Model,
id='models.E009',
),
])
def test_pointing_to_missing_field(self):
class Model(models.Model):
class Meta:
index_together = [['missing_field']]
self.assertEqual(Model.check(), [
Error(
"'index_together' refers to the nonexistent field 'missing_field'.",
obj=Model,
id='models.E012',
),
])
def test_pointing_to_non_local_field(self):
class Foo(models.Model):
field1 = models.IntegerField()
class Bar(Foo):
field2 = models.IntegerField()
class Meta:
index_together = [['field2', 'field1']]
self.assertEqual(Bar.check(), [
Error(
"'index_together' refers to field 'field1' which is not "
"local to model 'Bar'.",
hint='This issue may be caused by multi-table inheritance.',
obj=Bar,
id='models.E016',
),
])
def test_pointing_to_m2m_field(self):
class Model(models.Model):
m2m = models.ManyToManyField('self')
class Meta:
index_together = [['m2m']]
self.assertEqual(Model.check(), [
Error(
"'index_together' refers to a ManyToManyField 'm2m', but "
"ManyToManyFields are not permitted in 'index_together'.",
obj=Model,
id='models.E013',
),
])
def test_pointing_to_fk(self):
class Foo(models.Model):
pass
class Bar(models.Model):
foo_1 = models.ForeignKey(Foo, on_delete=models.CASCADE, related_name='bar_1')
foo_2 = models.ForeignKey(Foo, on_delete=models.CASCADE, related_name='bar_2')
class Meta:
index_together = [['foo_1_id', 'foo_2']]
self.assertEqual(Bar.check(), [])
# unique_together tests are very similar to index_together tests.
@isolate_apps('invalid_models_tests')
class UniqueTogetherTests(SimpleTestCase):
def test_non_iterable(self):
class Model(models.Model):
class Meta:
unique_together = 42
self.assertEqual(Model.check(), [
Error(
"'unique_together' must be a list or tuple.",
obj=Model,
id='models.E010',
),
])
def test_list_containing_non_iterable(self):
class Model(models.Model):
one = models.IntegerField()
two = models.IntegerField()
class Meta:
unique_together = [('a', 'b'), 42]
self.assertEqual(Model.check(), [
Error(
"All 'unique_together' elements must be lists or tuples.",
obj=Model,
id='models.E011',
),
])
def test_non_list(self):
class Model(models.Model):
class Meta:
unique_together = 'not-a-list'
self.assertEqual(Model.check(), [
Error(
"'unique_together' must be a list or tuple.",
obj=Model,
id='models.E010',
),
])
def test_valid_model(self):
class Model(models.Model):
one = models.IntegerField()
two = models.IntegerField()
class Meta:
# unique_together can be a simple tuple
unique_together = ('one', 'two')
self.assertEqual(Model.check(), [])
def test_pointing_to_missing_field(self):
class Model(models.Model):
class Meta:
unique_together = [['missing_field']]
self.assertEqual(Model.check(), [
Error(
"'unique_together' refers to the nonexistent field 'missing_field'.",
obj=Model,
id='models.E012',
),
])
def test_pointing_to_m2m(self):
class Model(models.Model):
m2m = models.ManyToManyField('self')
class Meta:
unique_together = [['m2m']]
self.assertEqual(Model.check(), [
Error(
"'unique_together' refers to a ManyToManyField 'm2m', but "
"ManyToManyFields are not permitted in 'unique_together'.",
obj=Model,
id='models.E013',
),
])
def test_pointing_to_fk(self):
class Foo(models.Model):
pass
class Bar(models.Model):
foo_1 = models.ForeignKey(Foo, on_delete=models.CASCADE, related_name='bar_1')
foo_2 = models.ForeignKey(Foo, on_delete=models.CASCADE, related_name='bar_2')
class Meta:
unique_together = [['foo_1_id', 'foo_2']]
self.assertEqual(Bar.check(), [])
@isolate_apps('invalid_models_tests')
class IndexesTests(TestCase):
def test_pointing_to_missing_field(self):
class Model(models.Model):
class Meta:
indexes = [models.Index(fields=['missing_field'], name='name')]
self.assertEqual(Model.check(), [
Error(
"'indexes' refers to the nonexistent field 'missing_field'.",
obj=Model,
id='models.E012',
),
])
def test_pointing_to_m2m_field(self):
class Model(models.Model):
m2m = models.ManyToManyField('self')
class Meta:
indexes = [models.Index(fields=['m2m'], name='name')]
self.assertEqual(Model.check(), [
Error(
"'indexes' refers to a ManyToManyField 'm2m', but "
"ManyToManyFields are not permitted in 'indexes'.",
obj=Model,
id='models.E013',
),
])
def test_pointing_to_non_local_field(self):
class Foo(models.Model):
field1 = models.IntegerField()
class Bar(Foo):
field2 = models.IntegerField()
class Meta:
indexes = [models.Index(fields=['field2', 'field1'], name='name')]
self.assertEqual(Bar.check(), [
Error(
"'indexes' refers to field 'field1' which is not local to "
"model 'Bar'.",
hint='This issue may be caused by multi-table inheritance.',
obj=Bar,
id='models.E016',
),
])
def test_pointing_to_fk(self):
class Foo(models.Model):
pass
class Bar(models.Model):
foo_1 = models.ForeignKey(Foo, on_delete=models.CASCADE, related_name='bar_1')
foo_2 = models.ForeignKey(Foo, on_delete=models.CASCADE, related_name='bar_2')
class Meta:
indexes = [models.Index(fields=['foo_1_id', 'foo_2'], name='index_name')]
self.assertEqual(Bar.check(), [])
def test_name_constraints(self):
class Model(models.Model):
class Meta:
indexes = [
models.Index(fields=['id'], name='_index_name'),
models.Index(fields=['id'], name='5index_name'),
]
self.assertEqual(Model.check(), [
Error(
"The index name '%sindex_name' cannot start with an "
"underscore or a number." % prefix,
obj=Model,
id='models.E033',
) for prefix in ('_', '5')
])
def test_max_name_length(self):
index_name = 'x' * 31
class Model(models.Model):
class Meta:
indexes = [models.Index(fields=['id'], name=index_name)]
self.assertEqual(Model.check(), [
Error(
"The index name '%s' cannot be longer than 30 characters."
% index_name,
obj=Model,
id='models.E034',
),
])
def test_index_with_condition(self):
class Model(models.Model):
age = models.IntegerField()
class Meta:
indexes = [
models.Index(
fields=['age'],
name='index_age_gte_10',
condition=models.Q(age__gte=10),
),
]
errors = Model.check(databases=self.databases)
expected = [] if connection.features.supports_partial_indexes else [
Warning(
'%s does not support indexes with conditions.'
% connection.display_name,
hint=(
"Conditions will be ignored. Silence this warning if you "
"don't care about it."
),
obj=Model,
id='models.W037',
)
]
self.assertEqual(errors, expected)
def test_index_with_condition_required_db_features(self):
class Model(models.Model):
age = models.IntegerField()
class Meta:
required_db_features = {'supports_partial_indexes'}
indexes = [
models.Index(
fields=['age'],
name='index_age_gte_10',
condition=models.Q(age__gte=10),
),
]
self.assertEqual(Model.check(databases=self.databases), [])
def test_index_with_include(self):
class Model(models.Model):
age = models.IntegerField()
class Meta:
indexes = [
models.Index(
fields=['age'],
name='index_age_include_id',
include=['id'],
),
]
errors = Model.check(databases=self.databases)
expected = [] if connection.features.supports_covering_indexes else [
Warning(
'%s does not support indexes with non-key columns.'
% connection.display_name,
hint=(
"Non-key columns will be ignored. Silence this warning if "
"you don't care about it."
),
obj=Model,
id='models.W040',
)
]
self.assertEqual(errors, expected)
def test_index_with_include_required_db_features(self):
class Model(models.Model):
age = models.IntegerField()
class Meta:
required_db_features = {'supports_covering_indexes'}
indexes = [
models.Index(
fields=['age'],
name='index_age_include_id',
include=['id'],
),
]
self.assertEqual(Model.check(databases=self.databases), [])
@skipUnlessDBFeature('supports_covering_indexes')
def test_index_include_pointing_to_missing_field(self):
class Model(models.Model):
class Meta:
indexes = [
models.Index(fields=['id'], include=['missing_field'], name='name'),
]
self.assertEqual(Model.check(databases=self.databases), [
Error(
"'indexes' refers to the nonexistent field 'missing_field'.",
obj=Model,
id='models.E012',
),
])
@skipUnlessDBFeature('supports_covering_indexes')
def test_index_include_pointing_to_m2m_field(self):
class Model(models.Model):
m2m = models.ManyToManyField('self')
class Meta:
indexes = [models.Index(fields=['id'], include=['m2m'], name='name')]
self.assertEqual(Model.check(databases=self.databases), [
Error(
"'indexes' refers to a ManyToManyField 'm2m', but "
"ManyToManyFields are not permitted in 'indexes'.",
obj=Model,
id='models.E013',
),
])
@skipUnlessDBFeature('supports_covering_indexes')
def test_index_include_pointing_to_non_local_field(self):
class Parent(models.Model):
field1 = models.IntegerField()
class Child(Parent):
field2 = models.IntegerField()
class Meta:
indexes = [
models.Index(fields=['field2'], include=['field1'], name='name'),
]
self.assertEqual(Child.check(databases=self.databases), [
Error(
"'indexes' refers to field 'field1' which is not local to "
"model 'Child'.",
hint='This issue may be caused by multi-table inheritance.',
obj=Child,
id='models.E016',
),
])
@skipUnlessDBFeature('supports_covering_indexes')
def test_index_include_pointing_to_fk(self):
class Target(models.Model):
pass
class Model(models.Model):
fk_1 = models.ForeignKey(Target, models.CASCADE, related_name='target_1')
fk_2 = models.ForeignKey(Target, models.CASCADE, related_name='target_2')
class Meta:
constraints = [
models.Index(
fields=['id'],
include=['fk_1_id', 'fk_2'],
name='name',
),
]
self.assertEqual(Model.check(databases=self.databases), [])
def test_func_index(self):
class Model(models.Model):
name = models.CharField(max_length=10)
class Meta:
indexes = [models.Index(Lower('name'), name='index_lower_name')]
warn = Warning(
'%s does not support indexes on expressions.'
% connection.display_name,
hint=(
"An index won't be created. Silence this warning if you don't "
"care about it."
),
obj=Model,
id='models.W043',
)
expected = [] if connection.features.supports_expression_indexes else [warn]
self.assertEqual(Model.check(databases=self.databases), expected)
def test_func_index_required_db_features(self):
class Model(models.Model):
name = models.CharField(max_length=10)
class Meta:
indexes = [models.Index(Lower('name'), name='index_lower_name')]
required_db_features = {'supports_expression_indexes'}
self.assertEqual(Model.check(databases=self.databases), [])
def test_func_index_complex_expression_custom_lookup(self):
class Model(models.Model):
height = models.IntegerField()
weight = models.IntegerField()
class Meta:
indexes = [
models.Index(
models.F('height') / (models.F('weight__abs') + models.Value(5)),
name='name',
),
]
with register_lookup(models.IntegerField, Abs):
self.assertEqual(Model.check(), [])
def test_func_index_pointing_to_missing_field(self):
class Model(models.Model):
class Meta:
indexes = [models.Index(Lower('missing_field').desc(), name='name')]
self.assertEqual(Model.check(), [
Error(
"'indexes' refers to the nonexistent field 'missing_field'.",
obj=Model,
id='models.E012',
),
])
def test_func_index_pointing_to_missing_field_nested(self):
class Model(models.Model):
class Meta:
indexes = [
models.Index(Abs(Round('missing_field')), name='name'),
]
self.assertEqual(Model.check(), [
Error(
"'indexes' refers to the nonexistent field 'missing_field'.",
obj=Model,
id='models.E012',
),
])
def test_func_index_pointing_to_m2m_field(self):
class Model(models.Model):
m2m = models.ManyToManyField('self')
class Meta:
indexes = [models.Index(Lower('m2m'), name='name')]
self.assertEqual(Model.check(), [
Error(
"'indexes' refers to a ManyToManyField 'm2m', but "
"ManyToManyFields are not permitted in 'indexes'.",
obj=Model,
id='models.E013',
),
])
def test_func_index_pointing_to_non_local_field(self):
class Foo(models.Model):
field1 = models.CharField(max_length=15)
class Bar(Foo):
class Meta:
indexes = [models.Index(Lower('field1'), name='name')]
self.assertEqual(Bar.check(), [
Error(
"'indexes' refers to field 'field1' which is not local to "
"model 'Bar'.",
hint='This issue may be caused by multi-table inheritance.',
obj=Bar,
id='models.E016',
),
])
def test_func_index_pointing_to_fk(self):
class Foo(models.Model):
pass
class Bar(models.Model):
foo_1 = models.ForeignKey(Foo, models.CASCADE, related_name='bar_1')
foo_2 = models.ForeignKey(Foo, models.CASCADE, related_name='bar_2')
class Meta:
indexes = [
models.Index(Lower('foo_1_id'), Lower('foo_2'), name='index_name'),
]
self.assertEqual(Bar.check(), [])
@isolate_apps('invalid_models_tests')
class FieldNamesTests(TestCase):
databases = {'default', 'other'}
def test_ending_with_underscore(self):
class Model(models.Model):
field_ = models.CharField(max_length=10)
m2m_ = models.ManyToManyField('self')
self.assertEqual(Model.check(), [
Error(
'Field names must not end with an underscore.',
obj=Model._meta.get_field('field_'),
id='fields.E001',
),
Error(
'Field names must not end with an underscore.',
obj=Model._meta.get_field('m2m_'),
id='fields.E001',
),
])
max_column_name_length, column_limit_db_alias = get_max_column_name_length()
@unittest.skipIf(max_column_name_length is None, "The database doesn't have a column name length limit.")
def test_M2M_long_column_name(self):
"""
#13711 -- Model check for long M2M column names when database has
column name length limits.
"""
# A model with very long name which will be used to set relations to.
class VeryLongModelNamezzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz(models.Model):
title = models.CharField(max_length=11)
# Main model for which checks will be performed.
class ModelWithLongField(models.Model):
m2m_field = models.ManyToManyField(
VeryLongModelNamezzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz,
related_name='rn1',
)
m2m_field2 = models.ManyToManyField(
VeryLongModelNamezzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz,
related_name='rn2', through='m2msimple',
)
m2m_field3 = models.ManyToManyField(
VeryLongModelNamezzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz,
related_name='rn3',
through='m2mcomplex',
)
fk = models.ForeignKey(
VeryLongModelNamezzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz,
models.CASCADE,
related_name='rn4',
)
# Models used for setting `through` in M2M field.
class m2msimple(models.Model):
id2 = models.ForeignKey(ModelWithLongField, models.CASCADE)
class m2mcomplex(models.Model):
id2 = models.ForeignKey(ModelWithLongField, models.CASCADE)
long_field_name = 'a' * (self.max_column_name_length + 1)
models.ForeignKey(
VeryLongModelNamezzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz,
models.CASCADE,
).contribute_to_class(m2msimple, long_field_name)
models.ForeignKey(
VeryLongModelNamezzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz,
models.CASCADE,
db_column=long_field_name
).contribute_to_class(m2mcomplex, long_field_name)
errors = ModelWithLongField.check(databases=('default', 'other'))
# First error because of M2M field set on the model with long name.
m2m_long_name = "verylongmodelnamezzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz_id"
if self.max_column_name_length > len(m2m_long_name):
# Some databases support names longer than the test name.
expected = []
else:
expected = [
Error(
'Autogenerated column name too long for M2M field "%s". '
'Maximum length is "%s" for database "%s".'
% (m2m_long_name, self.max_column_name_length, self.column_limit_db_alias),
hint="Use 'through' to create a separate model for "
"M2M and then set column_name using 'db_column'.",
obj=ModelWithLongField,
id='models.E019',
)
]
# Second error because the FK specified in the `through` model
# `m2msimple` has auto-generated name longer than allowed.
# There will be no check errors in the other M2M because it
# specifies db_column for the FK in `through` model even if the actual
# name is longer than the limits of the database.
expected.append(
Error(
'Autogenerated column name too long for M2M field "%s_id". '
'Maximum length is "%s" for database "%s".'
% (long_field_name, self.max_column_name_length, self.column_limit_db_alias),
hint="Use 'through' to create a separate model for "
"M2M and then set column_name using 'db_column'.",
obj=ModelWithLongField,
id='models.E019',
)
)
self.assertEqual(errors, expected)
# Check for long column names is called only for specified database
# aliases.
self.assertEqual(ModelWithLongField.check(databases=None), [])
@unittest.skipIf(max_column_name_length is None, "The database doesn't have a column name length limit.")
def test_local_field_long_column_name(self):
"""
#13711 -- Model check for long column names
when database does not support long names.
"""
class ModelWithLongField(models.Model):
title = models.CharField(max_length=11)
long_field_name = 'a' * (self.max_column_name_length + 1)
long_field_name2 = 'b' * (self.max_column_name_length + 1)
models.CharField(max_length=11).contribute_to_class(ModelWithLongField, long_field_name)
models.CharField(max_length=11, db_column='vlmn').contribute_to_class(ModelWithLongField, long_field_name2)
self.assertEqual(ModelWithLongField.check(databases=('default', 'other')), [
Error(
'Autogenerated column name too long for field "%s". '
'Maximum length is "%s" for database "%s".'
% (long_field_name, self.max_column_name_length, self.column_limit_db_alias),
hint="Set the column name manually using 'db_column'.",
obj=ModelWithLongField,
id='models.E018',
)
])
# Check for long column names is called only for specified database
# aliases.
self.assertEqual(ModelWithLongField.check(databases=None), [])
def test_including_separator(self):
class Model(models.Model):
some__field = models.IntegerField()
self.assertEqual(Model.check(), [
Error(
'Field names must not contain "__".',
obj=Model._meta.get_field('some__field'),
id='fields.E002',
)
])
def test_pk(self):
class Model(models.Model):
pk = models.IntegerField()
self.assertEqual(Model.check(), [
Error(
"'pk' is a reserved word that cannot be used as a field name.",
obj=Model._meta.get_field('pk'),
id='fields.E003',
)
])
def test_db_column_clash(self):
class Model(models.Model):
foo = models.IntegerField()
bar = models.IntegerField(db_column='foo')
self.assertEqual(Model.check(), [
Error(
"Field 'bar' has column name 'foo' that is used by "
"another field.",
hint="Specify a 'db_column' for the field.",
obj=Model,
id='models.E007',
)
])
@isolate_apps('invalid_models_tests')
class ShadowingFieldsTests(SimpleTestCase):
def test_field_name_clash_with_child_accessor(self):
class Parent(models.Model):
pass
class Child(Parent):
child = models.CharField(max_length=100)
self.assertEqual(Child.check(), [
Error(
"The field 'child' clashes with the field "
"'child' from model 'invalid_models_tests.parent'.",
obj=Child._meta.get_field('child'),
id='models.E006',
)
])
def test_field_name_clash_with_m2m_through(self):
class Parent(models.Model):
clash_id = models.IntegerField()
class Child(Parent):
clash = models.ForeignKey('Child', models.CASCADE)
class Model(models.Model):
parents = models.ManyToManyField(
to=Parent,
through='Through',
through_fields=['parent', 'model'],
)
class Through(models.Model):
parent = models.ForeignKey(Parent, models.CASCADE)
model = models.ForeignKey(Model, models.CASCADE)
self.assertEqual(Child.check(), [
Error(
"The field 'clash' clashes with the field 'clash_id' from "
"model 'invalid_models_tests.parent'.",
obj=Child._meta.get_field('clash'),
id='models.E006',
)
])
def test_multiinheritance_clash(self):
class Mother(models.Model):
clash = models.IntegerField()
class Father(models.Model):
clash = models.IntegerField()
class Child(Mother, Father):
# Here we have two clashed: id (automatic field) and clash, because
# both parents define these fields.
pass
self.assertEqual(Child.check(), [
Error(
"The field 'id' from parent model "
"'invalid_models_tests.mother' clashes with the field 'id' "
"from parent model 'invalid_models_tests.father'.",
obj=Child,
id='models.E005',
),
Error(
"The field 'clash' from parent model "
"'invalid_models_tests.mother' clashes with the field 'clash' "
"from parent model 'invalid_models_tests.father'.",
obj=Child,
id='models.E005',
)
])
def test_inheritance_clash(self):
class Parent(models.Model):
f_id = models.IntegerField()
class Target(models.Model):
# This field doesn't result in a clash.
f_id = models.IntegerField()
class Child(Parent):
# This field clashes with parent "f_id" field.
f = models.ForeignKey(Target, models.CASCADE)
self.assertEqual(Child.check(), [
Error(
"The field 'f' clashes with the field 'f_id' "
"from model 'invalid_models_tests.parent'.",
obj=Child._meta.get_field('f'),
id='models.E006',
)
])
def test_multigeneration_inheritance(self):
class GrandParent(models.Model):
clash = models.IntegerField()
class Parent(GrandParent):
pass
class Child(Parent):
pass
class GrandChild(Child):
clash = models.IntegerField()
self.assertEqual(GrandChild.check(), [
Error(
"The field 'clash' clashes with the field 'clash' "
"from model 'invalid_models_tests.grandparent'.",
obj=GrandChild._meta.get_field('clash'),
id='models.E006',
)
])
def test_id_clash(self):
class Target(models.Model):
pass
class Model(models.Model):
fk = models.ForeignKey(Target, models.CASCADE)
fk_id = models.IntegerField()
self.assertEqual(Model.check(), [
Error(
"The field 'fk_id' clashes with the field 'fk' from model "
"'invalid_models_tests.model'.",
obj=Model._meta.get_field('fk_id'),
id='models.E006',
)
])
@isolate_apps('invalid_models_tests')
class OtherModelTests(SimpleTestCase):
def test_unique_primary_key(self):
invalid_id = models.IntegerField(primary_key=False)
class Model(models.Model):
id = invalid_id
self.assertEqual(Model.check(), [
Error(
"'id' can only be used as a field name if the field also sets "
"'primary_key=True'.",
obj=Model,
id='models.E004',
),
])
def test_ordering_non_iterable(self):
class Model(models.Model):
class Meta:
ordering = 'missing_field'
self.assertEqual(Model.check(), [
Error(
"'ordering' must be a tuple or list "
"(even if you want to order by only one field).",
obj=Model,
id='models.E014',
),
])
def test_just_ordering_no_errors(self):
class Model(models.Model):
order = models.PositiveIntegerField()
class Meta:
ordering = ['order']
self.assertEqual(Model.check(), [])
def test_just_order_with_respect_to_no_errors(self):
class Question(models.Model):
pass
class Answer(models.Model):
question = models.ForeignKey(Question, models.CASCADE)
class Meta:
order_with_respect_to = 'question'
self.assertEqual(Answer.check(), [])
def test_ordering_with_order_with_respect_to(self):
class Question(models.Model):
pass
class Answer(models.Model):
question = models.ForeignKey(Question, models.CASCADE)
order = models.IntegerField()
class Meta:
order_with_respect_to = 'question'
ordering = ['order']
self.assertEqual(Answer.check(), [
Error(
"'ordering' and 'order_with_respect_to' cannot be used together.",
obj=Answer,
id='models.E021',
),
])
def test_non_valid(self):
class RelationModel(models.Model):
pass
class Model(models.Model):
relation = models.ManyToManyField(RelationModel)
class Meta:
ordering = ['relation']
self.assertEqual(Model.check(), [
Error(
"'ordering' refers to the nonexistent field, related field, "
"or lookup 'relation'.",
obj=Model,
id='models.E015',
),
])
def test_ordering_pointing_to_missing_field(self):
class Model(models.Model):
class Meta:
ordering = ('missing_field',)
self.assertEqual(Model.check(), [
Error(
"'ordering' refers to the nonexistent field, related field, "
"or lookup 'missing_field'.",
obj=Model,
id='models.E015',
)
])
def test_ordering_pointing_to_missing_foreignkey_field(self):
class Model(models.Model):
missing_fk_field = models.IntegerField()
class Meta:
ordering = ('missing_fk_field_id',)
self.assertEqual(Model.check(), [
Error(
"'ordering' refers to the nonexistent field, related field, "
"or lookup 'missing_fk_field_id'.",
obj=Model,
id='models.E015',
)
])
def test_ordering_pointing_to_missing_related_field(self):
class Model(models.Model):
test = models.IntegerField()
class Meta:
ordering = ('missing_related__id',)
self.assertEqual(Model.check(), [
Error(
"'ordering' refers to the nonexistent field, related field, "
"or lookup 'missing_related__id'.",
obj=Model,
id='models.E015',
)
])
def test_ordering_pointing_to_missing_related_model_field(self):
class Parent(models.Model):
pass
class Child(models.Model):
parent = models.ForeignKey(Parent, models.CASCADE)
class Meta:
ordering = ('parent__missing_field',)
self.assertEqual(Child.check(), [
Error(
"'ordering' refers to the nonexistent field, related field, "
"or lookup 'parent__missing_field'.",
obj=Child,
id='models.E015',
)
])
def test_ordering_pointing_to_non_related_field(self):
class Child(models.Model):
parent = models.IntegerField()
class Meta:
ordering = ('parent__missing_field',)
self.assertEqual(Child.check(), [
Error(
"'ordering' refers to the nonexistent field, related field, "
"or lookup 'parent__missing_field'.",
obj=Child,
id='models.E015',
)
])
def test_ordering_pointing_to_two_related_model_field(self):
class Parent2(models.Model):
pass
class Parent1(models.Model):
parent2 = models.ForeignKey(Parent2, models.CASCADE)
class Child(models.Model):
parent1 = models.ForeignKey(Parent1, models.CASCADE)
class Meta:
ordering = ('parent1__parent2__missing_field',)
self.assertEqual(Child.check(), [
Error(
"'ordering' refers to the nonexistent field, related field, "
"or lookup 'parent1__parent2__missing_field'.",
obj=Child,
id='models.E015',
)
])
def test_ordering_pointing_multiple_times_to_model_fields(self):
class Parent(models.Model):
field1 = models.CharField(max_length=100)
field2 = models.CharField(max_length=100)
class Child(models.Model):
parent = models.ForeignKey(Parent, models.CASCADE)
class Meta:
ordering = ('parent__field1__field2',)
self.assertEqual(Child.check(), [
Error(
"'ordering' refers to the nonexistent field, related field, "
"or lookup 'parent__field1__field2'.",
obj=Child,
id='models.E015',
)
])
def test_ordering_allows_registered_lookups(self):
class Model(models.Model):
test = models.CharField(max_length=100)
class Meta:
ordering = ('test__lower',)
with register_lookup(models.CharField, Lower):
self.assertEqual(Model.check(), [])
def test_ordering_pointing_to_lookup_not_transform(self):
class Model(models.Model):
test = models.CharField(max_length=100)
class Meta:
ordering = ('test__isnull',)
self.assertEqual(Model.check(), [])
def test_ordering_pointing_to_related_model_pk(self):
class Parent(models.Model):
pass
class Child(models.Model):
parent = models.ForeignKey(Parent, models.CASCADE)
class Meta:
ordering = ('parent__pk',)
self.assertEqual(Child.check(), [])
def test_ordering_pointing_to_foreignkey_field(self):
class Parent(models.Model):
pass
class Child(models.Model):
parent = models.ForeignKey(Parent, models.CASCADE)
class Meta:
ordering = ('parent_id',)
self.assertFalse(Child.check())
def test_name_beginning_with_underscore(self):
class _Model(models.Model):
pass
self.assertEqual(_Model.check(), [
Error(
"The model name '_Model' cannot start or end with an underscore "
"as it collides with the query lookup syntax.",
obj=_Model,
id='models.E023',
)
])
def test_name_ending_with_underscore(self):
class Model_(models.Model):
pass
self.assertEqual(Model_.check(), [
Error(
"The model name 'Model_' cannot start or end with an underscore "
"as it collides with the query lookup syntax.",
obj=Model_,
id='models.E023',
)
])
def test_name_contains_double_underscores(self):
class Test__Model(models.Model):
pass
self.assertEqual(Test__Model.check(), [
Error(
"The model name 'Test__Model' cannot contain double underscores "
"as it collides with the query lookup syntax.",
obj=Test__Model,
id='models.E024',
)
])
def test_property_and_related_field_accessor_clash(self):
class Model(models.Model):
fk = models.ForeignKey('self', models.CASCADE)
# Override related field accessor.
Model.fk_id = property(lambda self: 'ERROR')
self.assertEqual(Model.check(), [
Error(
"The property 'fk_id' clashes with a related field accessor.",
obj=Model,
id='models.E025',
)
])
def test_single_primary_key(self):
class Model(models.Model):
foo = models.IntegerField(primary_key=True)
bar = models.IntegerField(primary_key=True)
self.assertEqual(Model.check(), [
Error(
"The model cannot have more than one field with 'primary_key=True'.",
obj=Model,
id='models.E026',
)
])
@override_settings(TEST_SWAPPED_MODEL_BAD_VALUE='not-a-model')
def test_swappable_missing_app_name(self):
class Model(models.Model):
class Meta:
swappable = 'TEST_SWAPPED_MODEL_BAD_VALUE'
self.assertEqual(Model.check(), [
Error(
"'TEST_SWAPPED_MODEL_BAD_VALUE' is not of the form 'app_label.app_name'.",
id='models.E001',
),
])
@override_settings(TEST_SWAPPED_MODEL_BAD_MODEL='not_an_app.Target')
def test_swappable_missing_app(self):
class Model(models.Model):
class Meta:
swappable = 'TEST_SWAPPED_MODEL_BAD_MODEL'
self.assertEqual(Model.check(), [
Error(
"'TEST_SWAPPED_MODEL_BAD_MODEL' references 'not_an_app.Target', "
'which has not been installed, or is abstract.',
id='models.E002',
),
])
def test_two_m2m_through_same_relationship(self):
class Person(models.Model):
pass
class Group(models.Model):
primary = models.ManyToManyField(Person, through='Membership', related_name='primary')
secondary = models.ManyToManyField(Person, through='Membership', related_name='secondary')
class Membership(models.Model):
person = models.ForeignKey(Person, models.CASCADE)
group = models.ForeignKey(Group, models.CASCADE)
self.assertEqual(Group.check(), [
Error(
"The model has two identical many-to-many relations through "
"the intermediate model 'invalid_models_tests.Membership'.",
obj=Group,
id='models.E003',
)
])
def test_two_m2m_through_same_model_with_different_through_fields(self):
class Country(models.Model):
pass
class ShippingMethod(models.Model):
to_countries = models.ManyToManyField(
Country, through='ShippingMethodPrice',
through_fields=('method', 'to_country'),
)
from_countries = models.ManyToManyField(
Country, through='ShippingMethodPrice',
through_fields=('method', 'from_country'),
related_name='+',
)
class ShippingMethodPrice(models.Model):
method = models.ForeignKey(ShippingMethod, models.CASCADE)
to_country = models.ForeignKey(Country, models.CASCADE)
from_country = models.ForeignKey(Country, models.CASCADE)
self.assertEqual(ShippingMethod.check(), [])
def test_onetoone_with_parent_model(self):
class Place(models.Model):
pass
class ParkingLot(Place):
other_place = models.OneToOneField(Place, models.CASCADE, related_name='other_parking')
self.assertEqual(ParkingLot.check(), [])
def test_onetoone_with_explicit_parent_link_parent_model(self):
class Place(models.Model):
pass
class ParkingLot(Place):
place = models.OneToOneField(Place, models.CASCADE, parent_link=True, primary_key=True)
other_place = models.OneToOneField(Place, models.CASCADE, related_name='other_parking')
self.assertEqual(ParkingLot.check(), [])
def test_m2m_table_name_clash(self):
class Foo(models.Model):
bar = models.ManyToManyField('Bar', db_table='myapp_bar')
class Meta:
db_table = 'myapp_foo'
class Bar(models.Model):
class Meta:
db_table = 'myapp_bar'
self.assertEqual(Foo.check(), [
Error(
"The field's intermediary table 'myapp_bar' clashes with the "
"table name of 'invalid_models_tests.Bar'.",
obj=Foo._meta.get_field('bar'),
id='fields.E340',
)
])
@override_settings(DATABASE_ROUTERS=['invalid_models_tests.test_models.EmptyRouter'])
def test_m2m_table_name_clash_database_routers_installed(self):
class Foo(models.Model):
bar = models.ManyToManyField('Bar', db_table='myapp_bar')
class Meta:
db_table = 'myapp_foo'
class Bar(models.Model):
class Meta:
db_table = 'myapp_bar'
self.assertEqual(Foo.check(), [
Warning(
"The field's intermediary table 'myapp_bar' clashes with the "
"table name of 'invalid_models_tests.Bar'.",
obj=Foo._meta.get_field('bar'),
hint=(
"You have configured settings.DATABASE_ROUTERS. Verify "
"that the table of 'invalid_models_tests.Bar' is "
"correctly routed to a separate database."
),
id='fields.W344',
),
])
def test_m2m_field_table_name_clash(self):
class Foo(models.Model):
pass
class Bar(models.Model):
foos = models.ManyToManyField(Foo, db_table='clash')
class Baz(models.Model):
foos = models.ManyToManyField(Foo, db_table='clash')
self.assertEqual(Bar.check() + Baz.check(), [
Error(
"The field's intermediary table 'clash' clashes with the "
"table name of 'invalid_models_tests.Baz.foos'.",
obj=Bar._meta.get_field('foos'),
id='fields.E340',
),
Error(
"The field's intermediary table 'clash' clashes with the "
"table name of 'invalid_models_tests.Bar.foos'.",
obj=Baz._meta.get_field('foos'),
id='fields.E340',
)
])
@override_settings(DATABASE_ROUTERS=['invalid_models_tests.test_models.EmptyRouter'])
def test_m2m_field_table_name_clash_database_routers_installed(self):
class Foo(models.Model):
pass
class Bar(models.Model):
foos = models.ManyToManyField(Foo, db_table='clash')
class Baz(models.Model):
foos = models.ManyToManyField(Foo, db_table='clash')
self.assertEqual(Bar.check() + Baz.check(), [
Warning(
"The field's intermediary table 'clash' clashes with the "
"table name of 'invalid_models_tests.%s.foos'."
% clashing_model,
obj=model_cls._meta.get_field('foos'),
hint=(
"You have configured settings.DATABASE_ROUTERS. Verify "
"that the table of 'invalid_models_tests.%s.foos' is "
"correctly routed to a separate database." % clashing_model
),
id='fields.W344',
) for model_cls, clashing_model in [(Bar, 'Baz'), (Baz, 'Bar')]
])
def test_m2m_autogenerated_table_name_clash(self):
class Foo(models.Model):
class Meta:
db_table = 'bar_foos'
class Bar(models.Model):
# The autogenerated `db_table` will be bar_foos.
foos = models.ManyToManyField(Foo)
class Meta:
db_table = 'bar'
self.assertEqual(Bar.check(), [
Error(
"The field's intermediary table 'bar_foos' clashes with the "
"table name of 'invalid_models_tests.Foo'.",
obj=Bar._meta.get_field('foos'),
id='fields.E340',
)
])
@override_settings(DATABASE_ROUTERS=['invalid_models_tests.test_models.EmptyRouter'])
def test_m2m_autogenerated_table_name_clash_database_routers_installed(self):
class Foo(models.Model):
class Meta:
db_table = 'bar_foos'
class Bar(models.Model):
# The autogenerated db_table is bar_foos.
foos = models.ManyToManyField(Foo)
class Meta:
db_table = 'bar'
self.assertEqual(Bar.check(), [
Warning(
"The field's intermediary table 'bar_foos' clashes with the "
"table name of 'invalid_models_tests.Foo'.",
obj=Bar._meta.get_field('foos'),
hint=(
"You have configured settings.DATABASE_ROUTERS. Verify "
"that the table of 'invalid_models_tests.Foo' is "
"correctly routed to a separate database."
),
id='fields.W344',
),
])
def test_m2m_unmanaged_shadow_models_not_checked(self):
class A1(models.Model):
pass
class C1(models.Model):
mm_a = models.ManyToManyField(A1, db_table='d1')
# Unmanaged models that shadow the above models. Reused table names
# shouldn't be flagged by any checks.
class A2(models.Model):
class Meta:
managed = False
class C2(models.Model):
mm_a = models.ManyToManyField(A2, through='Intermediate')
class Meta:
managed = False
class Intermediate(models.Model):
a2 = models.ForeignKey(A2, models.CASCADE, db_column='a1_id')
c2 = models.ForeignKey(C2, models.CASCADE, db_column='c1_id')
class Meta:
db_table = 'd1'
managed = False
self.assertEqual(C1.check(), [])
self.assertEqual(C2.check(), [])
def test_m2m_to_concrete_and_proxy_allowed(self):
class A(models.Model):
pass
class Through(models.Model):
a = models.ForeignKey('A', models.CASCADE)
c = models.ForeignKey('C', models.CASCADE)
class ThroughProxy(Through):
class Meta:
proxy = True
class C(models.Model):
mm_a = models.ManyToManyField(A, through=Through)
mm_aproxy = models.ManyToManyField(A, through=ThroughProxy, related_name='proxied_m2m')
self.assertEqual(C.check(), [])
@isolate_apps('django.contrib.auth', kwarg_name='apps')
def test_lazy_reference_checks(self, apps):
class DummyModel(models.Model):
author = models.ForeignKey('Author', models.CASCADE)
class Meta:
app_label = 'invalid_models_tests'
class DummyClass:
def __call__(self, **kwargs):
pass
def dummy_method(self):
pass
def dummy_function(*args, **kwargs):
pass
apps.lazy_model_operation(dummy_function, ('auth', 'imaginarymodel'))
apps.lazy_model_operation(dummy_function, ('fanciful_app', 'imaginarymodel'))
post_init.connect(dummy_function, sender='missing-app.Model', apps=apps)
post_init.connect(DummyClass(), sender='missing-app.Model', apps=apps)
post_init.connect(DummyClass().dummy_method, sender='missing-app.Model', apps=apps)
self.assertEqual(_check_lazy_references(apps), [
Error(
"%r contains a lazy reference to auth.imaginarymodel, "
"but app 'auth' doesn't provide model 'imaginarymodel'." % dummy_function,
obj=dummy_function,
id='models.E022',
),
Error(
"%r contains a lazy reference to fanciful_app.imaginarymodel, "
"but app 'fanciful_app' isn't installed." % dummy_function,
obj=dummy_function,
id='models.E022',
),
Error(
"An instance of class 'DummyClass' was connected to "
"the 'post_init' signal with a lazy reference to the sender "
"'missing-app.model', but app 'missing-app' isn't installed.",
hint=None,
obj='invalid_models_tests.test_models',
id='signals.E001',
),
Error(
"Bound method 'DummyClass.dummy_method' was connected to the "
"'post_init' signal with a lazy reference to the sender "
"'missing-app.model', but app 'missing-app' isn't installed.",
hint=None,
obj='invalid_models_tests.test_models',
id='signals.E001',
),
Error(
"The field invalid_models_tests.DummyModel.author was declared "
"with a lazy reference to 'invalid_models_tests.author', but app "
"'invalid_models_tests' isn't installed.",
hint=None,
obj=DummyModel.author.field,
id='fields.E307',
),
Error(
"The function 'dummy_function' was connected to the 'post_init' "
"signal with a lazy reference to the sender "
"'missing-app.model', but app 'missing-app' isn't installed.",
hint=None,
obj='invalid_models_tests.test_models',
id='signals.E001',
),
])
class MultipleAutoFieldsTests(TestCase):
def test_multiple_autofields(self):
msg = (
"Model invalid_models_tests.MultipleAutoFields can't have more "
"than one auto-generated field."
)
with self.assertRaisesMessage(ValueError, msg):
class MultipleAutoFields(models.Model):
auto1 = models.AutoField(primary_key=True)
auto2 = models.AutoField(primary_key=True)
@isolate_apps('invalid_models_tests')
class JSONFieldTests(TestCase):
@skipUnlessDBFeature('supports_json_field')
def test_ordering_pointing_to_json_field_value(self):
class Model(models.Model):
field = models.JSONField()
class Meta:
ordering = ['field__value']
self.assertEqual(Model.check(databases=self.databases), [])
def test_check_jsonfield(self):
class Model(models.Model):
field = models.JSONField()
error = Error(
'%s does not support JSONFields.' % connection.display_name,
obj=Model,
id='fields.E180',
)
expected = [] if connection.features.supports_json_field else [error]
self.assertEqual(Model.check(databases=self.databases), expected)
def test_check_jsonfield_required_db_features(self):
class Model(models.Model):
field = models.JSONField()
class Meta:
required_db_features = {'supports_json_field'}
self.assertEqual(Model.check(databases=self.databases), [])
@isolate_apps('invalid_models_tests')
class ConstraintsTests(TestCase):
def test_check_constraints(self):
class Model(models.Model):
age = models.IntegerField()
class Meta:
constraints = [models.CheckConstraint(check=models.Q(age__gte=18), name='is_adult')]
errors = Model.check(databases=self.databases)
warn = Warning(
'%s does not support check constraints.' % connection.display_name,
hint=(
"A constraint won't be created. Silence this warning if you "
"don't care about it."
),
obj=Model,
id='models.W027',
)
expected = [] if connection.features.supports_table_check_constraints else [warn]
self.assertCountEqual(errors, expected)
def test_check_constraints_required_db_features(self):
class Model(models.Model):
age = models.IntegerField()
class Meta:
required_db_features = {'supports_table_check_constraints'}
constraints = [models.CheckConstraint(check=models.Q(age__gte=18), name='is_adult')]
self.assertEqual(Model.check(databases=self.databases), [])
def test_check_constraint_pointing_to_missing_field(self):
class Model(models.Model):
class Meta:
required_db_features = {'supports_table_check_constraints'}
constraints = [
models.CheckConstraint(
name='name', check=models.Q(missing_field=2),
),
]
self.assertEqual(Model.check(databases=self.databases), [
Error(
"'constraints' refers to the nonexistent field "
"'missing_field'.",
obj=Model,
id='models.E012',
),
] if connection.features.supports_table_check_constraints else [])
@skipUnlessDBFeature('supports_table_check_constraints')
def test_check_constraint_pointing_to_reverse_fk(self):
class Model(models.Model):
parent = models.ForeignKey('self', models.CASCADE, related_name='parents')
class Meta:
constraints = [
models.CheckConstraint(name='name', check=models.Q(parents=3)),
]
self.assertEqual(Model.check(databases=self.databases), [
Error(
"'constraints' refers to the nonexistent field 'parents'.",
obj=Model,
id='models.E012',
),
])
@skipUnlessDBFeature('supports_table_check_constraints')
def test_check_constraint_pointing_to_reverse_o2o(self):
class Model(models.Model):
parent = models.OneToOneField('self', models.CASCADE)
class Meta:
constraints = [
models.CheckConstraint(
name='name',
check=models.Q(model__isnull=True),
),
]
self.assertEqual(Model.check(databases=self.databases), [
Error(
"'constraints' refers to the nonexistent field 'model'.",
obj=Model,
id='models.E012',
),
])
@skipUnlessDBFeature('supports_table_check_constraints')
def test_check_constraint_pointing_to_m2m_field(self):
class Model(models.Model):
m2m = models.ManyToManyField('self')
class Meta:
constraints = [
models.CheckConstraint(name='name', check=models.Q(m2m=2)),
]
self.assertEqual(Model.check(databases=self.databases), [
Error(
"'constraints' refers to a ManyToManyField 'm2m', but "
"ManyToManyFields are not permitted in 'constraints'.",
obj=Model,
id='models.E013',
),
])
@skipUnlessDBFeature('supports_table_check_constraints')
def test_check_constraint_pointing_to_fk(self):
class Target(models.Model):
pass
class Model(models.Model):
fk_1 = models.ForeignKey(Target, models.CASCADE, related_name='target_1')
fk_2 = models.ForeignKey(Target, models.CASCADE, related_name='target_2')
class Meta:
constraints = [
models.CheckConstraint(
name='name',
check=models.Q(fk_1_id=2) | models.Q(fk_2=2),
),
]
self.assertEqual(Model.check(databases=self.databases), [])
@skipUnlessDBFeature('supports_table_check_constraints')
def test_check_constraint_pointing_to_pk(self):
class Model(models.Model):
age = models.SmallIntegerField()
class Meta:
constraints = [
models.CheckConstraint(
name='name',
check=models.Q(pk__gt=5) & models.Q(age__gt=models.F('pk')),
),
]
self.assertEqual(Model.check(databases=self.databases), [])
@skipUnlessDBFeature('supports_table_check_constraints')
def test_check_constraint_pointing_to_non_local_field(self):
class Parent(models.Model):
field1 = models.IntegerField()
class Child(Parent):
pass
class Meta:
constraints = [
models.CheckConstraint(name='name', check=models.Q(field1=1)),
]
self.assertEqual(Child.check(databases=self.databases), [
Error(
"'constraints' refers to field 'field1' which is not local to "
"model 'Child'.",
hint='This issue may be caused by multi-table inheritance.',
obj=Child,
id='models.E016',
),
])
@skipUnlessDBFeature('supports_table_check_constraints')
def test_check_constraint_pointing_to_joined_fields(self):
class Model(models.Model):
name = models.CharField(max_length=10)
field1 = models.PositiveSmallIntegerField()
field2 = models.PositiveSmallIntegerField()
field3 = models.PositiveSmallIntegerField()
parent = models.ForeignKey('self', models.CASCADE)
previous = models.OneToOneField('self', models.CASCADE, related_name='next')
class Meta:
constraints = [
models.CheckConstraint(
name='name1', check=models.Q(
field1__lt=models.F('parent__field1') + models.F('parent__field2')
)
),
models.CheckConstraint(
name='name2', check=models.Q(name=Lower('parent__name'))
),
models.CheckConstraint(
name='name3', check=models.Q(parent__field3=models.F('field1'))
),
models.CheckConstraint(
name='name4', check=models.Q(name=Lower('previous__name')),
),
]
joined_fields = [
'parent__field1',
'parent__field2',
'parent__field3',
'parent__name',
'previous__name',
]
errors = Model.check(databases=self.databases)
expected_errors = [
Error(
"'constraints' refers to the joined field '%s'." % field_name,
obj=Model,
id='models.E041',
) for field_name in joined_fields
]
self.assertCountEqual(errors, expected_errors)
@skipUnlessDBFeature('supports_table_check_constraints')
def test_check_constraint_pointing_to_joined_fields_complex_check(self):
class Model(models.Model):
name = models.PositiveSmallIntegerField()
field1 = models.PositiveSmallIntegerField()
field2 = models.PositiveSmallIntegerField()
parent = models.ForeignKey('self', models.CASCADE)
class Meta:
constraints = [
models.CheckConstraint(
name='name',
check=models.Q(
(
models.Q(name='test') &
models.Q(field1__lt=models.F('parent__field1'))
) |
(
models.Q(name__startswith=Lower('parent__name')) &
models.Q(field1__gte=(
models.F('parent__field1') + models.F('parent__field2')
))
)
) | (models.Q(name='test1'))
),
]
joined_fields = ['parent__field1', 'parent__field2', 'parent__name']
errors = Model.check(databases=self.databases)
expected_errors = [
Error(
"'constraints' refers to the joined field '%s'." % field_name,
obj=Model,
id='models.E041',
) for field_name in joined_fields
]
self.assertCountEqual(errors, expected_errors)
def test_unique_constraint_with_condition(self):
class Model(models.Model):
age = models.IntegerField()
class Meta:
constraints = [
models.UniqueConstraint(
fields=['age'],
name='unique_age_gte_100',
condition=models.Q(age__gte=100),
),
]
errors = Model.check(databases=self.databases)
expected = [] if connection.features.supports_partial_indexes else [
Warning(
'%s does not support unique constraints with conditions.'
% connection.display_name,
hint=(
"A constraint won't be created. Silence this warning if "
"you don't care about it."
),
obj=Model,
id='models.W036',
),
]
self.assertEqual(errors, expected)
def test_unique_constraint_with_condition_required_db_features(self):
class Model(models.Model):
age = models.IntegerField()
class Meta:
required_db_features = {'supports_partial_indexes'}
constraints = [
models.UniqueConstraint(
fields=['age'],
name='unique_age_gte_100',
condition=models.Q(age__gte=100),
),
]
self.assertEqual(Model.check(databases=self.databases), [])
def test_unique_constraint_condition_pointing_to_missing_field(self):
class Model(models.Model):
age = models.SmallIntegerField()
class Meta:
required_db_features = {'supports_partial_indexes'}
constraints = [
models.UniqueConstraint(
name='name',
fields=['age'],
condition=models.Q(missing_field=2),
),
]
self.assertEqual(Model.check(databases=self.databases), [
Error(
"'constraints' refers to the nonexistent field "
"'missing_field'.",
obj=Model,
id='models.E012',
),
] if connection.features.supports_partial_indexes else [])
def test_unique_constraint_condition_pointing_to_joined_fields(self):
class Model(models.Model):
age = models.SmallIntegerField()
parent = models.ForeignKey('self', models.CASCADE)
class Meta:
required_db_features = {'supports_partial_indexes'}
constraints = [
models.UniqueConstraint(
name='name',
fields=['age'],
condition=models.Q(parent__age__lt=2),
),
]
self.assertEqual(Model.check(databases=self.databases), [
Error(
"'constraints' refers to the joined field 'parent__age__lt'.",
obj=Model,
id='models.E041',
)
] if connection.features.supports_partial_indexes else [])
def test_unique_constraint_pointing_to_reverse_o2o(self):
class Model(models.Model):
parent = models.OneToOneField('self', models.CASCADE)
class Meta:
required_db_features = {'supports_partial_indexes'}
constraints = [
models.UniqueConstraint(
fields=['parent'],
name='name',
condition=models.Q(model__isnull=True),
),
]
self.assertEqual(Model.check(databases=self.databases), [
Error(
"'constraints' refers to the nonexistent field 'model'.",
obj=Model,
id='models.E012',
),
] if connection.features.supports_partial_indexes else [])
def test_deferrable_unique_constraint(self):
class Model(models.Model):
age = models.IntegerField()
class Meta:
constraints = [
models.UniqueConstraint(
fields=['age'],
name='unique_age_deferrable',
deferrable=models.Deferrable.DEFERRED,
),
]
errors = Model.check(databases=self.databases)
expected = [] if connection.features.supports_deferrable_unique_constraints else [
Warning(
'%s does not support deferrable unique constraints.'
% connection.display_name,
hint=(
"A constraint won't be created. Silence this warning if "
"you don't care about it."
),
obj=Model,
id='models.W038',
),
]
self.assertEqual(errors, expected)
def test_deferrable_unique_constraint_required_db_features(self):
class Model(models.Model):
age = models.IntegerField()
class Meta:
required_db_features = {'supports_deferrable_unique_constraints'}
constraints = [
models.UniqueConstraint(
fields=['age'],
name='unique_age_deferrable',
deferrable=models.Deferrable.IMMEDIATE,
),
]
self.assertEqual(Model.check(databases=self.databases), [])
def test_unique_constraint_pointing_to_missing_field(self):
class Model(models.Model):
class Meta:
constraints = [models.UniqueConstraint(fields=['missing_field'], name='name')]
self.assertEqual(Model.check(databases=self.databases), [
Error(
"'constraints' refers to the nonexistent field "
"'missing_field'.",
obj=Model,
id='models.E012',
),
])
def test_unique_constraint_pointing_to_m2m_field(self):
class Model(models.Model):
m2m = models.ManyToManyField('self')
class Meta:
constraints = [models.UniqueConstraint(fields=['m2m'], name='name')]
self.assertEqual(Model.check(databases=self.databases), [
Error(
"'constraints' refers to a ManyToManyField 'm2m', but "
"ManyToManyFields are not permitted in 'constraints'.",
obj=Model,
id='models.E013',
),
])
def test_unique_constraint_pointing_to_non_local_field(self):
class Parent(models.Model):
field1 = models.IntegerField()
class Child(Parent):
field2 = models.IntegerField()
class Meta:
constraints = [
models.UniqueConstraint(fields=['field2', 'field1'], name='name'),
]
self.assertEqual(Child.check(databases=self.databases), [
Error(
"'constraints' refers to field 'field1' which is not local to "
"model 'Child'.",
hint='This issue may be caused by multi-table inheritance.',
obj=Child,
id='models.E016',
),
])
def test_unique_constraint_pointing_to_fk(self):
class Target(models.Model):
pass
class Model(models.Model):
fk_1 = models.ForeignKey(Target, models.CASCADE, related_name='target_1')
fk_2 = models.ForeignKey(Target, models.CASCADE, related_name='target_2')
class Meta:
constraints = [
models.UniqueConstraint(fields=['fk_1_id', 'fk_2'], name='name'),
]
self.assertEqual(Model.check(databases=self.databases), [])
def test_unique_constraint_with_include(self):
class Model(models.Model):
age = models.IntegerField()
class Meta:
constraints = [
models.UniqueConstraint(
fields=['age'],
name='unique_age_include_id',
include=['id'],
),
]
errors = Model.check(databases=self.databases)
expected = [] if connection.features.supports_covering_indexes else [
Warning(
'%s does not support unique constraints with non-key columns.'
% connection.display_name,
hint=(
"A constraint won't be created. Silence this warning if "
"you don't care about it."
),
obj=Model,
id='models.W039',
),
]
self.assertEqual(errors, expected)
def test_unique_constraint_with_include_required_db_features(self):
class Model(models.Model):
age = models.IntegerField()
class Meta:
required_db_features = {'supports_covering_indexes'}
constraints = [
models.UniqueConstraint(
fields=['age'],
name='unique_age_include_id',
include=['id'],
),
]
self.assertEqual(Model.check(databases=self.databases), [])
@skipUnlessDBFeature('supports_covering_indexes')
def test_unique_constraint_include_pointing_to_missing_field(self):
class Model(models.Model):
class Meta:
constraints = [
models.UniqueConstraint(
fields=['id'],
include=['missing_field'],
name='name',
),
]
self.assertEqual(Model.check(databases=self.databases), [
Error(
"'constraints' refers to the nonexistent field "
"'missing_field'.",
obj=Model,
id='models.E012',
),
])
@skipUnlessDBFeature('supports_covering_indexes')
def test_unique_constraint_include_pointing_to_m2m_field(self):
class Model(models.Model):
m2m = models.ManyToManyField('self')
class Meta:
constraints = [
models.UniqueConstraint(
fields=['id'],
include=['m2m'],
name='name',
),
]
self.assertEqual(Model.check(databases=self.databases), [
Error(
"'constraints' refers to a ManyToManyField 'm2m', but "
"ManyToManyFields are not permitted in 'constraints'.",
obj=Model,
id='models.E013',
),
])
@skipUnlessDBFeature('supports_covering_indexes')
def test_unique_constraint_include_pointing_to_non_local_field(self):
class Parent(models.Model):
field1 = models.IntegerField()
class Child(Parent):
field2 = models.IntegerField()
class Meta:
constraints = [
models.UniqueConstraint(
fields=['field2'],
include=['field1'],
name='name',
),
]
self.assertEqual(Child.check(databases=self.databases), [
Error(
"'constraints' refers to field 'field1' which is not local to "
"model 'Child'.",
hint='This issue may be caused by multi-table inheritance.',
obj=Child,
id='models.E016',
),
])
@skipUnlessDBFeature('supports_covering_indexes')
def test_unique_constraint_include_pointing_to_fk(self):
class Target(models.Model):
pass
class Model(models.Model):
fk_1 = models.ForeignKey(Target, models.CASCADE, related_name='target_1')
fk_2 = models.ForeignKey(Target, models.CASCADE, related_name='target_2')
class Meta:
constraints = [
models.UniqueConstraint(
fields=['id'],
include=['fk_1_id', 'fk_2'],
name='name',
),
]
self.assertEqual(Model.check(databases=self.databases), [])
def test_func_unique_constraint(self):
class Model(models.Model):
name = models.CharField(max_length=10)
class Meta:
constraints = [
models.UniqueConstraint(Lower('name'), name='lower_name_uq'),
]
warn = Warning(
'%s does not support unique constraints on expressions.'
% connection.display_name,
hint=(
"A constraint won't be created. Silence this warning if you "
"don't care about it."
),
obj=Model,
id='models.W044',
)
expected = [] if connection.features.supports_expression_indexes else [warn]
self.assertEqual(Model.check(databases=self.databases), expected)
def test_func_unique_constraint_required_db_features(self):
class Model(models.Model):
name = models.CharField(max_length=10)
class Meta:
constraints = [
models.UniqueConstraint(Lower('name'), name='lower_name_unq'),
]
required_db_features = {'supports_expression_indexes'}
self.assertEqual(Model.check(databases=self.databases), [])
@skipUnlessDBFeature('supports_expression_indexes')
def test_func_unique_constraint_expression_custom_lookup(self):
class Model(models.Model):
height = models.IntegerField()
weight = models.IntegerField()
class Meta:
constraints = [
models.UniqueConstraint(
models.F('height') / (models.F('weight__abs') + models.Value(5)),
name='name',
),
]
with register_lookup(models.IntegerField, Abs):
self.assertEqual(Model.check(databases=self.databases), [])
@skipUnlessDBFeature('supports_expression_indexes')
def test_func_unique_constraint_pointing_to_missing_field(self):
class Model(models.Model):
class Meta:
constraints = [
models.UniqueConstraint(Lower('missing_field').desc(), name='name'),
]
self.assertEqual(Model.check(databases=self.databases), [
Error(
"'constraints' refers to the nonexistent field "
"'missing_field'.",
obj=Model,
id='models.E012',
),
])
@skipUnlessDBFeature('supports_expression_indexes')
def test_func_unique_constraint_pointing_to_missing_field_nested(self):
class Model(models.Model):
class Meta:
constraints = [
models.UniqueConstraint(Abs(Round('missing_field')), name='name'),
]
self.assertEqual(Model.check(databases=self.databases), [
Error(
"'constraints' refers to the nonexistent field "
"'missing_field'.",
obj=Model,
id='models.E012',
),
])
@skipUnlessDBFeature('supports_expression_indexes')
def test_func_unique_constraint_pointing_to_m2m_field(self):
class Model(models.Model):
m2m = models.ManyToManyField('self')
class Meta:
constraints = [models.UniqueConstraint(Lower('m2m'), name='name')]
self.assertEqual(Model.check(databases=self.databases), [
Error(
"'constraints' refers to a ManyToManyField 'm2m', but "
"ManyToManyFields are not permitted in 'constraints'.",
obj=Model,
id='models.E013',
),
])
@skipUnlessDBFeature('supports_expression_indexes')
def test_func_unique_constraint_pointing_to_non_local_field(self):
class Foo(models.Model):
field1 = models.CharField(max_length=15)
class Bar(Foo):
class Meta:
constraints = [models.UniqueConstraint(Lower('field1'), name='name')]
self.assertEqual(Bar.check(databases=self.databases), [
Error(
"'constraints' refers to field 'field1' which is not local to "
"model 'Bar'.",
hint='This issue may be caused by multi-table inheritance.',
obj=Bar,
id='models.E016',
),
])
@skipUnlessDBFeature('supports_expression_indexes')
def test_func_unique_constraint_pointing_to_fk(self):
class Foo(models.Model):
id = models.CharField(primary_key=True, max_length=255)
class Bar(models.Model):
foo_1 = models.ForeignKey(Foo, models.CASCADE, related_name='bar_1')
foo_2 = models.ForeignKey(Foo, models.CASCADE, related_name='bar_2')
class Meta:
constraints = [
models.UniqueConstraint(
Lower('foo_1_id'),
Lower('foo_2'),
name='name',
),
]
self.assertEqual(Bar.check(databases=self.databases), [])
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.