hash
stringlengths 64
64
| content
stringlengths 0
1.51M
|
---|---|
30a88b6db5b18f0463f663071fab75254fc3bd3d2b07637ac15083fc54e64025 | from django.core.serializers.json import DjangoJSONEncoder
from django.db import models
from .fields import (
ArrayField, BigIntegerRangeField, CICharField, CIEmailField, CITextField,
DateRangeField, DateTimeRangeField, DecimalRangeField, EnumField,
HStoreField, IntegerRangeField, JSONField, SearchVectorField,
)
class Tag:
def __init__(self, tag_id):
self.tag_id = tag_id
def __eq__(self, other):
return isinstance(other, Tag) and self.tag_id == other.tag_id
class TagField(models.SmallIntegerField):
def from_db_value(self, value, expression, connection):
if value is None:
return value
return Tag(int(value))
def to_python(self, value):
if isinstance(value, Tag):
return value
if value is None:
return value
return Tag(int(value))
def get_prep_value(self, value):
return value.tag_id
class PostgreSQLModel(models.Model):
class Meta:
abstract = True
required_db_vendor = 'postgresql'
class IntegerArrayModel(PostgreSQLModel):
field = ArrayField(models.IntegerField(), default=list, blank=True)
class NullableIntegerArrayModel(PostgreSQLModel):
field = ArrayField(models.IntegerField(), blank=True, null=True)
field_nested = ArrayField(ArrayField(models.IntegerField(null=True)), null=True)
class CharArrayModel(PostgreSQLModel):
field = ArrayField(models.CharField(max_length=10))
class DateTimeArrayModel(PostgreSQLModel):
datetimes = ArrayField(models.DateTimeField())
dates = ArrayField(models.DateField())
times = ArrayField(models.TimeField())
class NestedIntegerArrayModel(PostgreSQLModel):
field = ArrayField(ArrayField(models.IntegerField()))
class OtherTypesArrayModel(PostgreSQLModel):
ips = ArrayField(models.GenericIPAddressField(), default=list)
uuids = ArrayField(models.UUIDField(), default=list)
decimals = ArrayField(models.DecimalField(max_digits=5, decimal_places=2), default=list)
tags = ArrayField(TagField(), blank=True, null=True)
json = ArrayField(JSONField(default=dict), default=list)
int_ranges = ArrayField(IntegerRangeField(), blank=True, null=True)
bigint_ranges = ArrayField(BigIntegerRangeField(), blank=True, null=True)
class HStoreModel(PostgreSQLModel):
field = HStoreField(blank=True, null=True)
array_field = ArrayField(HStoreField(), null=True)
class ArrayEnumModel(PostgreSQLModel):
array_of_enums = ArrayField(EnumField(max_length=20))
class CharFieldModel(models.Model):
field = models.CharField(max_length=16)
class TextFieldModel(models.Model):
field = models.TextField()
class SmallAutoFieldModel(models.Model):
id = models.SmallAutoField(primary_key=True)
class BigAutoFieldModel(models.Model):
id = models.BigAutoField(primary_key=True)
# Scene/Character/Line models are used to test full text search. They're
# populated with content from Monty Python and the Holy Grail.
class Scene(models.Model):
scene = models.CharField(max_length=255)
setting = models.CharField(max_length=255)
class Character(models.Model):
name = models.CharField(max_length=255)
class CITestModel(PostgreSQLModel):
name = CICharField(primary_key=True, max_length=255)
email = CIEmailField()
description = CITextField()
array_field = ArrayField(CITextField(), null=True)
class Line(PostgreSQLModel):
scene = models.ForeignKey('Scene', models.CASCADE)
character = models.ForeignKey('Character', models.CASCADE)
dialogue = models.TextField(blank=True, null=True)
dialogue_search_vector = SearchVectorField(blank=True, null=True)
dialogue_config = models.CharField(max_length=100, blank=True, null=True)
class LineSavedSearch(PostgreSQLModel):
line = models.ForeignKey('Line', models.CASCADE)
query = models.CharField(max_length=100)
class RangesModel(PostgreSQLModel):
ints = IntegerRangeField(blank=True, null=True)
bigints = BigIntegerRangeField(blank=True, null=True)
decimals = DecimalRangeField(blank=True, null=True)
timestamps = DateTimeRangeField(blank=True, null=True)
timestamps_inner = DateTimeRangeField(blank=True, null=True)
dates = DateRangeField(blank=True, null=True)
dates_inner = DateRangeField(blank=True, null=True)
class RangeLookupsModel(PostgreSQLModel):
parent = models.ForeignKey(RangesModel, models.SET_NULL, blank=True, null=True)
integer = models.IntegerField(blank=True, null=True)
big_integer = models.BigIntegerField(blank=True, null=True)
float = models.FloatField(blank=True, null=True)
timestamp = models.DateTimeField(blank=True, null=True)
date = models.DateField(blank=True, null=True)
small_integer = models.SmallIntegerField(blank=True, null=True)
decimal_field = models.DecimalField(max_digits=5, decimal_places=2, blank=True, null=True)
class JSONModel(PostgreSQLModel):
field = JSONField(blank=True, null=True)
field_custom = JSONField(blank=True, null=True, encoder=DjangoJSONEncoder)
class ArrayFieldSubclass(ArrayField):
def __init__(self, *args, **kwargs):
super().__init__(models.IntegerField())
class AggregateTestModel(models.Model):
"""
To test postgres-specific general aggregation functions
"""
char_field = models.CharField(max_length=30, blank=True)
integer_field = models.IntegerField(null=True)
boolean_field = models.BooleanField(null=True)
class StatTestModel(models.Model):
"""
To test postgres-specific aggregation functions for statistics
"""
int1 = models.IntegerField()
int2 = models.IntegerField()
related_field = models.ForeignKey(AggregateTestModel, models.SET_NULL, null=True)
class NowTestModel(models.Model):
when = models.DateTimeField(null=True, default=None)
class UUIDTestModel(models.Model):
uuid = models.UUIDField(default=None, null=True)
class Room(models.Model):
number = models.IntegerField(unique=True)
class HotelReservation(PostgreSQLModel):
room = models.ForeignKey('Room', on_delete=models.CASCADE)
datespan = DateRangeField()
start = models.DateTimeField()
end = models.DateTimeField()
cancelled = models.BooleanField(default=False)
|
3d8c50af816cf47c686ff8b8b3d6eadd1197dbb18e95f16477527b30b9def887 | from django.db import models
class Author(models.Model):
name = models.CharField(max_length=100)
age = models.IntegerField()
friends = models.ManyToManyField('self', blank=True)
class Publisher(models.Model):
name = models.CharField(max_length=255)
num_awards = models.IntegerField()
class Book(models.Model):
isbn = models.CharField(max_length=9)
name = models.CharField(max_length=255)
pages = models.IntegerField()
rating = models.FloatField()
price = models.DecimalField(decimal_places=2, max_digits=6)
authors = models.ManyToManyField(Author)
contact = models.ForeignKey(Author, models.CASCADE, related_name='book_contact_set')
publisher = models.ForeignKey(Publisher, models.CASCADE)
pubdate = models.DateField()
class Store(models.Model):
name = models.CharField(max_length=255)
books = models.ManyToManyField(Book)
original_opening = models.DateTimeField()
friday_night_closing = models.TimeField()
area = models.IntegerField(null=True, db_column='surface')
class DepartmentStore(Store):
chain = models.CharField(max_length=255)
class Employee(models.Model):
# The order of these fields matter, do not change. Certain backends
# rely on field ordering to perform database conversions, and this
# model helps to test that.
first_name = models.CharField(max_length=20)
manager = models.BooleanField(default=False)
last_name = models.CharField(max_length=20)
store = models.ForeignKey(Store, models.CASCADE)
age = models.IntegerField()
salary = models.DecimalField(max_digits=8, decimal_places=2)
class Company(models.Model):
name = models.CharField(max_length=200)
motto = models.CharField(max_length=200, null=True, blank=True)
ticker_name = models.CharField(max_length=10, null=True, blank=True)
description = models.CharField(max_length=200, null=True, blank=True)
def __str__(self):
return 'Company(name=%s, motto=%s, ticker_name=%s, description=%s)' % (
self.name, self.motto, self.ticker_name, self.description,
)
class Ticket(models.Model):
active_at = models.DateTimeField()
duration = models.DurationField()
|
12e4b864922bb7c52c50ddaccb5dbc0f571b5f8e7804243b53670b46be6c3d3d | from django.db import IntegrityError, transaction
from django.test import TestCase, skipIfDBFeature
from .models import Bar, Business, Employee, Foo
class BasicCustomPKTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.dan = Employee.objects.create(
employee_code=123, first_name="Dan", last_name="Jones",
)
cls.fran = Employee.objects.create(
employee_code=456, first_name="Fran", last_name="Bones",
)
cls.business = Business.objects.create(name="Sears")
cls.business.employees.add(cls.dan, cls.fran)
def test_querysets(self):
"""
Both pk and custom attribute_name can be used in filter and friends
"""
self.assertQuerysetEqual(
Employee.objects.filter(pk=123), [
"Dan Jones",
],
str
)
self.assertQuerysetEqual(
Employee.objects.filter(employee_code=123), [
"Dan Jones",
],
str
)
self.assertQuerysetEqual(
Employee.objects.filter(pk__in=[123, 456]), [
"Fran Bones",
"Dan Jones",
],
str
)
self.assertQuerysetEqual(
Employee.objects.all(), [
"Fran Bones",
"Dan Jones",
],
str
)
self.assertQuerysetEqual(
Business.objects.filter(name="Sears"), [
"Sears"
],
lambda b: b.name
)
self.assertQuerysetEqual(
Business.objects.filter(pk="Sears"), [
"Sears",
],
lambda b: b.name
)
def test_querysets_related_name(self):
"""
Custom pk doesn't affect related_name based lookups
"""
self.assertQuerysetEqual(
self.business.employees.all(), [
"Fran Bones",
"Dan Jones",
],
str
)
self.assertQuerysetEqual(
self.fran.business_set.all(), [
"Sears",
],
lambda b: b.name
)
def test_querysets_relational(self):
"""
Queries across tables, involving primary key
"""
self.assertQuerysetEqual(
Employee.objects.filter(business__name="Sears"), [
"Fran Bones",
"Dan Jones",
],
str,
)
self.assertQuerysetEqual(
Employee.objects.filter(business__pk="Sears"), [
"Fran Bones",
"Dan Jones",
],
str,
)
self.assertQuerysetEqual(
Business.objects.filter(employees__employee_code=123), [
"Sears",
],
lambda b: b.name
)
self.assertQuerysetEqual(
Business.objects.filter(employees__pk=123), [
"Sears",
],
lambda b: b.name,
)
self.assertQuerysetEqual(
Business.objects.filter(employees__first_name__startswith="Fran"), [
"Sears",
],
lambda b: b.name
)
def test_get(self):
"""
Get can accept pk or the real attribute name
"""
self.assertEqual(Employee.objects.get(pk=123), self.dan)
self.assertEqual(Employee.objects.get(pk=456), self.fran)
with self.assertRaises(Employee.DoesNotExist):
Employee.objects.get(pk=42)
# Use the name of the primary key, rather than pk.
self.assertEqual(Employee.objects.get(employee_code=123), self.dan)
def test_pk_attributes(self):
"""
pk and attribute name are available on the model
No default id attribute is added
"""
# pk can be used as a substitute for the primary key.
# The primary key can be accessed via the pk property on the model.
e = Employee.objects.get(pk=123)
self.assertEqual(e.pk, 123)
# Or we can use the real attribute name for the primary key:
self.assertEqual(e.employee_code, 123)
with self.assertRaisesMessage(AttributeError, "'Employee' object has no attribute 'id'"):
e.id
def test_in_bulk(self):
"""
Custom pks work with in_bulk, both for integer and non-integer types
"""
emps = Employee.objects.in_bulk([123, 456])
self.assertEqual(emps[123], self.dan)
self.assertEqual(Business.objects.in_bulk(["Sears"]), {
"Sears": self.business,
})
def test_save(self):
"""
custom pks do not affect save
"""
fran = Employee.objects.get(pk=456)
fran.last_name = "Jones"
fran.save()
self.assertQuerysetEqual(
Employee.objects.filter(last_name="Jones"), [
"Dan Jones",
"Fran Jones",
],
str
)
class CustomPKTests(TestCase):
def test_custom_pk_create(self):
"""
New objects can be created both with pk and the custom name
"""
Employee.objects.create(employee_code=1234, first_name="Foo", last_name="Bar")
Employee.objects.create(pk=1235, first_name="Foo", last_name="Baz")
Business.objects.create(name="Bears")
Business.objects.create(pk="Tears")
def test_unicode_pk(self):
# Primary key may be Unicode string.
Business.objects.create(name='jaźń')
def test_unique_pk(self):
# The primary key must also obviously be unique, so trying to create a
# new object with the same primary key will fail.
Employee.objects.create(
employee_code=123, first_name="Frank", last_name="Jones"
)
with self.assertRaises(IntegrityError):
with transaction.atomic():
Employee.objects.create(employee_code=123, first_name="Fred", last_name="Jones")
def test_zero_non_autoincrement_pk(self):
Employee.objects.create(
employee_code=0, first_name="Frank", last_name="Jones"
)
employee = Employee.objects.get(pk=0)
self.assertEqual(employee.employee_code, 0)
def test_custom_field_pk(self):
# Regression for #10785 -- Custom fields can be used for primary keys.
new_bar = Bar.objects.create()
new_foo = Foo.objects.create(bar=new_bar)
f = Foo.objects.get(bar=new_bar.pk)
self.assertEqual(f, new_foo)
self.assertEqual(f.bar, new_bar)
f = Foo.objects.get(bar=new_bar)
self.assertEqual(f, new_foo),
self.assertEqual(f.bar, new_bar)
# SQLite lets objects be saved with an empty primary key, even though an
# integer is expected. So we can't check for an error being raised in that
# case for SQLite. Remove it from the suite for this next bit.
@skipIfDBFeature('supports_unspecified_pk')
def test_required_pk(self):
# The primary key must be specified, so an error is raised if you
# try to create an object without it.
with self.assertRaises(IntegrityError):
with transaction.atomic():
Employee.objects.create(first_name="Tom", last_name="Smith")
|
d98ac01f82549eaf6bf3e4d8dd3093f5bc783ffea6b5167969aa396d685114b6 | from django import forms
from django.contrib import admin
from django.db import models
from .models import (
Author, BinaryTree, CapoFamiglia, Chapter, Child, ChildModel1, ChildModel2,
Consigliere, EditablePKBook, ExtraTerrestrial, Fashionista, FootNote,
Holder, Holder2, Holder3, Holder4, Holder5, Inner, Inner2, Inner3,
Inner4Stacked, Inner4Tabular, Inner5Stacked, Inner5Tabular, NonAutoPKBook,
NonAutoPKBookChild, Novel, NovelReadonlyChapter, OutfitItem,
ParentModelWithCustomPk, Poll, Profile, ProfileCollection, Question,
ReadOnlyInline, ShoppingWeakness, Sighting, SomeChildModel,
SomeParentModel, SottoCapo, Teacher, Title, TitleCollection,
)
site = admin.AdminSite(name="admin")
class BookInline(admin.TabularInline):
model = Author.books.through
class NonAutoPKBookTabularInline(admin.TabularInline):
model = NonAutoPKBook
classes = ('collapse',)
class NonAutoPKBookChildTabularInline(admin.TabularInline):
model = NonAutoPKBookChild
classes = ('collapse',)
class NonAutoPKBookStackedInline(admin.StackedInline):
model = NonAutoPKBook
classes = ('collapse',)
class EditablePKBookTabularInline(admin.TabularInline):
model = EditablePKBook
class EditablePKBookStackedInline(admin.StackedInline):
model = EditablePKBook
class AuthorAdmin(admin.ModelAdmin):
inlines = [
BookInline, NonAutoPKBookTabularInline, NonAutoPKBookStackedInline,
EditablePKBookTabularInline, EditablePKBookStackedInline,
NonAutoPKBookChildTabularInline,
]
class InnerInline(admin.StackedInline):
model = Inner
can_delete = False
readonly_fields = ('readonly',) # For bug #13174 tests.
class HolderAdmin(admin.ModelAdmin):
class Media:
js = ('my_awesome_admin_scripts.js',)
class ReadOnlyInlineInline(admin.TabularInline):
model = ReadOnlyInline
readonly_fields = ['name']
class InnerInline2(admin.StackedInline):
model = Inner2
class Media:
js = ('my_awesome_inline_scripts.js',)
class InnerInline2Tabular(admin.TabularInline):
model = Inner2
class CustomNumberWidget(forms.NumberInput):
class Media:
js = ('custom_number.js',)
class InnerInline3(admin.StackedInline):
model = Inner3
formfield_overrides = {
models.IntegerField: {'widget': CustomNumberWidget},
}
class Media:
js = ('my_awesome_inline_scripts.js',)
class TitleForm(forms.ModelForm):
title1 = forms.CharField(max_length=100)
def clean(self):
cleaned_data = self.cleaned_data
title1 = cleaned_data.get("title1")
title2 = cleaned_data.get("title2")
if title1 != title2:
raise forms.ValidationError("The two titles must be the same")
return cleaned_data
class TitleInline(admin.TabularInline):
model = Title
form = TitleForm
extra = 1
class Inner4StackedInline(admin.StackedInline):
model = Inner4Stacked
show_change_link = True
class Inner4TabularInline(admin.TabularInline):
model = Inner4Tabular
show_change_link = True
class Holder4Admin(admin.ModelAdmin):
inlines = [Inner4StackedInline, Inner4TabularInline]
class Inner5StackedInline(admin.StackedInline):
model = Inner5Stacked
classes = ('collapse',)
class Inner5TabularInline(admin.TabularInline):
model = Inner5Tabular
classes = ('collapse',)
class Holder5Admin(admin.ModelAdmin):
inlines = [Inner5StackedInline, Inner5TabularInline]
class InlineWeakness(admin.TabularInline):
model = ShoppingWeakness
extra = 1
class WeaknessForm(forms.ModelForm):
extra_field = forms.CharField()
class Meta:
model = ShoppingWeakness
fields = '__all__'
class WeaknessInlineCustomForm(admin.TabularInline):
model = ShoppingWeakness
form = WeaknessForm
class FootNoteForm(forms.ModelForm):
extra_field = forms.CharField()
class Meta:
model = FootNote
fields = '__all__'
class FootNoteNonEditableInlineCustomForm(admin.TabularInline):
model = FootNote
form = FootNoteForm
def has_change_permission(self, request, obj=None):
return False
class QuestionInline(admin.TabularInline):
model = Question
readonly_fields = ['call_me']
def call_me(self, obj):
return 'Callable in QuestionInline'
class PollAdmin(admin.ModelAdmin):
inlines = [QuestionInline]
def call_me(self, obj):
return 'Callable in PollAdmin'
class ChapterInline(admin.TabularInline):
model = Chapter
readonly_fields = ['call_me']
def call_me(self, obj):
return 'Callable in ChapterInline'
class NovelAdmin(admin.ModelAdmin):
inlines = [ChapterInline]
class ReadOnlyChapterInline(admin.TabularInline):
model = Chapter
def has_change_permission(self, request, obj=None):
return False
class NovelReadonlyChapterAdmin(admin.ModelAdmin):
inlines = [ReadOnlyChapterInline]
class ConsigliereInline(admin.TabularInline):
model = Consigliere
class SottoCapoInline(admin.TabularInline):
model = SottoCapo
class ProfileInline(admin.TabularInline):
model = Profile
extra = 1
# admin for #18433
class ChildModel1Inline(admin.TabularInline):
model = ChildModel1
class ChildModel2Inline(admin.StackedInline):
model = ChildModel2
# admin for #19425 and #18388
class BinaryTreeAdmin(admin.TabularInline):
model = BinaryTree
def get_extra(self, request, obj=None, **kwargs):
extra = 2
if obj:
return extra - obj.binarytree_set.count()
return extra
def get_max_num(self, request, obj=None, **kwargs):
max_num = 3
if obj:
return max_num - obj.binarytree_set.count()
return max_num
# admin for #19524
class SightingInline(admin.TabularInline):
model = Sighting
# admin and form for #18263
class SomeChildModelForm(forms.ModelForm):
class Meta:
fields = '__all__'
model = SomeChildModel
widgets = {
'position': forms.HiddenInput,
}
labels = {'readonly_field': 'Label from ModelForm.Meta'}
help_texts = {'readonly_field': 'Help text from ModelForm.Meta'}
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.fields['name'].label = 'new label'
class SomeChildModelInline(admin.TabularInline):
model = SomeChildModel
form = SomeChildModelForm
readonly_fields = ('readonly_field',)
class StudentInline(admin.StackedInline):
model = Child
extra = 1
fieldsets = [
('Name', {'fields': ('name',), 'classes': ('collapse',)}),
]
class TeacherAdmin(admin.ModelAdmin):
inlines = [StudentInline]
site.register(TitleCollection, inlines=[TitleInline])
# Test bug #12561 and #12778
# only ModelAdmin media
site.register(Holder, HolderAdmin, inlines=[InnerInline])
# ModelAdmin and Inline media
site.register(Holder2, HolderAdmin, inlines=[InnerInline2, InnerInline2Tabular])
# only Inline media
site.register(Holder3, inlines=[InnerInline3])
site.register(Poll, PollAdmin)
site.register(Novel, NovelAdmin)
site.register(NovelReadonlyChapter, NovelReadonlyChapterAdmin)
site.register(Fashionista, inlines=[InlineWeakness])
site.register(Holder4, Holder4Admin)
site.register(Holder5, Holder5Admin)
site.register(Author, AuthorAdmin)
site.register(CapoFamiglia, inlines=[ConsigliereInline, SottoCapoInline, ReadOnlyInlineInline])
site.register(ProfileCollection, inlines=[ProfileInline])
site.register(ParentModelWithCustomPk, inlines=[ChildModel1Inline, ChildModel2Inline])
site.register(BinaryTree, inlines=[BinaryTreeAdmin])
site.register(ExtraTerrestrial, inlines=[SightingInline])
site.register(SomeParentModel, inlines=[SomeChildModelInline])
site.register([Question, Inner4Stacked, Inner4Tabular])
site.register(Teacher, TeacherAdmin)
site.register(Chapter, inlines=[FootNoteNonEditableInlineCustomForm])
site.register(OutfitItem, inlines=[WeaknessInlineCustomForm])
|
3e96d7abd670543165b6a418d6b005fdee9589262c811c705cbe8f2788c01aac | """
A series of tests to establish that the command-line management tools work as
advertised - especially with regards to the handling of the
DJANGO_SETTINGS_MODULE and default settings.py files.
"""
import os
import re
import shutil
import socket
import subprocess
import sys
import tempfile
import unittest
from io import StringIO
from unittest import mock
from django import conf, get_version
from django.conf import settings
from django.core.management import (
BaseCommand, CommandError, call_command, color,
)
from django.core.management.commands.loaddata import Command as LoaddataCommand
from django.core.management.commands.runserver import (
Command as RunserverCommand,
)
from django.core.management.commands.testserver import (
Command as TestserverCommand,
)
from django.db import ConnectionHandler, connection
from django.db.migrations.recorder import MigrationRecorder
from django.test import (
LiveServerTestCase, SimpleTestCase, TestCase, override_settings,
)
custom_templates_dir = os.path.join(os.path.dirname(__file__), 'custom_templates')
SYSTEM_CHECK_MSG = 'System check identified no issues'
class AdminScriptTestCase(SimpleTestCase):
def setUp(self):
tmpdir = tempfile.TemporaryDirectory()
self.addCleanup(tmpdir.cleanup)
# os.path.realpath() is required for temporary directories on macOS,
# where `/var` is a symlink to `/private/var`.
self.test_dir = os.path.realpath(os.path.join(tmpdir.name, 'test_project'))
os.mkdir(self.test_dir)
def write_settings(self, filename, apps=None, is_dir=False, sdict=None, extra=None):
if is_dir:
settings_dir = os.path.join(self.test_dir, filename)
os.mkdir(settings_dir)
settings_file_path = os.path.join(settings_dir, '__init__.py')
else:
settings_file_path = os.path.join(self.test_dir, filename)
with open(settings_file_path, 'w') as settings_file:
settings_file.write('# Settings file automatically generated by admin_scripts test case\n')
if extra:
settings_file.write("%s\n" % extra)
exports = [
'DATABASES',
'ROOT_URLCONF',
'SECRET_KEY',
]
for s in exports:
if hasattr(settings, s):
o = getattr(settings, s)
if not isinstance(o, (dict, tuple, list)):
o = "'%s'" % o
settings_file.write("%s = %s\n" % (s, o))
if apps is None:
apps = ['django.contrib.auth', 'django.contrib.contenttypes', 'admin_scripts']
settings_file.write("INSTALLED_APPS = %s\n" % apps)
if sdict:
for k, v in sdict.items():
settings_file.write("%s = %s\n" % (k, v))
def _ext_backend_paths(self):
"""
Returns the paths for any external backend packages.
"""
paths = []
for backend in settings.DATABASES.values():
package = backend['ENGINE'].split('.')[0]
if package != 'django':
backend_pkg = __import__(package)
backend_dir = os.path.dirname(backend_pkg.__file__)
paths.append(os.path.dirname(backend_dir))
return paths
def run_test(self, args, settings_file=None, apps=None):
base_dir = os.path.dirname(self.test_dir)
# The base dir for Django's tests is one level up.
tests_dir = os.path.dirname(os.path.dirname(__file__))
# The base dir for Django is one level above the test dir. We don't use
# `import django` to figure that out, so we don't pick up a Django
# from site-packages or similar.
django_dir = os.path.dirname(tests_dir)
ext_backend_base_dirs = self._ext_backend_paths()
# Define a temporary environment for the subprocess
test_environ = os.environ.copy()
# Set the test environment
if settings_file:
test_environ['DJANGO_SETTINGS_MODULE'] = settings_file
elif 'DJANGO_SETTINGS_MODULE' in test_environ:
del test_environ['DJANGO_SETTINGS_MODULE']
python_path = [base_dir, django_dir, tests_dir]
python_path.extend(ext_backend_base_dirs)
test_environ['PYTHONPATH'] = os.pathsep.join(python_path)
test_environ['PYTHONWARNINGS'] = ''
p = subprocess.run(
[sys.executable, *args],
stdout=subprocess.PIPE, stderr=subprocess.PIPE,
cwd=self.test_dir,
env=test_environ, universal_newlines=True,
)
return p.stdout, p.stderr
def run_django_admin(self, args, settings_file=None):
return self.run_test(['-m', 'django', *args], settings_file)
def run_manage(self, args, settings_file=None, manage_py=None):
template_manage_py = (
os.path.join(os.path.dirname(__file__), manage_py)
if manage_py else
os.path.join(os.path.dirname(conf.__file__), 'project_template', 'manage.py-tpl')
)
test_manage_py = os.path.join(self.test_dir, 'manage.py')
shutil.copyfile(template_manage_py, test_manage_py)
with open(test_manage_py) as fp:
manage_py_contents = fp.read()
manage_py_contents = manage_py_contents.replace(
"{{ project_name }}", "test_project")
with open(test_manage_py, 'w') as fp:
fp.write(manage_py_contents)
return self.run_test(['./manage.py', *args], settings_file)
def assertNoOutput(self, stream):
"Utility assertion: assert that the given stream is empty"
self.assertEqual(len(stream), 0, "Stream should be empty: actually contains '%s'" % stream)
def assertOutput(self, stream, msg, regex=False):
"Utility assertion: assert that the given message exists in the output"
if regex:
self.assertIsNotNone(
re.search(msg, stream),
"'%s' does not match actual output text '%s'" % (msg, stream)
)
else:
self.assertIn(msg, stream, "'%s' does not match actual output text '%s'" % (msg, stream))
def assertNotInOutput(self, stream, msg):
"Utility assertion: assert that the given message doesn't exist in the output"
self.assertNotIn(msg, stream, "'%s' matches actual output text '%s'" % (msg, stream))
##########################################################################
# DJANGO ADMIN TESTS
# This first series of test classes checks the environment processing
# of the django-admin.py script
##########################################################################
class DjangoAdminNoSettings(AdminScriptTestCase):
"A series of tests for django-admin.py when there is no settings.py file."
def test_builtin_command(self):
"no settings: django-admin builtin commands fail with an error when no settings provided"
args = ['check', 'admin_scripts']
out, err = self.run_django_admin(args)
self.assertNoOutput(out)
self.assertOutput(err, 'settings are not configured')
def test_builtin_with_bad_settings(self):
"no settings: django-admin builtin commands fail if settings file (from argument) doesn't exist"
args = ['check', '--settings=bad_settings', 'admin_scripts']
out, err = self.run_django_admin(args)
self.assertNoOutput(out)
self.assertOutput(err, "No module named '?bad_settings'?", regex=True)
def test_builtin_with_bad_environment(self):
"no settings: django-admin builtin commands fail if settings file (from environment) doesn't exist"
args = ['check', 'admin_scripts']
out, err = self.run_django_admin(args, 'bad_settings')
self.assertNoOutput(out)
self.assertOutput(err, "No module named '?bad_settings'?", regex=True)
def test_commands_with_invalid_settings(self):
""""
Commands that don't require settings succeed if the settings file
doesn't exist.
"""
args = ['startproject']
out, err = self.run_django_admin(args, settings_file='bad_settings')
self.assertNoOutput(out)
self.assertOutput(err, "You must provide a project name", regex=True)
class DjangoAdminDefaultSettings(AdminScriptTestCase):
"""A series of tests for django-admin.py when using a settings.py file that
contains the test application.
"""
def setUp(self):
super().setUp()
self.write_settings('settings.py')
def test_builtin_command(self):
"default: django-admin builtin commands fail with an error when no settings provided"
args = ['check', 'admin_scripts']
out, err = self.run_django_admin(args)
self.assertNoOutput(out)
self.assertOutput(err, 'settings are not configured')
def test_builtin_with_settings(self):
"default: django-admin builtin commands succeed if settings are provided as argument"
args = ['check', '--settings=test_project.settings', 'admin_scripts']
out, err = self.run_django_admin(args)
self.assertNoOutput(err)
self.assertOutput(out, SYSTEM_CHECK_MSG)
def test_builtin_with_environment(self):
"default: django-admin builtin commands succeed if settings are provided in the environment"
args = ['check', 'admin_scripts']
out, err = self.run_django_admin(args, 'test_project.settings')
self.assertNoOutput(err)
self.assertOutput(out, SYSTEM_CHECK_MSG)
def test_builtin_with_bad_settings(self):
"default: django-admin builtin commands fail if settings file (from argument) doesn't exist"
args = ['check', '--settings=bad_settings', 'admin_scripts']
out, err = self.run_django_admin(args)
self.assertNoOutput(out)
self.assertOutput(err, "No module named '?bad_settings'?", regex=True)
def test_builtin_with_bad_environment(self):
"default: django-admin builtin commands fail if settings file (from environment) doesn't exist"
args = ['check', 'admin_scripts']
out, err = self.run_django_admin(args, 'bad_settings')
self.assertNoOutput(out)
self.assertOutput(err, "No module named '?bad_settings'?", regex=True)
def test_custom_command(self):
"default: django-admin can't execute user commands if it isn't provided settings"
args = ['noargs_command']
out, err = self.run_django_admin(args)
self.assertNoOutput(out)
self.assertOutput(err, "No Django settings specified")
self.assertOutput(err, "Unknown command: 'noargs_command'")
def test_custom_command_with_settings(self):
"default: django-admin can execute user commands if settings are provided as argument"
args = ['noargs_command', '--settings=test_project.settings']
out, err = self.run_django_admin(args)
self.assertNoOutput(err)
self.assertOutput(out, "EXECUTE: noargs_command")
def test_custom_command_with_environment(self):
"default: django-admin can execute user commands if settings are provided in environment"
args = ['noargs_command']
out, err = self.run_django_admin(args, 'test_project.settings')
self.assertNoOutput(err)
self.assertOutput(out, "EXECUTE: noargs_command")
class DjangoAdminFullPathDefaultSettings(AdminScriptTestCase):
"""A series of tests for django-admin.py when using a settings.py file that
contains the test application specified using a full path.
"""
def setUp(self):
super().setUp()
self.write_settings('settings.py', ['django.contrib.auth', 'django.contrib.contenttypes',
'admin_scripts', 'admin_scripts.complex_app'])
def test_builtin_command(self):
"fulldefault: django-admin builtin commands fail with an error when no settings provided"
args = ['check', 'admin_scripts']
out, err = self.run_django_admin(args)
self.assertNoOutput(out)
self.assertOutput(err, 'settings are not configured')
def test_builtin_with_settings(self):
"fulldefault: django-admin builtin commands succeed if a settings file is provided"
args = ['check', '--settings=test_project.settings', 'admin_scripts']
out, err = self.run_django_admin(args)
self.assertNoOutput(err)
self.assertOutput(out, SYSTEM_CHECK_MSG)
def test_builtin_with_environment(self):
"fulldefault: django-admin builtin commands succeed if the environment contains settings"
args = ['check', 'admin_scripts']
out, err = self.run_django_admin(args, 'test_project.settings')
self.assertNoOutput(err)
self.assertOutput(out, SYSTEM_CHECK_MSG)
def test_builtin_with_bad_settings(self):
"fulldefault: django-admin builtin commands fail if settings file (from argument) doesn't exist"
args = ['check', '--settings=bad_settings', 'admin_scripts']
out, err = self.run_django_admin(args)
self.assertNoOutput(out)
self.assertOutput(err, "No module named '?bad_settings'?", regex=True)
def test_builtin_with_bad_environment(self):
"fulldefault: django-admin builtin commands fail if settings file (from environment) doesn't exist"
args = ['check', 'admin_scripts']
out, err = self.run_django_admin(args, 'bad_settings')
self.assertNoOutput(out)
self.assertOutput(err, "No module named '?bad_settings'?", regex=True)
def test_custom_command(self):
"fulldefault: django-admin can't execute user commands unless settings are provided"
args = ['noargs_command']
out, err = self.run_django_admin(args)
self.assertNoOutput(out)
self.assertOutput(err, "No Django settings specified")
self.assertOutput(err, "Unknown command: 'noargs_command'")
def test_custom_command_with_settings(self):
"fulldefault: django-admin can execute user commands if settings are provided as argument"
args = ['noargs_command', '--settings=test_project.settings']
out, err = self.run_django_admin(args)
self.assertNoOutput(err)
self.assertOutput(out, "EXECUTE: noargs_command")
def test_custom_command_with_environment(self):
"fulldefault: django-admin can execute user commands if settings are provided in environment"
args = ['noargs_command']
out, err = self.run_django_admin(args, 'test_project.settings')
self.assertNoOutput(err)
self.assertOutput(out, "EXECUTE: noargs_command")
class DjangoAdminMinimalSettings(AdminScriptTestCase):
"""A series of tests for django-admin.py when using a settings.py file that
doesn't contain the test application.
"""
def setUp(self):
super().setUp()
self.write_settings('settings.py', apps=['django.contrib.auth', 'django.contrib.contenttypes'])
def test_builtin_command(self):
"minimal: django-admin builtin commands fail with an error when no settings provided"
args = ['check', 'admin_scripts']
out, err = self.run_django_admin(args)
self.assertNoOutput(out)
self.assertOutput(err, 'settings are not configured')
def test_builtin_with_settings(self):
"minimal: django-admin builtin commands fail if settings are provided as argument"
args = ['check', '--settings=test_project.settings', 'admin_scripts']
out, err = self.run_django_admin(args)
self.assertNoOutput(out)
self.assertOutput(err, "No installed app with label 'admin_scripts'.")
def test_builtin_with_environment(self):
"minimal: django-admin builtin commands fail if settings are provided in the environment"
args = ['check', 'admin_scripts']
out, err = self.run_django_admin(args, 'test_project.settings')
self.assertNoOutput(out)
self.assertOutput(err, "No installed app with label 'admin_scripts'.")
def test_builtin_with_bad_settings(self):
"minimal: django-admin builtin commands fail if settings file (from argument) doesn't exist"
args = ['check', '--settings=bad_settings', 'admin_scripts']
out, err = self.run_django_admin(args)
self.assertNoOutput(out)
self.assertOutput(err, "No module named '?bad_settings'?", regex=True)
def test_builtin_with_bad_environment(self):
"minimal: django-admin builtin commands fail if settings file (from environment) doesn't exist"
args = ['check', 'admin_scripts']
out, err = self.run_django_admin(args, 'bad_settings')
self.assertNoOutput(out)
self.assertOutput(err, "No module named '?bad_settings'?", regex=True)
def test_custom_command(self):
"minimal: django-admin can't execute user commands unless settings are provided"
args = ['noargs_command']
out, err = self.run_django_admin(args)
self.assertNoOutput(out)
self.assertOutput(err, "No Django settings specified")
self.assertOutput(err, "Unknown command: 'noargs_command'")
def test_custom_command_with_settings(self):
"minimal: django-admin can't execute user commands, even if settings are provided as argument"
args = ['noargs_command', '--settings=test_project.settings']
out, err = self.run_django_admin(args)
self.assertNoOutput(out)
self.assertOutput(err, "Unknown command: 'noargs_command'")
def test_custom_command_with_environment(self):
"minimal: django-admin can't execute user commands, even if settings are provided in environment"
args = ['noargs_command']
out, err = self.run_django_admin(args, 'test_project.settings')
self.assertNoOutput(out)
self.assertOutput(err, "Unknown command: 'noargs_command'")
class DjangoAdminAlternateSettings(AdminScriptTestCase):
"""A series of tests for django-admin.py when using a settings file
with a name other than 'settings.py'.
"""
def setUp(self):
super().setUp()
self.write_settings('alternate_settings.py')
def test_builtin_command(self):
"alternate: django-admin builtin commands fail with an error when no settings provided"
args = ['check', 'admin_scripts']
out, err = self.run_django_admin(args)
self.assertNoOutput(out)
self.assertOutput(err, 'settings are not configured')
def test_builtin_with_settings(self):
"alternate: django-admin builtin commands succeed if settings are provided as argument"
args = ['check', '--settings=test_project.alternate_settings', 'admin_scripts']
out, err = self.run_django_admin(args)
self.assertNoOutput(err)
self.assertOutput(out, SYSTEM_CHECK_MSG)
def test_builtin_with_environment(self):
"alternate: django-admin builtin commands succeed if settings are provided in the environment"
args = ['check', 'admin_scripts']
out, err = self.run_django_admin(args, 'test_project.alternate_settings')
self.assertNoOutput(err)
self.assertOutput(out, SYSTEM_CHECK_MSG)
def test_builtin_with_bad_settings(self):
"alternate: django-admin builtin commands fail if settings file (from argument) doesn't exist"
args = ['check', '--settings=bad_settings', 'admin_scripts']
out, err = self.run_django_admin(args)
self.assertNoOutput(out)
self.assertOutput(err, "No module named '?bad_settings'?", regex=True)
def test_builtin_with_bad_environment(self):
"alternate: django-admin builtin commands fail if settings file (from environment) doesn't exist"
args = ['check', 'admin_scripts']
out, err = self.run_django_admin(args, 'bad_settings')
self.assertNoOutput(out)
self.assertOutput(err, "No module named '?bad_settings'?", regex=True)
def test_custom_command(self):
"alternate: django-admin can't execute user commands unless settings are provided"
args = ['noargs_command']
out, err = self.run_django_admin(args)
self.assertNoOutput(out)
self.assertOutput(err, "No Django settings specified")
self.assertOutput(err, "Unknown command: 'noargs_command'")
def test_custom_command_with_settings(self):
"alternate: django-admin can execute user commands if settings are provided as argument"
args = ['noargs_command', '--settings=test_project.alternate_settings']
out, err = self.run_django_admin(args)
self.assertNoOutput(err)
self.assertOutput(out, "EXECUTE: noargs_command")
def test_custom_command_with_environment(self):
"alternate: django-admin can execute user commands if settings are provided in environment"
args = ['noargs_command']
out, err = self.run_django_admin(args, 'test_project.alternate_settings')
self.assertNoOutput(err)
self.assertOutput(out, "EXECUTE: noargs_command")
class DjangoAdminMultipleSettings(AdminScriptTestCase):
"""A series of tests for django-admin.py when multiple settings files
(including the default 'settings.py') are available. The default settings
file is insufficient for performing the operations described, so the
alternate settings must be used by the running script.
"""
def setUp(self):
super().setUp()
self.write_settings('settings.py', apps=['django.contrib.auth', 'django.contrib.contenttypes'])
self.write_settings('alternate_settings.py')
def test_builtin_command(self):
"alternate: django-admin builtin commands fail with an error when no settings provided"
args = ['check', 'admin_scripts']
out, err = self.run_django_admin(args)
self.assertNoOutput(out)
self.assertOutput(err, 'settings are not configured')
def test_builtin_with_settings(self):
"alternate: django-admin builtin commands succeed if settings are provided as argument"
args = ['check', '--settings=test_project.alternate_settings', 'admin_scripts']
out, err = self.run_django_admin(args)
self.assertNoOutput(err)
self.assertOutput(out, SYSTEM_CHECK_MSG)
def test_builtin_with_environment(self):
"alternate: django-admin builtin commands succeed if settings are provided in the environment"
args = ['check', 'admin_scripts']
out, err = self.run_django_admin(args, 'test_project.alternate_settings')
self.assertNoOutput(err)
self.assertOutput(out, SYSTEM_CHECK_MSG)
def test_builtin_with_bad_settings(self):
"alternate: django-admin builtin commands fail if settings file (from argument) doesn't exist"
args = ['check', '--settings=bad_settings', 'admin_scripts']
out, err = self.run_django_admin(args)
self.assertOutput(err, "No module named '?bad_settings'?", regex=True)
def test_builtin_with_bad_environment(self):
"alternate: django-admin builtin commands fail if settings file (from environment) doesn't exist"
args = ['check', 'admin_scripts']
out, err = self.run_django_admin(args, 'bad_settings')
self.assertNoOutput(out)
self.assertOutput(err, "No module named '?bad_settings'?", regex=True)
def test_custom_command(self):
"alternate: django-admin can't execute user commands unless settings are provided"
args = ['noargs_command']
out, err = self.run_django_admin(args)
self.assertNoOutput(out)
self.assertOutput(err, "No Django settings specified")
self.assertOutput(err, "Unknown command: 'noargs_command'")
def test_custom_command_with_settings(self):
"alternate: django-admin can execute user commands if settings are provided as argument"
args = ['noargs_command', '--settings=test_project.alternate_settings']
out, err = self.run_django_admin(args)
self.assertNoOutput(err)
self.assertOutput(out, "EXECUTE: noargs_command")
def test_custom_command_with_environment(self):
"alternate: django-admin can execute user commands if settings are provided in environment"
args = ['noargs_command']
out, err = self.run_django_admin(args, 'test_project.alternate_settings')
self.assertNoOutput(err)
self.assertOutput(out, "EXECUTE: noargs_command")
class DjangoAdminSettingsDirectory(AdminScriptTestCase):
"""
A series of tests for django-admin.py when the settings file is in a
directory. (see #9751).
"""
def setUp(self):
super().setUp()
self.write_settings('settings', is_dir=True)
def test_setup_environ(self):
"directory: startapp creates the correct directory"
args = ['startapp', 'settings_test']
app_path = os.path.join(self.test_dir, 'settings_test')
out, err = self.run_django_admin(args, 'test_project.settings')
self.assertNoOutput(err)
self.assertTrue(os.path.exists(app_path))
with open(os.path.join(app_path, 'apps.py')) as f:
content = f.read()
self.assertIn("class SettingsTestConfig(AppConfig)", content)
self.assertIn("name = 'settings_test'", content)
def test_setup_environ_custom_template(self):
"directory: startapp creates the correct directory with a custom template"
template_path = os.path.join(custom_templates_dir, 'app_template')
args = ['startapp', '--template', template_path, 'custom_settings_test']
app_path = os.path.join(self.test_dir, 'custom_settings_test')
out, err = self.run_django_admin(args, 'test_project.settings')
self.assertNoOutput(err)
self.assertTrue(os.path.exists(app_path))
self.assertTrue(os.path.exists(os.path.join(app_path, 'api.py')))
def test_startapp_unicode_name(self):
"""startapp creates the correct directory with Unicode characters."""
args = ['startapp', 'こんにちは']
app_path = os.path.join(self.test_dir, 'こんにちは')
out, err = self.run_django_admin(args, 'test_project.settings')
self.assertNoOutput(err)
self.assertTrue(os.path.exists(app_path))
with open(os.path.join(app_path, 'apps.py'), encoding='utf8') as f:
content = f.read()
self.assertIn("class こんにちはConfig(AppConfig)", content)
self.assertIn("name = 'こんにちは'", content)
def test_builtin_command(self):
"directory: django-admin builtin commands fail with an error when no settings provided"
args = ['check', 'admin_scripts']
out, err = self.run_django_admin(args)
self.assertNoOutput(out)
self.assertOutput(err, 'settings are not configured')
def test_builtin_with_bad_settings(self):
"directory: django-admin builtin commands fail if settings file (from argument) doesn't exist"
args = ['check', '--settings=bad_settings', 'admin_scripts']
out, err = self.run_django_admin(args)
self.assertOutput(err, "No module named '?bad_settings'?", regex=True)
def test_builtin_with_bad_environment(self):
"directory: django-admin builtin commands fail if settings file (from environment) doesn't exist"
args = ['check', 'admin_scripts']
out, err = self.run_django_admin(args, 'bad_settings')
self.assertNoOutput(out)
self.assertOutput(err, "No module named '?bad_settings'?", regex=True)
def test_custom_command(self):
"directory: django-admin can't execute user commands unless settings are provided"
args = ['noargs_command']
out, err = self.run_django_admin(args)
self.assertNoOutput(out)
self.assertOutput(err, "No Django settings specified")
self.assertOutput(err, "Unknown command: 'noargs_command'")
def test_builtin_with_settings(self):
"directory: django-admin builtin commands succeed if settings are provided as argument"
args = ['check', '--settings=test_project.settings', 'admin_scripts']
out, err = self.run_django_admin(args)
self.assertNoOutput(err)
self.assertOutput(out, SYSTEM_CHECK_MSG)
def test_builtin_with_environment(self):
"directory: django-admin builtin commands succeed if settings are provided in the environment"
args = ['check', 'admin_scripts']
out, err = self.run_django_admin(args, 'test_project.settings')
self.assertNoOutput(err)
self.assertOutput(out, SYSTEM_CHECK_MSG)
##########################################################################
# MANAGE.PY TESTS
# This next series of test classes checks the environment processing
# of the generated manage.py script
##########################################################################
class ManageManuallyConfiguredSettings(AdminScriptTestCase):
"""Customized manage.py calling settings.configure()."""
def test_non_existent_command_output(self):
out, err = self.run_manage(['invalid_command'], manage_py='configured_settings_manage.py')
self.assertNoOutput(out)
self.assertOutput(err, "Unknown command: 'invalid_command'")
self.assertNotInOutput(err, 'No Django settings specified')
class ManageNoSettings(AdminScriptTestCase):
"A series of tests for manage.py when there is no settings.py file."
def test_builtin_command(self):
"no settings: manage.py builtin commands fail with an error when no settings provided"
args = ['check', 'admin_scripts']
out, err = self.run_manage(args)
self.assertNoOutput(out)
self.assertOutput(err, r"No module named '?(test_project\.)?settings'?", regex=True)
def test_builtin_with_bad_settings(self):
"no settings: manage.py builtin commands fail if settings file (from argument) doesn't exist"
args = ['check', '--settings=bad_settings', 'admin_scripts']
out, err = self.run_manage(args)
self.assertNoOutput(out)
self.assertOutput(err, "No module named '?bad_settings'?", regex=True)
def test_builtin_with_bad_environment(self):
"no settings: manage.py builtin commands fail if settings file (from environment) doesn't exist"
args = ['check', 'admin_scripts']
out, err = self.run_manage(args, 'bad_settings')
self.assertNoOutput(out)
self.assertOutput(err, "No module named '?bad_settings'?", regex=True)
class ManageDefaultSettings(AdminScriptTestCase):
"""A series of tests for manage.py when using a settings.py file that
contains the test application.
"""
def setUp(self):
super().setUp()
self.write_settings('settings.py')
def test_builtin_command(self):
"default: manage.py builtin commands succeed when default settings are appropriate"
args = ['check', 'admin_scripts']
out, err = self.run_manage(args)
self.assertNoOutput(err)
self.assertOutput(out, SYSTEM_CHECK_MSG)
def test_builtin_with_settings(self):
"default: manage.py builtin commands succeed if settings are provided as argument"
args = ['check', '--settings=test_project.settings', 'admin_scripts']
out, err = self.run_manage(args)
self.assertNoOutput(err)
self.assertOutput(out, SYSTEM_CHECK_MSG)
def test_builtin_with_environment(self):
"default: manage.py builtin commands succeed if settings are provided in the environment"
args = ['check', 'admin_scripts']
out, err = self.run_manage(args, 'test_project.settings')
self.assertNoOutput(err)
self.assertOutput(out, SYSTEM_CHECK_MSG)
def test_builtin_with_bad_settings(self):
"default: manage.py builtin commands succeed if settings file (from argument) doesn't exist"
args = ['check', '--settings=bad_settings', 'admin_scripts']
out, err = self.run_manage(args)
self.assertNoOutput(out)
self.assertOutput(err, "No module named '?bad_settings'?", regex=True)
def test_builtin_with_bad_environment(self):
"default: manage.py builtin commands fail if settings file (from environment) doesn't exist"
args = ['check', 'admin_scripts']
out, err = self.run_manage(args, 'bad_settings')
self.assertNoOutput(out)
self.assertOutput(err, "No module named '?bad_settings'?", regex=True)
def test_custom_command(self):
"default: manage.py can execute user commands when default settings are appropriate"
args = ['noargs_command']
out, err = self.run_manage(args)
self.assertNoOutput(err)
self.assertOutput(out, "EXECUTE: noargs_command")
def test_custom_command_with_settings(self):
"default: manage.py can execute user commands when settings are provided as argument"
args = ['noargs_command', '--settings=test_project.settings']
out, err = self.run_manage(args)
self.assertNoOutput(err)
self.assertOutput(out, "EXECUTE: noargs_command")
def test_custom_command_with_environment(self):
"default: manage.py can execute user commands when settings are provided in environment"
args = ['noargs_command']
out, err = self.run_manage(args, 'test_project.settings')
self.assertNoOutput(err)
self.assertOutput(out, "EXECUTE: noargs_command")
class ManageFullPathDefaultSettings(AdminScriptTestCase):
"""A series of tests for manage.py when using a settings.py file that
contains the test application specified using a full path.
"""
def setUp(self):
super().setUp()
self.write_settings('settings.py', ['django.contrib.auth', 'django.contrib.contenttypes', 'admin_scripts'])
def test_builtin_command(self):
"fulldefault: manage.py builtin commands succeed when default settings are appropriate"
args = ['check', 'admin_scripts']
out, err = self.run_manage(args)
self.assertNoOutput(err)
self.assertOutput(out, SYSTEM_CHECK_MSG)
def test_builtin_with_settings(self):
"fulldefault: manage.py builtin commands succeed if settings are provided as argument"
args = ['check', '--settings=test_project.settings', 'admin_scripts']
out, err = self.run_manage(args)
self.assertNoOutput(err)
self.assertOutput(out, SYSTEM_CHECK_MSG)
def test_builtin_with_environment(self):
"fulldefault: manage.py builtin commands succeed if settings are provided in the environment"
args = ['check', 'admin_scripts']
out, err = self.run_manage(args, 'test_project.settings')
self.assertNoOutput(err)
self.assertOutput(out, SYSTEM_CHECK_MSG)
def test_builtin_with_bad_settings(self):
"fulldefault: manage.py builtin commands succeed if settings file (from argument) doesn't exist"
args = ['check', '--settings=bad_settings', 'admin_scripts']
out, err = self.run_manage(args)
self.assertNoOutput(out)
self.assertOutput(err, "No module named '?bad_settings'?", regex=True)
def test_builtin_with_bad_environment(self):
"fulldefault: manage.py builtin commands fail if settings file (from environment) doesn't exist"
args = ['check', 'admin_scripts']
out, err = self.run_manage(args, 'bad_settings')
self.assertNoOutput(out)
self.assertOutput(err, "No module named '?bad_settings'?", regex=True)
def test_custom_command(self):
"fulldefault: manage.py can execute user commands when default settings are appropriate"
args = ['noargs_command']
out, err = self.run_manage(args)
self.assertNoOutput(err)
self.assertOutput(out, "EXECUTE: noargs_command")
def test_custom_command_with_settings(self):
"fulldefault: manage.py can execute user commands when settings are provided as argument"
args = ['noargs_command', '--settings=test_project.settings']
out, err = self.run_manage(args)
self.assertNoOutput(err)
self.assertOutput(out, "EXECUTE: noargs_command")
def test_custom_command_with_environment(self):
"fulldefault: manage.py can execute user commands when settings are provided in environment"
args = ['noargs_command']
out, err = self.run_manage(args, 'test_project.settings')
self.assertNoOutput(err)
self.assertOutput(out, "EXECUTE: noargs_command")
class ManageMinimalSettings(AdminScriptTestCase):
"""A series of tests for manage.py when using a settings.py file that
doesn't contain the test application.
"""
def setUp(self):
super().setUp()
self.write_settings('settings.py', apps=['django.contrib.auth', 'django.contrib.contenttypes'])
def test_builtin_command(self):
"minimal: manage.py builtin commands fail with an error when no settings provided"
args = ['check', 'admin_scripts']
out, err = self.run_manage(args)
self.assertNoOutput(out)
self.assertOutput(err, "No installed app with label 'admin_scripts'.")
def test_builtin_with_settings(self):
"minimal: manage.py builtin commands fail if settings are provided as argument"
args = ['check', '--settings=test_project.settings', 'admin_scripts']
out, err = self.run_manage(args)
self.assertNoOutput(out)
self.assertOutput(err, "No installed app with label 'admin_scripts'.")
def test_builtin_with_environment(self):
"minimal: manage.py builtin commands fail if settings are provided in the environment"
args = ['check', 'admin_scripts']
out, err = self.run_manage(args, 'test_project.settings')
self.assertNoOutput(out)
self.assertOutput(err, "No installed app with label 'admin_scripts'.")
def test_builtin_with_bad_settings(self):
"minimal: manage.py builtin commands fail if settings file (from argument) doesn't exist"
args = ['check', '--settings=bad_settings', 'admin_scripts']
out, err = self.run_manage(args)
self.assertNoOutput(out)
self.assertOutput(err, "No module named '?bad_settings'?", regex=True)
def test_builtin_with_bad_environment(self):
"minimal: manage.py builtin commands fail if settings file (from environment) doesn't exist"
args = ['check', 'admin_scripts']
out, err = self.run_manage(args, 'bad_settings')
self.assertNoOutput(out)
self.assertOutput(err, "No module named '?bad_settings'?", regex=True)
def test_custom_command(self):
"minimal: manage.py can't execute user commands without appropriate settings"
args = ['noargs_command']
out, err = self.run_manage(args)
self.assertNoOutput(out)
self.assertOutput(err, "Unknown command: 'noargs_command'")
def test_custom_command_with_settings(self):
"minimal: manage.py can't execute user commands, even if settings are provided as argument"
args = ['noargs_command', '--settings=test_project.settings']
out, err = self.run_manage(args)
self.assertNoOutput(out)
self.assertOutput(err, "Unknown command: 'noargs_command'")
def test_custom_command_with_environment(self):
"minimal: manage.py can't execute user commands, even if settings are provided in environment"
args = ['noargs_command']
out, err = self.run_manage(args, 'test_project.settings')
self.assertNoOutput(out)
self.assertOutput(err, "Unknown command: 'noargs_command'")
class ManageAlternateSettings(AdminScriptTestCase):
"""A series of tests for manage.py when using a settings file
with a name other than 'settings.py'.
"""
def setUp(self):
super().setUp()
self.write_settings('alternate_settings.py')
def test_builtin_command(self):
"alternate: manage.py builtin commands fail with an error when no default settings provided"
args = ['check', 'admin_scripts']
out, err = self.run_manage(args)
self.assertNoOutput(out)
self.assertOutput(err, r"No module named '?(test_project\.)?settings'?", regex=True)
def test_builtin_with_settings(self):
"alternate: manage.py builtin commands work with settings provided as argument"
args = ['check', '--settings=alternate_settings', 'admin_scripts']
out, err = self.run_manage(args)
self.assertOutput(out, SYSTEM_CHECK_MSG)
self.assertNoOutput(err)
def test_builtin_with_environment(self):
"alternate: manage.py builtin commands work if settings are provided in the environment"
args = ['check', 'admin_scripts']
out, err = self.run_manage(args, 'alternate_settings')
self.assertOutput(out, SYSTEM_CHECK_MSG)
self.assertNoOutput(err)
def test_builtin_with_bad_settings(self):
"alternate: manage.py builtin commands fail if settings file (from argument) doesn't exist"
args = ['check', '--settings=bad_settings', 'admin_scripts']
out, err = self.run_manage(args)
self.assertNoOutput(out)
self.assertOutput(err, "No module named '?bad_settings'?", regex=True)
def test_builtin_with_bad_environment(self):
"alternate: manage.py builtin commands fail if settings file (from environment) doesn't exist"
args = ['check', 'admin_scripts']
out, err = self.run_manage(args, 'bad_settings')
self.assertNoOutput(out)
self.assertOutput(err, "No module named '?bad_settings'?", regex=True)
def test_custom_command(self):
"alternate: manage.py can't execute user commands without settings"
args = ['noargs_command']
out, err = self.run_manage(args)
self.assertNoOutput(out)
self.assertOutput(err, r"No module named '?(test_project\.)?settings'?", regex=True)
def test_custom_command_with_settings(self):
"alternate: manage.py can execute user commands if settings are provided as argument"
args = ['noargs_command', '--settings=alternate_settings']
out, err = self.run_manage(args)
self.assertOutput(
out,
"EXECUTE: noargs_command options=[('force_color', False), "
"('no_color', False), ('pythonpath', None), ('settings', "
"'alternate_settings'), ('traceback', False), ('verbosity', 1)]"
)
self.assertNoOutput(err)
def test_custom_command_with_environment(self):
"alternate: manage.py can execute user commands if settings are provided in environment"
args = ['noargs_command']
out, err = self.run_manage(args, 'alternate_settings')
self.assertOutput(
out,
"EXECUTE: noargs_command options=[('force_color', False), "
"('no_color', False), ('pythonpath', None), ('settings', None), "
"('traceback', False), ('verbosity', 1)]"
)
self.assertNoOutput(err)
def test_custom_command_output_color(self):
"alternate: manage.py output syntax color can be deactivated with the `--no-color` option"
args = ['noargs_command', '--no-color', '--settings=alternate_settings']
out, err = self.run_manage(args)
self.assertOutput(
out,
"EXECUTE: noargs_command options=[('force_color', False), "
"('no_color', True), ('pythonpath', None), ('settings', "
"'alternate_settings'), ('traceback', False), ('verbosity', 1)]"
)
self.assertNoOutput(err)
class ManageMultipleSettings(AdminScriptTestCase):
"""A series of tests for manage.py when multiple settings files
(including the default 'settings.py') are available. The default settings
file is insufficient for performing the operations described, so the
alternate settings must be used by the running script.
"""
def setUp(self):
super().setUp()
self.write_settings('settings.py', apps=['django.contrib.auth', 'django.contrib.contenttypes'])
self.write_settings('alternate_settings.py')
def test_builtin_command(self):
"multiple: manage.py builtin commands fail with an error when no settings provided"
args = ['check', 'admin_scripts']
out, err = self.run_manage(args)
self.assertNoOutput(out)
self.assertOutput(err, "No installed app with label 'admin_scripts'.")
def test_builtin_with_settings(self):
"multiple: manage.py builtin commands succeed if settings are provided as argument"
args = ['check', '--settings=alternate_settings', 'admin_scripts']
out, err = self.run_manage(args)
self.assertNoOutput(err)
self.assertOutput(out, SYSTEM_CHECK_MSG)
def test_builtin_with_environment(self):
"multiple: manage.py can execute builtin commands if settings are provided in the environment"
args = ['check', 'admin_scripts']
out, err = self.run_manage(args, 'alternate_settings')
self.assertNoOutput(err)
self.assertOutput(out, SYSTEM_CHECK_MSG)
def test_builtin_with_bad_settings(self):
"multiple: manage.py builtin commands fail if settings file (from argument) doesn't exist"
args = ['check', '--settings=bad_settings', 'admin_scripts']
out, err = self.run_manage(args)
self.assertNoOutput(out)
self.assertOutput(err, "No module named '?bad_settings'?", regex=True)
def test_builtin_with_bad_environment(self):
"multiple: manage.py builtin commands fail if settings file (from environment) doesn't exist"
args = ['check', 'admin_scripts']
out, err = self.run_manage(args, 'bad_settings')
self.assertNoOutput(out)
self.assertOutput(err, "No module named '?bad_settings'?", regex=True)
def test_custom_command(self):
"multiple: manage.py can't execute user commands using default settings"
args = ['noargs_command']
out, err = self.run_manage(args)
self.assertNoOutput(out)
self.assertOutput(err, "Unknown command: 'noargs_command'")
def test_custom_command_with_settings(self):
"multiple: manage.py can execute user commands if settings are provided as argument"
args = ['noargs_command', '--settings=alternate_settings']
out, err = self.run_manage(args)
self.assertNoOutput(err)
self.assertOutput(out, "EXECUTE: noargs_command")
def test_custom_command_with_environment(self):
"multiple: manage.py can execute user commands if settings are provided in environment"
args = ['noargs_command']
out, err = self.run_manage(args, 'alternate_settings')
self.assertNoOutput(err)
self.assertOutput(out, "EXECUTE: noargs_command")
class ManageSettingsWithSettingsErrors(AdminScriptTestCase):
"""
Tests for manage.py when using the default settings.py file containing
runtime errors.
"""
def write_settings_with_import_error(self, filename):
settings_file_path = os.path.join(self.test_dir, filename)
with open(settings_file_path, 'w') as settings_file:
settings_file.write('# Settings file automatically generated by admin_scripts test case\n')
settings_file.write('# The next line will cause an import error:\nimport foo42bar\n')
def test_import_error(self):
"""
import error: manage.py builtin commands shows useful diagnostic info
when settings with import errors is provided (#14130).
"""
self.write_settings_with_import_error('settings.py')
args = ['check', 'admin_scripts']
out, err = self.run_manage(args)
self.assertNoOutput(out)
self.assertOutput(err, "No module named")
self.assertOutput(err, "foo42bar")
def test_attribute_error(self):
"""
manage.py builtin commands does not swallow attribute error due to bad
settings (#18845).
"""
self.write_settings('settings.py', sdict={'BAD_VAR': 'INSTALLED_APPS.crash'})
args = ['collectstatic', 'admin_scripts']
out, err = self.run_manage(args)
self.assertNoOutput(out)
self.assertOutput(err, "AttributeError: 'list' object has no attribute 'crash'")
def test_key_error(self):
self.write_settings('settings.py', sdict={'BAD_VAR': 'DATABASES["blah"]'})
args = ['collectstatic', 'admin_scripts']
out, err = self.run_manage(args)
self.assertNoOutput(out)
self.assertOutput(err, "KeyError: 'blah'")
def test_help(self):
"""
Test listing available commands output note when only core commands are
available.
"""
self.write_settings(
'settings.py',
extra='from django.core.exceptions import ImproperlyConfigured\n'
'raise ImproperlyConfigured()',
)
args = ['help']
out, err = self.run_manage(args)
self.assertOutput(out, 'only Django core commands are listed')
self.assertNoOutput(err)
class ManageCheck(AdminScriptTestCase):
def test_nonexistent_app(self):
"""check reports an error on a nonexistent app in INSTALLED_APPS."""
self.write_settings(
'settings.py',
apps=['admin_scriptz.broken_app'],
sdict={'USE_I18N': False},
)
args = ['check']
out, err = self.run_manage(args)
self.assertNoOutput(out)
self.assertOutput(err, 'ModuleNotFoundError')
self.assertOutput(err, 'No module named')
self.assertOutput(err, 'admin_scriptz')
def test_broken_app(self):
""" manage.py check reports an ImportError if an app's models.py
raises one on import """
self.write_settings('settings.py', apps=['admin_scripts.broken_app'])
args = ['check']
out, err = self.run_manage(args)
self.assertNoOutput(out)
self.assertOutput(err, 'ImportError')
def test_complex_app(self):
""" manage.py check does not raise an ImportError validating a
complex app with nested calls to load_app """
self.write_settings(
'settings.py',
apps=[
'admin_scripts.complex_app',
'admin_scripts.simple_app',
'django.contrib.admin.apps.SimpleAdminConfig',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.messages',
],
sdict={
'DEBUG': True,
'MIDDLEWARE': [
'django.contrib.messages.middleware.MessageMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
],
'TEMPLATES': [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
],
}
)
args = ['check']
out, err = self.run_manage(args)
self.assertNoOutput(err)
self.assertEqual(out, 'System check identified no issues (0 silenced).\n')
def test_app_with_import(self):
""" manage.py check does not raise errors when an app imports a base
class that itself has an abstract base. """
self.write_settings(
'settings.py',
apps=[
'admin_scripts.app_with_import',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sites',
],
sdict={'DEBUG': True},
)
args = ['check']
out, err = self.run_manage(args)
self.assertNoOutput(err)
self.assertEqual(out, 'System check identified no issues (0 silenced).\n')
def test_output_format(self):
""" All errors/warnings should be sorted by level and by message. """
self.write_settings(
'settings.py',
apps=[
'admin_scripts.app_raising_messages',
'django.contrib.auth',
'django.contrib.contenttypes',
],
sdict={'DEBUG': True},
)
args = ['check']
out, err = self.run_manage(args)
expected_err = (
"SystemCheckError: System check identified some issues:\n"
"\n"
"ERRORS:\n"
"?: An error\n"
"\tHINT: Error hint\n"
"\n"
"WARNINGS:\n"
"a: Second warning\n"
"obj: First warning\n"
"\tHINT: Hint\n"
"\n"
"System check identified 3 issues (0 silenced).\n"
)
self.assertEqual(err, expected_err)
self.assertNoOutput(out)
def test_warning_does_not_halt(self):
"""
When there are only warnings or less serious messages, then Django
should not prevent user from launching their project, so `check`
command should not raise `CommandError` exception.
In this test we also test output format.
"""
self.write_settings(
'settings.py',
apps=[
'admin_scripts.app_raising_warning',
'django.contrib.auth',
'django.contrib.contenttypes',
],
sdict={'DEBUG': True},
)
args = ['check']
out, err = self.run_manage(args)
expected_err = (
"System check identified some issues:\n" # No "CommandError: " part
"\n"
"WARNINGS:\n"
"?: A warning\n"
"\n"
"System check identified 1 issue (0 silenced).\n"
)
self.assertEqual(err, expected_err)
self.assertNoOutput(out)
class ManageRunserver(SimpleTestCase):
def setUp(self):
def monkey_run(*args, **options):
return
self.output = StringIO()
self.cmd = RunserverCommand(stdout=self.output)
self.cmd.run = monkey_run
def assertServerSettings(self, addr, port, ipv6=False, raw_ipv6=False):
self.assertEqual(self.cmd.addr, addr)
self.assertEqual(self.cmd.port, port)
self.assertEqual(self.cmd.use_ipv6, ipv6)
self.assertEqual(self.cmd._raw_ipv6, raw_ipv6)
def test_runserver_addrport(self):
call_command(self.cmd)
self.assertServerSettings('127.0.0.1', '8000')
call_command(self.cmd, addrport="1.2.3.4:8000")
self.assertServerSettings('1.2.3.4', '8000')
call_command(self.cmd, addrport="7000")
self.assertServerSettings('127.0.0.1', '7000')
@unittest.skipUnless(socket.has_ipv6, "platform doesn't support IPv6")
def test_runner_addrport_ipv6(self):
call_command(self.cmd, addrport="", use_ipv6=True)
self.assertServerSettings('::1', '8000', ipv6=True, raw_ipv6=True)
call_command(self.cmd, addrport="7000", use_ipv6=True)
self.assertServerSettings('::1', '7000', ipv6=True, raw_ipv6=True)
call_command(self.cmd, addrport="[2001:0db8:1234:5678::9]:7000")
self.assertServerSettings('2001:0db8:1234:5678::9', '7000', ipv6=True, raw_ipv6=True)
def test_runner_hostname(self):
call_command(self.cmd, addrport="localhost:8000")
self.assertServerSettings('localhost', '8000')
call_command(self.cmd, addrport="test.domain.local:7000")
self.assertServerSettings('test.domain.local', '7000')
@unittest.skipUnless(socket.has_ipv6, "platform doesn't support IPv6")
def test_runner_hostname_ipv6(self):
call_command(self.cmd, addrport="test.domain.local:7000", use_ipv6=True)
self.assertServerSettings('test.domain.local', '7000', ipv6=True)
def test_runner_custom_defaults(self):
self.cmd.default_addr = '0.0.0.0'
self.cmd.default_port = '5000'
call_command(self.cmd)
self.assertServerSettings('0.0.0.0', '5000')
@unittest.skipUnless(socket.has_ipv6, "platform doesn't support IPv6")
def test_runner_custom_defaults_ipv6(self):
self.cmd.default_addr_ipv6 = '::'
call_command(self.cmd, use_ipv6=True)
self.assertServerSettings('::', '8000', ipv6=True, raw_ipv6=True)
def test_runner_ambiguous(self):
# Only 4 characters, all of which could be in an ipv6 address
call_command(self.cmd, addrport="beef:7654")
self.assertServerSettings('beef', '7654')
# Uses only characters that could be in an ipv6 address
call_command(self.cmd, addrport="deadbeef:7654")
self.assertServerSettings('deadbeef', '7654')
def test_no_database(self):
"""
Ensure runserver.check_migrations doesn't choke on empty DATABASES.
"""
tested_connections = ConnectionHandler({})
with mock.patch('django.core.management.base.connections', new=tested_connections):
self.cmd.check_migrations()
def test_readonly_database(self):
"""
runserver.check_migrations() doesn't choke when a database is read-only.
"""
with mock.patch.object(MigrationRecorder, 'has_table', return_value=False):
self.cmd.check_migrations()
# You have # ...
self.assertIn('unapplied migration(s)', self.output.getvalue())
class ManageRunserverMigrationWarning(TestCase):
def setUp(self):
self.stdout = StringIO()
self.runserver_command = RunserverCommand(stdout=self.stdout)
@override_settings(INSTALLED_APPS=["admin_scripts.app_waiting_migration"])
def test_migration_warning_one_app(self):
self.runserver_command.check_migrations()
output = self.stdout.getvalue()
self.assertIn('You have 1 unapplied migration(s)', output)
self.assertIn('apply the migrations for app(s): app_waiting_migration.', output)
@override_settings(
INSTALLED_APPS=[
"admin_scripts.app_waiting_migration",
"admin_scripts.another_app_waiting_migration",
],
)
def test_migration_warning_multiple_apps(self):
self.runserver_command.check_migrations()
output = self.stdout.getvalue()
self.assertIn('You have 2 unapplied migration(s)', output)
self.assertIn(
'apply the migrations for app(s): another_app_waiting_migration, '
'app_waiting_migration.', output
)
class ManageRunserverEmptyAllowedHosts(AdminScriptTestCase):
def setUp(self):
super().setUp()
self.write_settings('settings.py', sdict={
'ALLOWED_HOSTS': [],
'DEBUG': False,
})
def test_empty_allowed_hosts_error(self):
out, err = self.run_manage(['runserver'])
self.assertNoOutput(out)
self.assertOutput(err, 'CommandError: You must set settings.ALLOWED_HOSTS if DEBUG is False.')
class ManageTestserver(SimpleTestCase):
@mock.patch.object(TestserverCommand, 'handle', return_value='')
def test_testserver_handle_params(self, mock_handle):
out = StringIO()
call_command('testserver', 'blah.json', stdout=out)
mock_handle.assert_called_with(
'blah.json',
stdout=out, settings=None, pythonpath=None, verbosity=1,
traceback=False, addrport='', no_color=False, use_ipv6=False,
skip_checks=True, interactive=True, force_color=False,
)
@mock.patch('django.db.connection.creation.create_test_db', return_value='test_db')
@mock.patch.object(LoaddataCommand, 'handle', return_value='')
@mock.patch.object(RunserverCommand, 'handle', return_value='')
def test_params_to_runserver(self, mock_runserver_handle, mock_loaddata_handle, mock_create_test_db):
out = StringIO()
call_command('testserver', 'blah.json', stdout=out)
mock_runserver_handle.assert_called_with(
addrport='',
force_color=False,
insecure_serving=False,
no_color=False,
pythonpath=None,
settings=None,
shutdown_message=(
"\nServer stopped.\nNote that the test database, 'test_db', "
"has not been deleted. You can explore it on your own."
),
skip_checks=True,
traceback=False,
use_ipv6=False,
use_reloader=False,
use_static_handler=True,
use_threading=connection.features.test_db_allows_multiple_connections,
verbosity=1,
)
##########################################################################
# COMMAND PROCESSING TESTS
# user-space commands are correctly handled - in particular, arguments to
# the commands are correctly parsed and processed.
##########################################################################
class ColorCommand(BaseCommand):
requires_system_checks = False
def handle(self, *args, **options):
self.stdout.write('Hello, world!', self.style.ERROR)
self.stderr.write('Hello, world!', self.style.ERROR)
class CommandTypes(AdminScriptTestCase):
"Tests for the various types of base command types that can be defined."
def setUp(self):
super().setUp()
self.write_settings('settings.py')
def test_version(self):
"version is handled as a special case"
args = ['version']
out, err = self.run_manage(args)
self.assertNoOutput(err)
self.assertOutput(out, get_version())
def test_version_alternative(self):
"--version is equivalent to version"
args1, args2 = ['version'], ['--version']
# It's possible one outputs on stderr and the other on stdout, hence the set
self.assertEqual(set(self.run_manage(args1)), set(self.run_manage(args2)))
def test_help(self):
"help is handled as a special case"
args = ['help']
out, err = self.run_manage(args)
self.assertOutput(out, "Type 'manage.py help <subcommand>' for help on a specific subcommand.")
self.assertOutput(out, '[django]')
self.assertOutput(out, 'startapp')
self.assertOutput(out, 'startproject')
def test_help_commands(self):
"help --commands shows the list of all available commands"
args = ['help', '--commands']
out, err = self.run_manage(args)
self.assertNotInOutput(out, 'usage:')
self.assertNotInOutput(out, 'Options:')
self.assertNotInOutput(out, '[django]')
self.assertOutput(out, 'startapp')
self.assertOutput(out, 'startproject')
self.assertNotInOutput(out, '\n\n')
def test_help_alternative(self):
"--help is equivalent to help"
args1, args2 = ['help'], ['--help']
self.assertEqual(self.run_manage(args1), self.run_manage(args2))
def test_help_short_altert(self):
"-h is handled as a short form of --help"
args1, args2 = ['--help'], ['-h']
self.assertEqual(self.run_manage(args1), self.run_manage(args2))
def test_specific_help(self):
"--help can be used on a specific command"
args = ['check', '--help']
out, err = self.run_manage(args)
self.assertNoOutput(err)
# Command-specific options like --tag appear before options common to
# all commands like --version.
tag_location = out.find('--tag')
version_location = out.find('--version')
self.assertNotEqual(tag_location, -1)
self.assertNotEqual(version_location, -1)
self.assertLess(tag_location, version_location)
self.assertOutput(out, "Checks the entire Django project for potential problems.")
def test_color_style(self):
style = color.no_style()
self.assertEqual(style.ERROR('Hello, world!'), 'Hello, world!')
style = color.make_style('nocolor')
self.assertEqual(style.ERROR('Hello, world!'), 'Hello, world!')
style = color.make_style('dark')
self.assertIn('Hello, world!', style.ERROR('Hello, world!'))
self.assertNotEqual(style.ERROR('Hello, world!'), 'Hello, world!')
# Default palette has color.
style = color.make_style('')
self.assertIn('Hello, world!', style.ERROR('Hello, world!'))
self.assertNotEqual(style.ERROR('Hello, world!'), 'Hello, world!')
def test_command_color(self):
out = StringIO()
err = StringIO()
command = ColorCommand(stdout=out, stderr=err)
call_command(command)
if color.supports_color():
self.assertIn('Hello, world!\n', out.getvalue())
self.assertIn('Hello, world!\n', err.getvalue())
self.assertNotEqual(out.getvalue(), 'Hello, world!\n')
self.assertNotEqual(err.getvalue(), 'Hello, world!\n')
else:
self.assertEqual(out.getvalue(), 'Hello, world!\n')
self.assertEqual(err.getvalue(), 'Hello, world!\n')
def test_command_no_color(self):
"--no-color prevent colorization of the output"
out = StringIO()
err = StringIO()
command = ColorCommand(stdout=out, stderr=err, no_color=True)
call_command(command)
self.assertEqual(out.getvalue(), 'Hello, world!\n')
self.assertEqual(err.getvalue(), 'Hello, world!\n')
out = StringIO()
err = StringIO()
command = ColorCommand(stdout=out, stderr=err)
call_command(command, no_color=True)
self.assertEqual(out.getvalue(), 'Hello, world!\n')
self.assertEqual(err.getvalue(), 'Hello, world!\n')
def test_force_color_execute(self):
out = StringIO()
err = StringIO()
with mock.patch.object(sys.stdout, 'isatty', lambda: False):
command = ColorCommand(stdout=out, stderr=err)
call_command(command, force_color=True)
self.assertEqual(out.getvalue(), '\x1b[31;1mHello, world!\n\x1b[0m')
self.assertEqual(err.getvalue(), '\x1b[31;1mHello, world!\n\x1b[0m')
def test_force_color_command_init(self):
out = StringIO()
err = StringIO()
with mock.patch.object(sys.stdout, 'isatty', lambda: False):
command = ColorCommand(stdout=out, stderr=err, force_color=True)
call_command(command)
self.assertEqual(out.getvalue(), '\x1b[31;1mHello, world!\n\x1b[0m')
self.assertEqual(err.getvalue(), '\x1b[31;1mHello, world!\n\x1b[0m')
def test_no_color_force_color_mutually_exclusive_execute(self):
msg = "The --no-color and --force-color options can't be used together."
with self.assertRaisesMessage(CommandError, msg):
call_command(BaseCommand(), no_color=True, force_color=True)
def test_no_color_force_color_mutually_exclusive_command_init(self):
msg = "'no_color' and 'force_color' can't be used together."
with self.assertRaisesMessage(CommandError, msg):
call_command(BaseCommand(no_color=True, force_color=True))
def test_custom_stdout(self):
class Command(BaseCommand):
requires_system_checks = False
def handle(self, *args, **options):
self.stdout.write("Hello, World!")
out = StringIO()
command = Command(stdout=out)
call_command(command)
self.assertEqual(out.getvalue(), "Hello, World!\n")
out.truncate(0)
new_out = StringIO()
call_command(command, stdout=new_out)
self.assertEqual(out.getvalue(), "")
self.assertEqual(new_out.getvalue(), "Hello, World!\n")
def test_custom_stderr(self):
class Command(BaseCommand):
requires_system_checks = False
def handle(self, *args, **options):
self.stderr.write("Hello, World!")
err = StringIO()
command = Command(stderr=err)
call_command(command)
self.assertEqual(err.getvalue(), "Hello, World!\n")
err.truncate(0)
new_err = StringIO()
call_command(command, stderr=new_err)
self.assertEqual(err.getvalue(), "")
self.assertEqual(new_err.getvalue(), "Hello, World!\n")
def test_base_command(self):
"User BaseCommands can execute when a label is provided"
args = ['base_command', 'testlabel']
expected_labels = "('testlabel',)"
self._test_base_command(args, expected_labels)
def test_base_command_no_label(self):
"User BaseCommands can execute when no labels are provided"
args = ['base_command']
expected_labels = "()"
self._test_base_command(args, expected_labels)
def test_base_command_multiple_label(self):
"User BaseCommands can execute when no labels are provided"
args = ['base_command', 'testlabel', 'anotherlabel']
expected_labels = "('testlabel', 'anotherlabel')"
self._test_base_command(args, expected_labels)
def test_base_command_with_option(self):
"User BaseCommands can execute with options when a label is provided"
args = ['base_command', 'testlabel', '--option_a=x']
expected_labels = "('testlabel',)"
self._test_base_command(args, expected_labels, option_a="'x'")
def test_base_command_with_options(self):
"User BaseCommands can execute with multiple options when a label is provided"
args = ['base_command', 'testlabel', '-a', 'x', '--option_b=y']
expected_labels = "('testlabel',)"
self._test_base_command(args, expected_labels, option_a="'x'", option_b="'y'")
def test_base_command_with_wrong_option(self):
"User BaseCommands outputs command usage when wrong option is specified"
args = ['base_command', '--invalid']
out, err = self.run_manage(args)
self.assertNoOutput(out)
self.assertOutput(err, "usage: manage.py base_command")
self.assertOutput(err, "error: unrecognized arguments: --invalid")
def _test_base_command(self, args, labels, option_a="'1'", option_b="'2'"):
out, err = self.run_manage(args)
expected_out = (
"EXECUTE:BaseCommand labels=%s, "
"options=[('force_color', False), ('no_color', False), "
"('option_a', %s), ('option_b', %s), ('option_c', '3'), "
"('pythonpath', None), ('settings', None), ('traceback', False), "
"('verbosity', 1)]") % (labels, option_a, option_b)
self.assertNoOutput(err)
self.assertOutput(out, expected_out)
def test_base_run_from_argv(self):
"""
Test run_from_argv properly terminates even with custom execute() (#19665)
Also test proper traceback display.
"""
err = StringIO()
command = BaseCommand(stderr=err)
def raise_command_error(*args, **kwargs):
raise CommandError("Custom error")
command.execute = lambda args: args # This will trigger TypeError
# If the Exception is not CommandError it should always
# raise the original exception.
with self.assertRaises(TypeError):
command.run_from_argv(['', ''])
# If the Exception is CommandError and --traceback is not present
# this command should raise a SystemExit and don't print any
# traceback to the stderr.
command.execute = raise_command_error
err.truncate(0)
with self.assertRaises(SystemExit):
command.run_from_argv(['', ''])
err_message = err.getvalue()
self.assertNotIn("Traceback", err_message)
self.assertIn("CommandError", err_message)
# If the Exception is CommandError and --traceback is present
# this command should raise the original CommandError as if it
# were not a CommandError.
err.truncate(0)
with self.assertRaises(CommandError):
command.run_from_argv(['', '', '--traceback'])
def test_run_from_argv_non_ascii_error(self):
"""
Non-ASCII message of CommandError does not raise any
UnicodeDecodeError in run_from_argv.
"""
def raise_command_error(*args, **kwargs):
raise CommandError("Erreur personnalisée")
command = BaseCommand(stderr=StringIO())
command.execute = raise_command_error
with self.assertRaises(SystemExit):
command.run_from_argv(['', ''])
def test_run_from_argv_closes_connections(self):
"""
A command called from the command line should close connections after
being executed (#21255).
"""
command = BaseCommand(stderr=StringIO())
command.check = lambda: []
command.handle = lambda *args, **kwargs: args
with mock.patch('django.core.management.base.connections') as mock_connections:
command.run_from_argv(['', ''])
# Test connections have been closed
self.assertTrue(mock_connections.close_all.called)
def test_noargs(self):
"NoArg Commands can be executed"
args = ['noargs_command']
out, err = self.run_manage(args)
self.assertNoOutput(err)
self.assertOutput(
out,
"EXECUTE: noargs_command options=[('force_color', False), "
"('no_color', False), ('pythonpath', None), ('settings', None), "
"('traceback', False), ('verbosity', 1)]"
)
def test_noargs_with_args(self):
"NoArg Commands raise an error if an argument is provided"
args = ['noargs_command', 'argument']
out, err = self.run_manage(args)
self.assertOutput(err, "error: unrecognized arguments: argument")
def test_app_command(self):
"User AppCommands can execute when a single app name is provided"
args = ['app_command', 'auth']
out, err = self.run_manage(args)
self.assertNoOutput(err)
self.assertOutput(out, "EXECUTE:AppCommand name=django.contrib.auth, options=")
self.assertOutput(
out,
", options=[('force_color', False), ('no_color', False), "
"('pythonpath', None), ('settings', None), ('traceback', False), "
"('verbosity', 1)]"
)
def test_app_command_no_apps(self):
"User AppCommands raise an error when no app name is provided"
args = ['app_command']
out, err = self.run_manage(args)
self.assertOutput(err, 'error: Enter at least one application label.')
def test_app_command_multiple_apps(self):
"User AppCommands raise an error when multiple app names are provided"
args = ['app_command', 'auth', 'contenttypes']
out, err = self.run_manage(args)
self.assertNoOutput(err)
self.assertOutput(out, "EXECUTE:AppCommand name=django.contrib.auth, options=")
self.assertOutput(
out,
", options=[('force_color', False), ('no_color', False), "
"('pythonpath', None), ('settings', None), ('traceback', False), "
"('verbosity', 1)]"
)
self.assertOutput(out, "EXECUTE:AppCommand name=django.contrib.contenttypes, options=")
self.assertOutput(
out,
", options=[('force_color', False), ('no_color', False), "
"('pythonpath', None), ('settings', None), ('traceback', False), "
"('verbosity', 1)]"
)
def test_app_command_invalid_app_label(self):
"User AppCommands can execute when a single app name is provided"
args = ['app_command', 'NOT_AN_APP']
out, err = self.run_manage(args)
self.assertOutput(err, "No installed app with label 'NOT_AN_APP'.")
def test_app_command_some_invalid_app_labels(self):
"User AppCommands can execute when some of the provided app names are invalid"
args = ['app_command', 'auth', 'NOT_AN_APP']
out, err = self.run_manage(args)
self.assertOutput(err, "No installed app with label 'NOT_AN_APP'.")
def test_label_command(self):
"User LabelCommands can execute when a label is provided"
args = ['label_command', 'testlabel']
out, err = self.run_manage(args)
self.assertNoOutput(err)
self.assertOutput(
out,
"EXECUTE:LabelCommand label=testlabel, options=[('force_color', "
"False), ('no_color', False), ('pythonpath', None), ('settings', "
"None), ('traceback', False), ('verbosity', 1)]"
)
def test_label_command_no_label(self):
"User LabelCommands raise an error if no label is provided"
args = ['label_command']
out, err = self.run_manage(args)
self.assertOutput(err, 'Enter at least one label')
def test_label_command_multiple_label(self):
"User LabelCommands are executed multiple times if multiple labels are provided"
args = ['label_command', 'testlabel', 'anotherlabel']
out, err = self.run_manage(args)
self.assertNoOutput(err)
self.assertOutput(
out,
"EXECUTE:LabelCommand label=testlabel, options=[('force_color', "
"False), ('no_color', False), ('pythonpath', None), "
"('settings', None), ('traceback', False), ('verbosity', 1)]"
)
self.assertOutput(
out,
"EXECUTE:LabelCommand label=anotherlabel, options=[('force_color', "
"False), ('no_color', False), ('pythonpath', None), "
"('settings', None), ('traceback', False), ('verbosity', 1)]"
)
class Discovery(SimpleTestCase):
def test_precedence(self):
"""
Apps listed first in INSTALLED_APPS have precedence.
"""
with self.settings(INSTALLED_APPS=['admin_scripts.complex_app',
'admin_scripts.simple_app',
'django.contrib.auth',
'django.contrib.contenttypes']):
out = StringIO()
call_command('duplicate', stdout=out)
self.assertEqual(out.getvalue().strip(), 'complex_app')
with self.settings(INSTALLED_APPS=['admin_scripts.simple_app',
'admin_scripts.complex_app',
'django.contrib.auth',
'django.contrib.contenttypes']):
out = StringIO()
call_command('duplicate', stdout=out)
self.assertEqual(out.getvalue().strip(), 'simple_app')
class ArgumentOrder(AdminScriptTestCase):
"""Tests for 2-stage argument parsing scheme.
django-admin command arguments are parsed in 2 parts; the core arguments
(--settings, --traceback and --pythonpath) are parsed using a basic parser,
ignoring any unknown options. Then the full settings are
passed to the command parser, which extracts commands of interest to the
individual command.
"""
def setUp(self):
super().setUp()
self.write_settings('settings.py', apps=['django.contrib.auth', 'django.contrib.contenttypes'])
self.write_settings('alternate_settings.py')
def test_setting_then_option(self):
""" Options passed after settings are correctly handled. """
args = ['base_command', 'testlabel', '--settings=alternate_settings', '--option_a=x']
self._test(args)
def test_setting_then_short_option(self):
""" Short options passed after settings are correctly handled. """
args = ['base_command', 'testlabel', '--settings=alternate_settings', '-a', 'x']
self._test(args)
def test_option_then_setting(self):
""" Options passed before settings are correctly handled. """
args = ['base_command', 'testlabel', '--option_a=x', '--settings=alternate_settings']
self._test(args)
def test_short_option_then_setting(self):
""" Short options passed before settings are correctly handled. """
args = ['base_command', 'testlabel', '-a', 'x', '--settings=alternate_settings']
self._test(args)
def test_option_then_setting_then_option(self):
""" Options are correctly handled when they are passed before and after
a setting. """
args = ['base_command', 'testlabel', '--option_a=x', '--settings=alternate_settings', '--option_b=y']
self._test(args, option_b="'y'")
def _test(self, args, option_b="'2'"):
out, err = self.run_manage(args)
self.assertNoOutput(err)
self.assertOutput(
out,
"EXECUTE:BaseCommand labels=('testlabel',), options=["
"('force_color', False), ('no_color', False), ('option_a', 'x'), "
"('option_b', %s), ('option_c', '3'), ('pythonpath', None), "
"('settings', 'alternate_settings'), ('traceback', False), "
"('verbosity', 1)]" % option_b
)
@override_settings(ROOT_URLCONF='admin_scripts.urls')
class StartProject(LiveServerTestCase, AdminScriptTestCase):
available_apps = [
'admin_scripts',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
]
def test_wrong_args(self):
"Make sure passing the wrong kinds of arguments outputs an error and prints usage"
out, err = self.run_django_admin(['startproject'])
self.assertNoOutput(out)
self.assertOutput(err, "usage:")
self.assertOutput(err, "You must provide a project name.")
def test_simple_project(self):
"Make sure the startproject management command creates a project"
args = ['startproject', 'testproject']
testproject_dir = os.path.join(self.test_dir, 'testproject')
out, err = self.run_django_admin(args)
self.assertNoOutput(err)
self.assertTrue(os.path.isdir(testproject_dir))
# running again..
out, err = self.run_django_admin(args)
self.assertNoOutput(out)
self.assertOutput(
err,
"CommandError: 'testproject' conflicts with the name of an "
"existing Python module and cannot be used as a project name. "
"Please try another name.",
)
def test_invalid_project_name(self):
"Make sure the startproject management command validates a project name"
for bad_name in ('7testproject', '../testproject'):
with self.subTest(project_name=bad_name):
args = ['startproject', bad_name]
testproject_dir = os.path.join(self.test_dir, bad_name)
out, err = self.run_django_admin(args)
self.assertOutput(
err,
"Error: '%s' is not a valid project name. Please make "
"sure the name is a valid identifier." % bad_name
)
self.assertFalse(os.path.exists(testproject_dir))
def test_importable_project_name(self):
"""
startproject validates that project name doesn't clash with existing
Python modules.
"""
bad_name = 'os'
args = ['startproject', bad_name]
testproject_dir = os.path.join(self.test_dir, bad_name)
out, err = self.run_django_admin(args)
self.assertOutput(
err,
"CommandError: 'os' conflicts with the name of an existing "
"Python module and cannot be used as a project name. Please try "
"another name."
)
self.assertFalse(os.path.exists(testproject_dir))
def test_simple_project_different_directory(self):
"Make sure the startproject management command creates a project in a specific directory"
args = ['startproject', 'testproject', 'othertestproject']
testproject_dir = os.path.join(self.test_dir, 'othertestproject')
os.mkdir(testproject_dir)
out, err = self.run_django_admin(args)
self.assertNoOutput(err)
self.assertTrue(os.path.exists(os.path.join(testproject_dir, 'manage.py')))
# running again..
out, err = self.run_django_admin(args)
self.assertNoOutput(out)
self.assertOutput(
err,
"already exists. Overlaying a project into an existing directory "
"won't replace conflicting files."
)
def test_custom_project_template(self):
"Make sure the startproject management command is able to use a different project template"
template_path = os.path.join(custom_templates_dir, 'project_template')
args = ['startproject', '--template', template_path, 'customtestproject']
testproject_dir = os.path.join(self.test_dir, 'customtestproject')
out, err = self.run_django_admin(args)
self.assertNoOutput(err)
self.assertTrue(os.path.isdir(testproject_dir))
self.assertTrue(os.path.exists(os.path.join(testproject_dir, 'additional_dir')))
def test_template_dir_with_trailing_slash(self):
"Ticket 17475: Template dir passed has a trailing path separator"
template_path = os.path.join(custom_templates_dir, 'project_template' + os.sep)
args = ['startproject', '--template', template_path, 'customtestproject']
testproject_dir = os.path.join(self.test_dir, 'customtestproject')
out, err = self.run_django_admin(args)
self.assertNoOutput(err)
self.assertTrue(os.path.isdir(testproject_dir))
self.assertTrue(os.path.exists(os.path.join(testproject_dir, 'additional_dir')))
def test_custom_project_template_from_tarball_by_path(self):
"Make sure the startproject management command is able to use a different project template from a tarball"
template_path = os.path.join(custom_templates_dir, 'project_template.tgz')
args = ['startproject', '--template', template_path, 'tarballtestproject']
testproject_dir = os.path.join(self.test_dir, 'tarballtestproject')
out, err = self.run_django_admin(args)
self.assertNoOutput(err)
self.assertTrue(os.path.isdir(testproject_dir))
self.assertTrue(os.path.exists(os.path.join(testproject_dir, 'run.py')))
def test_custom_project_template_from_tarball_to_alternative_location(self):
"Startproject can use a project template from a tarball and create it in a specified location"
template_path = os.path.join(custom_templates_dir, 'project_template.tgz')
args = ['startproject', '--template', template_path, 'tarballtestproject', 'altlocation']
testproject_dir = os.path.join(self.test_dir, 'altlocation')
os.mkdir(testproject_dir)
out, err = self.run_django_admin(args)
self.assertNoOutput(err)
self.assertTrue(os.path.isdir(testproject_dir))
self.assertTrue(os.path.exists(os.path.join(testproject_dir, 'run.py')))
def test_custom_project_template_from_tarball_by_url(self):
"""
The startproject management command is able to use a different project
template from a tarball via a URL.
"""
template_url = '%s/custom_templates/project_template.tgz' % self.live_server_url
args = ['startproject', '--template', template_url, 'urltestproject']
testproject_dir = os.path.join(self.test_dir, 'urltestproject')
out, err = self.run_django_admin(args)
self.assertNoOutput(err)
self.assertTrue(os.path.isdir(testproject_dir))
self.assertTrue(os.path.exists(os.path.join(testproject_dir, 'run.py')))
def test_project_template_tarball_url(self):
"Startproject management command handles project template tar/zip balls from non-canonical urls"
template_url = '%s/custom_templates/project_template.tgz/' % self.live_server_url
args = ['startproject', '--template', template_url, 'urltestproject']
testproject_dir = os.path.join(self.test_dir, 'urltestproject')
out, err = self.run_django_admin(args)
self.assertNoOutput(err)
self.assertTrue(os.path.isdir(testproject_dir))
self.assertTrue(os.path.exists(os.path.join(testproject_dir, 'run.py')))
def test_file_without_extension(self):
"Make sure the startproject management command is able to render custom files"
template_path = os.path.join(custom_templates_dir, 'project_template')
args = ['startproject', '--template', template_path, 'customtestproject', '-e', 'txt', '-n', 'Procfile']
testproject_dir = os.path.join(self.test_dir, 'customtestproject')
out, err = self.run_django_admin(args)
self.assertNoOutput(err)
self.assertTrue(os.path.isdir(testproject_dir))
self.assertTrue(os.path.exists(os.path.join(testproject_dir, 'additional_dir')))
base_path = os.path.join(testproject_dir, 'additional_dir')
for f in ('Procfile', 'additional_file.py', 'requirements.txt'):
self.assertTrue(os.path.exists(os.path.join(base_path, f)))
with open(os.path.join(base_path, f)) as fh:
self.assertEqual(fh.read().strip(), '# some file for customtestproject test project')
def test_custom_project_template_context_variables(self):
"Make sure template context variables are rendered with proper values"
template_path = os.path.join(custom_templates_dir, 'project_template')
args = ['startproject', '--template', template_path, 'another_project', 'project_dir']
testproject_dir = os.path.join(self.test_dir, 'project_dir')
os.mkdir(testproject_dir)
out, err = self.run_django_admin(args)
self.assertNoOutput(err)
test_manage_py = os.path.join(testproject_dir, 'manage.py')
with open(test_manage_py) as fp:
content = fp.read()
self.assertIn("project_name = 'another_project'", content)
self.assertIn("project_directory = '%s'" % testproject_dir, content)
def test_no_escaping_of_project_variables(self):
"Make sure template context variables are not html escaped"
# We're using a custom command so we need the alternate settings
self.write_settings('alternate_settings.py')
template_path = os.path.join(custom_templates_dir, 'project_template')
args = [
'custom_startproject', '--template', template_path,
'another_project', 'project_dir', '--extra', '<&>',
'--settings=alternate_settings',
]
testproject_dir = os.path.join(self.test_dir, 'project_dir')
os.mkdir(testproject_dir)
out, err = self.run_manage(args)
self.assertNoOutput(err)
test_manage_py = os.path.join(testproject_dir, 'additional_dir', 'extra.py')
with open(test_manage_py) as fp:
content = fp.read()
self.assertIn("<&>", content)
def test_custom_project_destination_missing(self):
"""
Make sure an exception is raised when the provided
destination directory doesn't exist
"""
template_path = os.path.join(custom_templates_dir, 'project_template')
args = ['startproject', '--template', template_path, 'yet_another_project', 'project_dir2']
testproject_dir = os.path.join(self.test_dir, 'project_dir2')
out, err = self.run_django_admin(args)
self.assertNoOutput(out)
self.assertOutput(err, "Destination directory '%s' does not exist, please create it first." % testproject_dir)
self.assertFalse(os.path.exists(testproject_dir))
def test_custom_project_template_with_non_ascii_templates(self):
"""
The startproject management command is able to render templates with
non-ASCII content.
"""
template_path = os.path.join(custom_templates_dir, 'project_template')
args = ['startproject', '--template', template_path, '--extension=txt', 'customtestproject']
testproject_dir = os.path.join(self.test_dir, 'customtestproject')
out, err = self.run_django_admin(args)
self.assertNoOutput(err)
self.assertTrue(os.path.isdir(testproject_dir))
path = os.path.join(testproject_dir, 'ticket-18091-non-ascii-template.txt')
with open(path, encoding='utf-8') as f:
self.assertEqual(f.read().splitlines(False), [
'Some non-ASCII text for testing ticket #18091:',
'üäö €'])
class StartApp(AdminScriptTestCase):
def test_invalid_name(self):
"""startapp validates that app name is a valid Python identifier."""
for bad_name in ('7testproject', '../testproject'):
with self.subTest(app_name=bad_name):
args = ['startapp', bad_name]
testproject_dir = os.path.join(self.test_dir, bad_name)
out, err = self.run_django_admin(args)
self.assertOutput(
err,
"CommandError: '{}' is not a valid app name. Please make "
"sure the name is a valid identifier.".format(bad_name)
)
self.assertFalse(os.path.exists(testproject_dir))
def test_importable_name(self):
"""
startapp validates that app name doesn't clash with existing Python
modules.
"""
bad_name = 'os'
args = ['startapp', bad_name]
testproject_dir = os.path.join(self.test_dir, bad_name)
out, err = self.run_django_admin(args)
self.assertOutput(
err,
"CommandError: 'os' conflicts with the name of an existing "
"Python module and cannot be used as an app name. Please try "
"another name."
)
self.assertFalse(os.path.exists(testproject_dir))
def test_invalid_target_name(self):
for bad_target in ('invalid.dir_name', '7invalid_dir_name', '.invalid_dir_name'):
with self.subTest(bad_target):
_, err = self.run_django_admin(['startapp', 'app', bad_target])
self.assertOutput(
err,
"CommandError: '%s' is not a valid app directory. Please "
"make sure the directory is a valid identifier." % bad_target
)
def test_importable_target_name(self):
_, err = self.run_django_admin(['startapp', 'app', 'os'])
self.assertOutput(
err,
"CommandError: 'os' conflicts with the name of an existing Python "
"module and cannot be used as an app directory. Please try "
"another directory."
)
def test_overlaying_app(self):
# Use a subdirectory so it is outside the PYTHONPATH.
os.makedirs(os.path.join(self.test_dir, 'apps/app1'))
self.run_django_admin(['startapp', 'app1', 'apps/app1'])
out, err = self.run_django_admin(['startapp', 'app2', 'apps/app1'])
self.assertOutput(
err,
"already exists. Overlaying an app into an existing directory "
"won't replace conflicting files."
)
class DiffSettings(AdminScriptTestCase):
"""Tests for diffsettings management command."""
def test_basic(self):
"""Runs without error and emits settings diff."""
self.write_settings('settings_to_diff.py', sdict={'FOO': '"bar"'})
args = ['diffsettings', '--settings=settings_to_diff']
out, err = self.run_manage(args)
self.assertNoOutput(err)
self.assertOutput(out, "FOO = 'bar' ###")
# Attributes from django.conf.Settings don't appear.
self.assertNotInOutput(out, 'is_overridden = ')
def test_settings_configured(self):
out, err = self.run_manage(['diffsettings'], manage_py='configured_settings_manage.py')
self.assertNoOutput(err)
self.assertOutput(out, 'CUSTOM = 1 ###\nDEBUG = True')
# Attributes from django.conf.UserSettingsHolder don't appear.
self.assertNotInOutput(out, 'default_settings = ')
def test_dynamic_settings_configured(self):
# Custom default settings appear.
out, err = self.run_manage(['diffsettings'], manage_py='configured_dynamic_settings_manage.py')
self.assertNoOutput(err)
self.assertOutput(out, "FOO = 'bar' ###")
def test_all(self):
"""The all option also shows settings with the default value."""
self.write_settings('settings_to_diff.py', sdict={'STATIC_URL': 'None'})
args = ['diffsettings', '--settings=settings_to_diff', '--all']
out, err = self.run_manage(args)
self.assertNoOutput(err)
self.assertOutput(out, "### STATIC_URL = None")
def test_custom_default(self):
"""
The --default option specifies an alternate settings module for
comparison.
"""
self.write_settings('settings_default.py', sdict={'FOO': '"foo"', 'BAR': '"bar1"'})
self.write_settings('settings_to_diff.py', sdict={'FOO': '"foo"', 'BAR': '"bar2"'})
out, err = self.run_manage(['diffsettings', '--settings=settings_to_diff', '--default=settings_default'])
self.assertNoOutput(err)
self.assertNotInOutput(out, "FOO")
self.assertOutput(out, "BAR = 'bar2'")
def test_unified(self):
"""--output=unified emits settings diff in unified mode."""
self.write_settings('settings_to_diff.py', sdict={'FOO': '"bar"'})
args = ['diffsettings', '--settings=settings_to_diff', '--output=unified']
out, err = self.run_manage(args)
self.assertNoOutput(err)
self.assertOutput(out, "+ FOO = 'bar'")
self.assertOutput(out, "- SECRET_KEY = ''")
self.assertOutput(out, "+ SECRET_KEY = 'django_tests_secret_key'")
self.assertNotInOutput(out, " APPEND_SLASH = True")
def test_unified_all(self):
"""
--output=unified --all emits settings diff in unified mode and includes
settings with the default value.
"""
self.write_settings('settings_to_diff.py', sdict={'FOO': '"bar"'})
args = ['diffsettings', '--settings=settings_to_diff', '--output=unified', '--all']
out, err = self.run_manage(args)
self.assertNoOutput(err)
self.assertOutput(out, " APPEND_SLASH = True")
self.assertOutput(out, "+ FOO = 'bar'")
self.assertOutput(out, "- SECRET_KEY = ''")
class Dumpdata(AdminScriptTestCase):
"""Tests for dumpdata management command."""
def setUp(self):
super().setUp()
self.write_settings('settings.py')
def test_pks_parsing(self):
"""Regression for #20509
Test would raise an exception rather than printing an error message.
"""
args = ['dumpdata', '--pks=1']
out, err = self.run_manage(args)
self.assertOutput(err, "You can only use --pks option with one model")
self.assertNoOutput(out)
class MainModule(AdminScriptTestCase):
"""python -m django works like django-admin."""
def test_program_name_in_help(self):
out, err = self.run_test(['-m', 'django', 'help'])
self.assertOutput(out, "Type 'python -m django help <subcommand>' for help on a specific subcommand.")
class DjangoAdminSuggestions(AdminScriptTestCase):
def setUp(self):
super().setUp()
self.write_settings('settings.py')
def test_suggestions(self):
args = ['rnserver', '--settings=test_project.settings']
out, err = self.run_django_admin(args)
self.assertNoOutput(out)
self.assertOutput(err, "Unknown command: 'rnserver'. Did you mean runserver?")
def test_no_suggestions(self):
args = ['abcdef', '--settings=test_project.settings']
out, err = self.run_django_admin(args)
self.assertNoOutput(out)
self.assertNotInOutput(err, 'Did you mean')
|
b3ba8facc32f06a5c4edc6d4d843f9dce8915f3322434d9836b75e7a8498cdff | import unittest
from django.core.checks import Error, Warning
from django.core.checks.model_checks import _check_lazy_references
from django.db import connection, connections, models
from django.db.models.functions import Lower
from django.db.models.signals import post_init
from django.test import SimpleTestCase, TestCase
from django.test.utils import isolate_apps, override_settings, register_lookup
class EmptyRouter:
pass
def get_max_column_name_length():
allowed_len = None
db_alias = None
for db in ('default', 'other'):
connection = connections[db]
max_name_length = connection.ops.max_name_length()
if max_name_length is not None and not connection.features.truncates_names:
if allowed_len is None or max_name_length < allowed_len:
allowed_len = max_name_length
db_alias = db
return (allowed_len, db_alias)
@isolate_apps('invalid_models_tests')
class IndexTogetherTests(SimpleTestCase):
def test_non_iterable(self):
class Model(models.Model):
class Meta:
index_together = 42
self.assertEqual(Model.check(), [
Error(
"'index_together' must be a list or tuple.",
obj=Model,
id='models.E008',
),
])
def test_non_list(self):
class Model(models.Model):
class Meta:
index_together = 'not-a-list'
self.assertEqual(Model.check(), [
Error(
"'index_together' must be a list or tuple.",
obj=Model,
id='models.E008',
),
])
def test_list_containing_non_iterable(self):
class Model(models.Model):
class Meta:
index_together = [('a', 'b'), 42]
self.assertEqual(Model.check(), [
Error(
"All 'index_together' elements must be lists or tuples.",
obj=Model,
id='models.E009',
),
])
def test_pointing_to_missing_field(self):
class Model(models.Model):
class Meta:
index_together = [['missing_field']]
self.assertEqual(Model.check(), [
Error(
"'index_together' refers to the nonexistent field 'missing_field'.",
obj=Model,
id='models.E012',
),
])
def test_pointing_to_non_local_field(self):
class Foo(models.Model):
field1 = models.IntegerField()
class Bar(Foo):
field2 = models.IntegerField()
class Meta:
index_together = [['field2', 'field1']]
self.assertEqual(Bar.check(), [
Error(
"'index_together' refers to field 'field1' which is not "
"local to model 'Bar'.",
hint='This issue may be caused by multi-table inheritance.',
obj=Bar,
id='models.E016',
),
])
def test_pointing_to_m2m_field(self):
class Model(models.Model):
m2m = models.ManyToManyField('self')
class Meta:
index_together = [['m2m']]
self.assertEqual(Model.check(), [
Error(
"'index_together' refers to a ManyToManyField 'm2m', but "
"ManyToManyFields are not permitted in 'index_together'.",
obj=Model,
id='models.E013',
),
])
def test_pointing_to_fk(self):
class Foo(models.Model):
pass
class Bar(models.Model):
foo_1 = models.ForeignKey(Foo, on_delete=models.CASCADE, related_name='bar_1')
foo_2 = models.ForeignKey(Foo, on_delete=models.CASCADE, related_name='bar_2')
class Meta:
index_together = [['foo_1_id', 'foo_2']]
self.assertEqual(Bar.check(), [])
# unique_together tests are very similar to index_together tests.
@isolate_apps('invalid_models_tests')
class UniqueTogetherTests(SimpleTestCase):
def test_non_iterable(self):
class Model(models.Model):
class Meta:
unique_together = 42
self.assertEqual(Model.check(), [
Error(
"'unique_together' must be a list or tuple.",
obj=Model,
id='models.E010',
),
])
def test_list_containing_non_iterable(self):
class Model(models.Model):
one = models.IntegerField()
two = models.IntegerField()
class Meta:
unique_together = [('a', 'b'), 42]
self.assertEqual(Model.check(), [
Error(
"All 'unique_together' elements must be lists or tuples.",
obj=Model,
id='models.E011',
),
])
def test_non_list(self):
class Model(models.Model):
class Meta:
unique_together = 'not-a-list'
self.assertEqual(Model.check(), [
Error(
"'unique_together' must be a list or tuple.",
obj=Model,
id='models.E010',
),
])
def test_valid_model(self):
class Model(models.Model):
one = models.IntegerField()
two = models.IntegerField()
class Meta:
# unique_together can be a simple tuple
unique_together = ('one', 'two')
self.assertEqual(Model.check(), [])
def test_pointing_to_missing_field(self):
class Model(models.Model):
class Meta:
unique_together = [['missing_field']]
self.assertEqual(Model.check(), [
Error(
"'unique_together' refers to the nonexistent field 'missing_field'.",
obj=Model,
id='models.E012',
),
])
def test_pointing_to_m2m(self):
class Model(models.Model):
m2m = models.ManyToManyField('self')
class Meta:
unique_together = [['m2m']]
self.assertEqual(Model.check(), [
Error(
"'unique_together' refers to a ManyToManyField 'm2m', but "
"ManyToManyFields are not permitted in 'unique_together'.",
obj=Model,
id='models.E013',
),
])
def test_pointing_to_fk(self):
class Foo(models.Model):
pass
class Bar(models.Model):
foo_1 = models.ForeignKey(Foo, on_delete=models.CASCADE, related_name='bar_1')
foo_2 = models.ForeignKey(Foo, on_delete=models.CASCADE, related_name='bar_2')
class Meta:
unique_together = [['foo_1_id', 'foo_2']]
self.assertEqual(Bar.check(), [])
@isolate_apps('invalid_models_tests')
class IndexesTests(TestCase):
def test_pointing_to_missing_field(self):
class Model(models.Model):
class Meta:
indexes = [models.Index(fields=['missing_field'], name='name')]
self.assertEqual(Model.check(), [
Error(
"'indexes' refers to the nonexistent field 'missing_field'.",
obj=Model,
id='models.E012',
),
])
def test_pointing_to_m2m_field(self):
class Model(models.Model):
m2m = models.ManyToManyField('self')
class Meta:
indexes = [models.Index(fields=['m2m'], name='name')]
self.assertEqual(Model.check(), [
Error(
"'indexes' refers to a ManyToManyField 'm2m', but "
"ManyToManyFields are not permitted in 'indexes'.",
obj=Model,
id='models.E013',
),
])
def test_pointing_to_non_local_field(self):
class Foo(models.Model):
field1 = models.IntegerField()
class Bar(Foo):
field2 = models.IntegerField()
class Meta:
indexes = [models.Index(fields=['field2', 'field1'], name='name')]
self.assertEqual(Bar.check(), [
Error(
"'indexes' refers to field 'field1' which is not local to "
"model 'Bar'.",
hint='This issue may be caused by multi-table inheritance.',
obj=Bar,
id='models.E016',
),
])
def test_pointing_to_fk(self):
class Foo(models.Model):
pass
class Bar(models.Model):
foo_1 = models.ForeignKey(Foo, on_delete=models.CASCADE, related_name='bar_1')
foo_2 = models.ForeignKey(Foo, on_delete=models.CASCADE, related_name='bar_2')
class Meta:
indexes = [models.Index(fields=['foo_1_id', 'foo_2'], name='index_name')]
self.assertEqual(Bar.check(), [])
def test_name_constraints(self):
class Model(models.Model):
class Meta:
indexes = [
models.Index(fields=['id'], name='_index_name'),
models.Index(fields=['id'], name='5index_name'),
]
self.assertEqual(Model.check(), [
Error(
"The index name '%sindex_name' cannot start with an "
"underscore or a number." % prefix,
obj=Model,
id='models.E033',
) for prefix in ('_', '5')
])
def test_max_name_length(self):
index_name = 'x' * 31
class Model(models.Model):
class Meta:
indexes = [models.Index(fields=['id'], name=index_name)]
self.assertEqual(Model.check(), [
Error(
"The index name '%s' cannot be longer than 30 characters."
% index_name,
obj=Model,
id='models.E034',
),
])
def test_index_with_condition(self):
class Model(models.Model):
age = models.IntegerField()
class Meta:
indexes = [
models.Index(
fields=['age'],
name='index_age_gte_10',
condition=models.Q(age__gte=10),
),
]
errors = Model.check(databases=self.databases)
expected = [] if connection.features.supports_partial_indexes else [
Warning(
'%s does not support indexes with conditions.'
% connection.display_name,
hint=(
"Conditions will be ignored. Silence this warning if you "
"don't care about it."
),
obj=Model,
id='models.W037',
)
]
self.assertEqual(errors, expected)
def test_index_with_condition_required_db_features(self):
class Model(models.Model):
age = models.IntegerField()
class Meta:
required_db_features = {'supports_partial_indexes'}
indexes = [
models.Index(
fields=['age'],
name='index_age_gte_10',
condition=models.Q(age__gte=10),
),
]
self.assertEqual(Model.check(databases=self.databases), [])
@isolate_apps('invalid_models_tests')
class FieldNamesTests(TestCase):
databases = {'default', 'other'}
def test_ending_with_underscore(self):
class Model(models.Model):
field_ = models.CharField(max_length=10)
m2m_ = models.ManyToManyField('self')
self.assertEqual(Model.check(), [
Error(
'Field names must not end with an underscore.',
obj=Model._meta.get_field('field_'),
id='fields.E001',
),
Error(
'Field names must not end with an underscore.',
obj=Model._meta.get_field('m2m_'),
id='fields.E001',
),
])
max_column_name_length, column_limit_db_alias = get_max_column_name_length()
@unittest.skipIf(max_column_name_length is None, "The database doesn't have a column name length limit.")
def test_M2M_long_column_name(self):
"""
#13711 -- Model check for long M2M column names when database has
column name length limits.
"""
# A model with very long name which will be used to set relations to.
class VeryLongModelNamezzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz(models.Model):
title = models.CharField(max_length=11)
# Main model for which checks will be performed.
class ModelWithLongField(models.Model):
m2m_field = models.ManyToManyField(
VeryLongModelNamezzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz,
related_name='rn1',
)
m2m_field2 = models.ManyToManyField(
VeryLongModelNamezzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz,
related_name='rn2', through='m2msimple',
)
m2m_field3 = models.ManyToManyField(
VeryLongModelNamezzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz,
related_name='rn3',
through='m2mcomplex',
)
fk = models.ForeignKey(
VeryLongModelNamezzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz,
models.CASCADE,
related_name='rn4',
)
# Models used for setting `through` in M2M field.
class m2msimple(models.Model):
id2 = models.ForeignKey(ModelWithLongField, models.CASCADE)
class m2mcomplex(models.Model):
id2 = models.ForeignKey(ModelWithLongField, models.CASCADE)
long_field_name = 'a' * (self.max_column_name_length + 1)
models.ForeignKey(
VeryLongModelNamezzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz,
models.CASCADE,
).contribute_to_class(m2msimple, long_field_name)
models.ForeignKey(
VeryLongModelNamezzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz,
models.CASCADE,
db_column=long_field_name
).contribute_to_class(m2mcomplex, long_field_name)
errors = ModelWithLongField.check(databases=('default', 'other'))
# First error because of M2M field set on the model with long name.
m2m_long_name = "verylongmodelnamezzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz_id"
if self.max_column_name_length > len(m2m_long_name):
# Some databases support names longer than the test name.
expected = []
else:
expected = [
Error(
'Autogenerated column name too long for M2M field "%s". '
'Maximum length is "%s" for database "%s".'
% (m2m_long_name, self.max_column_name_length, self.column_limit_db_alias),
hint="Use 'through' to create a separate model for "
"M2M and then set column_name using 'db_column'.",
obj=ModelWithLongField,
id='models.E019',
)
]
# Second error because the FK specified in the `through` model
# `m2msimple` has auto-generated name longer than allowed.
# There will be no check errors in the other M2M because it
# specifies db_column for the FK in `through` model even if the actual
# name is longer than the limits of the database.
expected.append(
Error(
'Autogenerated column name too long for M2M field "%s_id". '
'Maximum length is "%s" for database "%s".'
% (long_field_name, self.max_column_name_length, self.column_limit_db_alias),
hint="Use 'through' to create a separate model for "
"M2M and then set column_name using 'db_column'.",
obj=ModelWithLongField,
id='models.E019',
)
)
self.assertEqual(errors, expected)
# Check for long column names is called only for specified database
# aliases.
self.assertEqual(ModelWithLongField.check(databases=None), [])
@unittest.skipIf(max_column_name_length is None, "The database doesn't have a column name length limit.")
def test_local_field_long_column_name(self):
"""
#13711 -- Model check for long column names
when database does not support long names.
"""
class ModelWithLongField(models.Model):
title = models.CharField(max_length=11)
long_field_name = 'a' * (self.max_column_name_length + 1)
long_field_name2 = 'b' * (self.max_column_name_length + 1)
models.CharField(max_length=11).contribute_to_class(ModelWithLongField, long_field_name)
models.CharField(max_length=11, db_column='vlmn').contribute_to_class(ModelWithLongField, long_field_name2)
self.assertEqual(ModelWithLongField.check(databases=('default', 'other')), [
Error(
'Autogenerated column name too long for field "%s". '
'Maximum length is "%s" for database "%s".'
% (long_field_name, self.max_column_name_length, self.column_limit_db_alias),
hint="Set the column name manually using 'db_column'.",
obj=ModelWithLongField,
id='models.E018',
)
])
# Check for long column names is called only for specified database
# aliases.
self.assertEqual(ModelWithLongField.check(databases=None), [])
def test_including_separator(self):
class Model(models.Model):
some__field = models.IntegerField()
self.assertEqual(Model.check(), [
Error(
'Field names must not contain "__".',
obj=Model._meta.get_field('some__field'),
id='fields.E002',
)
])
def test_pk(self):
class Model(models.Model):
pk = models.IntegerField()
self.assertEqual(Model.check(), [
Error(
"'pk' is a reserved word that cannot be used as a field name.",
obj=Model._meta.get_field('pk'),
id='fields.E003',
)
])
def test_db_column_clash(self):
class Model(models.Model):
foo = models.IntegerField()
bar = models.IntegerField(db_column='foo')
self.assertEqual(Model.check(), [
Error(
"Field 'bar' has column name 'foo' that is used by "
"another field.",
hint="Specify a 'db_column' for the field.",
obj=Model,
id='models.E007',
)
])
@isolate_apps('invalid_models_tests')
class ShadowingFieldsTests(SimpleTestCase):
def test_field_name_clash_with_child_accessor(self):
class Parent(models.Model):
pass
class Child(Parent):
child = models.CharField(max_length=100)
self.assertEqual(Child.check(), [
Error(
"The field 'child' clashes with the field "
"'child' from model 'invalid_models_tests.parent'.",
obj=Child._meta.get_field('child'),
id='models.E006',
)
])
def test_multiinheritance_clash(self):
class Mother(models.Model):
clash = models.IntegerField()
class Father(models.Model):
clash = models.IntegerField()
class Child(Mother, Father):
# Here we have two clashed: id (automatic field) and clash, because
# both parents define these fields.
pass
self.assertEqual(Child.check(), [
Error(
"The field 'id' from parent model "
"'invalid_models_tests.mother' clashes with the field 'id' "
"from parent model 'invalid_models_tests.father'.",
obj=Child,
id='models.E005',
),
Error(
"The field 'clash' from parent model "
"'invalid_models_tests.mother' clashes with the field 'clash' "
"from parent model 'invalid_models_tests.father'.",
obj=Child,
id='models.E005',
)
])
def test_inheritance_clash(self):
class Parent(models.Model):
f_id = models.IntegerField()
class Target(models.Model):
# This field doesn't result in a clash.
f_id = models.IntegerField()
class Child(Parent):
# This field clashes with parent "f_id" field.
f = models.ForeignKey(Target, models.CASCADE)
self.assertEqual(Child.check(), [
Error(
"The field 'f' clashes with the field 'f_id' "
"from model 'invalid_models_tests.parent'.",
obj=Child._meta.get_field('f'),
id='models.E006',
)
])
def test_multigeneration_inheritance(self):
class GrandParent(models.Model):
clash = models.IntegerField()
class Parent(GrandParent):
pass
class Child(Parent):
pass
class GrandChild(Child):
clash = models.IntegerField()
self.assertEqual(GrandChild.check(), [
Error(
"The field 'clash' clashes with the field 'clash' "
"from model 'invalid_models_tests.grandparent'.",
obj=GrandChild._meta.get_field('clash'),
id='models.E006',
)
])
def test_id_clash(self):
class Target(models.Model):
pass
class Model(models.Model):
fk = models.ForeignKey(Target, models.CASCADE)
fk_id = models.IntegerField()
self.assertEqual(Model.check(), [
Error(
"The field 'fk_id' clashes with the field 'fk' from model "
"'invalid_models_tests.model'.",
obj=Model._meta.get_field('fk_id'),
id='models.E006',
)
])
@isolate_apps('invalid_models_tests')
class OtherModelTests(SimpleTestCase):
def test_unique_primary_key(self):
invalid_id = models.IntegerField(primary_key=False)
class Model(models.Model):
id = invalid_id
self.assertEqual(Model.check(), [
Error(
"'id' can only be used as a field name if the field also sets "
"'primary_key=True'.",
obj=Model,
id='models.E004',
),
])
def test_ordering_non_iterable(self):
class Model(models.Model):
class Meta:
ordering = 'missing_field'
self.assertEqual(Model.check(), [
Error(
"'ordering' must be a tuple or list "
"(even if you want to order by only one field).",
obj=Model,
id='models.E014',
),
])
def test_just_ordering_no_errors(self):
class Model(models.Model):
order = models.PositiveIntegerField()
class Meta:
ordering = ['order']
self.assertEqual(Model.check(), [])
def test_just_order_with_respect_to_no_errors(self):
class Question(models.Model):
pass
class Answer(models.Model):
question = models.ForeignKey(Question, models.CASCADE)
class Meta:
order_with_respect_to = 'question'
self.assertEqual(Answer.check(), [])
def test_ordering_with_order_with_respect_to(self):
class Question(models.Model):
pass
class Answer(models.Model):
question = models.ForeignKey(Question, models.CASCADE)
order = models.IntegerField()
class Meta:
order_with_respect_to = 'question'
ordering = ['order']
self.assertEqual(Answer.check(), [
Error(
"'ordering' and 'order_with_respect_to' cannot be used together.",
obj=Answer,
id='models.E021',
),
])
def test_non_valid(self):
class RelationModel(models.Model):
pass
class Model(models.Model):
relation = models.ManyToManyField(RelationModel)
class Meta:
ordering = ['relation']
self.assertEqual(Model.check(), [
Error(
"'ordering' refers to the nonexistent field, related field, "
"or lookup 'relation'.",
obj=Model,
id='models.E015',
),
])
def test_ordering_pointing_to_missing_field(self):
class Model(models.Model):
class Meta:
ordering = ('missing_field',)
self.assertEqual(Model.check(), [
Error(
"'ordering' refers to the nonexistent field, related field, "
"or lookup 'missing_field'.",
obj=Model,
id='models.E015',
)
])
def test_ordering_pointing_to_missing_foreignkey_field(self):
class Model(models.Model):
missing_fk_field = models.IntegerField()
class Meta:
ordering = ('missing_fk_field_id',)
self.assertEqual(Model.check(), [
Error(
"'ordering' refers to the nonexistent field, related field, "
"or lookup 'missing_fk_field_id'.",
obj=Model,
id='models.E015',
)
])
def test_ordering_pointing_to_missing_related_field(self):
class Model(models.Model):
test = models.IntegerField()
class Meta:
ordering = ('missing_related__id',)
self.assertEqual(Model.check(), [
Error(
"'ordering' refers to the nonexistent field, related field, "
"or lookup 'missing_related__id'.",
obj=Model,
id='models.E015',
)
])
def test_ordering_pointing_to_missing_related_model_field(self):
class Parent(models.Model):
pass
class Child(models.Model):
parent = models.ForeignKey(Parent, models.CASCADE)
class Meta:
ordering = ('parent__missing_field',)
self.assertEqual(Child.check(), [
Error(
"'ordering' refers to the nonexistent field, related field, "
"or lookup 'parent__missing_field'.",
obj=Child,
id='models.E015',
)
])
def test_ordering_pointing_to_non_related_field(self):
class Child(models.Model):
parent = models.IntegerField()
class Meta:
ordering = ('parent__missing_field',)
self.assertEqual(Child.check(), [
Error(
"'ordering' refers to the nonexistent field, related field, "
"or lookup 'parent__missing_field'.",
obj=Child,
id='models.E015',
)
])
def test_ordering_pointing_to_two_related_model_field(self):
class Parent2(models.Model):
pass
class Parent1(models.Model):
parent2 = models.ForeignKey(Parent2, models.CASCADE)
class Child(models.Model):
parent1 = models.ForeignKey(Parent1, models.CASCADE)
class Meta:
ordering = ('parent1__parent2__missing_field',)
self.assertEqual(Child.check(), [
Error(
"'ordering' refers to the nonexistent field, related field, "
"or lookup 'parent1__parent2__missing_field'.",
obj=Child,
id='models.E015',
)
])
def test_ordering_pointing_multiple_times_to_model_fields(self):
class Parent(models.Model):
field1 = models.CharField(max_length=100)
field2 = models.CharField(max_length=100)
class Child(models.Model):
parent = models.ForeignKey(Parent, models.CASCADE)
class Meta:
ordering = ('parent__field1__field2',)
self.assertEqual(Child.check(), [
Error(
"'ordering' refers to the nonexistent field, related field, "
"or lookup 'parent__field1__field2'.",
obj=Child,
id='models.E015',
)
])
def test_ordering_allows_registered_lookups(self):
class Model(models.Model):
test = models.CharField(max_length=100)
class Meta:
ordering = ('test__lower',)
with register_lookup(models.CharField, Lower):
self.assertEqual(Model.check(), [])
def test_ordering_pointing_to_related_model_pk(self):
class Parent(models.Model):
pass
class Child(models.Model):
parent = models.ForeignKey(Parent, models.CASCADE)
class Meta:
ordering = ('parent__pk',)
self.assertEqual(Child.check(), [])
def test_ordering_pointing_to_foreignkey_field(self):
class Parent(models.Model):
pass
class Child(models.Model):
parent = models.ForeignKey(Parent, models.CASCADE)
class Meta:
ordering = ('parent_id',)
self.assertFalse(Child.check())
def test_name_beginning_with_underscore(self):
class _Model(models.Model):
pass
self.assertEqual(_Model.check(), [
Error(
"The model name '_Model' cannot start or end with an underscore "
"as it collides with the query lookup syntax.",
obj=_Model,
id='models.E023',
)
])
def test_name_ending_with_underscore(self):
class Model_(models.Model):
pass
self.assertEqual(Model_.check(), [
Error(
"The model name 'Model_' cannot start or end with an underscore "
"as it collides with the query lookup syntax.",
obj=Model_,
id='models.E023',
)
])
def test_name_contains_double_underscores(self):
class Test__Model(models.Model):
pass
self.assertEqual(Test__Model.check(), [
Error(
"The model name 'Test__Model' cannot contain double underscores "
"as it collides with the query lookup syntax.",
obj=Test__Model,
id='models.E024',
)
])
def test_property_and_related_field_accessor_clash(self):
class Model(models.Model):
fk = models.ForeignKey('self', models.CASCADE)
@property
def fk_id(self):
pass
self.assertEqual(Model.check(), [
Error(
"The property 'fk_id' clashes with a related field accessor.",
obj=Model,
id='models.E025',
)
])
def test_single_primary_key(self):
class Model(models.Model):
foo = models.IntegerField(primary_key=True)
bar = models.IntegerField(primary_key=True)
self.assertEqual(Model.check(), [
Error(
"The model cannot have more than one field with 'primary_key=True'.",
obj=Model,
id='models.E026',
)
])
@override_settings(TEST_SWAPPED_MODEL_BAD_VALUE='not-a-model')
def test_swappable_missing_app_name(self):
class Model(models.Model):
class Meta:
swappable = 'TEST_SWAPPED_MODEL_BAD_VALUE'
self.assertEqual(Model.check(), [
Error(
"'TEST_SWAPPED_MODEL_BAD_VALUE' is not of the form 'app_label.app_name'.",
id='models.E001',
),
])
@override_settings(TEST_SWAPPED_MODEL_BAD_MODEL='not_an_app.Target')
def test_swappable_missing_app(self):
class Model(models.Model):
class Meta:
swappable = 'TEST_SWAPPED_MODEL_BAD_MODEL'
self.assertEqual(Model.check(), [
Error(
"'TEST_SWAPPED_MODEL_BAD_MODEL' references 'not_an_app.Target', "
'which has not been installed, or is abstract.',
id='models.E002',
),
])
def test_two_m2m_through_same_relationship(self):
class Person(models.Model):
pass
class Group(models.Model):
primary = models.ManyToManyField(Person, through='Membership', related_name='primary')
secondary = models.ManyToManyField(Person, through='Membership', related_name='secondary')
class Membership(models.Model):
person = models.ForeignKey(Person, models.CASCADE)
group = models.ForeignKey(Group, models.CASCADE)
self.assertEqual(Group.check(), [
Error(
"The model has two identical many-to-many relations through "
"the intermediate model 'invalid_models_tests.Membership'.",
obj=Group,
id='models.E003',
)
])
def test_two_m2m_through_same_model_with_different_through_fields(self):
class Country(models.Model):
pass
class ShippingMethod(models.Model):
to_countries = models.ManyToManyField(
Country, through='ShippingMethodPrice',
through_fields=('method', 'to_country'),
)
from_countries = models.ManyToManyField(
Country, through='ShippingMethodPrice',
through_fields=('method', 'from_country'),
related_name='+',
)
class ShippingMethodPrice(models.Model):
method = models.ForeignKey(ShippingMethod, models.CASCADE)
to_country = models.ForeignKey(Country, models.CASCADE)
from_country = models.ForeignKey(Country, models.CASCADE)
self.assertEqual(ShippingMethod.check(), [])
def test_onetoone_with_parent_model(self):
class Place(models.Model):
pass
class ParkingLot(Place):
other_place = models.OneToOneField(Place, models.CASCADE, related_name='other_parking')
self.assertEqual(ParkingLot.check(), [])
def test_onetoone_with_explicit_parent_link_parent_model(self):
class Place(models.Model):
pass
class ParkingLot(Place):
place = models.OneToOneField(Place, models.CASCADE, parent_link=True, primary_key=True)
other_place = models.OneToOneField(Place, models.CASCADE, related_name='other_parking')
self.assertEqual(ParkingLot.check(), [])
def test_m2m_table_name_clash(self):
class Foo(models.Model):
bar = models.ManyToManyField('Bar', db_table='myapp_bar')
class Meta:
db_table = 'myapp_foo'
class Bar(models.Model):
class Meta:
db_table = 'myapp_bar'
self.assertEqual(Foo.check(), [
Error(
"The field's intermediary table 'myapp_bar' clashes with the "
"table name of 'invalid_models_tests.Bar'.",
obj=Foo._meta.get_field('bar'),
id='fields.E340',
)
])
@override_settings(DATABASE_ROUTERS=['invalid_models_tests.test_models.EmptyRouter'])
def test_m2m_table_name_clash_database_routers_installed(self):
class Foo(models.Model):
bar = models.ManyToManyField('Bar', db_table='myapp_bar')
class Meta:
db_table = 'myapp_foo'
class Bar(models.Model):
class Meta:
db_table = 'myapp_bar'
self.assertEqual(Foo.check(), [
Warning(
"The field's intermediary table 'myapp_bar' clashes with the "
"table name of 'invalid_models_tests.Bar'.",
obj=Foo._meta.get_field('bar'),
hint=(
"You have configured settings.DATABASE_ROUTERS. Verify "
"that the table of 'invalid_models_tests.Bar' is "
"correctly routed to a separate database."
),
id='fields.W344',
),
])
def test_m2m_field_table_name_clash(self):
class Foo(models.Model):
pass
class Bar(models.Model):
foos = models.ManyToManyField(Foo, db_table='clash')
class Baz(models.Model):
foos = models.ManyToManyField(Foo, db_table='clash')
self.assertEqual(Bar.check() + Baz.check(), [
Error(
"The field's intermediary table 'clash' clashes with the "
"table name of 'invalid_models_tests.Baz.foos'.",
obj=Bar._meta.get_field('foos'),
id='fields.E340',
),
Error(
"The field's intermediary table 'clash' clashes with the "
"table name of 'invalid_models_tests.Bar.foos'.",
obj=Baz._meta.get_field('foos'),
id='fields.E340',
)
])
@override_settings(DATABASE_ROUTERS=['invalid_models_tests.test_models.EmptyRouter'])
def test_m2m_field_table_name_clash_database_routers_installed(self):
class Foo(models.Model):
pass
class Bar(models.Model):
foos = models.ManyToManyField(Foo, db_table='clash')
class Baz(models.Model):
foos = models.ManyToManyField(Foo, db_table='clash')
self.assertEqual(Bar.check() + Baz.check(), [
Warning(
"The field's intermediary table 'clash' clashes with the "
"table name of 'invalid_models_tests.%s.foos'."
% clashing_model,
obj=model_cls._meta.get_field('foos'),
hint=(
"You have configured settings.DATABASE_ROUTERS. Verify "
"that the table of 'invalid_models_tests.%s.foos' is "
"correctly routed to a separate database." % clashing_model
),
id='fields.W344',
) for model_cls, clashing_model in [(Bar, 'Baz'), (Baz, 'Bar')]
])
def test_m2m_autogenerated_table_name_clash(self):
class Foo(models.Model):
class Meta:
db_table = 'bar_foos'
class Bar(models.Model):
# The autogenerated `db_table` will be bar_foos.
foos = models.ManyToManyField(Foo)
class Meta:
db_table = 'bar'
self.assertEqual(Bar.check(), [
Error(
"The field's intermediary table 'bar_foos' clashes with the "
"table name of 'invalid_models_tests.Foo'.",
obj=Bar._meta.get_field('foos'),
id='fields.E340',
)
])
@override_settings(DATABASE_ROUTERS=['invalid_models_tests.test_models.EmptyRouter'])
def test_m2m_autogenerated_table_name_clash_database_routers_installed(self):
class Foo(models.Model):
class Meta:
db_table = 'bar_foos'
class Bar(models.Model):
# The autogenerated db_table is bar_foos.
foos = models.ManyToManyField(Foo)
class Meta:
db_table = 'bar'
self.assertEqual(Bar.check(), [
Warning(
"The field's intermediary table 'bar_foos' clashes with the "
"table name of 'invalid_models_tests.Foo'.",
obj=Bar._meta.get_field('foos'),
hint=(
"You have configured settings.DATABASE_ROUTERS. Verify "
"that the table of 'invalid_models_tests.Foo' is "
"correctly routed to a separate database."
),
id='fields.W344',
),
])
def test_m2m_unmanaged_shadow_models_not_checked(self):
class A1(models.Model):
pass
class C1(models.Model):
mm_a = models.ManyToManyField(A1, db_table='d1')
# Unmanaged models that shadow the above models. Reused table names
# shouldn't be flagged by any checks.
class A2(models.Model):
class Meta:
managed = False
class C2(models.Model):
mm_a = models.ManyToManyField(A2, through='Intermediate')
class Meta:
managed = False
class Intermediate(models.Model):
a2 = models.ForeignKey(A2, models.CASCADE, db_column='a1_id')
c2 = models.ForeignKey(C2, models.CASCADE, db_column='c1_id')
class Meta:
db_table = 'd1'
managed = False
self.assertEqual(C1.check(), [])
self.assertEqual(C2.check(), [])
def test_m2m_to_concrete_and_proxy_allowed(self):
class A(models.Model):
pass
class Through(models.Model):
a = models.ForeignKey('A', models.CASCADE)
c = models.ForeignKey('C', models.CASCADE)
class ThroughProxy(Through):
class Meta:
proxy = True
class C(models.Model):
mm_a = models.ManyToManyField(A, through=Through)
mm_aproxy = models.ManyToManyField(A, through=ThroughProxy, related_name='proxied_m2m')
self.assertEqual(C.check(), [])
@isolate_apps('django.contrib.auth', kwarg_name='apps')
def test_lazy_reference_checks(self, apps):
class DummyModel(models.Model):
author = models.ForeignKey('Author', models.CASCADE)
class Meta:
app_label = 'invalid_models_tests'
class DummyClass:
def __call__(self, **kwargs):
pass
def dummy_method(self):
pass
def dummy_function(*args, **kwargs):
pass
apps.lazy_model_operation(dummy_function, ('auth', 'imaginarymodel'))
apps.lazy_model_operation(dummy_function, ('fanciful_app', 'imaginarymodel'))
post_init.connect(dummy_function, sender='missing-app.Model', apps=apps)
post_init.connect(DummyClass(), sender='missing-app.Model', apps=apps)
post_init.connect(DummyClass().dummy_method, sender='missing-app.Model', apps=apps)
self.assertEqual(_check_lazy_references(apps), [
Error(
"%r contains a lazy reference to auth.imaginarymodel, "
"but app 'auth' doesn't provide model 'imaginarymodel'." % dummy_function,
obj=dummy_function,
id='models.E022',
),
Error(
"%r contains a lazy reference to fanciful_app.imaginarymodel, "
"but app 'fanciful_app' isn't installed." % dummy_function,
obj=dummy_function,
id='models.E022',
),
Error(
"An instance of class 'DummyClass' was connected to "
"the 'post_init' signal with a lazy reference to the sender "
"'missing-app.model', but app 'missing-app' isn't installed.",
hint=None,
obj='invalid_models_tests.test_models',
id='signals.E001',
),
Error(
"Bound method 'DummyClass.dummy_method' was connected to the "
"'post_init' signal with a lazy reference to the sender "
"'missing-app.model', but app 'missing-app' isn't installed.",
hint=None,
obj='invalid_models_tests.test_models',
id='signals.E001',
),
Error(
"The field invalid_models_tests.DummyModel.author was declared "
"with a lazy reference to 'invalid_models_tests.author', but app "
"'invalid_models_tests' isn't installed.",
hint=None,
obj=DummyModel.author.field,
id='fields.E307',
),
Error(
"The function 'dummy_function' was connected to the 'post_init' "
"signal with a lazy reference to the sender "
"'missing-app.model', but app 'missing-app' isn't installed.",
hint=None,
obj='invalid_models_tests.test_models',
id='signals.E001',
),
])
@isolate_apps('invalid_models_tests')
class ConstraintsTests(TestCase):
def test_check_constraints(self):
class Model(models.Model):
age = models.IntegerField()
class Meta:
constraints = [models.CheckConstraint(check=models.Q(age__gte=18), name='is_adult')]
errors = Model.check(databases=self.databases)
warn = Warning(
'%s does not support check constraints.' % connection.display_name,
hint=(
"A constraint won't be created. Silence this warning if you "
"don't care about it."
),
obj=Model,
id='models.W027',
)
expected = [] if connection.features.supports_table_check_constraints else [warn]
self.assertCountEqual(errors, expected)
def test_check_constraints_required_db_features(self):
class Model(models.Model):
age = models.IntegerField()
class Meta:
required_db_features = {'supports_table_check_constraints'}
constraints = [models.CheckConstraint(check=models.Q(age__gte=18), name='is_adult')]
self.assertEqual(Model.check(databases=self.databases), [])
def test_unique_constraint_with_condition(self):
class Model(models.Model):
age = models.IntegerField()
class Meta:
constraints = [
models.UniqueConstraint(
fields=['age'],
name='unique_age_gte_100',
condition=models.Q(age__gte=100),
),
]
errors = Model.check(databases=self.databases)
expected = [] if connection.features.supports_partial_indexes else [
Warning(
'%s does not support unique constraints with conditions.'
% connection.display_name,
hint=(
"A constraint won't be created. Silence this warning if "
"you don't care about it."
),
obj=Model,
id='models.W036',
),
]
self.assertEqual(errors, expected)
def test_unique_constraint_with_condition_required_db_features(self):
class Model(models.Model):
age = models.IntegerField()
class Meta:
required_db_features = {'supports_partial_indexes'}
constraints = [
models.UniqueConstraint(
fields=['age'],
name='unique_age_gte_100',
condition=models.Q(age__gte=100),
),
]
self.assertEqual(Model.check(databases=self.databases), [])
|
b297239b8de2587f1f6fc7725cf22f96dd7886ea574d50569640a11cc258e4c7 | from unittest import mock
from django.core.exceptions import ValidationError
from django.db import IntegrityError, connection, models
from django.db.models.constraints import BaseConstraint
from django.test import SimpleTestCase, TestCase, skipUnlessDBFeature
from .models import (
ChildModel, Product, UniqueConstraintConditionProduct,
UniqueConstraintProduct,
)
def get_constraints(table):
with connection.cursor() as cursor:
return connection.introspection.get_constraints(cursor, table)
class BaseConstraintTests(SimpleTestCase):
def test_constraint_sql(self):
c = BaseConstraint('name')
msg = 'This method must be implemented by a subclass.'
with self.assertRaisesMessage(NotImplementedError, msg):
c.constraint_sql(None, None)
def test_create_sql(self):
c = BaseConstraint('name')
msg = 'This method must be implemented by a subclass.'
with self.assertRaisesMessage(NotImplementedError, msg):
c.create_sql(None, None)
def test_remove_sql(self):
c = BaseConstraint('name')
msg = 'This method must be implemented by a subclass.'
with self.assertRaisesMessage(NotImplementedError, msg):
c.remove_sql(None, None)
class CheckConstraintTests(TestCase):
def test_eq(self):
check1 = models.Q(price__gt=models.F('discounted_price'))
check2 = models.Q(price__lt=models.F('discounted_price'))
self.assertEqual(
models.CheckConstraint(check=check1, name='price'),
models.CheckConstraint(check=check1, name='price'),
)
self.assertEqual(models.CheckConstraint(check=check1, name='price'), mock.ANY)
self.assertNotEqual(
models.CheckConstraint(check=check1, name='price'),
models.CheckConstraint(check=check1, name='price2'),
)
self.assertNotEqual(
models.CheckConstraint(check=check1, name='price'),
models.CheckConstraint(check=check2, name='price'),
)
self.assertNotEqual(models.CheckConstraint(check=check1, name='price'), 1)
def test_repr(self):
check = models.Q(price__gt=models.F('discounted_price'))
name = 'price_gt_discounted_price'
constraint = models.CheckConstraint(check=check, name=name)
self.assertEqual(
repr(constraint),
"<CheckConstraint: check='{}' name='{}'>".format(check, name),
)
def test_invalid_check_types(self):
msg = (
'CheckConstraint.check must be a Q instance or boolean expression.'
)
with self.assertRaisesMessage(TypeError, msg):
models.CheckConstraint(check=models.F('discounted_price'), name='check')
def test_deconstruction(self):
check = models.Q(price__gt=models.F('discounted_price'))
name = 'price_gt_discounted_price'
constraint = models.CheckConstraint(check=check, name=name)
path, args, kwargs = constraint.deconstruct()
self.assertEqual(path, 'django.db.models.CheckConstraint')
self.assertEqual(args, ())
self.assertEqual(kwargs, {'check': check, 'name': name})
@skipUnlessDBFeature('supports_table_check_constraints')
def test_database_constraint(self):
Product.objects.create(price=10, discounted_price=5)
with self.assertRaises(IntegrityError):
Product.objects.create(price=10, discounted_price=20)
@skipUnlessDBFeature('supports_table_check_constraints')
def test_database_constraint_expression(self):
Product.objects.create(price=999, discounted_price=5)
with self.assertRaises(IntegrityError):
Product.objects.create(price=1000, discounted_price=5)
@skipUnlessDBFeature('supports_table_check_constraints')
def test_database_constraint_expressionwrapper(self):
Product.objects.create(price=499, discounted_price=5)
with self.assertRaises(IntegrityError):
Product.objects.create(price=500, discounted_price=5)
@skipUnlessDBFeature('supports_table_check_constraints', 'can_introspect_check_constraints')
def test_name(self):
constraints = get_constraints(Product._meta.db_table)
for expected_name in (
'price_gt_discounted_price',
'constraints_price_lt_1000_raw',
'constraints_price_neq_500_wrap',
'constraints_product_price_gt_0',
):
with self.subTest(expected_name):
self.assertIn(expected_name, constraints)
@skipUnlessDBFeature('supports_table_check_constraints', 'can_introspect_check_constraints')
def test_abstract_name(self):
constraints = get_constraints(ChildModel._meta.db_table)
self.assertIn('constraints_childmodel_adult', constraints)
class UniqueConstraintTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.p1, cls.p2 = UniqueConstraintProduct.objects.bulk_create([
UniqueConstraintProduct(name='p1', color='red'),
UniqueConstraintProduct(name='p2'),
])
def test_eq(self):
self.assertEqual(
models.UniqueConstraint(fields=['foo', 'bar'], name='unique'),
models.UniqueConstraint(fields=['foo', 'bar'], name='unique'),
)
self.assertEqual(
models.UniqueConstraint(fields=['foo', 'bar'], name='unique'),
mock.ANY,
)
self.assertNotEqual(
models.UniqueConstraint(fields=['foo', 'bar'], name='unique'),
models.UniqueConstraint(fields=['foo', 'bar'], name='unique2'),
)
self.assertNotEqual(
models.UniqueConstraint(fields=['foo', 'bar'], name='unique'),
models.UniqueConstraint(fields=['foo', 'baz'], name='unique'),
)
self.assertNotEqual(models.UniqueConstraint(fields=['foo', 'bar'], name='unique'), 1)
def test_eq_with_condition(self):
self.assertEqual(
models.UniqueConstraint(
fields=['foo', 'bar'], name='unique',
condition=models.Q(foo=models.F('bar'))
),
models.UniqueConstraint(
fields=['foo', 'bar'], name='unique',
condition=models.Q(foo=models.F('bar'))),
)
self.assertNotEqual(
models.UniqueConstraint(
fields=['foo', 'bar'],
name='unique',
condition=models.Q(foo=models.F('bar'))
),
models.UniqueConstraint(
fields=['foo', 'bar'],
name='unique',
condition=models.Q(foo=models.F('baz'))
),
)
def test_repr(self):
fields = ['foo', 'bar']
name = 'unique_fields'
constraint = models.UniqueConstraint(fields=fields, name=name)
self.assertEqual(
repr(constraint),
"<UniqueConstraint: fields=('foo', 'bar') name='unique_fields'>",
)
def test_repr_with_condition(self):
constraint = models.UniqueConstraint(
fields=['foo', 'bar'],
name='unique_fields',
condition=models.Q(foo=models.F('bar')),
)
self.assertEqual(
repr(constraint),
"<UniqueConstraint: fields=('foo', 'bar') name='unique_fields' "
"condition=(AND: ('foo', F(bar)))>",
)
def test_deconstruction(self):
fields = ['foo', 'bar']
name = 'unique_fields'
constraint = models.UniqueConstraint(fields=fields, name=name)
path, args, kwargs = constraint.deconstruct()
self.assertEqual(path, 'django.db.models.UniqueConstraint')
self.assertEqual(args, ())
self.assertEqual(kwargs, {'fields': tuple(fields), 'name': name})
def test_deconstruction_with_condition(self):
fields = ['foo', 'bar']
name = 'unique_fields'
condition = models.Q(foo=models.F('bar'))
constraint = models.UniqueConstraint(fields=fields, name=name, condition=condition)
path, args, kwargs = constraint.deconstruct()
self.assertEqual(path, 'django.db.models.UniqueConstraint')
self.assertEqual(args, ())
self.assertEqual(kwargs, {'fields': tuple(fields), 'name': name, 'condition': condition})
def test_database_constraint(self):
with self.assertRaises(IntegrityError):
UniqueConstraintProduct.objects.create(name=self.p1.name, color=self.p1.color)
@skipUnlessDBFeature('supports_partial_indexes')
def test_database_constraint_with_condition(self):
UniqueConstraintConditionProduct.objects.create(name='p1')
UniqueConstraintConditionProduct.objects.create(name='p2')
with self.assertRaises(IntegrityError):
UniqueConstraintConditionProduct.objects.create(name='p1')
def test_model_validation(self):
msg = 'Unique constraint product with this Name and Color already exists.'
with self.assertRaisesMessage(ValidationError, msg):
UniqueConstraintProduct(name=self.p1.name, color=self.p1.color).validate_unique()
@skipUnlessDBFeature('supports_partial_indexes')
def test_model_validation_with_condition(self):
"""Partial unique constraints are ignored by Model.validate_unique()."""
obj1 = UniqueConstraintConditionProduct.objects.create(name='p1', color='red')
obj2 = UniqueConstraintConditionProduct.objects.create(name='p2')
UniqueConstraintConditionProduct(name=obj1.name, color='blue').validate_unique()
UniqueConstraintConditionProduct(name=obj2.name).validate_unique()
def test_name(self):
constraints = get_constraints(UniqueConstraintProduct._meta.db_table)
expected_name = 'name_color_uniq'
self.assertIn(expected_name, constraints)
def test_condition_must_be_q(self):
with self.assertRaisesMessage(ValueError, 'UniqueConstraint.condition must be a Q instance.'):
models.UniqueConstraint(name='uniq', fields=['name'], condition='invalid')
|
3b4d789892e5e5cd70abfdf5ac79345324b5ffe89155934c03fbeb8d9fb1ad92 | from django.db import models
class Product(models.Model):
price = models.IntegerField(null=True)
discounted_price = models.IntegerField(null=True)
class Meta:
required_db_features = {
'supports_table_check_constraints',
}
constraints = [
models.CheckConstraint(
check=models.Q(price__gt=models.F('discounted_price')),
name='price_gt_discounted_price',
),
models.CheckConstraint(
check=models.Q(price__gt=0),
name='%(app_label)s_%(class)s_price_gt_0',
),
models.CheckConstraint(
check=models.expressions.RawSQL(
'price < %s', (1000,), output_field=models.BooleanField()
),
name='%(app_label)s_price_lt_1000_raw',
),
models.CheckConstraint(
check=models.expressions.ExpressionWrapper(
models.Q(price__gt=500) | models.Q(price__lt=500),
output_field=models.BooleanField()
),
name='%(app_label)s_price_neq_500_wrap',
),
]
class UniqueConstraintProduct(models.Model):
name = models.CharField(max_length=255)
color = models.CharField(max_length=32, null=True)
class Meta:
constraints = [
models.UniqueConstraint(fields=['name', 'color'], name='name_color_uniq'),
]
class UniqueConstraintConditionProduct(models.Model):
name = models.CharField(max_length=255)
color = models.CharField(max_length=32, null=True)
class Meta:
required_db_features = {'supports_partial_indexes'}
constraints = [
models.UniqueConstraint(
fields=['name'],
name='name_without_color_uniq',
condition=models.Q(color__isnull=True),
),
]
class AbstractModel(models.Model):
age = models.IntegerField()
class Meta:
abstract = True
required_db_features = {
'supports_table_check_constraints',
}
constraints = [
models.CheckConstraint(
check=models.Q(age__gte=18),
name='%(app_label)s_%(class)s_adult',
),
]
class ChildModel(AbstractModel):
pass
|
845d1138df321ec54dc6d0a7e6115e5fc0d6d6961edbd64e5ac8c4d333cb696e | """
Tests for F() query expression syntax.
"""
import uuid
from django.db import models
class Manager(models.Model):
name = models.CharField(max_length=50)
class Employee(models.Model):
firstname = models.CharField(max_length=50)
lastname = models.CharField(max_length=50)
salary = models.IntegerField(blank=True, null=True)
manager = models.ForeignKey(Manager, models.CASCADE, null=True)
def __str__(self):
return '%s %s' % (self.firstname, self.lastname)
class RemoteEmployee(Employee):
adjusted_salary = models.IntegerField()
class Company(models.Model):
name = models.CharField(max_length=100)
num_employees = models.PositiveIntegerField()
num_chairs = models.PositiveIntegerField()
ceo = models.ForeignKey(
Employee,
models.CASCADE,
related_name='company_ceo_set',
)
point_of_contact = models.ForeignKey(
Employee,
models.SET_NULL,
related_name='company_point_of_contact_set',
null=True,
)
based_in_eu = models.BooleanField(default=False)
def __str__(self):
return self.name
class Number(models.Model):
integer = models.BigIntegerField(db_column='the_integer')
float = models.FloatField(null=True, db_column='the_float')
def __str__(self):
return '%i, %.3f' % (self.integer, self.float)
class Experiment(models.Model):
name = models.CharField(max_length=24)
assigned = models.DateField()
completed = models.DateField()
estimated_time = models.DurationField()
start = models.DateTimeField()
end = models.DateTimeField()
class Meta:
db_table = 'expressions_ExPeRiMeNt'
ordering = ('name',)
def duration(self):
return self.end - self.start
class Result(models.Model):
experiment = models.ForeignKey(Experiment, models.CASCADE)
result_time = models.DateTimeField()
def __str__(self):
return "Result at %s" % self.result_time
class Time(models.Model):
time = models.TimeField(null=True)
def __str__(self):
return "%s" % self.time
class SimulationRun(models.Model):
start = models.ForeignKey(Time, models.CASCADE, null=True, related_name='+')
end = models.ForeignKey(Time, models.CASCADE, null=True, related_name='+')
midpoint = models.TimeField()
def __str__(self):
return "%s (%s to %s)" % (self.midpoint, self.start, self.end)
class UUIDPK(models.Model):
id = models.UUIDField(primary_key=True, default=uuid.uuid4)
class UUID(models.Model):
uuid = models.UUIDField(null=True)
uuid_fk = models.ForeignKey(UUIDPK, models.CASCADE, null=True)
def __str__(self):
return "%s" % self.uuid
|
02b62020deefff05bf5c0354843dc31a8d0f0821501b2924f68c4b9f65a14637 | """
The lookup API
This demonstrates features of the database API.
"""
from django.db import models
from django.db.models.lookups import IsNull
class Alarm(models.Model):
desc = models.CharField(max_length=100)
time = models.TimeField()
def __str__(self):
return '%s (%s)' % (self.time, self.desc)
class Author(models.Model):
name = models.CharField(max_length=100)
alias = models.CharField(max_length=50, null=True, blank=True)
class Meta:
ordering = ('name',)
class Article(models.Model):
headline = models.CharField(max_length=100)
pub_date = models.DateTimeField()
author = models.ForeignKey(Author, models.SET_NULL, blank=True, null=True)
slug = models.SlugField(unique=True, blank=True, null=True)
class Meta:
ordering = ('-pub_date', 'headline')
def __str__(self):
return self.headline
class Tag(models.Model):
articles = models.ManyToManyField(Article)
name = models.CharField(max_length=100)
class Meta:
ordering = ('name',)
class NulledTextField(models.TextField):
def get_prep_value(self, value):
return None if value == '' else value
@NulledTextField.register_lookup
class NulledTransform(models.Transform):
lookup_name = 'nulled'
template = 'NULL'
@NulledTextField.register_lookup
class IsNullWithNoneAsRHS(IsNull):
lookup_name = 'isnull_none_rhs'
can_use_none_as_rhs = True
class Season(models.Model):
year = models.PositiveSmallIntegerField()
gt = models.IntegerField(null=True, blank=True)
nulled_text_field = NulledTextField(null=True)
class Meta:
constraints = [
models.UniqueConstraint(fields=['year'], name='season_year_unique'),
]
def __str__(self):
return str(self.year)
class Game(models.Model):
season = models.ForeignKey(Season, models.CASCADE, related_name='games')
home = models.CharField(max_length=100)
away = models.CharField(max_length=100)
def __str__(self):
return "%s at %s" % (self.away, self.home)
class Player(models.Model):
name = models.CharField(max_length=100)
games = models.ManyToManyField(Game, related_name='players')
def __str__(self):
return self.name
class Product(models.Model):
name = models.CharField(max_length=80)
qty_target = models.DecimalField(max_digits=6, decimal_places=2)
class Stock(models.Model):
product = models.ForeignKey(Product, models.CASCADE)
qty_available = models.DecimalField(max_digits=6, decimal_places=2)
class Freebie(models.Model):
gift_product = models.ForeignKey(Product, models.CASCADE)
stock_id = models.IntegerField(blank=True, null=True)
stock = models.ForeignObject(
Stock,
from_fields=['stock_id', 'gift_product'],
to_fields=['id', 'product'],
on_delete=models.CASCADE,
)
|
b065e702d07bdb6488b8956b1a3f581544248bd7b7dbda9b9e0ad5494991d66d | from django.contrib.auth.models import User
from django.contrib.contenttypes.fields import (
GenericForeignKey, GenericRelation,
)
from django.contrib.contenttypes.models import ContentType
from django.db import models
class Review(models.Model):
source = models.CharField(max_length=100)
content_type = models.ForeignKey(ContentType, models.CASCADE)
object_id = models.PositiveIntegerField()
content_object = GenericForeignKey()
class Meta:
ordering = ('source',)
def __str__(self):
return self.source
class PersonManager(models.Manager):
def get_by_natural_key(self, name):
return self.get(name=name)
class Person(models.Model):
name = models.CharField(max_length=100, unique=True)
objects = PersonManager()
class Meta:
ordering = ('name',)
def __str__(self):
return self.name
# This book manager doesn't do anything interesting; it just
# exists to strip out the 'extra_arg' argument to certain
# calls. This argument is used to establish that the BookManager
# is actually getting used when it should be.
class BookManager(models.Manager):
def create(self, *args, extra_arg=None, **kwargs):
return super().create(*args, **kwargs)
def get_or_create(self, *args, extra_arg=None, **kwargs):
return super().get_or_create(*args, **kwargs)
class Book(models.Model):
title = models.CharField(max_length=100)
published = models.DateField()
authors = models.ManyToManyField(Person)
editor = models.ForeignKey(Person, models.SET_NULL, null=True, related_name='edited')
reviews = GenericRelation(Review)
pages = models.IntegerField(default=100)
objects = BookManager()
class Meta:
ordering = ('title',)
def __str__(self):
return self.title
class Pet(models.Model):
name = models.CharField(max_length=100)
owner = models.ForeignKey(Person, models.CASCADE)
class Meta:
ordering = ('name',)
def __str__(self):
return self.name
class UserProfile(models.Model):
user = models.OneToOneField(User, models.SET_NULL, null=True)
flavor = models.CharField(max_length=100)
class Meta:
ordering = ('flavor',)
|
069641773d105f8a8b36e9162a68cad07b5acc7c1de842bc1c5e0452f3063156 | import gettext
import os
import re
from datetime import datetime, timedelta
from importlib import import_module
import pytz
from django import forms
from django.conf import settings
from django.contrib import admin
from django.contrib.admin import widgets
from django.contrib.admin.tests import AdminSeleniumTestCase
from django.contrib.auth.models import User
from django.core.files.storage import default_storage
from django.core.files.uploadedfile import SimpleUploadedFile
from django.db.models import (
CharField, DateField, DateTimeField, ManyToManyField, UUIDField,
)
from django.test import SimpleTestCase, TestCase, override_settings
from django.urls import reverse
from django.utils import translation
from .models import (
Advisor, Album, Band, Bee, Car, Company, Event, Honeycomb, Individual,
Inventory, Member, MyFileField, Profile, School, Student,
)
from .widgetadmin import site as widget_admin_site
class TestDataMixin:
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email=None)
cls.u2 = User.objects.create_user(username='testser', password='secret')
Car.objects.create(owner=cls.superuser, make='Volkswagen', model='Passat')
Car.objects.create(owner=cls.u2, make='BMW', model='M3')
class AdminFormfieldForDBFieldTests(SimpleTestCase):
"""
Tests for correct behavior of ModelAdmin.formfield_for_dbfield
"""
def assertFormfield(self, model, fieldname, widgetclass, **admin_overrides):
"""
Helper to call formfield_for_dbfield for a given model and field name
and verify that the returned formfield is appropriate.
"""
# Override any settings on the model admin
class MyModelAdmin(admin.ModelAdmin):
pass
for k in admin_overrides:
setattr(MyModelAdmin, k, admin_overrides[k])
# Construct the admin, and ask it for a formfield
ma = MyModelAdmin(model, admin.site)
ff = ma.formfield_for_dbfield(model._meta.get_field(fieldname), request=None)
# "unwrap" the widget wrapper, if needed
if isinstance(ff.widget, widgets.RelatedFieldWidgetWrapper):
widget = ff.widget.widget
else:
widget = ff.widget
self.assertIsInstance(widget, widgetclass)
# Return the formfield so that other tests can continue
return ff
def test_DateField(self):
self.assertFormfield(Event, 'start_date', widgets.AdminDateWidget)
def test_DateTimeField(self):
self.assertFormfield(Member, 'birthdate', widgets.AdminSplitDateTime)
def test_TimeField(self):
self.assertFormfield(Event, 'start_time', widgets.AdminTimeWidget)
def test_TextField(self):
self.assertFormfield(Event, 'description', widgets.AdminTextareaWidget)
def test_URLField(self):
self.assertFormfield(Event, 'link', widgets.AdminURLFieldWidget)
def test_IntegerField(self):
self.assertFormfield(Event, 'min_age', widgets.AdminIntegerFieldWidget)
def test_CharField(self):
self.assertFormfield(Member, 'name', widgets.AdminTextInputWidget)
def test_EmailField(self):
self.assertFormfield(Member, 'email', widgets.AdminEmailInputWidget)
def test_FileField(self):
self.assertFormfield(Album, 'cover_art', widgets.AdminFileWidget)
def test_ForeignKey(self):
self.assertFormfield(Event, 'main_band', forms.Select)
def test_raw_id_ForeignKey(self):
self.assertFormfield(Event, 'main_band', widgets.ForeignKeyRawIdWidget,
raw_id_fields=['main_band'])
def test_radio_fields_ForeignKey(self):
ff = self.assertFormfield(Event, 'main_band', widgets.AdminRadioSelect,
radio_fields={'main_band': admin.VERTICAL})
self.assertIsNone(ff.empty_label)
def test_many_to_many(self):
self.assertFormfield(Band, 'members', forms.SelectMultiple)
def test_raw_id_many_to_many(self):
self.assertFormfield(Band, 'members', widgets.ManyToManyRawIdWidget,
raw_id_fields=['members'])
def test_filtered_many_to_many(self):
self.assertFormfield(Band, 'members', widgets.FilteredSelectMultiple,
filter_vertical=['members'])
def test_formfield_overrides(self):
self.assertFormfield(Event, 'start_date', forms.TextInput,
formfield_overrides={DateField: {'widget': forms.TextInput}})
def test_formfield_overrides_widget_instances(self):
"""
Widget instances in formfield_overrides are not shared between
different fields. (#19423)
"""
class BandAdmin(admin.ModelAdmin):
formfield_overrides = {
CharField: {'widget': forms.TextInput(attrs={'size': '10'})}
}
ma = BandAdmin(Band, admin.site)
f1 = ma.formfield_for_dbfield(Band._meta.get_field('name'), request=None)
f2 = ma.formfield_for_dbfield(Band._meta.get_field('style'), request=None)
self.assertNotEqual(f1.widget, f2.widget)
self.assertEqual(f1.widget.attrs['maxlength'], '100')
self.assertEqual(f2.widget.attrs['maxlength'], '20')
self.assertEqual(f2.widget.attrs['size'], '10')
def test_formfield_overrides_m2m_filter_widget(self):
"""
The autocomplete_fields, raw_id_fields, filter_vertical, and
filter_horizontal widgets for ManyToManyFields may be overridden by
specifying a widget in formfield_overrides.
"""
class BandAdmin(admin.ModelAdmin):
filter_vertical = ['members']
formfield_overrides = {
ManyToManyField: {'widget': forms.CheckboxSelectMultiple},
}
ma = BandAdmin(Band, admin.site)
field = ma.formfield_for_dbfield(Band._meta.get_field('members'), request=None)
self.assertIsInstance(field.widget.widget, forms.CheckboxSelectMultiple)
def test_formfield_overrides_for_datetime_field(self):
"""
Overriding the widget for DateTimeField doesn't overrides the default
form_class for that field (#26449).
"""
class MemberAdmin(admin.ModelAdmin):
formfield_overrides = {DateTimeField: {'widget': widgets.AdminSplitDateTime}}
ma = MemberAdmin(Member, admin.site)
f1 = ma.formfield_for_dbfield(Member._meta.get_field('birthdate'), request=None)
self.assertIsInstance(f1.widget, widgets.AdminSplitDateTime)
self.assertIsInstance(f1, forms.SplitDateTimeField)
def test_formfield_overrides_for_custom_field(self):
"""
formfield_overrides works for a custom field class.
"""
class AlbumAdmin(admin.ModelAdmin):
formfield_overrides = {MyFileField: {'widget': forms.TextInput()}}
ma = AlbumAdmin(Member, admin.site)
f1 = ma.formfield_for_dbfield(Album._meta.get_field('backside_art'), request=None)
self.assertIsInstance(f1.widget, forms.TextInput)
def test_field_with_choices(self):
self.assertFormfield(Member, 'gender', forms.Select)
def test_choices_with_radio_fields(self):
self.assertFormfield(Member, 'gender', widgets.AdminRadioSelect,
radio_fields={'gender': admin.VERTICAL})
def test_inheritance(self):
self.assertFormfield(Album, 'backside_art', widgets.AdminFileWidget)
def test_m2m_widgets(self):
"""m2m fields help text as it applies to admin app (#9321)."""
class AdvisorAdmin(admin.ModelAdmin):
filter_vertical = ['companies']
self.assertFormfield(Advisor, 'companies', widgets.FilteredSelectMultiple,
filter_vertical=['companies'])
ma = AdvisorAdmin(Advisor, admin.site)
f = ma.formfield_for_dbfield(Advisor._meta.get_field('companies'), request=None)
self.assertEqual(
f.help_text,
'Hold down “Control”, or “Command” on a Mac, to select more than one.'
)
@override_settings(ROOT_URLCONF='admin_widgets.urls')
class AdminFormfieldForDBFieldWithRequestTests(TestDataMixin, TestCase):
def test_filter_choices_by_request_user(self):
"""
Ensure the user can only see their own cars in the foreign key dropdown.
"""
self.client.force_login(self.superuser)
response = self.client.get(reverse('admin:admin_widgets_cartire_add'))
self.assertNotContains(response, "BMW M3")
self.assertContains(response, "Volkswagen Passat")
@override_settings(ROOT_URLCONF='admin_widgets.urls')
class AdminForeignKeyWidgetChangeList(TestDataMixin, TestCase):
def setUp(self):
self.client.force_login(self.superuser)
def test_changelist_ForeignKey(self):
response = self.client.get(reverse('admin:admin_widgets_car_changelist'))
self.assertContains(response, '/auth/user/add/')
@override_settings(ROOT_URLCONF='admin_widgets.urls')
class AdminForeignKeyRawIdWidget(TestDataMixin, TestCase):
def setUp(self):
self.client.force_login(self.superuser)
def test_nonexistent_target_id(self):
band = Band.objects.create(name='Bogey Blues')
pk = band.pk
band.delete()
post_data = {
"main_band": '%s' % pk,
}
# Try posting with a nonexistent pk in a raw id field: this
# should result in an error message, not a server exception.
response = self.client.post(reverse('admin:admin_widgets_event_add'), post_data)
self.assertContains(response, 'Select a valid choice. That choice is not one of the available choices.')
def test_invalid_target_id(self):
for test_str in ('Iñtërnâtiônàlizætiøn', "1234'", -1234):
# This should result in an error message, not a server exception.
response = self.client.post(reverse('admin:admin_widgets_event_add'), {"main_band": test_str})
self.assertContains(response, 'Select a valid choice. That choice is not one of the available choices.')
def test_url_params_from_lookup_dict_any_iterable(self):
lookup1 = widgets.url_params_from_lookup_dict({'color__in': ('red', 'blue')})
lookup2 = widgets.url_params_from_lookup_dict({'color__in': ['red', 'blue']})
self.assertEqual(lookup1, {'color__in': 'red,blue'})
self.assertEqual(lookup1, lookup2)
def test_url_params_from_lookup_dict_callable(self):
def my_callable():
return 'works'
lookup1 = widgets.url_params_from_lookup_dict({'myfield': my_callable})
lookup2 = widgets.url_params_from_lookup_dict({'myfield': my_callable()})
self.assertEqual(lookup1, lookup2)
def test_label_and_url_for_value_invalid_uuid(self):
field = Bee._meta.get_field('honeycomb')
self.assertIsInstance(field.target_field, UUIDField)
widget = widgets.ForeignKeyRawIdWidget(field.remote_field, admin.site)
self.assertEqual(widget.label_and_url_for_value('invalid-uuid'), ('', ''))
class FilteredSelectMultipleWidgetTest(SimpleTestCase):
def test_render(self):
# Backslash in verbose_name to ensure it is JavaScript escaped.
w = widgets.FilteredSelectMultiple('test\\', False)
self.assertHTMLEqual(
w.render('test', 'test'),
'<select multiple name="test" class="selectfilter" '
'data-field-name="test\\" data-is-stacked="0">\n</select>'
)
def test_stacked_render(self):
# Backslash in verbose_name to ensure it is JavaScript escaped.
w = widgets.FilteredSelectMultiple('test\\', True)
self.assertHTMLEqual(
w.render('test', 'test'),
'<select multiple name="test" class="selectfilterstacked" '
'data-field-name="test\\" data-is-stacked="1">\n</select>'
)
class AdminDateWidgetTest(SimpleTestCase):
def test_attrs(self):
w = widgets.AdminDateWidget()
self.assertHTMLEqual(
w.render('test', datetime(2007, 12, 1, 9, 30)),
'<input value="2007-12-01" type="text" class="vDateField" name="test" size="10">',
)
# pass attrs to widget
w = widgets.AdminDateWidget(attrs={'size': 20, 'class': 'myDateField'})
self.assertHTMLEqual(
w.render('test', datetime(2007, 12, 1, 9, 30)),
'<input value="2007-12-01" type="text" class="myDateField" name="test" size="20">',
)
class AdminTimeWidgetTest(SimpleTestCase):
def test_attrs(self):
w = widgets.AdminTimeWidget()
self.assertHTMLEqual(
w.render('test', datetime(2007, 12, 1, 9, 30)),
'<input value="09:30:00" type="text" class="vTimeField" name="test" size="8">',
)
# pass attrs to widget
w = widgets.AdminTimeWidget(attrs={'size': 20, 'class': 'myTimeField'})
self.assertHTMLEqual(
w.render('test', datetime(2007, 12, 1, 9, 30)),
'<input value="09:30:00" type="text" class="myTimeField" name="test" size="20">',
)
class AdminSplitDateTimeWidgetTest(SimpleTestCase):
def test_render(self):
w = widgets.AdminSplitDateTime()
self.assertHTMLEqual(
w.render('test', datetime(2007, 12, 1, 9, 30)),
'<p class="datetime">'
'Date: <input value="2007-12-01" type="text" class="vDateField" '
'name="test_0" size="10"><br>'
'Time: <input value="09:30:00" type="text" class="vTimeField" '
'name="test_1" size="8"></p>'
)
def test_localization(self):
w = widgets.AdminSplitDateTime()
with self.settings(USE_L10N=True), translation.override('de-at'):
w.is_localized = True
self.assertHTMLEqual(
w.render('test', datetime(2007, 12, 1, 9, 30)),
'<p class="datetime">'
'Datum: <input value="01.12.2007" type="text" '
'class="vDateField" name="test_0"size="10"><br>'
'Zeit: <input value="09:30:00" type="text" class="vTimeField" '
'name="test_1" size="8"></p>'
)
class AdminURLWidgetTest(SimpleTestCase):
def test_get_context_validates_url(self):
w = widgets.AdminURLFieldWidget()
for invalid in ['', '/not/a/full/url/', 'javascript:alert("Danger XSS!")']:
with self.subTest(url=invalid):
self.assertFalse(w.get_context('name', invalid, {})['url_valid'])
self.assertTrue(w.get_context('name', 'http://example.com', {})['url_valid'])
def test_render(self):
w = widgets.AdminURLFieldWidget()
self.assertHTMLEqual(
w.render('test', ''),
'<input class="vURLField" name="test" type="url">'
)
self.assertHTMLEqual(
w.render('test', 'http://example.com'),
'<p class="url">Currently:<a href="http://example.com">'
'http://example.com</a><br>'
'Change:<input class="vURLField" name="test" type="url" '
'value="http://example.com"></p>'
)
def test_render_idn(self):
w = widgets.AdminURLFieldWidget()
self.assertHTMLEqual(
w.render('test', 'http://example-äüö.com'),
'<p class="url">Currently: <a href="http://xn--example--7za4pnc.com">'
'http://example-äüö.com</a><br>'
'Change:<input class="vURLField" name="test" type="url" '
'value="http://example-äüö.com"></p>'
)
def test_render_quoting(self):
"""
WARNING: This test doesn't use assertHTMLEqual since it will get rid
of some escapes which are tested here!
"""
HREF_RE = re.compile('href="([^"]+)"')
VALUE_RE = re.compile('value="([^"]+)"')
TEXT_RE = re.compile('<a[^>]+>([^>]+)</a>')
w = widgets.AdminURLFieldWidget()
output = w.render('test', 'http://example.com/<sometag>some-text</sometag>')
self.assertEqual(
HREF_RE.search(output).groups()[0],
'http://example.com/%3Csometag%3Esome-text%3C/sometag%3E',
)
self.assertEqual(
TEXT_RE.search(output).groups()[0],
'http://example.com/<sometag>some-text</sometag>',
)
self.assertEqual(
VALUE_RE.search(output).groups()[0],
'http://example.com/<sometag>some-text</sometag>',
)
output = w.render('test', 'http://example-äüö.com/<sometag>some-text</sometag>')
self.assertEqual(
HREF_RE.search(output).groups()[0],
'http://xn--example--7za4pnc.com/%3Csometag%3Esome-text%3C/sometag%3E',
)
self.assertEqual(
TEXT_RE.search(output).groups()[0],
'http://example-äüö.com/<sometag>some-text</sometag>',
)
self.assertEqual(
VALUE_RE.search(output).groups()[0],
'http://example-äüö.com/<sometag>some-text</sometag>',
)
output = w.render('test', 'http://www.example.com/%C3%A4"><script>alert("XSS!")</script>"')
self.assertEqual(
HREF_RE.search(output).groups()[0],
'http://www.example.com/%C3%A4%22%3E%3Cscript%3Ealert(%22XSS!%22)%3C/script%3E%22',
)
self.assertEqual(
TEXT_RE.search(output).groups()[0],
'http://www.example.com/%C3%A4"><script>'
'alert("XSS!")</script>"'
)
self.assertEqual(
VALUE_RE.search(output).groups()[0],
'http://www.example.com/%C3%A4"><script>alert("XSS!")</script>"',
)
class AdminUUIDWidgetTests(SimpleTestCase):
def test_attrs(self):
w = widgets.AdminUUIDInputWidget()
self.assertHTMLEqual(
w.render('test', '550e8400-e29b-41d4-a716-446655440000'),
'<input value="550e8400-e29b-41d4-a716-446655440000" type="text" class="vUUIDField" name="test">',
)
w = widgets.AdminUUIDInputWidget(attrs={'class': 'myUUIDInput'})
self.assertHTMLEqual(
w.render('test', '550e8400-e29b-41d4-a716-446655440000'),
'<input value="550e8400-e29b-41d4-a716-446655440000" type="text" class="myUUIDInput" name="test">',
)
@override_settings(ROOT_URLCONF='admin_widgets.urls')
class AdminFileWidgetTests(TestDataMixin, TestCase):
@classmethod
def setUpTestData(cls):
super().setUpTestData()
band = Band.objects.create(name='Linkin Park')
cls.album = band.album_set.create(
name='Hybrid Theory', cover_art=r'albums\hybrid_theory.jpg'
)
def test_render(self):
w = widgets.AdminFileWidget()
self.assertHTMLEqual(
w.render('test', self.album.cover_art),
'<p class="file-upload">Currently: <a href="%(STORAGE_URL)salbums/'
r'hybrid_theory.jpg">albums\hybrid_theory.jpg</a> '
'<span class="clearable-file-input">'
'<input type="checkbox" name="test-clear" id="test-clear_id"> '
'<label for="test-clear_id">Clear</label></span><br>'
'Change: <input type="file" name="test"></p>' % {
'STORAGE_URL': default_storage.url(''),
},
)
self.assertHTMLEqual(
w.render('test', SimpleUploadedFile('test', b'content')),
'<input type="file" name="test">',
)
def test_render_required(self):
widget = widgets.AdminFileWidget()
widget.is_required = True
self.assertHTMLEqual(
widget.render('test', self.album.cover_art),
'<p class="file-upload">Currently: <a href="%(STORAGE_URL)salbums/'
r'hybrid_theory.jpg">albums\hybrid_theory.jpg</a><br>'
'Change: <input type="file" name="test"></p>' % {
'STORAGE_URL': default_storage.url(''),
},
)
def test_readonly_fields(self):
"""
File widgets should render as a link when they're marked "read only."
"""
self.client.force_login(self.superuser)
response = self.client.get(reverse('admin:admin_widgets_album_change', args=(self.album.id,)))
self.assertContains(
response,
'<div class="readonly"><a href="%(STORAGE_URL)salbums/hybrid_theory.jpg">'
r'albums\hybrid_theory.jpg</a></div>' % {'STORAGE_URL': default_storage.url('')},
html=True,
)
self.assertNotContains(
response,
'<input type="file" name="cover_art" id="id_cover_art">',
html=True,
)
response = self.client.get(reverse('admin:admin_widgets_album_add'))
self.assertContains(
response,
'<div class="readonly"></div>',
html=True,
)
@override_settings(ROOT_URLCONF='admin_widgets.urls')
class ForeignKeyRawIdWidgetTest(TestCase):
def test_render(self):
band = Band.objects.create(name='Linkin Park')
band.album_set.create(
name='Hybrid Theory', cover_art=r'albums\hybrid_theory.jpg'
)
rel = Album._meta.get_field('band').remote_field
w = widgets.ForeignKeyRawIdWidget(rel, widget_admin_site)
self.assertHTMLEqual(
w.render('test', band.pk, attrs={}),
'<input type="text" name="test" value="%(bandpk)s" '
'class="vForeignKeyRawIdAdminField">'
'<a href="/admin_widgets/band/?_to_field=id" class="related-lookup" '
'id="lookup_id_test" title="Lookup"></a> <strong>'
'<a href="/admin_widgets/band/%(bandpk)s/change/">Linkin Park</a>'
'</strong>' % {'bandpk': band.pk}
)
def test_relations_to_non_primary_key(self):
# ForeignKeyRawIdWidget works with fields which aren't related to
# the model's primary key.
apple = Inventory.objects.create(barcode=86, name='Apple')
Inventory.objects.create(barcode=22, name='Pear')
core = Inventory.objects.create(
barcode=87, name='Core', parent=apple
)
rel = Inventory._meta.get_field('parent').remote_field
w = widgets.ForeignKeyRawIdWidget(rel, widget_admin_site)
self.assertHTMLEqual(
w.render('test', core.parent_id, attrs={}),
'<input type="text" name="test" value="86" '
'class="vForeignKeyRawIdAdminField">'
'<a href="/admin_widgets/inventory/?_to_field=barcode" '
'class="related-lookup" id="lookup_id_test" title="Lookup"></a>'
' <strong><a href="/admin_widgets/inventory/%(pk)s/change/">'
'Apple</a></strong>' % {'pk': apple.pk}
)
def test_fk_related_model_not_in_admin(self):
# FK to a model not registered with admin site. Raw ID widget should
# have no magnifying glass link. See #16542
big_honeycomb = Honeycomb.objects.create(location='Old tree')
big_honeycomb.bee_set.create()
rel = Bee._meta.get_field('honeycomb').remote_field
w = widgets.ForeignKeyRawIdWidget(rel, widget_admin_site)
self.assertHTMLEqual(
w.render('honeycomb_widget', big_honeycomb.pk, attrs={}),
'<input type="text" name="honeycomb_widget" value="%(hcombpk)s">'
' <strong>%(hcomb)s</strong>'
% {'hcombpk': big_honeycomb.pk, 'hcomb': big_honeycomb}
)
def test_fk_to_self_model_not_in_admin(self):
# FK to self, not registered with admin site. Raw ID widget should have
# no magnifying glass link. See #16542
subject1 = Individual.objects.create(name='Subject #1')
Individual.objects.create(name='Child', parent=subject1)
rel = Individual._meta.get_field('parent').remote_field
w = widgets.ForeignKeyRawIdWidget(rel, widget_admin_site)
self.assertHTMLEqual(
w.render('individual_widget', subject1.pk, attrs={}),
'<input type="text" name="individual_widget" value="%(subj1pk)s">'
' <strong>%(subj1)s</strong>'
% {'subj1pk': subject1.pk, 'subj1': subject1}
)
def test_proper_manager_for_label_lookup(self):
# see #9258
rel = Inventory._meta.get_field('parent').remote_field
w = widgets.ForeignKeyRawIdWidget(rel, widget_admin_site)
hidden = Inventory.objects.create(
barcode=93, name='Hidden', hidden=True
)
child_of_hidden = Inventory.objects.create(
barcode=94, name='Child of hidden', parent=hidden
)
self.assertHTMLEqual(
w.render('test', child_of_hidden.parent_id, attrs={}),
'<input type="text" name="test" value="93" class="vForeignKeyRawIdAdminField">'
'<a href="/admin_widgets/inventory/?_to_field=barcode" '
'class="related-lookup" id="lookup_id_test" title="Lookup"></a>'
' <strong><a href="/admin_widgets/inventory/%(pk)s/change/">'
'Hidden</a></strong>' % {'pk': hidden.pk}
)
@override_settings(ROOT_URLCONF='admin_widgets.urls')
class ManyToManyRawIdWidgetTest(TestCase):
def test_render(self):
band = Band.objects.create(name='Linkin Park')
m1 = Member.objects.create(name='Chester')
m2 = Member.objects.create(name='Mike')
band.members.add(m1, m2)
rel = Band._meta.get_field('members').remote_field
w = widgets.ManyToManyRawIdWidget(rel, widget_admin_site)
self.assertHTMLEqual(
w.render('test', [m1.pk, m2.pk], attrs={}), (
'<input type="text" name="test" value="%(m1pk)s,%(m2pk)s" class="vManyToManyRawIdAdminField">'
'<a href="/admin_widgets/member/" class="related-lookup" id="lookup_id_test" title="Lookup"></a>'
) % {'m1pk': m1.pk, 'm2pk': m2.pk}
)
self.assertHTMLEqual(
w.render('test', [m1.pk]), (
'<input type="text" name="test" value="%(m1pk)s" class="vManyToManyRawIdAdminField">'
'<a href="/admin_widgets/member/" class="related-lookup" id="lookup_id_test" title="Lookup"></a>'
) % {'m1pk': m1.pk}
)
def test_m2m_related_model_not_in_admin(self):
# M2M relationship with model not registered with admin site. Raw ID
# widget should have no magnifying glass link. See #16542
consultor1 = Advisor.objects.create(name='Rockstar Techie')
c1 = Company.objects.create(name='Doodle')
c2 = Company.objects.create(name='Pear')
consultor1.companies.add(c1, c2)
rel = Advisor._meta.get_field('companies').remote_field
w = widgets.ManyToManyRawIdWidget(rel, widget_admin_site)
self.assertHTMLEqual(
w.render('company_widget1', [c1.pk, c2.pk], attrs={}),
'<input type="text" name="company_widget1" value="%(c1pk)s,%(c2pk)s">' % {'c1pk': c1.pk, 'c2pk': c2.pk}
)
self.assertHTMLEqual(
w.render('company_widget2', [c1.pk]),
'<input type="text" name="company_widget2" value="%(c1pk)s">' % {'c1pk': c1.pk}
)
@override_settings(ROOT_URLCONF='admin_widgets.urls')
class RelatedFieldWidgetWrapperTests(SimpleTestCase):
def test_no_can_add_related(self):
rel = Individual._meta.get_field('parent').remote_field
w = widgets.AdminRadioSelect()
# Used to fail with a name error.
w = widgets.RelatedFieldWidgetWrapper(w, rel, widget_admin_site)
self.assertFalse(w.can_add_related)
def test_select_multiple_widget_cant_change_delete_related(self):
rel = Individual._meta.get_field('parent').remote_field
widget = forms.SelectMultiple()
wrapper = widgets.RelatedFieldWidgetWrapper(
widget, rel, widget_admin_site,
can_add_related=True,
can_change_related=True,
can_delete_related=True,
)
self.assertTrue(wrapper.can_add_related)
self.assertFalse(wrapper.can_change_related)
self.assertFalse(wrapper.can_delete_related)
def test_on_delete_cascade_rel_cant_delete_related(self):
rel = Individual._meta.get_field('soulmate').remote_field
widget = forms.Select()
wrapper = widgets.RelatedFieldWidgetWrapper(
widget, rel, widget_admin_site,
can_add_related=True,
can_change_related=True,
can_delete_related=True,
)
self.assertTrue(wrapper.can_add_related)
self.assertTrue(wrapper.can_change_related)
self.assertFalse(wrapper.can_delete_related)
def test_custom_widget_render(self):
class CustomWidget(forms.Select):
def render(self, *args, **kwargs):
return 'custom render output'
rel = Album._meta.get_field('band').remote_field
widget = CustomWidget()
wrapper = widgets.RelatedFieldWidgetWrapper(
widget, rel, widget_admin_site,
can_add_related=True,
can_change_related=True,
can_delete_related=True,
)
output = wrapper.render('name', 'value')
self.assertIn('custom render output', output)
def test_widget_delegates_value_omitted_from_data(self):
class CustomWidget(forms.Select):
def value_omitted_from_data(self, data, files, name):
return False
rel = Album._meta.get_field('band').remote_field
widget = CustomWidget()
wrapper = widgets.RelatedFieldWidgetWrapper(widget, rel, widget_admin_site)
self.assertIs(wrapper.value_omitted_from_data({}, {}, 'band'), False)
def test_widget_is_hidden(self):
rel = Album._meta.get_field('band').remote_field
widget = forms.HiddenInput()
widget.choices = ()
wrapper = widgets.RelatedFieldWidgetWrapper(widget, rel, widget_admin_site)
self.assertIs(wrapper.is_hidden, True)
context = wrapper.get_context('band', None, {})
self.assertIs(context['is_hidden'], True)
output = wrapper.render('name', 'value')
# Related item links are hidden.
self.assertNotIn('<a ', output)
def test_widget_is_not_hidden(self):
rel = Album._meta.get_field('band').remote_field
widget = forms.Select()
wrapper = widgets.RelatedFieldWidgetWrapper(widget, rel, widget_admin_site)
self.assertIs(wrapper.is_hidden, False)
context = wrapper.get_context('band', None, {})
self.assertIs(context['is_hidden'], False)
output = wrapper.render('name', 'value')
# Related item links are present.
self.assertIn('<a ', output)
@override_settings(ROOT_URLCONF='admin_widgets.urls')
class AdminWidgetSeleniumTestCase(AdminSeleniumTestCase):
available_apps = ['admin_widgets'] + AdminSeleniumTestCase.available_apps
def setUp(self):
self.u1 = User.objects.create_superuser(username='super', password='secret', email='[email protected]')
class DateTimePickerSeleniumTests(AdminWidgetSeleniumTestCase):
def test_show_hide_date_time_picker_widgets(self):
"""
Pressing the ESC key or clicking on a widget value closes the date and
time picker widgets.
"""
from selenium.webdriver.common.keys import Keys
self.admin_login(username='super', password='secret', login_url='/')
# Open a page that has a date and time picker widgets
self.selenium.get(self.live_server_url + reverse('admin:admin_widgets_member_add'))
# First, with the date picker widget ---------------------------------
cal_icon = self.selenium.find_element_by_id('calendarlink0')
# The date picker is hidden
self.assertFalse(self.selenium.find_element_by_id('calendarbox0').is_displayed())
# Click the calendar icon
cal_icon.click()
# The date picker is visible
self.assertTrue(self.selenium.find_element_by_id('calendarbox0').is_displayed())
# Press the ESC key
self.selenium.find_element_by_tag_name('body').send_keys([Keys.ESCAPE])
# The date picker is hidden again
self.assertFalse(self.selenium.find_element_by_id('calendarbox0').is_displayed())
# Click the calendar icon, then on the 15th of current month
cal_icon.click()
self.selenium.find_element_by_xpath("//a[contains(text(), '15')]").click()
self.assertFalse(self.selenium.find_element_by_id('calendarbox0').is_displayed())
self.assertEqual(
self.selenium.find_element_by_id('id_birthdate_0').get_attribute('value'),
datetime.today().strftime('%Y-%m-') + '15',
)
# Then, with the time picker widget ----------------------------------
time_icon = self.selenium.find_element_by_id('clocklink0')
# The time picker is hidden
self.assertFalse(self.selenium.find_element_by_id('clockbox0').is_displayed())
# Click the time icon
time_icon.click()
# The time picker is visible
self.assertTrue(self.selenium.find_element_by_id('clockbox0').is_displayed())
self.assertEqual(
[
x.text for x in
self.selenium.find_elements_by_xpath("//ul[@class='timelist']/li/a")
],
['Now', 'Midnight', '6 a.m.', 'Noon', '6 p.m.']
)
# Press the ESC key
self.selenium.find_element_by_tag_name('body').send_keys([Keys.ESCAPE])
# The time picker is hidden again
self.assertFalse(self.selenium.find_element_by_id('clockbox0').is_displayed())
# Click the time icon, then select the 'Noon' value
time_icon.click()
self.selenium.find_element_by_xpath("//a[contains(text(), 'Noon')]").click()
self.assertFalse(self.selenium.find_element_by_id('clockbox0').is_displayed())
self.assertEqual(
self.selenium.find_element_by_id('id_birthdate_1').get_attribute('value'),
'12:00:00',
)
def test_calendar_nonday_class(self):
"""
Ensure cells that are not days of the month have the `nonday` CSS class.
Refs #4574.
"""
self.admin_login(username='super', password='secret', login_url='/')
# Open a page that has a date and time picker widgets
self.selenium.get(self.live_server_url + reverse('admin:admin_widgets_member_add'))
# fill in the birth date.
self.selenium.find_element_by_id('id_birthdate_0').send_keys('2013-06-01')
# Click the calendar icon
self.selenium.find_element_by_id('calendarlink0').click()
# get all the tds within the calendar
calendar0 = self.selenium.find_element_by_id('calendarin0')
tds = calendar0.find_elements_by_tag_name('td')
# make sure the first and last 6 cells have class nonday
for td in tds[:6] + tds[-6:]:
self.assertEqual(td.get_attribute('class'), 'nonday')
def test_calendar_selected_class(self):
"""
Ensure cell for the day in the input has the `selected` CSS class.
Refs #4574.
"""
self.admin_login(username='super', password='secret', login_url='/')
# Open a page that has a date and time picker widgets
self.selenium.get(self.live_server_url + reverse('admin:admin_widgets_member_add'))
# fill in the birth date.
self.selenium.find_element_by_id('id_birthdate_0').send_keys('2013-06-01')
# Click the calendar icon
self.selenium.find_element_by_id('calendarlink0').click()
# get all the tds within the calendar
calendar0 = self.selenium.find_element_by_id('calendarin0')
tds = calendar0.find_elements_by_tag_name('td')
# verify the selected cell
selected = tds[6]
self.assertEqual(selected.get_attribute('class'), 'selected')
self.assertEqual(selected.text, '1')
def test_calendar_no_selected_class(self):
"""
Ensure no cells are given the selected class when the field is empty.
Refs #4574.
"""
self.admin_login(username='super', password='secret', login_url='/')
# Open a page that has a date and time picker widgets
self.selenium.get(self.live_server_url + reverse('admin:admin_widgets_member_add'))
# Click the calendar icon
self.selenium.find_element_by_id('calendarlink0').click()
# get all the tds within the calendar
calendar0 = self.selenium.find_element_by_id('calendarin0')
tds = calendar0.find_elements_by_tag_name('td')
# verify there are no cells with the selected class
selected = [td for td in tds if td.get_attribute('class') == 'selected']
self.assertEqual(len(selected), 0)
def test_calendar_show_date_from_input(self):
"""
The calendar shows the date from the input field for every locale
supported by Django.
"""
self.admin_login(username='super', password='secret', login_url='/')
# Enter test data
member = Member.objects.create(name='Bob', birthdate=datetime(1984, 5, 15), gender='M')
# Get month name translations for every locale
month_string = 'May'
path = os.path.join(os.path.dirname(import_module('django.contrib.admin').__file__), 'locale')
for language_code, language_name in settings.LANGUAGES:
try:
catalog = gettext.translation('djangojs', path, [language_code])
except OSError:
continue
if month_string in catalog._catalog:
month_name = catalog._catalog[month_string]
else:
month_name = month_string
# Get the expected caption
may_translation = month_name
expected_caption = '{:s} {:d}'.format(may_translation.upper(), 1984)
# Test with every locale
with override_settings(LANGUAGE_CODE=language_code, USE_L10N=True):
# Open a page that has a date picker widget
url = reverse('admin:admin_widgets_member_change', args=(member.pk,))
self.selenium.get(self.live_server_url + url)
# Click on the calendar icon
self.selenium.find_element_by_id('calendarlink0').click()
# Make sure that the right month and year are displayed
self.wait_for_text('#calendarin0 caption', expected_caption)
@override_settings(TIME_ZONE='Asia/Singapore')
class DateTimePickerShortcutsSeleniumTests(AdminWidgetSeleniumTestCase):
def test_date_time_picker_shortcuts(self):
"""
date/time/datetime picker shortcuts work in the current time zone.
Refs #20663.
This test case is fairly tricky, it relies on selenium still running the browser
in the default time zone "America/Chicago" despite `override_settings` changing
the time zone to "Asia/Singapore".
"""
self.admin_login(username='super', password='secret', login_url='/')
error_margin = timedelta(seconds=10)
# If we are neighbouring a DST, we add an hour of error margin.
tz = pytz.timezone('America/Chicago')
utc_now = datetime.now(pytz.utc)
tz_yesterday = (utc_now - timedelta(days=1)).astimezone(tz).tzname()
tz_tomorrow = (utc_now + timedelta(days=1)).astimezone(tz).tzname()
if tz_yesterday != tz_tomorrow:
error_margin += timedelta(hours=1)
now = datetime.now()
self.selenium.get(self.live_server_url + reverse('admin:admin_widgets_member_add'))
self.selenium.find_element_by_id('id_name').send_keys('test')
# Click on the "today" and "now" shortcuts.
shortcuts = self.selenium.find_elements_by_css_selector('.field-birthdate .datetimeshortcuts')
for shortcut in shortcuts:
shortcut.find_element_by_tag_name('a').click()
# There is a time zone mismatch warning.
# Warning: This would effectively fail if the TIME_ZONE defined in the
# settings has the same UTC offset as "Asia/Singapore" because the
# mismatch warning would be rightfully missing from the page.
self.selenium.find_elements_by_css_selector('.field-birthdate .timezonewarning')
# Submit the form.
with self.wait_page_loaded():
self.selenium.find_element_by_name('_save').click()
# Make sure that "now" in javascript is within 10 seconds
# from "now" on the server side.
member = Member.objects.get(name='test')
self.assertGreater(member.birthdate, now - error_margin)
self.assertLess(member.birthdate, now + error_margin)
# The above tests run with Asia/Singapore which are on the positive side of
# UTC. Here we test with a timezone on the negative side.
@override_settings(TIME_ZONE='US/Eastern')
class DateTimePickerAltTimezoneSeleniumTests(DateTimePickerShortcutsSeleniumTests):
pass
class HorizontalVerticalFilterSeleniumTests(AdminWidgetSeleniumTestCase):
def setUp(self):
super().setUp()
self.lisa = Student.objects.create(name='Lisa')
self.john = Student.objects.create(name='John')
self.bob = Student.objects.create(name='Bob')
self.peter = Student.objects.create(name='Peter')
self.jenny = Student.objects.create(name='Jenny')
self.jason = Student.objects.create(name='Jason')
self.cliff = Student.objects.create(name='Cliff')
self.arthur = Student.objects.create(name='Arthur')
self.school = School.objects.create(name='School of Awesome')
def assertActiveButtons(self, mode, field_name, choose, remove, choose_all=None, remove_all=None):
choose_link = '#id_%s_add_link' % field_name
choose_all_link = '#id_%s_add_all_link' % field_name
remove_link = '#id_%s_remove_link' % field_name
remove_all_link = '#id_%s_remove_all_link' % field_name
self.assertEqual(self.has_css_class(choose_link, 'active'), choose)
self.assertEqual(self.has_css_class(remove_link, 'active'), remove)
if mode == 'horizontal':
self.assertEqual(self.has_css_class(choose_all_link, 'active'), choose_all)
self.assertEqual(self.has_css_class(remove_all_link, 'active'), remove_all)
def execute_basic_operations(self, mode, field_name):
original_url = self.selenium.current_url
from_box = '#id_%s_from' % field_name
to_box = '#id_%s_to' % field_name
choose_link = 'id_%s_add_link' % field_name
choose_all_link = 'id_%s_add_all_link' % field_name
remove_link = 'id_%s_remove_link' % field_name
remove_all_link = 'id_%s_remove_all_link' % field_name
# Initial positions ---------------------------------------------------
self.assertSelectOptions(from_box, [
str(self.arthur.id), str(self.bob.id),
str(self.cliff.id), str(self.jason.id),
str(self.jenny.id), str(self.john.id),
])
self.assertSelectOptions(to_box, [str(self.lisa.id), str(self.peter.id)])
self.assertActiveButtons(mode, field_name, False, False, True, True)
# Click 'Choose all' --------------------------------------------------
if mode == 'horizontal':
self.selenium.find_element_by_id(choose_all_link).click()
elif mode == 'vertical':
# There 's no 'Choose all' button in vertical mode, so individually
# select all options and click 'Choose'.
for option in self.selenium.find_elements_by_css_selector(from_box + ' > option'):
option.click()
self.selenium.find_element_by_id(choose_link).click()
self.assertSelectOptions(from_box, [])
self.assertSelectOptions(to_box, [
str(self.lisa.id), str(self.peter.id),
str(self.arthur.id), str(self.bob.id),
str(self.cliff.id), str(self.jason.id),
str(self.jenny.id), str(self.john.id),
])
self.assertActiveButtons(mode, field_name, False, False, False, True)
# Click 'Remove all' --------------------------------------------------
if mode == 'horizontal':
self.selenium.find_element_by_id(remove_all_link).click()
elif mode == 'vertical':
# There 's no 'Remove all' button in vertical mode, so individually
# select all options and click 'Remove'.
for option in self.selenium.find_elements_by_css_selector(to_box + ' > option'):
option.click()
self.selenium.find_element_by_id(remove_link).click()
self.assertSelectOptions(from_box, [
str(self.lisa.id), str(self.peter.id),
str(self.arthur.id), str(self.bob.id),
str(self.cliff.id), str(self.jason.id),
str(self.jenny.id), str(self.john.id),
])
self.assertSelectOptions(to_box, [])
self.assertActiveButtons(mode, field_name, False, False, True, False)
# Choose some options ------------------------------------------------
from_lisa_select_option = self.selenium.find_element_by_css_selector(
'{} > option[value="{}"]'.format(from_box, self.lisa.id)
)
# Check the title attribute is there for tool tips: ticket #20821
self.assertEqual(from_lisa_select_option.get_attribute('title'), from_lisa_select_option.get_attribute('text'))
self.select_option(from_box, str(self.lisa.id))
self.select_option(from_box, str(self.jason.id))
self.select_option(from_box, str(self.bob.id))
self.select_option(from_box, str(self.john.id))
self.assertActiveButtons(mode, field_name, True, False, True, False)
self.selenium.find_element_by_id(choose_link).click()
self.assertActiveButtons(mode, field_name, False, False, True, True)
self.assertSelectOptions(from_box, [
str(self.peter.id), str(self.arthur.id),
str(self.cliff.id), str(self.jenny.id),
])
self.assertSelectOptions(to_box, [
str(self.lisa.id), str(self.bob.id),
str(self.jason.id), str(self.john.id),
])
# Check the tooltip is still there after moving: ticket #20821
to_lisa_select_option = self.selenium.find_element_by_css_selector(
'{} > option[value="{}"]'.format(to_box, self.lisa.id)
)
self.assertEqual(to_lisa_select_option.get_attribute('title'), to_lisa_select_option.get_attribute('text'))
# Remove some options -------------------------------------------------
self.select_option(to_box, str(self.lisa.id))
self.select_option(to_box, str(self.bob.id))
self.assertActiveButtons(mode, field_name, False, True, True, True)
self.selenium.find_element_by_id(remove_link).click()
self.assertActiveButtons(mode, field_name, False, False, True, True)
self.assertSelectOptions(from_box, [
str(self.peter.id), str(self.arthur.id),
str(self.cliff.id), str(self.jenny.id),
str(self.lisa.id), str(self.bob.id)
])
self.assertSelectOptions(to_box, [str(self.jason.id), str(self.john.id)])
# Choose some more options --------------------------------------------
self.select_option(from_box, str(self.arthur.id))
self.select_option(from_box, str(self.cliff.id))
self.selenium.find_element_by_id(choose_link).click()
self.assertSelectOptions(from_box, [
str(self.peter.id), str(self.jenny.id),
str(self.lisa.id), str(self.bob.id),
])
self.assertSelectOptions(to_box, [
str(self.jason.id), str(self.john.id),
str(self.arthur.id), str(self.cliff.id),
])
# Choose some more options --------------------------------------------
self.select_option(from_box, str(self.peter.id))
self.select_option(from_box, str(self.lisa.id))
# Confirm they're selected after clicking inactive buttons: ticket #26575
self.assertSelectedOptions(from_box, [str(self.peter.id), str(self.lisa.id)])
self.selenium.find_element_by_id(remove_link).click()
self.assertSelectedOptions(from_box, [str(self.peter.id), str(self.lisa.id)])
# Unselect the options ------------------------------------------------
self.deselect_option(from_box, str(self.peter.id))
self.deselect_option(from_box, str(self.lisa.id))
# Choose some more options --------------------------------------------
self.select_option(to_box, str(self.jason.id))
self.select_option(to_box, str(self.john.id))
# Confirm they're selected after clicking inactive buttons: ticket #26575
self.assertSelectedOptions(to_box, [str(self.jason.id), str(self.john.id)])
self.selenium.find_element_by_id(choose_link).click()
self.assertSelectedOptions(to_box, [str(self.jason.id), str(self.john.id)])
# Unselect the options ------------------------------------------------
self.deselect_option(to_box, str(self.jason.id))
self.deselect_option(to_box, str(self.john.id))
# Pressing buttons shouldn't change the URL.
self.assertEqual(self.selenium.current_url, original_url)
def test_basic(self):
self.school.students.set([self.lisa, self.peter])
self.school.alumni.set([self.lisa, self.peter])
self.admin_login(username='super', password='secret', login_url='/')
self.selenium.get(self.live_server_url + reverse('admin:admin_widgets_school_change', args=(self.school.id,)))
self.wait_page_ready()
self.execute_basic_operations('vertical', 'students')
self.execute_basic_operations('horizontal', 'alumni')
# Save and check that everything is properly stored in the database ---
self.selenium.find_element_by_xpath('//input[@value="Save"]').click()
self.wait_page_ready()
self.school = School.objects.get(id=self.school.id) # Reload from database
self.assertEqual(list(self.school.students.all()), [self.arthur, self.cliff, self.jason, self.john])
self.assertEqual(list(self.school.alumni.all()), [self.arthur, self.cliff, self.jason, self.john])
def test_filter(self):
"""
Typing in the search box filters out options displayed in the 'from'
box.
"""
from selenium.webdriver.common.keys import Keys
self.school.students.set([self.lisa, self.peter])
self.school.alumni.set([self.lisa, self.peter])
self.admin_login(username='super', password='secret', login_url='/')
self.selenium.get(self.live_server_url + reverse('admin:admin_widgets_school_change', args=(self.school.id,)))
for field_name in ['students', 'alumni']:
from_box = '#id_%s_from' % field_name
to_box = '#id_%s_to' % field_name
choose_link = 'id_%s_add_link' % field_name
remove_link = 'id_%s_remove_link' % field_name
input = self.selenium.find_element_by_id('id_%s_input' % field_name)
# Initial values
self.assertSelectOptions(from_box, [
str(self.arthur.id), str(self.bob.id),
str(self.cliff.id), str(self.jason.id),
str(self.jenny.id), str(self.john.id),
])
# Typing in some characters filters out non-matching options
input.send_keys('a')
self.assertSelectOptions(from_box, [str(self.arthur.id), str(self.jason.id)])
input.send_keys('R')
self.assertSelectOptions(from_box, [str(self.arthur.id)])
# Clearing the text box makes the other options reappear
input.send_keys([Keys.BACK_SPACE])
self.assertSelectOptions(from_box, [str(self.arthur.id), str(self.jason.id)])
input.send_keys([Keys.BACK_SPACE])
self.assertSelectOptions(from_box, [
str(self.arthur.id), str(self.bob.id),
str(self.cliff.id), str(self.jason.id),
str(self.jenny.id), str(self.john.id),
])
# -----------------------------------------------------------------
# Choosing a filtered option sends it properly to the 'to' box.
input.send_keys('a')
self.assertSelectOptions(from_box, [str(self.arthur.id), str(self.jason.id)])
self.select_option(from_box, str(self.jason.id))
self.selenium.find_element_by_id(choose_link).click()
self.assertSelectOptions(from_box, [str(self.arthur.id)])
self.assertSelectOptions(to_box, [
str(self.lisa.id), str(self.peter.id), str(self.jason.id),
])
self.select_option(to_box, str(self.lisa.id))
self.selenium.find_element_by_id(remove_link).click()
self.assertSelectOptions(from_box, [str(self.arthur.id), str(self.lisa.id)])
self.assertSelectOptions(to_box, [str(self.peter.id), str(self.jason.id)])
input.send_keys([Keys.BACK_SPACE]) # Clear text box
self.assertSelectOptions(from_box, [
str(self.arthur.id), str(self.bob.id),
str(self.cliff.id), str(self.jenny.id),
str(self.john.id), str(self.lisa.id),
])
self.assertSelectOptions(to_box, [str(self.peter.id), str(self.jason.id)])
# -----------------------------------------------------------------
# Pressing enter on a filtered option sends it properly to
# the 'to' box.
self.select_option(to_box, str(self.jason.id))
self.selenium.find_element_by_id(remove_link).click()
input.send_keys('ja')
self.assertSelectOptions(from_box, [str(self.jason.id)])
input.send_keys([Keys.ENTER])
self.assertSelectOptions(to_box, [str(self.peter.id), str(self.jason.id)])
input.send_keys([Keys.BACK_SPACE, Keys.BACK_SPACE])
# Save and check that everything is properly stored in the database ---
with self.wait_page_loaded():
self.selenium.find_element_by_xpath('//input[@value="Save"]').click()
self.school = School.objects.get(id=self.school.id) # Reload from database
self.assertEqual(list(self.school.students.all()), [self.jason, self.peter])
self.assertEqual(list(self.school.alumni.all()), [self.jason, self.peter])
def test_back_button_bug(self):
"""
Some browsers had a bug where navigating away from the change page
and then clicking the browser's back button would clear the
filter_horizontal/filter_vertical widgets (#13614).
"""
self.school.students.set([self.lisa, self.peter])
self.school.alumni.set([self.lisa, self.peter])
self.admin_login(username='super', password='secret', login_url='/')
change_url = reverse('admin:admin_widgets_school_change', args=(self.school.id,))
self.selenium.get(self.live_server_url + change_url)
# Navigate away and go back to the change form page.
self.selenium.find_element_by_link_text('Home').click()
self.selenium.back()
expected_unselected_values = [
str(self.arthur.id), str(self.bob.id), str(self.cliff.id),
str(self.jason.id), str(self.jenny.id), str(self.john.id),
]
expected_selected_values = [str(self.lisa.id), str(self.peter.id)]
# Everything is still in place
self.assertSelectOptions('#id_students_from', expected_unselected_values)
self.assertSelectOptions('#id_students_to', expected_selected_values)
self.assertSelectOptions('#id_alumni_from', expected_unselected_values)
self.assertSelectOptions('#id_alumni_to', expected_selected_values)
def test_refresh_page(self):
"""
Horizontal and vertical filter widgets keep selected options on page
reload (#22955).
"""
self.school.students.add(self.arthur, self.jason)
self.school.alumni.add(self.arthur, self.jason)
self.admin_login(username='super', password='secret', login_url='/')
change_url = reverse('admin:admin_widgets_school_change', args=(self.school.id,))
self.selenium.get(self.live_server_url + change_url)
options_len = len(self.selenium.find_elements_by_css_selector('#id_students_to > option'))
self.assertEqual(options_len, 2)
# self.selenium.refresh() or send_keys(Keys.F5) does hard reload and
# doesn't replicate what happens when a user clicks the browser's
# 'Refresh' button.
with self.wait_page_loaded():
self.selenium.execute_script("location.reload()")
options_len = len(self.selenium.find_elements_by_css_selector('#id_students_to > option'))
self.assertEqual(options_len, 2)
class AdminRawIdWidgetSeleniumTests(AdminWidgetSeleniumTestCase):
def setUp(self):
super().setUp()
Band.objects.create(id=42, name='Bogey Blues')
Band.objects.create(id=98, name='Green Potatoes')
def test_ForeignKey(self):
self.admin_login(username='super', password='secret', login_url='/')
self.selenium.get(self.live_server_url + reverse('admin:admin_widgets_event_add'))
main_window = self.selenium.current_window_handle
# No value has been selected yet
self.assertEqual(self.selenium.find_element_by_id('id_main_band').get_attribute('value'), '')
# Open the popup window and click on a band
self.selenium.find_element_by_id('lookup_id_main_band').click()
self.wait_for_and_switch_to_popup()
link = self.selenium.find_element_by_link_text('Bogey Blues')
self.assertIn('/band/42/', link.get_attribute('href'))
link.click()
# The field now contains the selected band's id
self.selenium.switch_to.window(main_window)
self.wait_for_value('#id_main_band', '42')
# Reopen the popup window and click on another band
self.selenium.find_element_by_id('lookup_id_main_band').click()
self.wait_for_and_switch_to_popup()
link = self.selenium.find_element_by_link_text('Green Potatoes')
self.assertIn('/band/98/', link.get_attribute('href'))
link.click()
# The field now contains the other selected band's id
self.selenium.switch_to.window(main_window)
self.wait_for_value('#id_main_band', '98')
def test_many_to_many(self):
self.admin_login(username='super', password='secret', login_url='/')
self.selenium.get(self.live_server_url + reverse('admin:admin_widgets_event_add'))
main_window = self.selenium.current_window_handle
# No value has been selected yet
self.assertEqual(self.selenium.find_element_by_id('id_supporting_bands').get_attribute('value'), '')
# Help text for the field is displayed
self.assertEqual(
self.selenium.find_element_by_css_selector('.field-supporting_bands div.help').text,
'Supporting Bands.'
)
# Open the popup window and click on a band
self.selenium.find_element_by_id('lookup_id_supporting_bands').click()
self.wait_for_and_switch_to_popup()
link = self.selenium.find_element_by_link_text('Bogey Blues')
self.assertIn('/band/42/', link.get_attribute('href'))
link.click()
# The field now contains the selected band's id
self.selenium.switch_to.window(main_window)
self.wait_for_value('#id_supporting_bands', '42')
# Reopen the popup window and click on another band
self.selenium.find_element_by_id('lookup_id_supporting_bands').click()
self.wait_for_and_switch_to_popup()
link = self.selenium.find_element_by_link_text('Green Potatoes')
self.assertIn('/band/98/', link.get_attribute('href'))
link.click()
# The field now contains the two selected bands' ids
self.selenium.switch_to.window(main_window)
self.wait_for_value('#id_supporting_bands', '42,98')
class RelatedFieldWidgetSeleniumTests(AdminWidgetSeleniumTestCase):
def test_ForeignKey_using_to_field(self):
self.admin_login(username='super', password='secret', login_url='/')
self.selenium.get(self.live_server_url + reverse('admin:admin_widgets_profile_add'))
main_window = self.selenium.current_window_handle
# Click the Add User button to add new
self.selenium.find_element_by_id('add_id_user').click()
self.wait_for_and_switch_to_popup()
password_field = self.selenium.find_element_by_id('id_password')
password_field.send_keys('password')
username_field = self.selenium.find_element_by_id('id_username')
username_value = 'newuser'
username_field.send_keys(username_value)
save_button_css_selector = '.submit-row > input[type=submit]'
self.selenium.find_element_by_css_selector(save_button_css_selector).click()
self.selenium.switch_to.window(main_window)
# The field now contains the new user
self.selenium.find_element_by_css_selector('#id_user option[value=newuser]')
# Click the Change User button to change it
self.selenium.find_element_by_id('change_id_user').click()
self.wait_for_and_switch_to_popup()
username_field = self.selenium.find_element_by_id('id_username')
username_value = 'changednewuser'
username_field.clear()
username_field.send_keys(username_value)
save_button_css_selector = '.submit-row > input[type=submit]'
self.selenium.find_element_by_css_selector(save_button_css_selector).click()
self.selenium.switch_to.window(main_window)
self.selenium.find_element_by_css_selector('#id_user option[value=changednewuser]')
# Go ahead and submit the form to make sure it works
self.selenium.find_element_by_css_selector(save_button_css_selector).click()
self.wait_for_text('li.success', 'The profile “changednewuser” was added successfully.')
profiles = Profile.objects.all()
self.assertEqual(len(profiles), 1)
self.assertEqual(profiles[0].user.username, username_value)
|
1d7c318c2841edb13e6ae3c9cefd6a380518c83ddade0a09196ebbcc2c016d33 | import os
import unittest
from django.core.files.uploadedfile import (
SimpleUploadedFile, TemporaryUploadedFile,
)
from django.forms import (
ClearableFileInput, FileInput, ImageField, ValidationError, Widget,
)
from django.test import SimpleTestCase
from . import FormFieldAssertionsMixin
try:
from PIL import Image
except ImportError:
Image = None
def get_img_path(path):
return os.path.join(os.path.abspath(os.path.join(__file__, '..', '..')), 'tests', path)
@unittest.skipUnless(Image, "Pillow is required to test ImageField")
class ImageFieldTest(FormFieldAssertionsMixin, SimpleTestCase):
def test_imagefield_annotate_with_image_after_clean(self):
f = ImageField()
img_path = get_img_path('filepath_test_files/1x1.png')
with open(img_path, 'rb') as img_file:
img_data = img_file.read()
img_file = SimpleUploadedFile('1x1.png', img_data)
img_file.content_type = 'text/plain'
uploaded_file = f.clean(img_file)
self.assertEqual('PNG', uploaded_file.image.format)
self.assertEqual('image/png', uploaded_file.content_type)
def test_imagefield_annotate_with_bitmap_image_after_clean(self):
"""
This also tests the situation when Pillow doesn't detect the MIME type
of the image (#24948).
"""
from PIL.BmpImagePlugin import BmpImageFile
try:
Image.register_mime(BmpImageFile.format, None)
f = ImageField()
img_path = get_img_path('filepath_test_files/1x1.bmp')
with open(img_path, 'rb') as img_file:
img_data = img_file.read()
img_file = SimpleUploadedFile('1x1.bmp', img_data)
img_file.content_type = 'text/plain'
uploaded_file = f.clean(img_file)
self.assertEqual('BMP', uploaded_file.image.format)
self.assertIsNone(uploaded_file.content_type)
finally:
Image.register_mime(BmpImageFile.format, 'image/bmp')
def test_file_extension_validation(self):
f = ImageField()
img_path = get_img_path('filepath_test_files/1x1.png')
with open(img_path, 'rb') as img_file:
img_data = img_file.read()
img_file = SimpleUploadedFile('1x1.txt', img_data)
with self.assertRaisesMessage(ValidationError, 'File extension “txt” is not allowed.'):
f.clean(img_file)
def test_corrupted_image(self):
f = ImageField()
img_file = SimpleUploadedFile('not_an_image.jpg', b'not an image')
msg = (
'Upload a valid image. The file you uploaded was either not an '
'image or a corrupted image.'
)
with self.assertRaisesMessage(ValidationError, msg):
f.clean(img_file)
with TemporaryUploadedFile('not_an_image_tmp.png', 'text/plain', 1, 'utf-8') as tmp_file:
with self.assertRaisesMessage(ValidationError, msg):
f.clean(tmp_file)
def test_widget_attrs_default_accept(self):
f = ImageField()
# Nothing added for non-FileInput widgets.
self.assertEqual(f.widget_attrs(Widget()), {})
self.assertEqual(f.widget_attrs(FileInput()), {'accept': 'image/*'})
self.assertEqual(f.widget_attrs(ClearableFileInput()), {'accept': 'image/*'})
self.assertWidgetRendersTo(f, '<input type="file" name="f" accept="image/*" required id="id_f" />')
def test_widget_attrs_accept_specified(self):
f = ImageField(widget=FileInput(attrs={'accept': 'image/png'}))
self.assertEqual(f.widget_attrs(f.widget), {})
self.assertWidgetRendersTo(f, '<input type="file" name="f" accept="image/png" required id="id_f" />')
def test_widget_attrs_accept_false(self):
f = ImageField(widget=FileInput(attrs={'accept': False}))
self.assertEqual(f.widget_attrs(f.widget), {})
self.assertWidgetRendersTo(f, '<input type="file" name="f" required id="id_f" />')
|
535cbc0609bdeea412b7bbe95c11a7fd2b97120806e4198b1a1855cf3c8eb564 | import re
from django.forms import RegexField, ValidationError
from django.test import SimpleTestCase
class RegexFieldTest(SimpleTestCase):
def test_regexfield_1(self):
f = RegexField('^[0-9][A-F][0-9]$')
self.assertEqual('2A2', f.clean('2A2'))
self.assertEqual('3F3', f.clean('3F3'))
with self.assertRaisesMessage(ValidationError, "'Enter a valid value.'"):
f.clean('3G3')
with self.assertRaisesMessage(ValidationError, "'Enter a valid value.'"):
f.clean(' 2A2')
with self.assertRaisesMessage(ValidationError, "'Enter a valid value.'"):
f.clean('2A2 ')
with self.assertRaisesMessage(ValidationError, "'This field is required.'"):
f.clean('')
def test_regexfield_2(self):
f = RegexField('^[0-9][A-F][0-9]$', required=False)
self.assertEqual('2A2', f.clean('2A2'))
self.assertEqual('3F3', f.clean('3F3'))
with self.assertRaisesMessage(ValidationError, "'Enter a valid value.'"):
f.clean('3G3')
self.assertEqual('', f.clean(''))
def test_regexfield_3(self):
f = RegexField(re.compile('^[0-9][A-F][0-9]$'))
self.assertEqual('2A2', f.clean('2A2'))
self.assertEqual('3F3', f.clean('3F3'))
with self.assertRaisesMessage(ValidationError, "'Enter a valid value.'"):
f.clean('3G3')
with self.assertRaisesMessage(ValidationError, "'Enter a valid value.'"):
f.clean(' 2A2')
with self.assertRaisesMessage(ValidationError, "'Enter a valid value.'"):
f.clean('2A2 ')
def test_regexfield_4(self):
f = RegexField('^[0-9]+$', min_length=5, max_length=10)
with self.assertRaisesMessage(ValidationError, "'Ensure this value has at least 5 characters (it has 3).'"):
f.clean('123')
with self.assertRaisesMessage(
ValidationError,
"'Ensure this value has at least 5 characters (it has 3).', "
"'Enter a valid value.'",
):
f.clean('abc')
self.assertEqual('12345', f.clean('12345'))
self.assertEqual('1234567890', f.clean('1234567890'))
with self.assertRaisesMessage(ValidationError, "'Ensure this value has at most 10 characters (it has 11).'"):
f.clean('12345678901')
with self.assertRaisesMessage(ValidationError, "'Enter a valid value.'"):
f.clean('12345a')
def test_regexfield_unicode_characters(self):
f = RegexField(r'^\w+$')
self.assertEqual('éèøçÎÎ你好', f.clean('éèøçÎÎ你好'))
def test_change_regex_after_init(self):
f = RegexField('^[a-z]+$')
f.regex = '^[0-9]+$'
self.assertEqual('1234', f.clean('1234'))
with self.assertRaisesMessage(ValidationError, "'Enter a valid value.'"):
f.clean('abcd')
def test_get_regex(self):
f = RegexField('^[a-z]+$')
self.assertEqual(f.regex, re.compile('^[a-z]+$'))
def test_regexfield_strip(self):
f = RegexField('^[a-z]+$', strip=True)
self.assertEqual(f.clean(' a'), 'a')
self.assertEqual(f.clean('a '), 'a')
|
4886b997fa964d50354a5dfd12fe8ba8c57f82dbab4681ce82da777961e87b5d | import copy
import datetime
import json
import uuid
from django.core.exceptions import NON_FIELD_ERRORS
from django.core.files.uploadedfile import SimpleUploadedFile
from django.core.validators import MaxValueValidator, RegexValidator
from django.forms import (
BooleanField, CharField, CheckboxSelectMultiple, ChoiceField, DateField,
DateTimeField, EmailField, FileField, FileInput, FloatField, Form,
HiddenInput, ImageField, IntegerField, MultipleChoiceField,
MultipleHiddenInput, MultiValueField, NullBooleanField, PasswordInput,
RadioSelect, Select, SplitDateTimeField, SplitHiddenDateTimeWidget,
Textarea, TextInput, TimeField, ValidationError, forms,
)
from django.forms.renderers import DjangoTemplates, get_default_renderer
from django.forms.utils import ErrorList
from django.http import QueryDict
from django.template import Context, Template
from django.test import SimpleTestCase
from django.utils.datastructures import MultiValueDict
from django.utils.safestring import mark_safe
class Person(Form):
first_name = CharField()
last_name = CharField()
birthday = DateField()
class PersonNew(Form):
first_name = CharField(widget=TextInput(attrs={'id': 'first_name_id'}))
last_name = CharField()
birthday = DateField()
class MultiValueDictLike(dict):
def getlist(self, key):
return [self[key]]
class FormsTestCase(SimpleTestCase):
# A Form is a collection of Fields. It knows how to validate a set of data and it
# knows how to render itself in a couple of default ways (e.g., an HTML table).
# You can pass it data in __init__(), as a dictionary.
def test_form(self):
# Pass a dictionary to a Form's __init__().
p = Person({'first_name': 'John', 'last_name': 'Lennon', 'birthday': '1940-10-9'})
self.assertTrue(p.is_bound)
self.assertEqual(p.errors, {})
self.assertTrue(p.is_valid())
self.assertHTMLEqual(p.errors.as_ul(), '')
self.assertEqual(p.errors.as_text(), '')
self.assertEqual(p.cleaned_data["first_name"], 'John')
self.assertEqual(p.cleaned_data["last_name"], 'Lennon')
self.assertEqual(p.cleaned_data["birthday"], datetime.date(1940, 10, 9))
self.assertHTMLEqual(
str(p['first_name']),
'<input type="text" name="first_name" value="John" id="id_first_name" required>'
)
self.assertHTMLEqual(
str(p['last_name']),
'<input type="text" name="last_name" value="Lennon" id="id_last_name" required>'
)
self.assertHTMLEqual(
str(p['birthday']),
'<input type="text" name="birthday" value="1940-10-9" id="id_birthday" required>'
)
msg = "Key 'nonexistentfield' not found in 'Person'. Choices are: birthday, first_name, last_name."
with self.assertRaisesMessage(KeyError, msg):
p['nonexistentfield']
form_output = []
for boundfield in p:
form_output.append(str(boundfield))
self.assertHTMLEqual(
'\n'.join(form_output),
"""<input type="text" name="first_name" value="John" id="id_first_name" required>
<input type="text" name="last_name" value="Lennon" id="id_last_name" required>
<input type="text" name="birthday" value="1940-10-9" id="id_birthday" required>"""
)
form_output = []
for boundfield in p:
form_output.append([boundfield.label, boundfield.data])
self.assertEqual(form_output, [
['First name', 'John'],
['Last name', 'Lennon'],
['Birthday', '1940-10-9']
])
self.assertHTMLEqual(
str(p),
"""<tr><th><label for="id_first_name">First name:</label></th><td>
<input type="text" name="first_name" value="John" id="id_first_name" required></td></tr>
<tr><th><label for="id_last_name">Last name:</label></th><td>
<input type="text" name="last_name" value="Lennon" id="id_last_name" required></td></tr>
<tr><th><label for="id_birthday">Birthday:</label></th><td>
<input type="text" name="birthday" value="1940-10-9" id="id_birthday" required></td></tr>"""
)
def test_empty_dict(self):
# Empty dictionaries are valid, too.
p = Person({})
self.assertTrue(p.is_bound)
self.assertEqual(p.errors['first_name'], ['This field is required.'])
self.assertEqual(p.errors['last_name'], ['This field is required.'])
self.assertEqual(p.errors['birthday'], ['This field is required.'])
self.assertFalse(p.is_valid())
self.assertEqual(p.cleaned_data, {})
self.assertHTMLEqual(
str(p),
"""<tr><th><label for="id_first_name">First name:</label></th><td>
<ul class="errorlist"><li>This field is required.</li></ul>
<input type="text" name="first_name" id="id_first_name" required></td></tr>
<tr><th><label for="id_last_name">Last name:</label></th>
<td><ul class="errorlist"><li>This field is required.</li></ul>
<input type="text" name="last_name" id="id_last_name" required></td></tr>
<tr><th><label for="id_birthday">Birthday:</label></th><td>
<ul class="errorlist"><li>This field is required.</li></ul>
<input type="text" name="birthday" id="id_birthday" required></td></tr>"""
)
self.assertHTMLEqual(
p.as_table(),
"""<tr><th><label for="id_first_name">First name:</label></th><td>
<ul class="errorlist"><li>This field is required.</li></ul>
<input type="text" name="first_name" id="id_first_name" required></td></tr>
<tr><th><label for="id_last_name">Last name:</label></th>
<td><ul class="errorlist"><li>This field is required.</li></ul>
<input type="text" name="last_name" id="id_last_name" required></td></tr>
<tr><th><label for="id_birthday">Birthday:</label></th>
<td><ul class="errorlist"><li>This field is required.</li></ul>
<input type="text" name="birthday" id="id_birthday" required></td></tr>"""
)
self.assertHTMLEqual(
p.as_ul(),
"""<li><ul class="errorlist"><li>This field is required.</li></ul>
<label for="id_first_name">First name:</label>
<input type="text" name="first_name" id="id_first_name" required></li>
<li><ul class="errorlist"><li>This field is required.</li></ul>
<label for="id_last_name">Last name:</label>
<input type="text" name="last_name" id="id_last_name" required></li>
<li><ul class="errorlist"><li>This field is required.</li></ul>
<label for="id_birthday">Birthday:</label>
<input type="text" name="birthday" id="id_birthday" required></li>"""
)
self.assertHTMLEqual(
p.as_p(),
"""<ul class="errorlist"><li>This field is required.</li></ul>
<p><label for="id_first_name">First name:</label>
<input type="text" name="first_name" id="id_first_name" required></p>
<ul class="errorlist"><li>This field is required.</li></ul>
<p><label for="id_last_name">Last name:</label>
<input type="text" name="last_name" id="id_last_name" required></p>
<ul class="errorlist"><li>This field is required.</li></ul>
<p><label for="id_birthday">Birthday:</label>
<input type="text" name="birthday" id="id_birthday" required></p>"""
)
def test_empty_querydict_args(self):
data = QueryDict()
files = QueryDict()
p = Person(data, files)
self.assertIs(p.data, data)
self.assertIs(p.files, files)
def test_unbound_form(self):
# If you don't pass any values to the Form's __init__(), or if you pass None,
# the Form will be considered unbound and won't do any validation. Form.errors
# will be an empty dictionary *but* Form.is_valid() will return False.
p = Person()
self.assertFalse(p.is_bound)
self.assertEqual(p.errors, {})
self.assertFalse(p.is_valid())
with self.assertRaises(AttributeError):
p.cleaned_data
self.assertHTMLEqual(
str(p),
"""<tr><th><label for="id_first_name">First name:</label></th><td>
<input type="text" name="first_name" id="id_first_name" required></td></tr>
<tr><th><label for="id_last_name">Last name:</label></th><td>
<input type="text" name="last_name" id="id_last_name" required></td></tr>
<tr><th><label for="id_birthday">Birthday:</label></th><td>
<input type="text" name="birthday" id="id_birthday" required></td></tr>"""
)
self.assertHTMLEqual(
p.as_table(),
"""<tr><th><label for="id_first_name">First name:</label></th><td>
<input type="text" name="first_name" id="id_first_name" required></td></tr>
<tr><th><label for="id_last_name">Last name:</label></th><td>
<input type="text" name="last_name" id="id_last_name" required></td></tr>
<tr><th><label for="id_birthday">Birthday:</label></th><td>
<input type="text" name="birthday" id="id_birthday" required></td></tr>"""
)
self.assertHTMLEqual(
p.as_ul(),
"""<li><label for="id_first_name">First name:</label>
<input type="text" name="first_name" id="id_first_name" required></li>
<li><label for="id_last_name">Last name:</label>
<input type="text" name="last_name" id="id_last_name" required></li>
<li><label for="id_birthday">Birthday:</label>
<input type="text" name="birthday" id="id_birthday" required></li>"""
)
self.assertHTMLEqual(
p.as_p(),
"""<p><label for="id_first_name">First name:</label>
<input type="text" name="first_name" id="id_first_name" required></p>
<p><label for="id_last_name">Last name:</label>
<input type="text" name="last_name" id="id_last_name" required></p>
<p><label for="id_birthday">Birthday:</label>
<input type="text" name="birthday" id="id_birthday" required></p>"""
)
def test_unicode_values(self):
# Unicode values are handled properly.
p = Person({
'first_name': 'John',
'last_name': '\u0160\u0110\u0106\u017d\u0107\u017e\u0161\u0111',
'birthday': '1940-10-9'
})
self.assertHTMLEqual(
p.as_table(),
'<tr><th><label for="id_first_name">First name:</label></th><td>'
'<input type="text" name="first_name" value="John" id="id_first_name" required></td></tr>\n'
'<tr><th><label for="id_last_name">Last name:</label>'
'</th><td><input type="text" name="last_name" '
'value="\u0160\u0110\u0106\u017d\u0107\u017e\u0161\u0111"'
'id="id_last_name" required></td></tr>\n'
'<tr><th><label for="id_birthday">Birthday:</label></th><td>'
'<input type="text" name="birthday" value="1940-10-9" id="id_birthday" required></td></tr>'
)
self.assertHTMLEqual(
p.as_ul(),
'<li><label for="id_first_name">First name:</label> '
'<input type="text" name="first_name" value="John" id="id_first_name" required></li>\n'
'<li><label for="id_last_name">Last name:</label> '
'<input type="text" name="last_name" '
'value="\u0160\u0110\u0106\u017d\u0107\u017e\u0161\u0111" id="id_last_name" required></li>\n'
'<li><label for="id_birthday">Birthday:</label> '
'<input type="text" name="birthday" value="1940-10-9" id="id_birthday" required></li>'
)
self.assertHTMLEqual(
p.as_p(),
'<p><label for="id_first_name">First name:</label> '
'<input type="text" name="first_name" value="John" id="id_first_name" required></p>\n'
'<p><label for="id_last_name">Last name:</label> '
'<input type="text" name="last_name" '
'value="\u0160\u0110\u0106\u017d\u0107\u017e\u0161\u0111" id="id_last_name" required></p>\n'
'<p><label for="id_birthday">Birthday:</label> '
'<input type="text" name="birthday" value="1940-10-9" id="id_birthday" required></p>'
)
p = Person({'last_name': 'Lennon'})
self.assertEqual(p.errors['first_name'], ['This field is required.'])
self.assertEqual(p.errors['birthday'], ['This field is required.'])
self.assertFalse(p.is_valid())
self.assertEqual(
p.errors,
{'birthday': ['This field is required.'], 'first_name': ['This field is required.']}
)
self.assertEqual(p.cleaned_data, {'last_name': 'Lennon'})
self.assertEqual(p['first_name'].errors, ['This field is required.'])
self.assertHTMLEqual(
p['first_name'].errors.as_ul(),
'<ul class="errorlist"><li>This field is required.</li></ul>'
)
self.assertEqual(p['first_name'].errors.as_text(), '* This field is required.')
p = Person()
self.assertHTMLEqual(
str(p['first_name']),
'<input type="text" name="first_name" id="id_first_name" required>',
)
self.assertHTMLEqual(str(p['last_name']), '<input type="text" name="last_name" id="id_last_name" required>')
self.assertHTMLEqual(str(p['birthday']), '<input type="text" name="birthday" id="id_birthday" required>')
def test_cleaned_data_only_fields(self):
# cleaned_data will always *only* contain a key for fields defined in the
# Form, even if you pass extra data when you define the Form. In this
# example, we pass a bunch of extra fields to the form constructor,
# but cleaned_data contains only the form's fields.
data = {
'first_name': 'John',
'last_name': 'Lennon',
'birthday': '1940-10-9',
'extra1': 'hello',
'extra2': 'hello',
}
p = Person(data)
self.assertTrue(p.is_valid())
self.assertEqual(p.cleaned_data['first_name'], 'John')
self.assertEqual(p.cleaned_data['last_name'], 'Lennon')
self.assertEqual(p.cleaned_data['birthday'], datetime.date(1940, 10, 9))
def test_optional_data(self):
# cleaned_data will include a key and value for *all* fields defined in the Form,
# even if the Form's data didn't include a value for fields that are not
# required. In this example, the data dictionary doesn't include a value for the
# "nick_name" field, but cleaned_data includes it. For CharFields, it's set to the
# empty string.
class OptionalPersonForm(Form):
first_name = CharField()
last_name = CharField()
nick_name = CharField(required=False)
data = {'first_name': 'John', 'last_name': 'Lennon'}
f = OptionalPersonForm(data)
self.assertTrue(f.is_valid())
self.assertEqual(f.cleaned_data['nick_name'], '')
self.assertEqual(f.cleaned_data['first_name'], 'John')
self.assertEqual(f.cleaned_data['last_name'], 'Lennon')
# For DateFields, it's set to None.
class OptionalPersonForm(Form):
first_name = CharField()
last_name = CharField()
birth_date = DateField(required=False)
data = {'first_name': 'John', 'last_name': 'Lennon'}
f = OptionalPersonForm(data)
self.assertTrue(f.is_valid())
self.assertIsNone(f.cleaned_data['birth_date'])
self.assertEqual(f.cleaned_data['first_name'], 'John')
self.assertEqual(f.cleaned_data['last_name'], 'Lennon')
def test_auto_id(self):
# "auto_id" tells the Form to add an "id" attribute to each form element.
# If it's a string that contains '%s', Django will use that as a format string
# into which the field's name will be inserted. It will also put a <label> around
# the human-readable labels for a field.
p = Person(auto_id='%s_id')
self.assertHTMLEqual(
p.as_table(),
"""<tr><th><label for="first_name_id">First name:</label></th><td>
<input type="text" name="first_name" id="first_name_id" required></td></tr>
<tr><th><label for="last_name_id">Last name:</label></th><td>
<input type="text" name="last_name" id="last_name_id" required></td></tr>
<tr><th><label for="birthday_id">Birthday:</label></th><td>
<input type="text" name="birthday" id="birthday_id" required></td></tr>"""
)
self.assertHTMLEqual(
p.as_ul(),
"""<li><label for="first_name_id">First name:</label>
<input type="text" name="first_name" id="first_name_id" required></li>
<li><label for="last_name_id">Last name:</label>
<input type="text" name="last_name" id="last_name_id" required></li>
<li><label for="birthday_id">Birthday:</label>
<input type="text" name="birthday" id="birthday_id" required></li>"""
)
self.assertHTMLEqual(
p.as_p(),
"""<p><label for="first_name_id">First name:</label>
<input type="text" name="first_name" id="first_name_id" required></p>
<p><label for="last_name_id">Last name:</label>
<input type="text" name="last_name" id="last_name_id" required></p>
<p><label for="birthday_id">Birthday:</label>
<input type="text" name="birthday" id="birthday_id" required></p>"""
)
def test_auto_id_true(self):
# If auto_id is any True value whose str() does not contain '%s', the "id"
# attribute will be the name of the field.
p = Person(auto_id=True)
self.assertHTMLEqual(
p.as_ul(),
"""<li><label for="first_name">First name:</label>
<input type="text" name="first_name" id="first_name" required></li>
<li><label for="last_name">Last name:</label>
<input type="text" name="last_name" id="last_name" required></li>
<li><label for="birthday">Birthday:</label>
<input type="text" name="birthday" id="birthday" required></li>"""
)
def test_auto_id_false(self):
# If auto_id is any False value, an "id" attribute won't be output unless it
# was manually entered.
p = Person(auto_id=False)
self.assertHTMLEqual(
p.as_ul(),
"""<li>First name: <input type="text" name="first_name" required></li>
<li>Last name: <input type="text" name="last_name" required></li>
<li>Birthday: <input type="text" name="birthday" required></li>"""
)
def test_id_on_field(self):
# In this example, auto_id is False, but the "id" attribute for the "first_name"
# field is given. Also note that field gets a <label>, while the others don't.
p = PersonNew(auto_id=False)
self.assertHTMLEqual(
p.as_ul(),
"""<li><label for="first_name_id">First name:</label>
<input type="text" id="first_name_id" name="first_name" required></li>
<li>Last name: <input type="text" name="last_name" required></li>
<li>Birthday: <input type="text" name="birthday" required></li>"""
)
def test_auto_id_on_form_and_field(self):
# If the "id" attribute is specified in the Form and auto_id is True, the "id"
# attribute in the Form gets precedence.
p = PersonNew(auto_id=True)
self.assertHTMLEqual(
p.as_ul(),
"""<li><label for="first_name_id">First name:</label>
<input type="text" id="first_name_id" name="first_name" required></li>
<li><label for="last_name">Last name:</label>
<input type="text" name="last_name" id="last_name" required></li>
<li><label for="birthday">Birthday:</label>
<input type="text" name="birthday" id="birthday" required></li>"""
)
def test_various_boolean_values(self):
class SignupForm(Form):
email = EmailField()
get_spam = BooleanField()
f = SignupForm(auto_id=False)
self.assertHTMLEqual(str(f['email']), '<input type="email" name="email" required>')
self.assertHTMLEqual(str(f['get_spam']), '<input type="checkbox" name="get_spam" required>')
f = SignupForm({'email': '[email protected]', 'get_spam': True}, auto_id=False)
self.assertHTMLEqual(str(f['email']), '<input type="email" name="email" value="[email protected]" required>')
self.assertHTMLEqual(
str(f['get_spam']),
'<input checked type="checkbox" name="get_spam" required>',
)
# 'True' or 'true' should be rendered without a value attribute
f = SignupForm({'email': '[email protected]', 'get_spam': 'True'}, auto_id=False)
self.assertHTMLEqual(
str(f['get_spam']),
'<input checked type="checkbox" name="get_spam" required>',
)
f = SignupForm({'email': '[email protected]', 'get_spam': 'true'}, auto_id=False)
self.assertHTMLEqual(
str(f['get_spam']), '<input checked type="checkbox" name="get_spam" required>')
# A value of 'False' or 'false' should be rendered unchecked
f = SignupForm({'email': '[email protected]', 'get_spam': 'False'}, auto_id=False)
self.assertHTMLEqual(str(f['get_spam']), '<input type="checkbox" name="get_spam" required>')
f = SignupForm({'email': '[email protected]', 'get_spam': 'false'}, auto_id=False)
self.assertHTMLEqual(str(f['get_spam']), '<input type="checkbox" name="get_spam" required>')
# A value of '0' should be interpreted as a True value (#16820)
f = SignupForm({'email': '[email protected]', 'get_spam': '0'})
self.assertTrue(f.is_valid())
self.assertTrue(f.cleaned_data.get('get_spam'))
def test_widget_output(self):
# Any Field can have a Widget class passed to its constructor:
class ContactForm(Form):
subject = CharField()
message = CharField(widget=Textarea)
f = ContactForm(auto_id=False)
self.assertHTMLEqual(str(f['subject']), '<input type="text" name="subject" required>')
self.assertHTMLEqual(str(f['message']), '<textarea name="message" rows="10" cols="40" required></textarea>')
# as_textarea(), as_text() and as_hidden() are shortcuts for changing the output
# widget type:
self.assertHTMLEqual(
f['subject'].as_textarea(),
'<textarea name="subject" rows="10" cols="40" required></textarea>',
)
self.assertHTMLEqual(f['message'].as_text(), '<input type="text" name="message" required>')
self.assertHTMLEqual(f['message'].as_hidden(), '<input type="hidden" name="message">')
# The 'widget' parameter to a Field can also be an instance:
class ContactForm(Form):
subject = CharField()
message = CharField(widget=Textarea(attrs={'rows': 80, 'cols': 20}))
f = ContactForm(auto_id=False)
self.assertHTMLEqual(str(f['message']), '<textarea name="message" rows="80" cols="20" required></textarea>')
# Instance-level attrs are *not* carried over to as_textarea(), as_text() and
# as_hidden():
self.assertHTMLEqual(f['message'].as_text(), '<input type="text" name="message" required>')
f = ContactForm({'subject': 'Hello', 'message': 'I love you.'}, auto_id=False)
self.assertHTMLEqual(
f['subject'].as_textarea(),
'<textarea rows="10" cols="40" name="subject" required>Hello</textarea>'
)
self.assertHTMLEqual(
f['message'].as_text(),
'<input type="text" name="message" value="I love you." required>',
)
self.assertHTMLEqual(f['message'].as_hidden(), '<input type="hidden" name="message" value="I love you.">')
def test_forms_with_choices(self):
# For a form with a <select>, use ChoiceField:
class FrameworkForm(Form):
name = CharField()
language = ChoiceField(choices=[('P', 'Python'), ('J', 'Java')])
f = FrameworkForm(auto_id=False)
self.assertHTMLEqual(str(f['language']), """<select name="language">
<option value="P">Python</option>
<option value="J">Java</option>
</select>""")
f = FrameworkForm({'name': 'Django', 'language': 'P'}, auto_id=False)
self.assertHTMLEqual(str(f['language']), """<select name="language">
<option value="P" selected>Python</option>
<option value="J">Java</option>
</select>""")
# A subtlety: If one of the choices' value is the empty string and the form is
# unbound, then the <option> for the empty-string choice will get selected.
class FrameworkForm(Form):
name = CharField()
language = ChoiceField(choices=[('', '------'), ('P', 'Python'), ('J', 'Java')])
f = FrameworkForm(auto_id=False)
self.assertHTMLEqual(str(f['language']), """<select name="language" required>
<option value="" selected>------</option>
<option value="P">Python</option>
<option value="J">Java</option>
</select>""")
# You can specify widget attributes in the Widget constructor.
class FrameworkForm(Form):
name = CharField()
language = ChoiceField(choices=[('P', 'Python'), ('J', 'Java')], widget=Select(attrs={'class': 'foo'}))
f = FrameworkForm(auto_id=False)
self.assertHTMLEqual(str(f['language']), """<select class="foo" name="language">
<option value="P">Python</option>
<option value="J">Java</option>
</select>""")
f = FrameworkForm({'name': 'Django', 'language': 'P'}, auto_id=False)
self.assertHTMLEqual(str(f['language']), """<select class="foo" name="language">
<option value="P" selected>Python</option>
<option value="J">Java</option>
</select>""")
# When passing a custom widget instance to ChoiceField, note that setting
# 'choices' on the widget is meaningless. The widget will use the choices
# defined on the Field, not the ones defined on the Widget.
class FrameworkForm(Form):
name = CharField()
language = ChoiceField(
choices=[('P', 'Python'), ('J', 'Java')],
widget=Select(choices=[('R', 'Ruby'), ('P', 'Perl')], attrs={'class': 'foo'}),
)
f = FrameworkForm(auto_id=False)
self.assertHTMLEqual(str(f['language']), """<select class="foo" name="language">
<option value="P">Python</option>
<option value="J">Java</option>
</select>""")
f = FrameworkForm({'name': 'Django', 'language': 'P'}, auto_id=False)
self.assertHTMLEqual(str(f['language']), """<select class="foo" name="language">
<option value="P" selected>Python</option>
<option value="J">Java</option>
</select>""")
# You can set a ChoiceField's choices after the fact.
class FrameworkForm(Form):
name = CharField()
language = ChoiceField()
f = FrameworkForm(auto_id=False)
self.assertHTMLEqual(str(f['language']), """<select name="language">
</select>""")
f.fields['language'].choices = [('P', 'Python'), ('J', 'Java')]
self.assertHTMLEqual(str(f['language']), """<select name="language">
<option value="P">Python</option>
<option value="J">Java</option>
</select>""")
def test_forms_with_radio(self):
# Add widget=RadioSelect to use that widget with a ChoiceField.
class FrameworkForm(Form):
name = CharField()
language = ChoiceField(choices=[('P', 'Python'), ('J', 'Java')], widget=RadioSelect)
f = FrameworkForm(auto_id=False)
self.assertHTMLEqual(str(f['language']), """<ul>
<li><label><input type="radio" name="language" value="P" required> Python</label></li>
<li><label><input type="radio" name="language" value="J" required> Java</label></li>
</ul>""")
self.assertHTMLEqual(f.as_table(), """<tr><th>Name:</th><td><input type="text" name="name" required></td></tr>
<tr><th>Language:</th><td><ul>
<li><label><input type="radio" name="language" value="P" required> Python</label></li>
<li><label><input type="radio" name="language" value="J" required> Java</label></li>
</ul></td></tr>""")
self.assertHTMLEqual(f.as_ul(), """<li>Name: <input type="text" name="name" required></li>
<li>Language: <ul>
<li><label><input type="radio" name="language" value="P" required> Python</label></li>
<li><label><input type="radio" name="language" value="J" required> Java</label></li>
</ul></li>""")
# Regarding auto_id and <label>, RadioSelect is a special case. Each radio button
# gets a distinct ID, formed by appending an underscore plus the button's
# zero-based index.
f = FrameworkForm(auto_id='id_%s')
self.assertHTMLEqual(
str(f['language']),
"""<ul id="id_language">
<li><label for="id_language_0"><input type="radio" id="id_language_0" value="P" name="language" required>
Python</label></li>
<li><label for="id_language_1"><input type="radio" id="id_language_1" value="J" name="language" required>
Java</label></li>
</ul>"""
)
# When RadioSelect is used with auto_id, and the whole form is printed using
# either as_table() or as_ul(), the label for the RadioSelect will point to the
# ID of the *first* radio button.
self.assertHTMLEqual(
f.as_table(),
"""<tr><th><label for="id_name">Name:</label></th><td><input type="text" name="name" id="id_name" required></td></tr>
<tr><th><label for="id_language_0">Language:</label></th><td><ul id="id_language">
<li><label for="id_language_0"><input type="radio" id="id_language_0" value="P" name="language" required>
Python</label></li>
<li><label for="id_language_1"><input type="radio" id="id_language_1" value="J" name="language" required>
Java</label></li>
</ul></td></tr>"""
)
self.assertHTMLEqual(
f.as_ul(),
"""<li><label for="id_name">Name:</label> <input type="text" name="name" id="id_name" required></li>
<li><label for="id_language_0">Language:</label> <ul id="id_language">
<li><label for="id_language_0"><input type="radio" id="id_language_0" value="P" name="language" required>
Python</label></li>
<li><label for="id_language_1"><input type="radio" id="id_language_1" value="J" name="language" required>
Java</label></li>
</ul></li>"""
)
self.assertHTMLEqual(
f.as_p(),
"""<p><label for="id_name">Name:</label> <input type="text" name="name" id="id_name" required></p>
<p><label for="id_language_0">Language:</label> <ul id="id_language">
<li><label for="id_language_0"><input type="radio" id="id_language_0" value="P" name="language" required>
Python</label></li>
<li><label for="id_language_1"><input type="radio" id="id_language_1" value="J" name="language" required>
Java</label></li>
</ul></p>"""
)
# Test iterating on individual radios in a template
t = Template('{% for radio in form.language %}<div class="myradio">{{ radio }}</div>{% endfor %}')
self.assertHTMLEqual(
t.render(Context({'form': f})),
"""<div class="myradio"><label for="id_language_0">
<input id="id_language_0" name="language" type="radio" value="P" required> Python</label></div>
<div class="myradio"><label for="id_language_1">
<input id="id_language_1" name="language" type="radio" value="J" required> Java</label></div>"""
)
def test_form_with_iterable_boundfield(self):
class BeatleForm(Form):
name = ChoiceField(
choices=[('john', 'John'), ('paul', 'Paul'), ('george', 'George'), ('ringo', 'Ringo')],
widget=RadioSelect,
)
f = BeatleForm(auto_id=False)
self.assertHTMLEqual(
'\n'.join(str(bf) for bf in f['name']),
"""<label><input type="radio" name="name" value="john" required> John</label>
<label><input type="radio" name="name" value="paul" required> Paul</label>
<label><input type="radio" name="name" value="george" required> George</label>
<label><input type="radio" name="name" value="ringo" required> Ringo</label>"""
)
self.assertHTMLEqual(
'\n'.join('<div>%s</div>' % bf for bf in f['name']),
"""<div><label><input type="radio" name="name" value="john" required> John</label></div>
<div><label><input type="radio" name="name" value="paul" required> Paul</label></div>
<div><label><input type="radio" name="name" value="george" required> George</label></div>
<div><label><input type="radio" name="name" value="ringo" required> Ringo</label></div>"""
)
def test_form_with_iterable_boundfield_id(self):
class BeatleForm(Form):
name = ChoiceField(
choices=[('john', 'John'), ('paul', 'Paul'), ('george', 'George'), ('ringo', 'Ringo')],
widget=RadioSelect,
)
fields = list(BeatleForm()['name'])
self.assertEqual(len(fields), 4)
self.assertEqual(fields[0].id_for_label, 'id_name_0')
self.assertEqual(fields[0].choice_label, 'John')
self.assertHTMLEqual(
fields[0].tag(),
'<input type="radio" name="name" value="john" id="id_name_0" required>'
)
self.assertHTMLEqual(
str(fields[0]),
'<label for="id_name_0"><input type="radio" name="name" '
'value="john" id="id_name_0" required> John</label>'
)
self.assertEqual(fields[1].id_for_label, 'id_name_1')
self.assertEqual(fields[1].choice_label, 'Paul')
self.assertHTMLEqual(
fields[1].tag(),
'<input type="radio" name="name" value="paul" id="id_name_1" required>'
)
self.assertHTMLEqual(
str(fields[1]),
'<label for="id_name_1"><input type="radio" name="name" '
'value="paul" id="id_name_1" required> Paul</label>'
)
def test_iterable_boundfield_select(self):
class BeatleForm(Form):
name = ChoiceField(choices=[('john', 'John'), ('paul', 'Paul'), ('george', 'George'), ('ringo', 'Ringo')])
fields = list(BeatleForm(auto_id=False)['name'])
self.assertEqual(len(fields), 4)
self.assertEqual(fields[0].id_for_label, 'id_name_0')
self.assertEqual(fields[0].choice_label, 'John')
self.assertHTMLEqual(fields[0].tag(), '<option value="john">John</option>')
self.assertHTMLEqual(str(fields[0]), '<option value="john">John</option>')
def test_form_with_noniterable_boundfield(self):
# You can iterate over any BoundField, not just those with widget=RadioSelect.
class BeatleForm(Form):
name = CharField()
f = BeatleForm(auto_id=False)
self.assertHTMLEqual('\n'.join(str(bf) for bf in f['name']), '<input type="text" name="name" required>')
def test_boundfield_slice(self):
class BeatleForm(Form):
name = ChoiceField(
choices=[('john', 'John'), ('paul', 'Paul'), ('george', 'George'), ('ringo', 'Ringo')],
widget=RadioSelect,
)
f = BeatleForm()
bf = f['name']
self.assertEqual(
[str(item) for item in bf[1:]],
[str(bf[1]), str(bf[2]), str(bf[3])],
)
def test_boundfield_invalid_index(self):
class TestForm(Form):
name = ChoiceField(choices=[])
field = TestForm()['name']
msg = 'BoundField indices must be integers or slices, not str.'
with self.assertRaisesMessage(TypeError, msg):
field['foo']
def test_boundfield_bool(self):
"""BoundField without any choices (subwidgets) evaluates to True."""
class TestForm(Form):
name = ChoiceField(choices=[])
self.assertIs(bool(TestForm()['name']), True)
def test_forms_with_multiple_choice(self):
# MultipleChoiceField is a special case, as its data is required to be a list:
class SongForm(Form):
name = CharField()
composers = MultipleChoiceField()
f = SongForm(auto_id=False)
self.assertHTMLEqual(str(f['composers']), """<select multiple name="composers" required>
</select>""")
class SongForm(Form):
name = CharField()
composers = MultipleChoiceField(choices=[('J', 'John Lennon'), ('P', 'Paul McCartney')])
f = SongForm(auto_id=False)
self.assertHTMLEqual(str(f['composers']), """<select multiple name="composers" required>
<option value="J">John Lennon</option>
<option value="P">Paul McCartney</option>
</select>""")
f = SongForm({'name': 'Yesterday', 'composers': ['P']}, auto_id=False)
self.assertHTMLEqual(str(f['name']), '<input type="text" name="name" value="Yesterday" required>')
self.assertHTMLEqual(str(f['composers']), """<select multiple name="composers" required>
<option value="J">John Lennon</option>
<option value="P" selected>Paul McCartney</option>
</select>""")
def test_form_with_disabled_fields(self):
class PersonForm(Form):
name = CharField()
birthday = DateField(disabled=True)
class PersonFormFieldInitial(Form):
name = CharField()
birthday = DateField(disabled=True, initial=datetime.date(1974, 8, 16))
# Disabled fields are generally not transmitted by user agents.
# The value from the form's initial data is used.
f1 = PersonForm({'name': 'John Doe'}, initial={'birthday': datetime.date(1974, 8, 16)})
f2 = PersonFormFieldInitial({'name': 'John Doe'})
for form in (f1, f2):
self.assertTrue(form.is_valid())
self.assertEqual(
form.cleaned_data,
{'birthday': datetime.date(1974, 8, 16), 'name': 'John Doe'}
)
# Values provided in the form's data are ignored.
data = {'name': 'John Doe', 'birthday': '1984-11-10'}
f1 = PersonForm(data, initial={'birthday': datetime.date(1974, 8, 16)})
f2 = PersonFormFieldInitial(data)
for form in (f1, f2):
self.assertTrue(form.is_valid())
self.assertEqual(
form.cleaned_data,
{'birthday': datetime.date(1974, 8, 16), 'name': 'John Doe'}
)
# Initial data remains present on invalid forms.
data = {}
f1 = PersonForm(data, initial={'birthday': datetime.date(1974, 8, 16)})
f2 = PersonFormFieldInitial(data)
for form in (f1, f2):
self.assertFalse(form.is_valid())
self.assertEqual(form['birthday'].value(), datetime.date(1974, 8, 16))
def test_hidden_data(self):
class SongForm(Form):
name = CharField()
composers = MultipleChoiceField(choices=[('J', 'John Lennon'), ('P', 'Paul McCartney')])
# MultipleChoiceField rendered as_hidden() is a special case. Because it can
# have multiple values, its as_hidden() renders multiple <input type="hidden">
# tags.
f = SongForm({'name': 'Yesterday', 'composers': ['P']}, auto_id=False)
self.assertHTMLEqual(f['composers'].as_hidden(), '<input type="hidden" name="composers" value="P">')
f = SongForm({'name': 'From Me To You', 'composers': ['P', 'J']}, auto_id=False)
self.assertHTMLEqual(f['composers'].as_hidden(), """<input type="hidden" name="composers" value="P">
<input type="hidden" name="composers" value="J">""")
# DateTimeField rendered as_hidden() is special too
class MessageForm(Form):
when = SplitDateTimeField()
f = MessageForm({'when_0': '1992-01-01', 'when_1': '01:01'})
self.assertTrue(f.is_valid())
self.assertHTMLEqual(
str(f['when']),
'<input type="text" name="when_0" value="1992-01-01" id="id_when_0" required>'
'<input type="text" name="when_1" value="01:01" id="id_when_1" required>'
)
self.assertHTMLEqual(
f['when'].as_hidden(),
'<input type="hidden" name="when_0" value="1992-01-01" id="id_when_0">'
'<input type="hidden" name="when_1" value="01:01" id="id_when_1">'
)
def test_multiple_choice_checkbox(self):
# MultipleChoiceField can also be used with the CheckboxSelectMultiple widget.
class SongForm(Form):
name = CharField()
composers = MultipleChoiceField(
choices=[('J', 'John Lennon'), ('P', 'Paul McCartney')],
widget=CheckboxSelectMultiple,
)
f = SongForm(auto_id=False)
self.assertHTMLEqual(str(f['composers']), """<ul>
<li><label><input type="checkbox" name="composers" value="J"> John Lennon</label></li>
<li><label><input type="checkbox" name="composers" value="P"> Paul McCartney</label></li>
</ul>""")
f = SongForm({'composers': ['J']}, auto_id=False)
self.assertHTMLEqual(str(f['composers']), """<ul>
<li><label><input checked type="checkbox" name="composers" value="J"> John Lennon</label></li>
<li><label><input type="checkbox" name="composers" value="P"> Paul McCartney</label></li>
</ul>""")
f = SongForm({'composers': ['J', 'P']}, auto_id=False)
self.assertHTMLEqual(str(f['composers']), """<ul>
<li><label><input checked type="checkbox" name="composers" value="J"> John Lennon</label></li>
<li><label><input checked type="checkbox" name="composers" value="P"> Paul McCartney</label></li>
</ul>""")
# Test iterating on individual checkboxes in a template
t = Template('{% for checkbox in form.composers %}<div class="mycheckbox">{{ checkbox }}</div>{% endfor %}')
self.assertHTMLEqual(t.render(Context({'form': f})), """<div class="mycheckbox"><label>
<input checked name="composers" type="checkbox" value="J"> John Lennon</label></div>
<div class="mycheckbox"><label>
<input checked name="composers" type="checkbox" value="P"> Paul McCartney</label></div>""")
def test_checkbox_auto_id(self):
# Regarding auto_id, CheckboxSelectMultiple is a special case. Each checkbox
# gets a distinct ID, formed by appending an underscore plus the checkbox's
# zero-based index.
class SongForm(Form):
name = CharField()
composers = MultipleChoiceField(
choices=[('J', 'John Lennon'), ('P', 'Paul McCartney')],
widget=CheckboxSelectMultiple,
)
f = SongForm(auto_id='%s_id')
self.assertHTMLEqual(
str(f['composers']),
"""<ul id="composers_id">
<li><label for="composers_id_0">
<input type="checkbox" name="composers" value="J" id="composers_id_0"> John Lennon</label></li>
<li><label for="composers_id_1">
<input type="checkbox" name="composers" value="P" id="composers_id_1"> Paul McCartney</label></li>
</ul>"""
)
def test_multiple_choice_list_data(self):
# Data for a MultipleChoiceField should be a list. QueryDict and
# MultiValueDict conveniently work with this.
class SongForm(Form):
name = CharField()
composers = MultipleChoiceField(
choices=[('J', 'John Lennon'), ('P', 'Paul McCartney')],
widget=CheckboxSelectMultiple,
)
data = {'name': 'Yesterday', 'composers': ['J', 'P']}
f = SongForm(data)
self.assertEqual(f.errors, {})
data = QueryDict('name=Yesterday&composers=J&composers=P')
f = SongForm(data)
self.assertEqual(f.errors, {})
data = MultiValueDict({'name': ['Yesterday'], 'composers': ['J', 'P']})
f = SongForm(data)
self.assertEqual(f.errors, {})
# SelectMultiple uses ducktyping so that MultiValueDictLike.getlist()
# is called.
f = SongForm(MultiValueDictLike({'name': 'Yesterday', 'composers': 'J'}))
self.assertEqual(f.errors, {})
self.assertEqual(f.cleaned_data['composers'], ['J'])
def test_multiple_hidden(self):
class SongForm(Form):
name = CharField()
composers = MultipleChoiceField(
choices=[('J', 'John Lennon'), ('P', 'Paul McCartney')],
widget=CheckboxSelectMultiple,
)
# The MultipleHiddenInput widget renders multiple values as hidden fields.
class SongFormHidden(Form):
name = CharField()
composers = MultipleChoiceField(
choices=[('J', 'John Lennon'), ('P', 'Paul McCartney')],
widget=MultipleHiddenInput,
)
f = SongFormHidden(MultiValueDict({'name': ['Yesterday'], 'composers': ['J', 'P']}), auto_id=False)
self.assertHTMLEqual(
f.as_ul(),
"""<li>Name: <input type="text" name="name" value="Yesterday" required>
<input type="hidden" name="composers" value="J">
<input type="hidden" name="composers" value="P"></li>"""
)
# When using CheckboxSelectMultiple, the framework expects a list of input and
# returns a list of input.
f = SongForm({'name': 'Yesterday'}, auto_id=False)
self.assertEqual(f.errors['composers'], ['This field is required.'])
f = SongForm({'name': 'Yesterday', 'composers': ['J']}, auto_id=False)
self.assertEqual(f.errors, {})
self.assertEqual(f.cleaned_data['composers'], ['J'])
self.assertEqual(f.cleaned_data['name'], 'Yesterday')
f = SongForm({'name': 'Yesterday', 'composers': ['J', 'P']}, auto_id=False)
self.assertEqual(f.errors, {})
self.assertEqual(f.cleaned_data['composers'], ['J', 'P'])
self.assertEqual(f.cleaned_data['name'], 'Yesterday')
# MultipleHiddenInput uses ducktyping so that
# MultiValueDictLike.getlist() is called.
f = SongForm(MultiValueDictLike({'name': 'Yesterday', 'composers': 'J'}))
self.assertEqual(f.errors, {})
self.assertEqual(f.cleaned_data['composers'], ['J'])
def test_escaping(self):
# Validation errors are HTML-escaped when output as HTML.
class EscapingForm(Form):
special_name = CharField(label="<em>Special</em> Field")
special_safe_name = CharField(label=mark_safe("<em>Special</em> Field"))
def clean_special_name(self):
raise ValidationError("Something's wrong with '%s'" % self.cleaned_data['special_name'])
def clean_special_safe_name(self):
raise ValidationError(
mark_safe("'<b>%s</b>' is a safe string" % self.cleaned_data['special_safe_name'])
)
f = EscapingForm({
'special_name':
"Nothing to escape",
'special_safe_name': "Nothing to escape",
}, auto_id=False)
self.assertHTMLEqual(
f.as_table(),
"""<tr><th><em>Special</em> Field:</th><td>
<ul class="errorlist"><li>Something's wrong with 'Nothing to escape'</li></ul>
<input type="text" name="special_name" value="Nothing to escape" required></td></tr>
<tr><th><em>Special</em> Field:</th><td>
<ul class="errorlist"><li>'<b>Nothing to escape</b>' is a safe string</li></ul>
<input type="text" name="special_safe_name" value="Nothing to escape" required></td></tr>"""
)
f = EscapingForm({
'special_name': "Should escape < & > and <script>alert('xss')</script>",
'special_safe_name': "<i>Do not escape</i>"
}, auto_id=False)
self.assertHTMLEqual(
f.as_table(),
"""<tr><th><em>Special</em> Field:</th><td>
<ul class="errorlist"><li>Something's wrong with 'Should escape < & > and
<script>alert('xss')</script>'</li></ul>
<input type="text" name="special_name"
value="Should escape < & > and <script>alert('xss')</script>" required></td></tr>
<tr><th><em>Special</em> Field:</th><td>
<ul class="errorlist"><li>'<b><i>Do not escape</i></b>' is a safe string</li></ul>
<input type="text" name="special_safe_name" value="<i>Do not escape</i>" required></td></tr>"""
)
def test_validating_multiple_fields(self):
# There are a couple of ways to do multiple-field validation. If you want the
# validation message to be associated with a particular field, implement the
# clean_XXX() method on the Form, where XXX is the field name. As in
# Field.clean(), the clean_XXX() method should return the cleaned value. In the
# clean_XXX() method, you have access to self.cleaned_data, which is a dictionary
# of all the data that has been cleaned *so far*, in order by the fields,
# including the current field (e.g., the field XXX if you're in clean_XXX()).
class UserRegistration(Form):
username = CharField(max_length=10)
password1 = CharField(widget=PasswordInput)
password2 = CharField(widget=PasswordInput)
def clean_password2(self):
if (self.cleaned_data.get('password1') and self.cleaned_data.get('password2') and
self.cleaned_data['password1'] != self.cleaned_data['password2']):
raise ValidationError('Please make sure your passwords match.')
return self.cleaned_data['password2']
f = UserRegistration(auto_id=False)
self.assertEqual(f.errors, {})
f = UserRegistration({}, auto_id=False)
self.assertEqual(f.errors['username'], ['This field is required.'])
self.assertEqual(f.errors['password1'], ['This field is required.'])
self.assertEqual(f.errors['password2'], ['This field is required.'])
f = UserRegistration({'username': 'adrian', 'password1': 'foo', 'password2': 'bar'}, auto_id=False)
self.assertEqual(f.errors['password2'], ['Please make sure your passwords match.'])
f = UserRegistration({'username': 'adrian', 'password1': 'foo', 'password2': 'foo'}, auto_id=False)
self.assertEqual(f.errors, {})
self.assertEqual(f.cleaned_data['username'], 'adrian')
self.assertEqual(f.cleaned_data['password1'], 'foo')
self.assertEqual(f.cleaned_data['password2'], 'foo')
# Another way of doing multiple-field validation is by implementing the
# Form's clean() method. Usually ValidationError raised by that method
# will not be associated with a particular field and will have a
# special-case association with the field named '__all__'. It's
# possible to associate the errors to particular field with the
# Form.add_error() method or by passing a dictionary that maps each
# field to one or more errors.
#
# Note that in Form.clean(), you have access to self.cleaned_data, a
# dictionary of all the fields/values that have *not* raised a
# ValidationError. Also note Form.clean() is required to return a
# dictionary of all clean data.
class UserRegistration(Form):
username = CharField(max_length=10)
password1 = CharField(widget=PasswordInput)
password2 = CharField(widget=PasswordInput)
def clean(self):
# Test raising a ValidationError as NON_FIELD_ERRORS.
if (self.cleaned_data.get('password1') and self.cleaned_data.get('password2') and
self.cleaned_data['password1'] != self.cleaned_data['password2']):
raise ValidationError('Please make sure your passwords match.')
# Test raising ValidationError that targets multiple fields.
errors = {}
if self.cleaned_data.get('password1') == 'FORBIDDEN_VALUE':
errors['password1'] = 'Forbidden value.'
if self.cleaned_data.get('password2') == 'FORBIDDEN_VALUE':
errors['password2'] = ['Forbidden value.']
if errors:
raise ValidationError(errors)
# Test Form.add_error()
if self.cleaned_data.get('password1') == 'FORBIDDEN_VALUE2':
self.add_error(None, 'Non-field error 1.')
self.add_error('password1', 'Forbidden value 2.')
if self.cleaned_data.get('password2') == 'FORBIDDEN_VALUE2':
self.add_error('password2', 'Forbidden value 2.')
raise ValidationError('Non-field error 2.')
return self.cleaned_data
f = UserRegistration(auto_id=False)
self.assertEqual(f.errors, {})
f = UserRegistration({}, auto_id=False)
self.assertHTMLEqual(
f.as_table(),
"""<tr><th>Username:</th><td>
<ul class="errorlist"><li>This field is required.</li></ul>
<input type="text" name="username" maxlength="10" required></td></tr>
<tr><th>Password1:</th><td><ul class="errorlist"><li>This field is required.</li></ul>
<input type="password" name="password1" required></td></tr>
<tr><th>Password2:</th><td><ul class="errorlist"><li>This field is required.</li></ul>
<input type="password" name="password2" required></td></tr>"""
)
self.assertEqual(f.errors['username'], ['This field is required.'])
self.assertEqual(f.errors['password1'], ['This field is required.'])
self.assertEqual(f.errors['password2'], ['This field is required.'])
f = UserRegistration({'username': 'adrian', 'password1': 'foo', 'password2': 'bar'}, auto_id=False)
self.assertEqual(f.errors['__all__'], ['Please make sure your passwords match.'])
self.assertHTMLEqual(
f.as_table(),
"""<tr><td colspan="2">
<ul class="errorlist nonfield"><li>Please make sure your passwords match.</li></ul></td></tr>
<tr><th>Username:</th><td><input type="text" name="username" value="adrian" maxlength="10" required></td></tr>
<tr><th>Password1:</th><td><input type="password" name="password1" required></td></tr>
<tr><th>Password2:</th><td><input type="password" name="password2" required></td></tr>"""
)
self.assertHTMLEqual(
f.as_ul(),
"""<li><ul class="errorlist nonfield">
<li>Please make sure your passwords match.</li></ul></li>
<li>Username: <input type="text" name="username" value="adrian" maxlength="10" required></li>
<li>Password1: <input type="password" name="password1" required></li>
<li>Password2: <input type="password" name="password2" required></li>"""
)
f = UserRegistration({'username': 'adrian', 'password1': 'foo', 'password2': 'foo'}, auto_id=False)
self.assertEqual(f.errors, {})
self.assertEqual(f.cleaned_data['username'], 'adrian')
self.assertEqual(f.cleaned_data['password1'], 'foo')
self.assertEqual(f.cleaned_data['password2'], 'foo')
f = UserRegistration({
'username': 'adrian',
'password1': 'FORBIDDEN_VALUE',
'password2': 'FORBIDDEN_VALUE',
}, auto_id=False)
self.assertEqual(f.errors['password1'], ['Forbidden value.'])
self.assertEqual(f.errors['password2'], ['Forbidden value.'])
f = UserRegistration({
'username': 'adrian',
'password1': 'FORBIDDEN_VALUE2',
'password2': 'FORBIDDEN_VALUE2',
}, auto_id=False)
self.assertEqual(f.errors['__all__'], ['Non-field error 1.', 'Non-field error 2.'])
self.assertEqual(f.errors['password1'], ['Forbidden value 2.'])
self.assertEqual(f.errors['password2'], ['Forbidden value 2.'])
with self.assertRaisesMessage(ValueError, "has no field named"):
f.add_error('missing_field', 'Some error.')
def test_update_error_dict(self):
class CodeForm(Form):
code = CharField(max_length=10)
def clean(self):
try:
raise ValidationError({'code': [ValidationError('Code error 1.')]})
except ValidationError as e:
self._errors = e.update_error_dict(self._errors)
try:
raise ValidationError({'code': [ValidationError('Code error 2.')]})
except ValidationError as e:
self._errors = e.update_error_dict(self._errors)
try:
raise ValidationError({'code': forms.ErrorList(['Code error 3.'])})
except ValidationError as e:
self._errors = e.update_error_dict(self._errors)
try:
raise ValidationError('Non-field error 1.')
except ValidationError as e:
self._errors = e.update_error_dict(self._errors)
try:
raise ValidationError([ValidationError('Non-field error 2.')])
except ValidationError as e:
self._errors = e.update_error_dict(self._errors)
# The newly added list of errors is an instance of ErrorList.
for field, error_list in self._errors.items():
if not isinstance(error_list, self.error_class):
self._errors[field] = self.error_class(error_list)
form = CodeForm({'code': 'hello'})
# Trigger validation.
self.assertFalse(form.is_valid())
# update_error_dict didn't lose track of the ErrorDict type.
self.assertIsInstance(form._errors, forms.ErrorDict)
self.assertEqual(dict(form.errors), {
'code': ['Code error 1.', 'Code error 2.', 'Code error 3.'],
NON_FIELD_ERRORS: ['Non-field error 1.', 'Non-field error 2.'],
})
def test_has_error(self):
class UserRegistration(Form):
username = CharField(max_length=10)
password1 = CharField(widget=PasswordInput, min_length=5)
password2 = CharField(widget=PasswordInput)
def clean(self):
if (self.cleaned_data.get('password1') and self.cleaned_data.get('password2') and
self.cleaned_data['password1'] != self.cleaned_data['password2']):
raise ValidationError(
'Please make sure your passwords match.',
code='password_mismatch',
)
f = UserRegistration(data={})
self.assertTrue(f.has_error('password1'))
self.assertTrue(f.has_error('password1', 'required'))
self.assertFalse(f.has_error('password1', 'anything'))
f = UserRegistration(data={'password1': 'Hi', 'password2': 'Hi'})
self.assertTrue(f.has_error('password1'))
self.assertTrue(f.has_error('password1', 'min_length'))
self.assertFalse(f.has_error('password1', 'anything'))
self.assertFalse(f.has_error('password2'))
self.assertFalse(f.has_error('password2', 'anything'))
f = UserRegistration(data={'password1': 'Bonjour', 'password2': 'Hello'})
self.assertFalse(f.has_error('password1'))
self.assertFalse(f.has_error('password1', 'required'))
self.assertTrue(f.has_error(NON_FIELD_ERRORS))
self.assertTrue(f.has_error(NON_FIELD_ERRORS, 'password_mismatch'))
self.assertFalse(f.has_error(NON_FIELD_ERRORS, 'anything'))
def test_html_output_with_hidden_input_field_errors(self):
class TestForm(Form):
hidden_input = CharField(widget=HiddenInput)
def clean(self):
self.add_error(None, 'Form error')
f = TestForm(data={})
error_dict = {
'hidden_input': ['This field is required.'],
'__all__': ['Form error'],
}
self.assertEqual(f.errors, error_dict)
f.as_table()
self.assertEqual(f.errors, error_dict)
def test_dynamic_construction(self):
# It's possible to construct a Form dynamically by adding to the self.fields
# dictionary in __init__(). Don't forget to call Form.__init__() within the
# subclass' __init__().
class Person(Form):
first_name = CharField()
last_name = CharField()
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.fields['birthday'] = DateField()
p = Person(auto_id=False)
self.assertHTMLEqual(
p.as_table(),
"""<tr><th>First name:</th><td><input type="text" name="first_name" required></td></tr>
<tr><th>Last name:</th><td><input type="text" name="last_name" required></td></tr>
<tr><th>Birthday:</th><td><input type="text" name="birthday" required></td></tr>"""
)
# Instances of a dynamic Form do not persist fields from one Form instance to
# the next.
class MyForm(Form):
def __init__(self, data=None, auto_id=False, field_list=[]):
Form.__init__(self, data, auto_id=auto_id)
for field in field_list:
self.fields[field[0]] = field[1]
field_list = [('field1', CharField()), ('field2', CharField())]
my_form = MyForm(field_list=field_list)
self.assertHTMLEqual(
my_form.as_table(),
"""<tr><th>Field1:</th><td><input type="text" name="field1" required></td></tr>
<tr><th>Field2:</th><td><input type="text" name="field2" required></td></tr>"""
)
field_list = [('field3', CharField()), ('field4', CharField())]
my_form = MyForm(field_list=field_list)
self.assertHTMLEqual(
my_form.as_table(),
"""<tr><th>Field3:</th><td><input type="text" name="field3" required></td></tr>
<tr><th>Field4:</th><td><input type="text" name="field4" required></td></tr>"""
)
class MyForm(Form):
default_field_1 = CharField()
default_field_2 = CharField()
def __init__(self, data=None, auto_id=False, field_list=[]):
Form.__init__(self, data, auto_id=auto_id)
for field in field_list:
self.fields[field[0]] = field[1]
field_list = [('field1', CharField()), ('field2', CharField())]
my_form = MyForm(field_list=field_list)
self.assertHTMLEqual(
my_form.as_table(),
"""<tr><th>Default field 1:</th><td><input type="text" name="default_field_1" required></td></tr>
<tr><th>Default field 2:</th><td><input type="text" name="default_field_2" required></td></tr>
<tr><th>Field1:</th><td><input type="text" name="field1" required></td></tr>
<tr><th>Field2:</th><td><input type="text" name="field2" required></td></tr>"""
)
field_list = [('field3', CharField()), ('field4', CharField())]
my_form = MyForm(field_list=field_list)
self.assertHTMLEqual(
my_form.as_table(),
"""<tr><th>Default field 1:</th><td><input type="text" name="default_field_1" required></td></tr>
<tr><th>Default field 2:</th><td><input type="text" name="default_field_2" required></td></tr>
<tr><th>Field3:</th><td><input type="text" name="field3" required></td></tr>
<tr><th>Field4:</th><td><input type="text" name="field4" required></td></tr>"""
)
# Similarly, changes to field attributes do not persist from one Form instance
# to the next.
class Person(Form):
first_name = CharField(required=False)
last_name = CharField(required=False)
def __init__(self, names_required=False, *args, **kwargs):
super().__init__(*args, **kwargs)
if names_required:
self.fields['first_name'].required = True
self.fields['first_name'].widget.attrs['class'] = 'required'
self.fields['last_name'].required = True
self.fields['last_name'].widget.attrs['class'] = 'required'
f = Person(names_required=False)
self.assertEqual(f['first_name'].field.required, f['last_name'].field.required, (False, False))
self.assertEqual(f['first_name'].field.widget.attrs, f['last_name'].field.widget.attrs, ({}, {}))
f = Person(names_required=True)
self.assertEqual(f['first_name'].field.required, f['last_name'].field.required, (True, True))
self.assertEqual(
f['first_name'].field.widget.attrs,
f['last_name'].field.widget.attrs,
({'class': 'reuired'}, {'class': 'required'})
)
f = Person(names_required=False)
self.assertEqual(f['first_name'].field.required, f['last_name'].field.required, (False, False))
self.assertEqual(f['first_name'].field.widget.attrs, f['last_name'].field.widget.attrs, ({}, {}))
class Person(Form):
first_name = CharField(max_length=30)
last_name = CharField(max_length=30)
def __init__(self, name_max_length=None, *args, **kwargs):
super().__init__(*args, **kwargs)
if name_max_length:
self.fields['first_name'].max_length = name_max_length
self.fields['last_name'].max_length = name_max_length
f = Person(name_max_length=None)
self.assertEqual(f['first_name'].field.max_length, f['last_name'].field.max_length, (30, 30))
f = Person(name_max_length=20)
self.assertEqual(f['first_name'].field.max_length, f['last_name'].field.max_length, (20, 20))
f = Person(name_max_length=None)
self.assertEqual(f['first_name'].field.max_length, f['last_name'].field.max_length, (30, 30))
# Similarly, choices do not persist from one Form instance to the next.
# Refs #15127.
class Person(Form):
first_name = CharField(required=False)
last_name = CharField(required=False)
gender = ChoiceField(choices=(('f', 'Female'), ('m', 'Male')))
def __init__(self, allow_unspec_gender=False, *args, **kwargs):
super().__init__(*args, **kwargs)
if allow_unspec_gender:
self.fields['gender'].choices += (('u', 'Unspecified'),)
f = Person()
self.assertEqual(f['gender'].field.choices, [('f', 'Female'), ('m', 'Male')])
f = Person(allow_unspec_gender=True)
self.assertEqual(f['gender'].field.choices, [('f', 'Female'), ('m', 'Male'), ('u', 'Unspecified')])
f = Person()
self.assertEqual(f['gender'].field.choices, [('f', 'Female'), ('m', 'Male')])
def test_validators_independence(self):
"""
The list of form field validators can be modified without polluting
other forms.
"""
class MyForm(Form):
myfield = CharField(max_length=25)
f1 = MyForm()
f2 = MyForm()
f1.fields['myfield'].validators[0] = MaxValueValidator(12)
self.assertNotEqual(f1.fields['myfield'].validators[0], f2.fields['myfield'].validators[0])
def test_hidden_widget(self):
# HiddenInput widgets are displayed differently in the as_table(), as_ul())
# and as_p() output of a Form -- their verbose names are not displayed, and a
# separate row is not displayed. They're displayed in the last row of the
# form, directly after that row's form element.
class Person(Form):
first_name = CharField()
last_name = CharField()
hidden_text = CharField(widget=HiddenInput)
birthday = DateField()
p = Person(auto_id=False)
self.assertHTMLEqual(
p.as_table(),
"""<tr><th>First name:</th><td><input type="text" name="first_name" required></td></tr>
<tr><th>Last name:</th><td><input type="text" name="last_name" required></td></tr>
<tr><th>Birthday:</th>
<td><input type="text" name="birthday" required><input type="hidden" name="hidden_text"></td></tr>"""
)
self.assertHTMLEqual(
p.as_ul(),
"""<li>First name: <input type="text" name="first_name" required></li>
<li>Last name: <input type="text" name="last_name" required></li>
<li>Birthday: <input type="text" name="birthday" required><input type="hidden" name="hidden_text"></li>"""
)
self.assertHTMLEqual(
p.as_p(), """<p>First name: <input type="text" name="first_name" required></p>
<p>Last name: <input type="text" name="last_name" required></p>
<p>Birthday: <input type="text" name="birthday" required><input type="hidden" name="hidden_text"></p>"""
)
# With auto_id set, a HiddenInput still gets an ID, but it doesn't get a label.
p = Person(auto_id='id_%s')
self.assertHTMLEqual(
p.as_table(),
"""<tr><th><label for="id_first_name">First name:</label></th><td>
<input type="text" name="first_name" id="id_first_name" required></td></tr>
<tr><th><label for="id_last_name">Last name:</label></th><td>
<input type="text" name="last_name" id="id_last_name" required></td></tr>
<tr><th><label for="id_birthday">Birthday:</label></th><td>
<input type="text" name="birthday" id="id_birthday" required>
<input type="hidden" name="hidden_text" id="id_hidden_text"></td></tr>"""
)
self.assertHTMLEqual(
p.as_ul(),
"""<li><label for="id_first_name">First name:</label>
<input type="text" name="first_name" id="id_first_name" required></li>
<li><label for="id_last_name">Last name:</label>
<input type="text" name="last_name" id="id_last_name" required></li>
<li><label for="id_birthday">Birthday:</label>
<input type="text" name="birthday" id="id_birthday" required>
<input type="hidden" name="hidden_text" id="id_hidden_text"></li>"""
)
self.assertHTMLEqual(
p.as_p(),
"""<p><label for="id_first_name">First name:</label>
<input type="text" name="first_name" id="id_first_name" required></p>
<p><label for="id_last_name">Last name:</label>
<input type="text" name="last_name" id="id_last_name" required></p>
<p><label for="id_birthday">Birthday:</label>
<input type="text" name="birthday" id="id_birthday" required>
<input type="hidden" name="hidden_text" id="id_hidden_text"></p>"""
)
# If a field with a HiddenInput has errors, the as_table() and as_ul() output
# will include the error message(s) with the text "(Hidden field [fieldname]) "
# prepended. This message is displayed at the top of the output, regardless of
# its field's order in the form.
p = Person({'first_name': 'John', 'last_name': 'Lennon', 'birthday': '1940-10-9'}, auto_id=False)
self.assertHTMLEqual(
p.as_table(),
"""<tr><td colspan="2">
<ul class="errorlist nonfield"><li>(Hidden field hidden_text) This field is required.</li></ul></td></tr>
<tr><th>First name:</th><td><input type="text" name="first_name" value="John" required></td></tr>
<tr><th>Last name:</th><td><input type="text" name="last_name" value="Lennon" required></td></tr>
<tr><th>Birthday:</th><td><input type="text" name="birthday" value="1940-10-9" required>
<input type="hidden" name="hidden_text"></td></tr>"""
)
self.assertHTMLEqual(
p.as_ul(),
"""<li><ul class="errorlist nonfield"><li>(Hidden field hidden_text) This field is required.</li></ul></li>
<li>First name: <input type="text" name="first_name" value="John" required></li>
<li>Last name: <input type="text" name="last_name" value="Lennon" required></li>
<li>Birthday: <input type="text" name="birthday" value="1940-10-9" required>
<input type="hidden" name="hidden_text"></li>"""
)
self.assertHTMLEqual(
p.as_p(),
"""<ul class="errorlist nonfield"><li>(Hidden field hidden_text) This field is required.</li></ul>
<p>First name: <input type="text" name="first_name" value="John" required></p>
<p>Last name: <input type="text" name="last_name" value="Lennon" required></p>
<p>Birthday: <input type="text" name="birthday" value="1940-10-9" required>
<input type="hidden" name="hidden_text"></p>"""
)
# A corner case: It's possible for a form to have only HiddenInputs.
class TestForm(Form):
foo = CharField(widget=HiddenInput)
bar = CharField(widget=HiddenInput)
p = TestForm(auto_id=False)
self.assertHTMLEqual(p.as_table(), '<input type="hidden" name="foo"><input type="hidden" name="bar">')
self.assertHTMLEqual(p.as_ul(), '<input type="hidden" name="foo"><input type="hidden" name="bar">')
self.assertHTMLEqual(p.as_p(), '<input type="hidden" name="foo"><input type="hidden" name="bar">')
def test_field_order(self):
# A Form's fields are displayed in the same order in which they were defined.
class TestForm(Form):
field1 = CharField()
field2 = CharField()
field3 = CharField()
field4 = CharField()
field5 = CharField()
field6 = CharField()
field7 = CharField()
field8 = CharField()
field9 = CharField()
field10 = CharField()
field11 = CharField()
field12 = CharField()
field13 = CharField()
field14 = CharField()
p = TestForm(auto_id=False)
self.assertHTMLEqual(p.as_table(), """<tr><th>Field1:</th><td><input type="text" name="field1" required></td></tr>
<tr><th>Field2:</th><td><input type="text" name="field2" required></td></tr>
<tr><th>Field3:</th><td><input type="text" name="field3" required></td></tr>
<tr><th>Field4:</th><td><input type="text" name="field4" required></td></tr>
<tr><th>Field5:</th><td><input type="text" name="field5" required></td></tr>
<tr><th>Field6:</th><td><input type="text" name="field6" required></td></tr>
<tr><th>Field7:</th><td><input type="text" name="field7" required></td></tr>
<tr><th>Field8:</th><td><input type="text" name="field8" required></td></tr>
<tr><th>Field9:</th><td><input type="text" name="field9" required></td></tr>
<tr><th>Field10:</th><td><input type="text" name="field10" required></td></tr>
<tr><th>Field11:</th><td><input type="text" name="field11" required></td></tr>
<tr><th>Field12:</th><td><input type="text" name="field12" required></td></tr>
<tr><th>Field13:</th><td><input type="text" name="field13" required></td></tr>
<tr><th>Field14:</th><td><input type="text" name="field14" required></td></tr>""")
def test_explicit_field_order(self):
class TestFormParent(Form):
field1 = CharField()
field2 = CharField()
field4 = CharField()
field5 = CharField()
field6 = CharField()
field_order = ['field6', 'field5', 'field4', 'field2', 'field1']
class TestForm(TestFormParent):
field3 = CharField()
field_order = ['field2', 'field4', 'field3', 'field5', 'field6']
class TestFormRemove(TestForm):
field1 = None
class TestFormMissing(TestForm):
field_order = ['field2', 'field4', 'field3', 'field5', 'field6', 'field1']
field1 = None
class TestFormInit(TestFormParent):
field3 = CharField()
field_order = None
def __init__(self, **kwargs):
super().__init__(**kwargs)
self.order_fields(field_order=TestForm.field_order)
p = TestFormParent()
self.assertEqual(list(p.fields), TestFormParent.field_order)
p = TestFormRemove()
self.assertEqual(list(p.fields), TestForm.field_order)
p = TestFormMissing()
self.assertEqual(list(p.fields), TestForm.field_order)
p = TestForm()
self.assertEqual(list(p.fields), TestFormMissing.field_order)
p = TestFormInit()
order = [*TestForm.field_order, 'field1']
self.assertEqual(list(p.fields), order)
TestForm.field_order = ['unknown']
p = TestForm()
self.assertEqual(list(p.fields), ['field1', 'field2', 'field4', 'field5', 'field6', 'field3'])
def test_form_html_attributes(self):
# Some Field classes have an effect on the HTML attributes of their associated
# Widget. If you set max_length in a CharField and its associated widget is
# either a TextInput or PasswordInput, then the widget's rendered HTML will
# include the "maxlength" attribute.
class UserRegistration(Form):
username = CharField(max_length=10) # uses TextInput by default
password = CharField(max_length=10, widget=PasswordInput)
realname = CharField(max_length=10, widget=TextInput) # redundantly define widget, just to test
address = CharField() # no max_length defined here
p = UserRegistration(auto_id=False)
self.assertHTMLEqual(
p.as_ul(),
"""<li>Username: <input type="text" name="username" maxlength="10" required></li>
<li>Password: <input type="password" name="password" maxlength="10" required></li>
<li>Realname: <input type="text" name="realname" maxlength="10" required></li>
<li>Address: <input type="text" name="address" required></li>"""
)
# If you specify a custom "attrs" that includes the "maxlength" attribute,
# the Field's max_length attribute will override whatever "maxlength" you specify
# in "attrs".
class UserRegistration(Form):
username = CharField(max_length=10, widget=TextInput(attrs={'maxlength': 20}))
password = CharField(max_length=10, widget=PasswordInput)
p = UserRegistration(auto_id=False)
self.assertHTMLEqual(
p.as_ul(),
"""<li>Username: <input type="text" name="username" maxlength="10" required></li>
<li>Password: <input type="password" name="password" maxlength="10" required></li>"""
)
def test_specifying_labels(self):
# You can specify the label for a field by using the 'label' argument to a Field
# class. If you don't specify 'label', Django will use the field name with
# underscores converted to spaces, and the initial letter capitalized.
class UserRegistration(Form):
username = CharField(max_length=10, label='Your username')
password1 = CharField(widget=PasswordInput)
password2 = CharField(widget=PasswordInput, label='Contraseña (de nuevo)')
p = UserRegistration(auto_id=False)
self.assertHTMLEqual(
p.as_ul(),
"""<li>Your username: <input type="text" name="username" maxlength="10" required></li>
<li>Password1: <input type="password" name="password1" required></li>
<li>Contraseña (de nuevo): <input type="password" name="password2" required></li>"""
)
# Labels for as_* methods will only end in a colon if they don't end in other
# punctuation already.
class Questions(Form):
q1 = CharField(label='The first question')
q2 = CharField(label='What is your name?')
q3 = CharField(label='The answer to life is:')
q4 = CharField(label='Answer this question!')
q5 = CharField(label='The last question. Period.')
self.assertHTMLEqual(
Questions(auto_id=False).as_p(),
"""<p>The first question: <input type="text" name="q1" required></p>
<p>What is your name? <input type="text" name="q2" required></p>
<p>The answer to life is: <input type="text" name="q3" required></p>
<p>Answer this question! <input type="text" name="q4" required></p>
<p>The last question. Period. <input type="text" name="q5" required></p>"""
)
self.assertHTMLEqual(
Questions().as_p(),
"""<p><label for="id_q1">The first question:</label> <input type="text" name="q1" id="id_q1" required></p>
<p><label for="id_q2">What is your name?</label> <input type="text" name="q2" id="id_q2" required></p>
<p><label for="id_q3">The answer to life is:</label> <input type="text" name="q3" id="id_q3" required></p>
<p><label for="id_q4">Answer this question!</label> <input type="text" name="q4" id="id_q4" required></p>
<p><label for="id_q5">The last question. Period.</label> <input type="text" name="q5" id="id_q5" required></p>"""
)
# If a label is set to the empty string for a field, that field won't get a label.
class UserRegistration(Form):
username = CharField(max_length=10, label='')
password = CharField(widget=PasswordInput)
p = UserRegistration(auto_id=False)
self.assertHTMLEqual(p.as_ul(), """<li> <input type="text" name="username" maxlength="10" required></li>
<li>Password: <input type="password" name="password" required></li>""")
p = UserRegistration(auto_id='id_%s')
self.assertHTMLEqual(
p.as_ul(),
"""<li> <input id="id_username" type="text" name="username" maxlength="10" required></li>
<li><label for="id_password">Password:</label>
<input type="password" name="password" id="id_password" required></li>"""
)
# If label is None, Django will auto-create the label from the field name. This
# is default behavior.
class UserRegistration(Form):
username = CharField(max_length=10, label=None)
password = CharField(widget=PasswordInput)
p = UserRegistration(auto_id=False)
self.assertHTMLEqual(
p.as_ul(),
"""<li>Username: <input type="text" name="username" maxlength="10" required></li>
<li>Password: <input type="password" name="password" required></li>"""
)
p = UserRegistration(auto_id='id_%s')
self.assertHTMLEqual(
p.as_ul(),
"""<li><label for="id_username">Username:</label>
<input id="id_username" type="text" name="username" maxlength="10" required></li>
<li><label for="id_password">Password:</label>
<input type="password" name="password" id="id_password" required></li>"""
)
def test_label_suffix(self):
# You can specify the 'label_suffix' argument to a Form class to modify the
# punctuation symbol used at the end of a label. By default, the colon (:) is
# used, and is only appended to the label if the label doesn't already end with a
# punctuation symbol: ., !, ? or :. If you specify a different suffix, it will
# be appended regardless of the last character of the label.
class FavoriteForm(Form):
color = CharField(label='Favorite color?')
animal = CharField(label='Favorite animal')
answer = CharField(label='Secret answer', label_suffix=' =')
f = FavoriteForm(auto_id=False)
self.assertHTMLEqual(f.as_ul(), """<li>Favorite color? <input type="text" name="color" required></li>
<li>Favorite animal: <input type="text" name="animal" required></li>
<li>Secret answer = <input type="text" name="answer" required></li>""")
f = FavoriteForm(auto_id=False, label_suffix='?')
self.assertHTMLEqual(f.as_ul(), """<li>Favorite color? <input type="text" name="color" required></li>
<li>Favorite animal? <input type="text" name="animal" required></li>
<li>Secret answer = <input type="text" name="answer" required></li>""")
f = FavoriteForm(auto_id=False, label_suffix='')
self.assertHTMLEqual(f.as_ul(), """<li>Favorite color? <input type="text" name="color" required></li>
<li>Favorite animal <input type="text" name="animal" required></li>
<li>Secret answer = <input type="text" name="answer" required></li>""")
f = FavoriteForm(auto_id=False, label_suffix='\u2192')
self.assertHTMLEqual(
f.as_ul(),
'<li>Favorite color? <input type="text" name="color" required></li>\n'
'<li>Favorite animal\u2192 <input type="text" name="animal" required></li>\n'
'<li>Secret answer = <input type="text" name="answer" required></li>'
)
def test_initial_data(self):
# You can specify initial data for a field by using the 'initial' argument to a
# Field class. This initial data is displayed when a Form is rendered with *no*
# data. It is not displayed when a Form is rendered with any data (including an
# empty dictionary). Also, the initial value is *not* used if data for a
# particular required field isn't provided.
class UserRegistration(Form):
username = CharField(max_length=10, initial='django')
password = CharField(widget=PasswordInput)
# Here, we're not submitting any data, so the initial value will be displayed.)
p = UserRegistration(auto_id=False)
self.assertHTMLEqual(
p.as_ul(),
"""<li>Username: <input type="text" name="username" value="django" maxlength="10" required></li>
<li>Password: <input type="password" name="password" required></li>"""
)
# Here, we're submitting data, so the initial value will *not* be displayed.
p = UserRegistration({}, auto_id=False)
self.assertHTMLEqual(
p.as_ul(),
"""<li><ul class="errorlist"><li>This field is required.</li></ul>
Username: <input type="text" name="username" maxlength="10" required></li>
<li><ul class="errorlist"><li>This field is required.</li></ul>
Password: <input type="password" name="password" required></li>"""
)
p = UserRegistration({'username': ''}, auto_id=False)
self.assertHTMLEqual(
p.as_ul(),
"""<li><ul class="errorlist"><li>This field is required.</li></ul>
Username: <input type="text" name="username" maxlength="10" required></li>
<li><ul class="errorlist"><li>This field is required.</li></ul>
Password: <input type="password" name="password" required></li>"""
)
p = UserRegistration({'username': 'foo'}, auto_id=False)
self.assertHTMLEqual(
p.as_ul(),
"""<li>Username: <input type="text" name="username" value="foo" maxlength="10" required></li>
<li><ul class="errorlist"><li>This field is required.</li></ul>
Password: <input type="password" name="password" required></li>"""
)
# An 'initial' value is *not* used as a fallback if data is not provided. In this
# example, we don't provide a value for 'username', and the form raises a
# validation error rather than using the initial value for 'username'.
p = UserRegistration({'password': 'secret'})
self.assertEqual(p.errors['username'], ['This field is required.'])
self.assertFalse(p.is_valid())
def test_dynamic_initial_data(self):
# The previous technique dealt with "hard-coded" initial data, but it's also
# possible to specify initial data after you've already created the Form class
# (i.e., at runtime). Use the 'initial' parameter to the Form constructor. This
# should be a dictionary containing initial values for one or more fields in the
# form, keyed by field name.
class UserRegistration(Form):
username = CharField(max_length=10)
password = CharField(widget=PasswordInput)
# Here, we're not submitting any data, so the initial value will be displayed.)
p = UserRegistration(initial={'username': 'django'}, auto_id=False)
self.assertHTMLEqual(
p.as_ul(),
"""<li>Username: <input type="text" name="username" value="django" maxlength="10" required></li>
<li>Password: <input type="password" name="password" required></li>"""
)
p = UserRegistration(initial={'username': 'stephane'}, auto_id=False)
self.assertHTMLEqual(
p.as_ul(),
"""<li>Username: <input type="text" name="username" value="stephane" maxlength="10" required></li>
<li>Password: <input type="password" name="password" required></li>"""
)
# The 'initial' parameter is meaningless if you pass data.
p = UserRegistration({}, initial={'username': 'django'}, auto_id=False)
self.assertHTMLEqual(
p.as_ul(),
"""<li><ul class="errorlist"><li>This field is required.</li></ul>
Username: <input type="text" name="username" maxlength="10" required></li>
<li><ul class="errorlist"><li>This field is required.</li></ul>
Password: <input type="password" name="password" required></li>"""
)
p = UserRegistration({'username': ''}, initial={'username': 'django'}, auto_id=False)
self.assertHTMLEqual(
p.as_ul(),
"""<li><ul class="errorlist"><li>This field is required.</li></ul>
Username: <input type="text" name="username" maxlength="10" required></li>
<li><ul class="errorlist"><li>This field is required.</li></ul>
Password: <input type="password" name="password" required></li>"""
)
p = UserRegistration({'username': 'foo'}, initial={'username': 'django'}, auto_id=False)
self.assertHTMLEqual(
p.as_ul(), """<li>Username: <input type="text" name="username" value="foo" maxlength="10" required></li>
<li><ul class="errorlist"><li>This field is required.</li></ul>
Password: <input type="password" name="password" required></li>"""
)
# A dynamic 'initial' value is *not* used as a fallback if data is not provided.
# In this example, we don't provide a value for 'username', and the form raises a
# validation error rather than using the initial value for 'username'.
p = UserRegistration({'password': 'secret'}, initial={'username': 'django'})
self.assertEqual(p.errors['username'], ['This field is required.'])
self.assertFalse(p.is_valid())
# If a Form defines 'initial' *and* 'initial' is passed as a parameter to Form(),
# then the latter will get precedence.
class UserRegistration(Form):
username = CharField(max_length=10, initial='django')
password = CharField(widget=PasswordInput)
p = UserRegistration(initial={'username': 'babik'}, auto_id=False)
self.assertHTMLEqual(
p.as_ul(),
"""<li>Username: <input type="text" name="username" value="babik" maxlength="10" required></li>
<li>Password: <input type="password" name="password" required></li>"""
)
def test_callable_initial_data(self):
# The previous technique dealt with raw values as initial data, but it's also
# possible to specify callable data.
class UserRegistration(Form):
username = CharField(max_length=10)
password = CharField(widget=PasswordInput)
options = MultipleChoiceField(choices=[('f', 'foo'), ('b', 'bar'), ('w', 'whiz')])
# We need to define functions that get called later.)
def initial_django():
return 'django'
def initial_stephane():
return 'stephane'
def initial_options():
return ['f', 'b']
def initial_other_options():
return ['b', 'w']
# Here, we're not submitting any data, so the initial value will be displayed.)
p = UserRegistration(initial={'username': initial_django, 'options': initial_options}, auto_id=False)
self.assertHTMLEqual(
p.as_ul(),
"""<li>Username: <input type="text" name="username" value="django" maxlength="10" required></li>
<li>Password: <input type="password" name="password" required></li>
<li>Options: <select multiple name="options" required>
<option value="f" selected>foo</option>
<option value="b" selected>bar</option>
<option value="w">whiz</option>
</select></li>"""
)
# The 'initial' parameter is meaningless if you pass data.
p = UserRegistration({}, initial={'username': initial_django, 'options': initial_options}, auto_id=False)
self.assertHTMLEqual(
p.as_ul(),
"""<li><ul class="errorlist"><li>This field is required.</li></ul>
Username: <input type="text" name="username" maxlength="10" required></li>
<li><ul class="errorlist"><li>This field is required.</li></ul>
Password: <input type="password" name="password" required></li>
<li><ul class="errorlist"><li>This field is required.</li></ul>
Options: <select multiple name="options" required>
<option value="f">foo</option>
<option value="b">bar</option>
<option value="w">whiz</option>
</select></li>"""
)
p = UserRegistration({'username': ''}, initial={'username': initial_django}, auto_id=False)
self.assertHTMLEqual(
p.as_ul(),
"""<li><ul class="errorlist"><li>This field is required.</li></ul>
Username: <input type="text" name="username" maxlength="10" required></li>
<li><ul class="errorlist"><li>This field is required.</li></ul>
Password: <input type="password" name="password" required></li>
<li><ul class="errorlist"><li>This field is required.</li></ul>
Options: <select multiple name="options" required>
<option value="f">foo</option>
<option value="b">bar</option>
<option value="w">whiz</option>
</select></li>"""
)
p = UserRegistration(
{'username': 'foo', 'options': ['f', 'b']}, initial={'username': initial_django}, auto_id=False
)
self.assertHTMLEqual(
p.as_ul(),
"""<li>Username: <input type="text" name="username" value="foo" maxlength="10" required></li>
<li><ul class="errorlist"><li>This field is required.</li></ul>
Password: <input type="password" name="password" required></li>
<li>Options: <select multiple name="options" required>
<option value="f" selected>foo</option>
<option value="b" selected>bar</option>
<option value="w">whiz</option>
</select></li>"""
)
# A callable 'initial' value is *not* used as a fallback if data is not provided.
# In this example, we don't provide a value for 'username', and the form raises a
# validation error rather than using the initial value for 'username'.
p = UserRegistration({'password': 'secret'}, initial={'username': initial_django, 'options': initial_options})
self.assertEqual(p.errors['username'], ['This field is required.'])
self.assertFalse(p.is_valid())
# If a Form defines 'initial' *and* 'initial' is passed as a parameter to Form(),
# then the latter will get precedence.
class UserRegistration(Form):
username = CharField(max_length=10, initial=initial_django)
password = CharField(widget=PasswordInput)
options = MultipleChoiceField(
choices=[('f', 'foo'), ('b', 'bar'), ('w', 'whiz')],
initial=initial_other_options,
)
p = UserRegistration(auto_id=False)
self.assertHTMLEqual(
p.as_ul(),
"""<li>Username: <input type="text" name="username" value="django" maxlength="10" required></li>
<li>Password: <input type="password" name="password" required></li>
<li>Options: <select multiple name="options" required>
<option value="f">foo</option>
<option value="b" selected>bar</option>
<option value="w" selected>whiz</option>
</select></li>"""
)
p = UserRegistration(initial={'username': initial_stephane, 'options': initial_options}, auto_id=False)
self.assertHTMLEqual(
p.as_ul(),
"""<li>Username: <input type="text" name="username" value="stephane" maxlength="10" required></li>
<li>Password: <input type="password" name="password" required></li>
<li>Options: <select multiple name="options" required>
<option value="f" selected>foo</option>
<option value="b" selected>bar</option>
<option value="w">whiz</option>
</select></li>"""
)
def test_get_initial_for_field(self):
class PersonForm(Form):
first_name = CharField(initial='John')
last_name = CharField(initial='Doe')
age = IntegerField()
occupation = CharField(initial=lambda: 'Unknown')
form = PersonForm(initial={'first_name': 'Jane'})
self.assertIsNone(form.get_initial_for_field(form.fields['age'], 'age'))
self.assertEqual(form.get_initial_for_field(form.fields['last_name'], 'last_name'), 'Doe')
# Form.initial overrides Field.initial.
self.assertEqual(form.get_initial_for_field(form.fields['first_name'], 'first_name'), 'Jane')
# Callables are evaluated.
self.assertEqual(form.get_initial_for_field(form.fields['occupation'], 'occupation'), 'Unknown')
def test_changed_data(self):
class Person(Form):
first_name = CharField(initial='Hans')
last_name = CharField(initial='Greatel')
birthday = DateField(initial=datetime.date(1974, 8, 16))
p = Person(data={'first_name': 'Hans', 'last_name': 'Scrmbl', 'birthday': '1974-08-16'})
self.assertTrue(p.is_valid())
self.assertNotIn('first_name', p.changed_data)
self.assertIn('last_name', p.changed_data)
self.assertNotIn('birthday', p.changed_data)
# A field raising ValidationError is always in changed_data
class PedanticField(forms.Field):
def to_python(self, value):
raise ValidationError('Whatever')
class Person2(Person):
pedantic = PedanticField(initial='whatever', show_hidden_initial=True)
p = Person2(data={
'first_name': 'Hans', 'last_name': 'Scrmbl', 'birthday': '1974-08-16',
'initial-pedantic': 'whatever',
})
self.assertFalse(p.is_valid())
self.assertIn('pedantic', p.changed_data)
def test_boundfield_values(self):
# It's possible to get to the value which would be used for rendering
# the widget for a field by using the BoundField's value method.
class UserRegistration(Form):
username = CharField(max_length=10, initial='djangonaut')
password = CharField(widget=PasswordInput)
unbound = UserRegistration()
bound = UserRegistration({'password': 'foo'})
self.assertIsNone(bound['username'].value())
self.assertEqual(unbound['username'].value(), 'djangonaut')
self.assertEqual(bound['password'].value(), 'foo')
self.assertIsNone(unbound['password'].value())
def test_boundfield_initial_called_once(self):
"""
Multiple calls to BoundField().value() in an unbound form should return
the same result each time (#24391).
"""
class MyForm(Form):
name = CharField(max_length=10, initial=uuid.uuid4)
form = MyForm()
name = form['name']
self.assertEqual(name.value(), name.value())
# BoundField is also cached
self.assertIs(form['name'], name)
def test_boundfield_value_disabled_callable_initial(self):
class PersonForm(Form):
name = CharField(initial=lambda: 'John Doe', disabled=True)
# Without form data.
form = PersonForm()
self.assertEqual(form['name'].value(), 'John Doe')
# With form data. As the field is disabled, the value should not be
# affected by the form data.
form = PersonForm({})
self.assertEqual(form['name'].value(), 'John Doe')
def test_custom_boundfield(self):
class CustomField(CharField):
def get_bound_field(self, form, name):
return (form, name)
class SampleForm(Form):
name = CustomField()
f = SampleForm()
self.assertEqual(f['name'], (f, 'name'))
def test_initial_datetime_values(self):
now = datetime.datetime.now()
# Nix microseconds (since they should be ignored). #22502
now_no_ms = now.replace(microsecond=0)
if now == now_no_ms:
now = now.replace(microsecond=1)
def delayed_now():
return now
def delayed_now_time():
return now.time()
class HiddenInputWithoutMicrosec(HiddenInput):
supports_microseconds = False
class TextInputWithoutMicrosec(TextInput):
supports_microseconds = False
class DateTimeForm(Form):
auto_timestamp = DateTimeField(initial=delayed_now)
auto_time_only = TimeField(initial=delayed_now_time)
supports_microseconds = DateTimeField(initial=delayed_now, widget=TextInput)
hi_default_microsec = DateTimeField(initial=delayed_now, widget=HiddenInput)
hi_without_microsec = DateTimeField(initial=delayed_now, widget=HiddenInputWithoutMicrosec)
ti_without_microsec = DateTimeField(initial=delayed_now, widget=TextInputWithoutMicrosec)
unbound = DateTimeForm()
self.assertEqual(unbound['auto_timestamp'].value(), now_no_ms)
self.assertEqual(unbound['auto_time_only'].value(), now_no_ms.time())
self.assertEqual(unbound['supports_microseconds'].value(), now)
self.assertEqual(unbound['hi_default_microsec'].value(), now)
self.assertEqual(unbound['hi_without_microsec'].value(), now_no_ms)
self.assertEqual(unbound['ti_without_microsec'].value(), now_no_ms)
def test_datetime_clean_initial_callable_disabled(self):
now = datetime.datetime(2006, 10, 25, 14, 30, 45, 123456)
class DateTimeForm(forms.Form):
dt = DateTimeField(initial=lambda: now, disabled=True)
form = DateTimeForm({})
self.assertEqual(form.errors, {})
self.assertEqual(form.cleaned_data, {'dt': now})
def test_datetime_changed_data_callable_with_microseconds(self):
class DateTimeForm(forms.Form):
dt = DateTimeField(initial=lambda: datetime.datetime(2006, 10, 25, 14, 30, 45, 123456), disabled=True)
form = DateTimeForm({'dt': '2006-10-25 14:30:45'})
self.assertEqual(form.changed_data, [])
def test_help_text(self):
# You can specify descriptive text for a field by using the 'help_text' argument)
class UserRegistration(Form):
username = CharField(max_length=10, help_text='e.g., [email protected]')
password = CharField(widget=PasswordInput, help_text='Wählen Sie mit Bedacht.')
p = UserRegistration(auto_id=False)
self.assertHTMLEqual(
p.as_ul(),
"""<li>Username: <input type="text" name="username" maxlength="10" required>
<span class="helptext">e.g., [email protected]</span></li>
<li>Password: <input type="password" name="password" required>
<span class="helptext">Wählen Sie mit Bedacht.</span></li>"""
)
self.assertHTMLEqual(
p.as_p(),
"""<p>Username: <input type="text" name="username" maxlength="10" required>
<span class="helptext">e.g., [email protected]</span></p>
<p>Password: <input type="password" name="password" required>
<span class="helptext">Wählen Sie mit Bedacht.</span></p>"""
)
self.assertHTMLEqual(
p.as_table(),
"""<tr><th>Username:</th><td><input type="text" name="username" maxlength="10" required><br>
<span class="helptext">e.g., [email protected]</span></td></tr>
<tr><th>Password:</th><td><input type="password" name="password" required><br>
<span class="helptext">Wählen Sie mit Bedacht.</span></td></tr>"""
)
# The help text is displayed whether or not data is provided for the form.
p = UserRegistration({'username': 'foo'}, auto_id=False)
self.assertHTMLEqual(
p.as_ul(),
"""<li>Username: <input type="text" name="username" value="foo" maxlength="10" required>
<span class="helptext">e.g., [email protected]</span></li>
<li><ul class="errorlist"><li>This field is required.</li></ul>
Password: <input type="password" name="password" required>
<span class="helptext">Wählen Sie mit Bedacht.</span></li>"""
)
# help_text is not displayed for hidden fields. It can be used for documentation
# purposes, though.
class UserRegistration(Form):
username = CharField(max_length=10, help_text='e.g., [email protected]')
password = CharField(widget=PasswordInput)
next = CharField(widget=HiddenInput, initial='/', help_text='Redirect destination')
p = UserRegistration(auto_id=False)
self.assertHTMLEqual(
p.as_ul(),
"""<li>Username: <input type="text" name="username" maxlength="10" required>
<span class="helptext">e.g., [email protected]</span></li>
<li>Password: <input type="password" name="password" required>
<input type="hidden" name="next" value="/"></li>"""
)
def test_subclassing_forms(self):
# You can subclass a Form to add fields. The resulting form subclass will have
# all of the fields of the parent Form, plus whichever fields you define in the
# subclass.
class Person(Form):
first_name = CharField()
last_name = CharField()
birthday = DateField()
class Musician(Person):
instrument = CharField()
p = Person(auto_id=False)
self.assertHTMLEqual(
p.as_ul(),
"""<li>First name: <input type="text" name="first_name" required></li>
<li>Last name: <input type="text" name="last_name" required></li>
<li>Birthday: <input type="text" name="birthday" required></li>"""
)
m = Musician(auto_id=False)
self.assertHTMLEqual(
m.as_ul(),
"""<li>First name: <input type="text" name="first_name" required></li>
<li>Last name: <input type="text" name="last_name" required></li>
<li>Birthday: <input type="text" name="birthday" required></li>
<li>Instrument: <input type="text" name="instrument" required></li>"""
)
# Yes, you can subclass multiple forms. The fields are added in the order in
# which the parent classes are listed.
class Person(Form):
first_name = CharField()
last_name = CharField()
birthday = DateField()
class Instrument(Form):
instrument = CharField()
class Beatle(Person, Instrument):
haircut_type = CharField()
b = Beatle(auto_id=False)
self.assertHTMLEqual(b.as_ul(), """<li>Instrument: <input type="text" name="instrument" required></li>
<li>First name: <input type="text" name="first_name" required></li>
<li>Last name: <input type="text" name="last_name" required></li>
<li>Birthday: <input type="text" name="birthday" required></li>
<li>Haircut type: <input type="text" name="haircut_type" required></li>""")
def test_forms_with_prefixes(self):
# Sometimes it's necessary to have multiple forms display on the same HTML page,
# or multiple copies of the same form. We can accomplish this with form prefixes.
# Pass the keyword argument 'prefix' to the Form constructor to use this feature.
# This value will be prepended to each HTML form field name. One way to think
# about this is "namespaces for HTML forms". Notice that in the data argument,
# each field's key has the prefix, in this case 'person1', prepended to the
# actual field name.
class Person(Form):
first_name = CharField()
last_name = CharField()
birthday = DateField()
data = {
'person1-first_name': 'John',
'person1-last_name': 'Lennon',
'person1-birthday': '1940-10-9'
}
p = Person(data, prefix='person1')
self.assertHTMLEqual(
p.as_ul(),
"""<li><label for="id_person1-first_name">First name:</label>
<input type="text" name="person1-first_name" value="John" id="id_person1-first_name" required></li>
<li><label for="id_person1-last_name">Last name:</label>
<input type="text" name="person1-last_name" value="Lennon" id="id_person1-last_name" required></li>
<li><label for="id_person1-birthday">Birthday:</label>
<input type="text" name="person1-birthday" value="1940-10-9" id="id_person1-birthday" required></li>"""
)
self.assertHTMLEqual(
str(p['first_name']),
'<input type="text" name="person1-first_name" value="John" id="id_person1-first_name" required>'
)
self.assertHTMLEqual(
str(p['last_name']),
'<input type="text" name="person1-last_name" value="Lennon" id="id_person1-last_name" required>'
)
self.assertHTMLEqual(
str(p['birthday']),
'<input type="text" name="person1-birthday" value="1940-10-9" id="id_person1-birthday" required>'
)
self.assertEqual(p.errors, {})
self.assertTrue(p.is_valid())
self.assertEqual(p.cleaned_data['first_name'], 'John')
self.assertEqual(p.cleaned_data['last_name'], 'Lennon')
self.assertEqual(p.cleaned_data['birthday'], datetime.date(1940, 10, 9))
# Let's try submitting some bad data to make sure form.errors and field.errors
# work as expected.
data = {
'person1-first_name': '',
'person1-last_name': '',
'person1-birthday': ''
}
p = Person(data, prefix='person1')
self.assertEqual(p.errors['first_name'], ['This field is required.'])
self.assertEqual(p.errors['last_name'], ['This field is required.'])
self.assertEqual(p.errors['birthday'], ['This field is required.'])
self.assertEqual(p['first_name'].errors, ['This field is required.'])
# Accessing a nonexistent field.
with self.assertRaises(KeyError):
p['person1-first_name'].errors
# In this example, the data doesn't have a prefix, but the form requires it, so
# the form doesn't "see" the fields.
data = {
'first_name': 'John',
'last_name': 'Lennon',
'birthday': '1940-10-9'
}
p = Person(data, prefix='person1')
self.assertEqual(p.errors['first_name'], ['This field is required.'])
self.assertEqual(p.errors['last_name'], ['This field is required.'])
self.assertEqual(p.errors['birthday'], ['This field is required.'])
# With prefixes, a single data dictionary can hold data for multiple instances
# of the same form.
data = {
'person1-first_name': 'John',
'person1-last_name': 'Lennon',
'person1-birthday': '1940-10-9',
'person2-first_name': 'Jim',
'person2-last_name': 'Morrison',
'person2-birthday': '1943-12-8'
}
p1 = Person(data, prefix='person1')
self.assertTrue(p1.is_valid())
self.assertEqual(p1.cleaned_data['first_name'], 'John')
self.assertEqual(p1.cleaned_data['last_name'], 'Lennon')
self.assertEqual(p1.cleaned_data['birthday'], datetime.date(1940, 10, 9))
p2 = Person(data, prefix='person2')
self.assertTrue(p2.is_valid())
self.assertEqual(p2.cleaned_data['first_name'], 'Jim')
self.assertEqual(p2.cleaned_data['last_name'], 'Morrison')
self.assertEqual(p2.cleaned_data['birthday'], datetime.date(1943, 12, 8))
# By default, forms append a hyphen between the prefix and the field name, but a
# form can alter that behavior by implementing the add_prefix() method. This
# method takes a field name and returns the prefixed field, according to
# self.prefix.
class Person(Form):
first_name = CharField()
last_name = CharField()
birthday = DateField()
def add_prefix(self, field_name):
return '%s-prefix-%s' % (self.prefix, field_name) if self.prefix else field_name
p = Person(prefix='foo')
self.assertHTMLEqual(
p.as_ul(),
"""<li><label for="id_foo-prefix-first_name">First name:</label>
<input type="text" name="foo-prefix-first_name" id="id_foo-prefix-first_name" required></li>
<li><label for="id_foo-prefix-last_name">Last name:</label>
<input type="text" name="foo-prefix-last_name" id="id_foo-prefix-last_name" required></li>
<li><label for="id_foo-prefix-birthday">Birthday:</label>
<input type="text" name="foo-prefix-birthday" id="id_foo-prefix-birthday" required></li>"""
)
data = {
'foo-prefix-first_name': 'John',
'foo-prefix-last_name': 'Lennon',
'foo-prefix-birthday': '1940-10-9'
}
p = Person(data, prefix='foo')
self.assertTrue(p.is_valid())
self.assertEqual(p.cleaned_data['first_name'], 'John')
self.assertEqual(p.cleaned_data['last_name'], 'Lennon')
self.assertEqual(p.cleaned_data['birthday'], datetime.date(1940, 10, 9))
def test_class_prefix(self):
# Prefix can be also specified at the class level.
class Person(Form):
first_name = CharField()
prefix = 'foo'
p = Person()
self.assertEqual(p.prefix, 'foo')
p = Person(prefix='bar')
self.assertEqual(p.prefix, 'bar')
def test_forms_with_null_boolean(self):
# NullBooleanField is a bit of a special case because its presentation (widget)
# is different than its data. This is handled transparently, though.
class Person(Form):
name = CharField()
is_cool = NullBooleanField()
p = Person({'name': 'Joe'}, auto_id=False)
self.assertHTMLEqual(str(p['is_cool']), """<select name="is_cool">
<option value="unknown" selected>Unknown</option>
<option value="true">Yes</option>
<option value="false">No</option>
</select>""")
p = Person({'name': 'Joe', 'is_cool': '1'}, auto_id=False)
self.assertHTMLEqual(str(p['is_cool']), """<select name="is_cool">
<option value="unknown" selected>Unknown</option>
<option value="true">Yes</option>
<option value="false">No</option>
</select>""")
p = Person({'name': 'Joe', 'is_cool': '2'}, auto_id=False)
self.assertHTMLEqual(str(p['is_cool']), """<select name="is_cool">
<option value="unknown">Unknown</option>
<option value="true" selected>Yes</option>
<option value="false">No</option>
</select>""")
p = Person({'name': 'Joe', 'is_cool': '3'}, auto_id=False)
self.assertHTMLEqual(str(p['is_cool']), """<select name="is_cool">
<option value="unknown">Unknown</option>
<option value="true">Yes</option>
<option value="false" selected>No</option>
</select>""")
p = Person({'name': 'Joe', 'is_cool': True}, auto_id=False)
self.assertHTMLEqual(str(p['is_cool']), """<select name="is_cool">
<option value="unknown">Unknown</option>
<option value="true" selected>Yes</option>
<option value="false">No</option>
</select>""")
p = Person({'name': 'Joe', 'is_cool': False}, auto_id=False)
self.assertHTMLEqual(str(p['is_cool']), """<select name="is_cool">
<option value="unknown">Unknown</option>
<option value="true">Yes</option>
<option value="false" selected>No</option>
</select>""")
p = Person({'name': 'Joe', 'is_cool': 'unknown'}, auto_id=False)
self.assertHTMLEqual(str(p['is_cool']), """<select name="is_cool">
<option value="unknown" selected>Unknown</option>
<option value="true">Yes</option>
<option value="false">No</option>
</select>""")
p = Person({'name': 'Joe', 'is_cool': 'true'}, auto_id=False)
self.assertHTMLEqual(str(p['is_cool']), """<select name="is_cool">
<option value="unknown">Unknown</option>
<option value="true" selected>Yes</option>
<option value="false">No</option>
</select>""")
p = Person({'name': 'Joe', 'is_cool': 'false'}, auto_id=False)
self.assertHTMLEqual(str(p['is_cool']), """<select name="is_cool">
<option value="unknown">Unknown</option>
<option value="true">Yes</option>
<option value="false" selected>No</option>
</select>""")
def test_forms_with_file_fields(self):
# FileFields are a special case because they take their data from the request.FILES,
# not request.POST.
class FileForm(Form):
file1 = FileField()
f = FileForm(auto_id=False)
self.assertHTMLEqual(
f.as_table(),
'<tr><th>File1:</th><td><input type="file" name="file1" required></td></tr>',
)
f = FileForm(data={}, files={}, auto_id=False)
self.assertHTMLEqual(
f.as_table(),
'<tr><th>File1:</th><td>'
'<ul class="errorlist"><li>This field is required.</li></ul>'
'<input type="file" name="file1" required></td></tr>'
)
f = FileForm(data={}, files={'file1': SimpleUploadedFile('name', b'')}, auto_id=False)
self.assertHTMLEqual(
f.as_table(),
'<tr><th>File1:</th><td>'
'<ul class="errorlist"><li>The submitted file is empty.</li></ul>'
'<input type="file" name="file1" required></td></tr>'
)
f = FileForm(data={}, files={'file1': 'something that is not a file'}, auto_id=False)
self.assertHTMLEqual(
f.as_table(),
'<tr><th>File1:</th><td>'
'<ul class="errorlist"><li>No file was submitted. Check the '
'encoding type on the form.</li></ul>'
'<input type="file" name="file1" required></td></tr>'
)
f = FileForm(data={}, files={'file1': SimpleUploadedFile('name', b'some content')}, auto_id=False)
self.assertHTMLEqual(
f.as_table(),
'<tr><th>File1:</th><td><input type="file" name="file1" required></td></tr>',
)
self.assertTrue(f.is_valid())
file1 = SimpleUploadedFile('我隻氣墊船裝滿晒鱔.txt', 'मेरी मँडराने वाली नाव सर्पमीनों से भरी ह'.encode())
f = FileForm(data={}, files={'file1': file1}, auto_id=False)
self.assertHTMLEqual(
f.as_table(),
'<tr><th>File1:</th><td><input type="file" name="file1" required></td></tr>',
)
# A required file field with initial data should not contain the
# required HTML attribute. The file input is left blank by the user to
# keep the existing, initial value.
f = FileForm(initial={'file1': 'resume.txt'}, auto_id=False)
self.assertHTMLEqual(
f.as_table(),
'<tr><th>File1:</th><td><input type="file" name="file1"></td></tr>',
)
def test_filefield_initial_callable(self):
class FileForm(forms.Form):
file1 = forms.FileField(initial=lambda: 'resume.txt')
f = FileForm({})
self.assertEqual(f.errors, {})
self.assertEqual(f.cleaned_data['file1'], 'resume.txt')
def test_filefield_with_fileinput_required(self):
class FileForm(Form):
file1 = forms.FileField(widget=FileInput)
f = FileForm(auto_id=False)
self.assertHTMLEqual(
f.as_table(),
'<tr><th>File1:</th><td>'
'<input type="file" name="file1" required></td></tr>',
)
# A required file field with initial data doesn't contain the required
# HTML attribute. The file input is left blank by the user to keep the
# existing, initial value.
f = FileForm(initial={'file1': 'resume.txt'}, auto_id=False)
self.assertHTMLEqual(
f.as_table(),
'<tr><th>File1:</th><td><input type="file" name="file1"></td></tr>',
)
def test_basic_processing_in_view(self):
class UserRegistration(Form):
username = CharField(max_length=10)
password1 = CharField(widget=PasswordInput)
password2 = CharField(widget=PasswordInput)
def clean(self):
if (self.cleaned_data.get('password1') and self.cleaned_data.get('password2') and
self.cleaned_data['password1'] != self.cleaned_data['password2']):
raise ValidationError('Please make sure your passwords match.')
return self.cleaned_data
def my_function(method, post_data):
if method == 'POST':
form = UserRegistration(post_data, auto_id=False)
else:
form = UserRegistration(auto_id=False)
if form.is_valid():
return 'VALID: %r' % sorted(form.cleaned_data.items())
t = Template(
'<form method="post">\n'
'<table>\n{{ form }}\n</table>\n<input type="submit" required>\n</form>'
)
return t.render(Context({'form': form}))
# Case 1: GET (an empty form, with no errors).)
self.assertHTMLEqual(my_function('GET', {}), """<form method="post">
<table>
<tr><th>Username:</th><td><input type="text" name="username" maxlength="10" required></td></tr>
<tr><th>Password1:</th><td><input type="password" name="password1" required></td></tr>
<tr><th>Password2:</th><td><input type="password" name="password2" required></td></tr>
</table>
<input type="submit" required>
</form>""")
# Case 2: POST with erroneous data (a redisplayed form, with errors).)
self.assertHTMLEqual(
my_function('POST', {'username': 'this-is-a-long-username', 'password1': 'foo', 'password2': 'bar'}),
"""<form method="post">
<table>
<tr><td colspan="2"><ul class="errorlist nonfield"><li>Please make sure your passwords match.</li></ul></td></tr>
<tr><th>Username:</th><td><ul class="errorlist">
<li>Ensure this value has at most 10 characters (it has 23).</li></ul>
<input type="text" name="username" value="this-is-a-long-username" maxlength="10" required></td></tr>
<tr><th>Password1:</th><td><input type="password" name="password1" required></td></tr>
<tr><th>Password2:</th><td><input type="password" name="password2" required></td></tr>
</table>
<input type="submit" required>
</form>"""
)
# Case 3: POST with valid data (the success message).)
self.assertEqual(
my_function('POST', {'username': 'adrian', 'password1': 'secret', 'password2': 'secret'}),
"VALID: [('password1', 'secret'), ('password2', 'secret'), ('username', 'adrian')]"
)
def test_templates_with_forms(self):
class UserRegistration(Form):
username = CharField(max_length=10, help_text="Good luck picking a username that doesn't already exist.")
password1 = CharField(widget=PasswordInput)
password2 = CharField(widget=PasswordInput)
def clean(self):
if (self.cleaned_data.get('password1') and self.cleaned_data.get('password2') and
self.cleaned_data['password1'] != self.cleaned_data['password2']):
raise ValidationError('Please make sure your passwords match.')
return self.cleaned_data
# You have full flexibility in displaying form fields in a template. Just pass a
# Form instance to the template, and use "dot" access to refer to individual
# fields. Note, however, that this flexibility comes with the responsibility of
# displaying all the errors, including any that might not be associated with a
# particular field.
t = Template('''<form>
{{ form.username.errors.as_ul }}<p><label>Your username: {{ form.username }}</label></p>
{{ form.password1.errors.as_ul }}<p><label>Password: {{ form.password1 }}</label></p>
{{ form.password2.errors.as_ul }}<p><label>Password (again): {{ form.password2 }}</label></p>
<input type="submit" required>
</form>''')
self.assertHTMLEqual(t.render(Context({'form': UserRegistration(auto_id=False)})), """<form>
<p><label>Your username: <input type="text" name="username" maxlength="10" required></label></p>
<p><label>Password: <input type="password" name="password1" required></label></p>
<p><label>Password (again): <input type="password" name="password2" required></label></p>
<input type="submit" required>
</form>""")
self.assertHTMLEqual(
t.render(Context({'form': UserRegistration({'username': 'django'}, auto_id=False)})),
"""<form>
<p><label>Your username: <input type="text" name="username" value="django" maxlength="10" required></label></p>
<ul class="errorlist"><li>This field is required.</li></ul><p>
<label>Password: <input type="password" name="password1" required></label></p>
<ul class="errorlist"><li>This field is required.</li></ul>
<p><label>Password (again): <input type="password" name="password2" required></label></p>
<input type="submit" required>
</form>"""
)
# Use form.[field].label to output a field's label. You can specify the label for
# a field by using the 'label' argument to a Field class. If you don't specify
# 'label', Django will use the field name with underscores converted to spaces,
# and the initial letter capitalized.
t = Template('''<form>
<p><label>{{ form.username.label }}: {{ form.username }}</label></p>
<p><label>{{ form.password1.label }}: {{ form.password1 }}</label></p>
<p><label>{{ form.password2.label }}: {{ form.password2 }}</label></p>
<input type="submit" required>
</form>''')
self.assertHTMLEqual(t.render(Context({'form': UserRegistration(auto_id=False)})), """<form>
<p><label>Username: <input type="text" name="username" maxlength="10" required></label></p>
<p><label>Password1: <input type="password" name="password1" required></label></p>
<p><label>Password2: <input type="password" name="password2" required></label></p>
<input type="submit" required>
</form>""")
# User form.[field].label_tag to output a field's label with a <label> tag
# wrapped around it, but *only* if the given field has an "id" attribute.
# Recall from above that passing the "auto_id" argument to a Form gives each
# field an "id" attribute.
t = Template('''<form>
<p>{{ form.username.label_tag }} {{ form.username }}</p>
<p>{{ form.password1.label_tag }} {{ form.password1 }}</p>
<p>{{ form.password2.label_tag }} {{ form.password2 }}</p>
<input type="submit" required>
</form>''')
self.assertHTMLEqual(t.render(Context({'form': UserRegistration(auto_id=False)})), """<form>
<p>Username: <input type="text" name="username" maxlength="10" required></p>
<p>Password1: <input type="password" name="password1" required></p>
<p>Password2: <input type="password" name="password2" required></p>
<input type="submit" required>
</form>""")
self.assertHTMLEqual(t.render(Context({'form': UserRegistration(auto_id='id_%s')})), """<form>
<p><label for="id_username">Username:</label>
<input id="id_username" type="text" name="username" maxlength="10" required></p>
<p><label for="id_password1">Password1:</label>
<input type="password" name="password1" id="id_password1" required></p>
<p><label for="id_password2">Password2:</label>
<input type="password" name="password2" id="id_password2" required></p>
<input type="submit" required>
</form>""")
# User form.[field].help_text to output a field's help text. If the given field
# does not have help text, nothing will be output.
t = Template('''<form>
<p>{{ form.username.label_tag }} {{ form.username }}<br>{{ form.username.help_text }}</p>
<p>{{ form.password1.label_tag }} {{ form.password1 }}</p>
<p>{{ form.password2.label_tag }} {{ form.password2 }}</p>
<input type="submit" required>
</form>''')
self.assertHTMLEqual(
t.render(Context({'form': UserRegistration(auto_id=False)})),
"""<form>
<p>Username: <input type="text" name="username" maxlength="10" required><br>
Good luck picking a username that doesn't already exist.</p>
<p>Password1: <input type="password" name="password1" required></p>
<p>Password2: <input type="password" name="password2" required></p>
<input type="submit" required>
</form>"""
)
self.assertEqual(
Template('{{ form.password1.help_text }}').render(Context({'form': UserRegistration(auto_id=False)})),
''
)
# To display the errors that aren't associated with a particular field -- e.g.,
# the errors caused by Form.clean() -- use {{ form.non_field_errors }} in the
# template. If used on its own, it is displayed as a <ul> (or an empty string, if
# the list of errors is empty). You can also use it in {% if %} statements.
t = Template('''<form>
{{ form.username.errors.as_ul }}<p><label>Your username: {{ form.username }}</label></p>
{{ form.password1.errors.as_ul }}<p><label>Password: {{ form.password1 }}</label></p>
{{ form.password2.errors.as_ul }}<p><label>Password (again): {{ form.password2 }}</label></p>
<input type="submit" required>
</form>''')
self.assertHTMLEqual(
t.render(Context({
'form': UserRegistration({'username': 'django', 'password1': 'foo', 'password2': 'bar'}, auto_id=False)
})),
"""<form>
<p><label>Your username: <input type="text" name="username" value="django" maxlength="10" required></label></p>
<p><label>Password: <input type="password" name="password1" required></label></p>
<p><label>Password (again): <input type="password" name="password2" required></label></p>
<input type="submit" required>
</form>"""
)
t = Template('''<form>
{{ form.non_field_errors }}
{{ form.username.errors.as_ul }}<p><label>Your username: {{ form.username }}</label></p>
{{ form.password1.errors.as_ul }}<p><label>Password: {{ form.password1 }}</label></p>
{{ form.password2.errors.as_ul }}<p><label>Password (again): {{ form.password2 }}</label></p>
<input type="submit" required>
</form>''')
self.assertHTMLEqual(
t.render(Context({
'form': UserRegistration({'username': 'django', 'password1': 'foo', 'password2': 'bar'}, auto_id=False)
})),
"""<form>
<ul class="errorlist nonfield"><li>Please make sure your passwords match.</li></ul>
<p><label>Your username: <input type="text" name="username" value="django" maxlength="10" required></label></p>
<p><label>Password: <input type="password" name="password1" required></label></p>
<p><label>Password (again): <input type="password" name="password2" required></label></p>
<input type="submit" required>
</form>"""
)
def test_empty_permitted(self):
# Sometimes (pretty much in formsets) we want to allow a form to pass validation
# if it is completely empty. We can accomplish this by using the empty_permitted
# argument to a form constructor.
class SongForm(Form):
artist = CharField()
name = CharField()
# First let's show what happens id empty_permitted=False (the default):
data = {'artist': '', 'song': ''}
form = SongForm(data, empty_permitted=False)
self.assertFalse(form.is_valid())
self.assertEqual(form.errors, {'name': ['This field is required.'], 'artist': ['This field is required.']})
self.assertEqual(form.cleaned_data, {})
# Now let's show what happens when empty_permitted=True and the form is empty.
form = SongForm(data, empty_permitted=True, use_required_attribute=False)
self.assertTrue(form.is_valid())
self.assertEqual(form.errors, {})
self.assertEqual(form.cleaned_data, {})
# But if we fill in data for one of the fields, the form is no longer empty and
# the whole thing must pass validation.
data = {'artist': 'The Doors', 'song': ''}
form = SongForm(data, empty_permitted=False)
self.assertFalse(form.is_valid())
self.assertEqual(form.errors, {'name': ['This field is required.']})
self.assertEqual(form.cleaned_data, {'artist': 'The Doors'})
# If a field is not given in the data then None is returned for its data. Lets
# make sure that when checking for empty_permitted that None is treated
# accordingly.
data = {'artist': None, 'song': ''}
form = SongForm(data, empty_permitted=True, use_required_attribute=False)
self.assertTrue(form.is_valid())
# However, we *really* need to be sure we are checking for None as any data in
# initial that returns False on a boolean call needs to be treated literally.
class PriceForm(Form):
amount = FloatField()
qty = IntegerField()
data = {'amount': '0.0', 'qty': ''}
form = PriceForm(data, initial={'amount': 0.0}, empty_permitted=True, use_required_attribute=False)
self.assertTrue(form.is_valid())
def test_empty_permitted_and_use_required_attribute(self):
msg = (
'The empty_permitted and use_required_attribute arguments may not '
'both be True.'
)
with self.assertRaisesMessage(ValueError, msg):
Person(empty_permitted=True, use_required_attribute=True)
def test_extracting_hidden_and_visible(self):
class SongForm(Form):
token = CharField(widget=HiddenInput)
artist = CharField()
name = CharField()
form = SongForm()
self.assertEqual([f.name for f in form.hidden_fields()], ['token'])
self.assertEqual([f.name for f in form.visible_fields()], ['artist', 'name'])
def test_hidden_initial_gets_id(self):
class MyForm(Form):
field1 = CharField(max_length=50, show_hidden_initial=True)
self.assertHTMLEqual(
MyForm().as_table(),
'<tr><th><label for="id_field1">Field1:</label></th>'
'<td><input id="id_field1" type="text" name="field1" maxlength="50" required>'
'<input type="hidden" name="initial-field1" id="initial-id_field1"></td></tr>'
)
def test_error_html_required_html_classes(self):
class Person(Form):
name = CharField()
is_cool = NullBooleanField()
email = EmailField(required=False)
age = IntegerField()
p = Person({})
p.error_css_class = 'error'
p.required_css_class = 'required'
self.assertHTMLEqual(
p.as_ul(),
"""<li class="required error"><ul class="errorlist"><li>This field is required.</li></ul>
<label class="required" for="id_name">Name:</label> <input type="text" name="name" id="id_name" required></li>
<li class="required"><label class="required" for="id_is_cool">Is cool:</label>
<select name="is_cool" id="id_is_cool">
<option value="unknown" selected>Unknown</option>
<option value="true">Yes</option>
<option value="false">No</option>
</select></li>
<li><label for="id_email">Email:</label> <input type="email" name="email" id="id_email"></li>
<li class="required error"><ul class="errorlist"><li>This field is required.</li></ul>
<label class="required" for="id_age">Age:</label> <input type="number" name="age" id="id_age" required></li>"""
)
self.assertHTMLEqual(
p.as_p(),
"""<ul class="errorlist"><li>This field is required.</li></ul>
<p class="required error"><label class="required" for="id_name">Name:</label>
<input type="text" name="name" id="id_name" required></p>
<p class="required"><label class="required" for="id_is_cool">Is cool:</label>
<select name="is_cool" id="id_is_cool">
<option value="unknown" selected>Unknown</option>
<option value="true">Yes</option>
<option value="false">No</option>
</select></p>
<p><label for="id_email">Email:</label> <input type="email" name="email" id="id_email"></p>
<ul class="errorlist"><li>This field is required.</li></ul>
<p class="required error"><label class="required" for="id_age">Age:</label>
<input type="number" name="age" id="id_age" required></p>"""
)
self.assertHTMLEqual(
p.as_table(),
"""<tr class="required error">
<th><label class="required" for="id_name">Name:</label></th>
<td><ul class="errorlist"><li>This field is required.</li></ul>
<input type="text" name="name" id="id_name" required></td></tr>
<tr class="required"><th><label class="required" for="id_is_cool">Is cool:</label></th>
<td><select name="is_cool" id="id_is_cool">
<option value="unknown" selected>Unknown</option>
<option value="true">Yes</option>
<option value="false">No</option>
</select></td></tr>
<tr><th><label for="id_email">Email:</label></th><td>
<input type="email" name="email" id="id_email"></td></tr>
<tr class="required error"><th><label class="required" for="id_age">Age:</label></th>
<td><ul class="errorlist"><li>This field is required.</li></ul>
<input type="number" name="age" id="id_age" required></td></tr>"""
)
def test_label_has_required_css_class(self):
"""
#17922 - required_css_class is added to the label_tag() of required fields.
"""
class SomeForm(Form):
required_css_class = 'required'
field = CharField(max_length=10)
field2 = IntegerField(required=False)
f = SomeForm({'field': 'test'})
self.assertHTMLEqual(f['field'].label_tag(), '<label for="id_field" class="required">Field:</label>')
self.assertHTMLEqual(
f['field'].label_tag(attrs={'class': 'foo'}),
'<label for="id_field" class="foo required">Field:</label>'
)
self.assertHTMLEqual(f['field2'].label_tag(), '<label for="id_field2">Field2:</label>')
def test_label_split_datetime_not_displayed(self):
class EventForm(Form):
happened_at = SplitDateTimeField(widget=SplitHiddenDateTimeWidget)
form = EventForm()
self.assertHTMLEqual(
form.as_ul(),
'<input type="hidden" name="happened_at_0" id="id_happened_at_0">'
'<input type="hidden" name="happened_at_1" id="id_happened_at_1">'
)
def test_multivalue_field_validation(self):
def bad_names(value):
if value == 'bad value':
raise ValidationError('bad value not allowed')
class NameField(MultiValueField):
def __init__(self, fields=(), *args, **kwargs):
fields = (CharField(label='First name', max_length=10),
CharField(label='Last name', max_length=10))
super().__init__(fields=fields, *args, **kwargs)
def compress(self, data_list):
return ' '.join(data_list)
class NameForm(Form):
name = NameField(validators=[bad_names])
form = NameForm(data={'name': ['bad', 'value']})
form.full_clean()
self.assertFalse(form.is_valid())
self.assertEqual(form.errors, {'name': ['bad value not allowed']})
form = NameForm(data={'name': ['should be overly', 'long for the field names']})
self.assertFalse(form.is_valid())
self.assertEqual(
form.errors, {
'name': [
'Ensure this value has at most 10 characters (it has 16).',
'Ensure this value has at most 10 characters (it has 24).',
],
}
)
form = NameForm(data={'name': ['fname', 'lname']})
self.assertTrue(form.is_valid())
self.assertEqual(form.cleaned_data, {'name': 'fname lname'})
def test_multivalue_deep_copy(self):
"""
#19298 -- MultiValueField needs to override the default as it needs
to deep-copy subfields:
"""
class ChoicesField(MultiValueField):
def __init__(self, fields=(), *args, **kwargs):
fields = (
ChoiceField(label='Rank', choices=((1, 1), (2, 2))),
CharField(label='Name', max_length=10),
)
super().__init__(fields=fields, *args, **kwargs)
field = ChoicesField()
field2 = copy.deepcopy(field)
self.assertIsInstance(field2, ChoicesField)
self.assertIsNot(field2.fields, field.fields)
self.assertIsNot(field2.fields[0].choices, field.fields[0].choices)
def test_multivalue_initial_data(self):
"""
#23674 -- invalid initial data should not break form.changed_data()
"""
class DateAgeField(MultiValueField):
def __init__(self, fields=(), *args, **kwargs):
fields = (DateField(label="Date"), IntegerField(label="Age"))
super().__init__(fields=fields, *args, **kwargs)
class DateAgeForm(Form):
date_age = DateAgeField()
data = {"date_age": ["1998-12-06", 16]}
form = DateAgeForm(data, initial={"date_age": ["200-10-10", 14]})
self.assertTrue(form.has_changed())
def test_multivalue_optional_subfields(self):
class PhoneField(MultiValueField):
def __init__(self, *args, **kwargs):
fields = (
CharField(label='Country Code', validators=[
RegexValidator(r'^\+[0-9]{1,2}$', message='Enter a valid country code.')]),
CharField(label='Phone Number'),
CharField(label='Extension', error_messages={'incomplete': 'Enter an extension.'}),
CharField(label='Label', required=False, help_text='E.g. home, work.'),
)
super().__init__(fields, *args, **kwargs)
def compress(self, data_list):
if data_list:
return '%s.%s ext. %s (label: %s)' % tuple(data_list)
return None
# An empty value for any field will raise a `required` error on a
# required `MultiValueField`.
f = PhoneField()
with self.assertRaisesMessage(ValidationError, "'This field is required.'"):
f.clean('')
with self.assertRaisesMessage(ValidationError, "'This field is required.'"):
f.clean(None)
with self.assertRaisesMessage(ValidationError, "'This field is required.'"):
f.clean([])
with self.assertRaisesMessage(ValidationError, "'This field is required.'"):
f.clean(['+61'])
with self.assertRaisesMessage(ValidationError, "'This field is required.'"):
f.clean(['+61', '287654321', '123'])
self.assertEqual('+61.287654321 ext. 123 (label: Home)', f.clean(['+61', '287654321', '123', 'Home']))
with self.assertRaisesMessage(ValidationError, "'Enter a valid country code.'"):
f.clean(['61', '287654321', '123', 'Home'])
# Empty values for fields will NOT raise a `required` error on an
# optional `MultiValueField`
f = PhoneField(required=False)
self.assertIsNone(f.clean(''))
self.assertIsNone(f.clean(None))
self.assertIsNone(f.clean([]))
self.assertEqual('+61. ext. (label: )', f.clean(['+61']))
self.assertEqual('+61.287654321 ext. 123 (label: )', f.clean(['+61', '287654321', '123']))
self.assertEqual('+61.287654321 ext. 123 (label: Home)', f.clean(['+61', '287654321', '123', 'Home']))
with self.assertRaisesMessage(ValidationError, "'Enter a valid country code.'"):
f.clean(['61', '287654321', '123', 'Home'])
# For a required `MultiValueField` with `require_all_fields=False`, a
# `required` error will only be raised if all fields are empty. Fields
# can individually be required or optional. An empty value for any
# required field will raise an `incomplete` error.
f = PhoneField(require_all_fields=False)
with self.assertRaisesMessage(ValidationError, "'This field is required.'"):
f.clean('')
with self.assertRaisesMessage(ValidationError, "'This field is required.'"):
f.clean(None)
with self.assertRaisesMessage(ValidationError, "'This field is required.'"):
f.clean([])
with self.assertRaisesMessage(ValidationError, "'Enter a complete value.'"):
f.clean(['+61'])
self.assertEqual('+61.287654321 ext. 123 (label: )', f.clean(['+61', '287654321', '123']))
with self.assertRaisesMessage(ValidationError, "'Enter a complete value.', 'Enter an extension.'"):
f.clean(['', '', '', 'Home'])
with self.assertRaisesMessage(ValidationError, "'Enter a valid country code.'"):
f.clean(['61', '287654321', '123', 'Home'])
# For an optional `MultiValueField` with `require_all_fields=False`, we
# don't get any `required` error but we still get `incomplete` errors.
f = PhoneField(required=False, require_all_fields=False)
self.assertIsNone(f.clean(''))
self.assertIsNone(f.clean(None))
self.assertIsNone(f.clean([]))
with self.assertRaisesMessage(ValidationError, "'Enter a complete value.'"):
f.clean(['+61'])
self.assertEqual('+61.287654321 ext. 123 (label: )', f.clean(['+61', '287654321', '123']))
with self.assertRaisesMessage(ValidationError, "'Enter a complete value.', 'Enter an extension.'"):
f.clean(['', '', '', 'Home'])
with self.assertRaisesMessage(ValidationError, "'Enter a valid country code.'"):
f.clean(['61', '287654321', '123', 'Home'])
def test_custom_empty_values(self):
"""
Form fields can customize what is considered as an empty value
for themselves (#19997).
"""
class CustomJSONField(CharField):
empty_values = [None, '']
def to_python(self, value):
# Fake json.loads
if value == '{}':
return {}
return super().to_python(value)
class JSONForm(forms.Form):
json = CustomJSONField()
form = JSONForm(data={'json': '{}'})
form.full_clean()
self.assertEqual(form.cleaned_data, {'json': {}})
def test_boundfield_label_tag(self):
class SomeForm(Form):
field = CharField()
boundfield = SomeForm()['field']
testcases = [ # (args, kwargs, expected)
# without anything: just print the <label>
((), {}, '<label for="id_field">Field:</label>'),
# passing just one argument: overrides the field's label
(('custom',), {}, '<label for="id_field">custom:</label>'),
# the overridden label is escaped
(('custom&',), {}, '<label for="id_field">custom&:</label>'),
((mark_safe('custom&'),), {}, '<label for="id_field">custom&:</label>'),
# Passing attrs to add extra attributes on the <label>
((), {'attrs': {'class': 'pretty'}}, '<label for="id_field" class="pretty">Field:</label>')
]
for args, kwargs, expected in testcases:
with self.subTest(args=args, kwargs=kwargs):
self.assertHTMLEqual(boundfield.label_tag(*args, **kwargs), expected)
def test_boundfield_label_tag_no_id(self):
"""
If a widget has no id, label_tag just returns the text with no
surrounding <label>.
"""
class SomeForm(Form):
field = CharField()
boundfield = SomeForm(auto_id='')['field']
self.assertHTMLEqual(boundfield.label_tag(), 'Field:')
self.assertHTMLEqual(boundfield.label_tag('Custom&'), 'Custom&:')
def test_boundfield_label_tag_custom_widget_id_for_label(self):
class CustomIdForLabelTextInput(TextInput):
def id_for_label(self, id):
return 'custom_' + id
class EmptyIdForLabelTextInput(TextInput):
def id_for_label(self, id):
return None
class SomeForm(Form):
custom = CharField(widget=CustomIdForLabelTextInput)
empty = CharField(widget=EmptyIdForLabelTextInput)
form = SomeForm()
self.assertHTMLEqual(form['custom'].label_tag(), '<label for="custom_id_custom">Custom:</label>')
self.assertHTMLEqual(form['empty'].label_tag(), '<label>Empty:</label>')
def test_boundfield_empty_label(self):
class SomeForm(Form):
field = CharField(label='')
boundfield = SomeForm()['field']
self.assertHTMLEqual(boundfield.label_tag(), '<label for="id_field"></label>')
def test_boundfield_id_for_label(self):
class SomeForm(Form):
field = CharField(label='')
self.assertEqual(SomeForm()['field'].id_for_label, 'id_field')
def test_boundfield_id_for_label_override_by_attrs(self):
"""
If an id is provided in `Widget.attrs`, it overrides the generated ID,
unless it is `None`.
"""
class SomeForm(Form):
field = CharField(widget=TextInput(attrs={'id': 'myCustomID'}))
field_none = CharField(widget=TextInput(attrs={'id': None}))
form = SomeForm()
self.assertEqual(form['field'].id_for_label, 'myCustomID')
self.assertEqual(form['field_none'].id_for_label, 'id_field_none')
def test_boundfield_widget_type(self):
class SomeForm(Form):
first_name = CharField()
birthday = SplitDateTimeField(widget=SplitHiddenDateTimeWidget)
f = SomeForm()
self.assertEqual(f['first_name'].widget_type, 'text')
self.assertEqual(f['birthday'].widget_type, 'splithiddendatetime')
def test_boundfield_css_classes(self):
form = Person()
field = form['first_name']
self.assertEqual(field.css_classes(), '')
self.assertEqual(field.css_classes(extra_classes=''), '')
self.assertEqual(field.css_classes(extra_classes='test'), 'test')
self.assertEqual(field.css_classes(extra_classes='test test'), 'test')
def test_label_tag_override(self):
"""
BoundField label_suffix (if provided) overrides Form label_suffix
"""
class SomeForm(Form):
field = CharField()
boundfield = SomeForm(label_suffix='!')['field']
self.assertHTMLEqual(boundfield.label_tag(label_suffix='$'), '<label for="id_field">Field$</label>')
def test_field_name(self):
"""#5749 - `field_name` may be used as a key in _html_output()."""
class SomeForm(Form):
some_field = CharField()
def as_p(self):
return self._html_output(
normal_row='<p id="p_%(field_name)s"></p>',
error_row='%s',
row_ender='</p>',
help_text_html=' %s',
errors_on_separate_row=True,
)
form = SomeForm()
self.assertHTMLEqual(form.as_p(), '<p id="p_some_field"></p>')
def test_field_without_css_classes(self):
"""
`css_classes` may be used as a key in _html_output() (empty classes).
"""
class SomeForm(Form):
some_field = CharField()
def as_p(self):
return self._html_output(
normal_row='<p class="%(css_classes)s"></p>',
error_row='%s',
row_ender='</p>',
help_text_html=' %s',
errors_on_separate_row=True,
)
form = SomeForm()
self.assertHTMLEqual(form.as_p(), '<p class=""></p>')
def test_field_with_css_class(self):
"""
`css_classes` may be used as a key in _html_output() (class comes
from required_css_class in this case).
"""
class SomeForm(Form):
some_field = CharField()
required_css_class = 'foo'
def as_p(self):
return self._html_output(
normal_row='<p class="%(css_classes)s"></p>',
error_row='%s',
row_ender='</p>',
help_text_html=' %s',
errors_on_separate_row=True,
)
form = SomeForm()
self.assertHTMLEqual(form.as_p(), '<p class="foo"></p>')
def test_field_name_with_hidden_input(self):
"""
BaseForm._html_output() should merge all the hidden input fields and
put them in the last row.
"""
class SomeForm(Form):
hidden1 = CharField(widget=HiddenInput)
custom = CharField()
hidden2 = CharField(widget=HiddenInput)
def as_p(self):
return self._html_output(
normal_row='<p%(html_class_attr)s>%(field)s %(field_name)s</p>',
error_row='%s',
row_ender='</p>',
help_text_html=' %s',
errors_on_separate_row=True,
)
form = SomeForm()
self.assertHTMLEqual(
form.as_p(),
'<p><input id="id_custom" name="custom" type="text" required> custom'
'<input id="id_hidden1" name="hidden1" type="hidden">'
'<input id="id_hidden2" name="hidden2" type="hidden"></p>'
)
def test_field_name_with_hidden_input_and_non_matching_row_ender(self):
"""
BaseForm._html_output() should merge all the hidden input fields and
put them in the last row ended with the specific row ender.
"""
class SomeForm(Form):
hidden1 = CharField(widget=HiddenInput)
custom = CharField()
hidden2 = CharField(widget=HiddenInput)
def as_p(self):
return self._html_output(
normal_row='<p%(html_class_attr)s>%(field)s %(field_name)s</p>',
error_row='%s',
row_ender='<hr><hr>',
help_text_html=' %s',
errors_on_separate_row=True
)
form = SomeForm()
self.assertHTMLEqual(
form.as_p(),
'<p><input id="id_custom" name="custom" type="text" required> custom</p>\n'
'<input id="id_hidden1" name="hidden1" type="hidden">'
'<input id="id_hidden2" name="hidden2" type="hidden"><hr><hr>'
)
def test_error_dict(self):
class MyForm(Form):
foo = CharField()
bar = CharField()
def clean(self):
raise ValidationError('Non-field error.', code='secret', params={'a': 1, 'b': 2})
form = MyForm({})
self.assertIs(form.is_valid(), False)
errors = form.errors.as_text()
control = [
'* foo\n * This field is required.',
'* bar\n * This field is required.',
'* __all__\n * Non-field error.',
]
for error in control:
self.assertIn(error, errors)
errors = form.errors.as_ul()
control = [
'<li>foo<ul class="errorlist"><li>This field is required.</li></ul></li>',
'<li>bar<ul class="errorlist"><li>This field is required.</li></ul></li>',
'<li>__all__<ul class="errorlist nonfield"><li>Non-field error.</li></ul></li>',
]
for error in control:
self.assertInHTML(error, errors)
errors = form.errors.get_json_data()
control = {
'foo': [{'code': 'required', 'message': 'This field is required.'}],
'bar': [{'code': 'required', 'message': 'This field is required.'}],
'__all__': [{'code': 'secret', 'message': 'Non-field error.'}]
}
self.assertEqual(errors, control)
self.assertEqual(json.dumps(errors), form.errors.as_json())
def test_error_dict_as_json_escape_html(self):
"""#21962 - adding html escape flag to ErrorDict"""
class MyForm(Form):
foo = CharField()
bar = CharField()
def clean(self):
raise ValidationError(
'<p>Non-field error.</p>',
code='secret',
params={'a': 1, 'b': 2},
)
control = {
'foo': [{'code': 'required', 'message': 'This field is required.'}],
'bar': [{'code': 'required', 'message': 'This field is required.'}],
'__all__': [{'code': 'secret', 'message': '<p>Non-field error.</p>'}]
}
form = MyForm({})
self.assertFalse(form.is_valid())
errors = json.loads(form.errors.as_json())
self.assertEqual(errors, control)
escaped_error = '<p>Non-field error.</p>'
self.assertEqual(
form.errors.get_json_data(escape_html=True)['__all__'][0]['message'],
escaped_error
)
errors = json.loads(form.errors.as_json(escape_html=True))
control['__all__'][0]['message'] = escaped_error
self.assertEqual(errors, control)
def test_error_list(self):
e = ErrorList()
e.append('Foo')
e.append(ValidationError('Foo%(bar)s', code='foobar', params={'bar': 'bar'}))
self.assertIsInstance(e, list)
self.assertIn('Foo', e)
self.assertIn('Foo', forms.ValidationError(e))
self.assertEqual(
e.as_text(),
'* Foo\n* Foobar'
)
self.assertEqual(
e.as_ul(),
'<ul class="errorlist"><li>Foo</li><li>Foobar</li></ul>'
)
errors = e.get_json_data()
self.assertEqual(
errors,
[{"message": "Foo", "code": ""}, {"message": "Foobar", "code": "foobar"}]
)
self.assertEqual(json.dumps(errors), e.as_json())
def test_error_list_class_not_specified(self):
e = ErrorList()
e.append('Foo')
e.append(ValidationError('Foo%(bar)s', code='foobar', params={'bar': 'bar'}))
self.assertEqual(
e.as_ul(),
'<ul class="errorlist"><li>Foo</li><li>Foobar</li></ul>'
)
def test_error_list_class_has_one_class_specified(self):
e = ErrorList(error_class='foobar-error-class')
e.append('Foo')
e.append(ValidationError('Foo%(bar)s', code='foobar', params={'bar': 'bar'}))
self.assertEqual(
e.as_ul(),
'<ul class="errorlist foobar-error-class"><li>Foo</li><li>Foobar</li></ul>'
)
def test_error_list_with_hidden_field_errors_has_correct_class(self):
class Person(Form):
first_name = CharField()
last_name = CharField(widget=HiddenInput)
p = Person({'first_name': 'John'})
self.assertHTMLEqual(
p.as_ul(),
"""<li><ul class="errorlist nonfield">
<li>(Hidden field last_name) This field is required.</li></ul></li><li>
<label for="id_first_name">First name:</label>
<input id="id_first_name" name="first_name" type="text" value="John" required>
<input id="id_last_name" name="last_name" type="hidden"></li>"""
)
self.assertHTMLEqual(
p.as_p(),
"""<ul class="errorlist nonfield"><li>(Hidden field last_name) This field is required.</li></ul>
<p><label for="id_first_name">First name:</label>
<input id="id_first_name" name="first_name" type="text" value="John" required>
<input id="id_last_name" name="last_name" type="hidden"></p>"""
)
self.assertHTMLEqual(
p.as_table(),
"""<tr><td colspan="2"><ul class="errorlist nonfield">
<li>(Hidden field last_name) This field is required.</li></ul></td></tr>
<tr><th><label for="id_first_name">First name:</label></th><td>
<input id="id_first_name" name="first_name" type="text" value="John" required>
<input id="id_last_name" name="last_name" type="hidden"></td></tr>"""
)
def test_error_list_with_non_field_errors_has_correct_class(self):
class Person(Form):
first_name = CharField()
last_name = CharField()
def clean(self):
raise ValidationError('Generic validation error')
p = Person({'first_name': 'John', 'last_name': 'Lennon'})
self.assertHTMLEqual(
str(p.non_field_errors()),
'<ul class="errorlist nonfield"><li>Generic validation error</li></ul>'
)
self.assertHTMLEqual(
p.as_ul(),
"""<li>
<ul class="errorlist nonfield"><li>Generic validation error</li></ul></li>
<li><label for="id_first_name">First name:</label>
<input id="id_first_name" name="first_name" type="text" value="John" required></li>
<li><label for="id_last_name">Last name:</label>
<input id="id_last_name" name="last_name" type="text" value="Lennon" required></li>"""
)
self.assertHTMLEqual(
p.non_field_errors().as_text(),
'* Generic validation error'
)
self.assertHTMLEqual(
p.as_p(),
"""<ul class="errorlist nonfield"><li>Generic validation error</li></ul>
<p><label for="id_first_name">First name:</label>
<input id="id_first_name" name="first_name" type="text" value="John" required></p>
<p><label for="id_last_name">Last name:</label>
<input id="id_last_name" name="last_name" type="text" value="Lennon" required></p>"""
)
self.assertHTMLEqual(
p.as_table(),
"""<tr><td colspan="2"><ul class="errorlist nonfield"><li>Generic validation error</li></ul></td></tr>
<tr><th><label for="id_first_name">First name:</label></th><td>
<input id="id_first_name" name="first_name" type="text" value="John" required></td></tr>
<tr><th><label for="id_last_name">Last name:</label></th><td>
<input id="id_last_name" name="last_name" type="text" value="Lennon" required></td></tr>"""
)
def test_errorlist_override(self):
class DivErrorList(ErrorList):
def __str__(self):
return self.as_divs()
def as_divs(self):
if not self:
return ''
return '<div class="errorlist">%s</div>' % ''.join(
'<div class="error">%s</div>' % e for e in self)
class CommentForm(Form):
name = CharField(max_length=50, required=False)
email = EmailField()
comment = CharField()
data = {'email': 'invalid'}
f = CommentForm(data, auto_id=False, error_class=DivErrorList)
self.assertHTMLEqual(f.as_p(), """<p>Name: <input type="text" name="name" maxlength="50"></p>
<div class="errorlist"><div class="error">Enter a valid email address.</div></div>
<p>Email: <input type="email" name="email" value="invalid" required></p>
<div class="errorlist"><div class="error">This field is required.</div></div>
<p>Comment: <input type="text" name="comment" required></p>""")
def test_error_escaping(self):
class TestForm(Form):
hidden = CharField(widget=HiddenInput(), required=False)
visible = CharField()
def clean_hidden(self):
raise ValidationError('Foo & "bar"!')
clean_visible = clean_hidden
form = TestForm({'hidden': 'a', 'visible': 'b'})
form.is_valid()
self.assertHTMLEqual(
form.as_ul(),
'<li><ul class="errorlist nonfield"><li>(Hidden field hidden) Foo & "bar"!</li></ul></li>'
'<li><ul class="errorlist"><li>Foo & "bar"!</li></ul>'
'<label for="id_visible">Visible:</label> '
'<input type="text" name="visible" value="b" id="id_visible" required>'
'<input type="hidden" name="hidden" value="a" id="id_hidden"></li>'
)
def test_baseform_repr(self):
"""
BaseForm.__repr__() should contain some basic information about the
form.
"""
p = Person()
self.assertEqual(repr(p), "<Person bound=False, valid=Unknown, fields=(first_name;last_name;birthday)>")
p = Person({'first_name': 'John', 'last_name': 'Lennon', 'birthday': '1940-10-9'})
self.assertEqual(repr(p), "<Person bound=True, valid=Unknown, fields=(first_name;last_name;birthday)>")
p.is_valid()
self.assertEqual(repr(p), "<Person bound=True, valid=True, fields=(first_name;last_name;birthday)>")
p = Person({'first_name': 'John', 'last_name': 'Lennon', 'birthday': 'fakedate'})
p.is_valid()
self.assertEqual(repr(p), "<Person bound=True, valid=False, fields=(first_name;last_name;birthday)>")
def test_baseform_repr_dont_trigger_validation(self):
"""
BaseForm.__repr__() shouldn't trigger the form validation.
"""
p = Person({'first_name': 'John', 'last_name': 'Lennon', 'birthday': 'fakedate'})
repr(p)
with self.assertRaises(AttributeError):
p.cleaned_data
self.assertFalse(p.is_valid())
self.assertEqual(p.cleaned_data, {'first_name': 'John', 'last_name': 'Lennon'})
def test_accessing_clean(self):
class UserForm(Form):
username = CharField(max_length=10)
password = CharField(widget=PasswordInput)
def clean(self):
data = self.cleaned_data
if not self.errors:
data['username'] = data['username'].lower()
return data
f = UserForm({'username': 'SirRobin', 'password': 'blue'})
self.assertTrue(f.is_valid())
self.assertEqual(f.cleaned_data['username'], 'sirrobin')
def test_changing_cleaned_data_nothing_returned(self):
class UserForm(Form):
username = CharField(max_length=10)
password = CharField(widget=PasswordInput)
def clean(self):
self.cleaned_data['username'] = self.cleaned_data['username'].lower()
# don't return anything
f = UserForm({'username': 'SirRobin', 'password': 'blue'})
self.assertTrue(f.is_valid())
self.assertEqual(f.cleaned_data['username'], 'sirrobin')
def test_changing_cleaned_data_in_clean(self):
class UserForm(Form):
username = CharField(max_length=10)
password = CharField(widget=PasswordInput)
def clean(self):
data = self.cleaned_data
# Return a different dict. We have not changed self.cleaned_data.
return {
'username': data['username'].lower(),
'password': 'this_is_not_a_secret',
}
f = UserForm({'username': 'SirRobin', 'password': 'blue'})
self.assertTrue(f.is_valid())
self.assertEqual(f.cleaned_data['username'], 'sirrobin')
def test_multipart_encoded_form(self):
class FormWithoutFile(Form):
username = CharField()
class FormWithFile(Form):
username = CharField()
file = FileField()
class FormWithImage(Form):
image = ImageField()
self.assertFalse(FormWithoutFile().is_multipart())
self.assertTrue(FormWithFile().is_multipart())
self.assertTrue(FormWithImage().is_multipart())
def test_html_safe(self):
class SimpleForm(Form):
username = CharField()
form = SimpleForm()
self.assertTrue(hasattr(SimpleForm, '__html__'))
self.assertEqual(str(form), form.__html__())
self.assertTrue(hasattr(form['username'], '__html__'))
self.assertEqual(str(form['username']), form['username'].__html__())
def test_use_required_attribute_true(self):
class MyForm(Form):
use_required_attribute = True
f1 = CharField(max_length=30)
f2 = CharField(max_length=30, required=False)
f3 = CharField(widget=Textarea)
f4 = ChoiceField(choices=[('P', 'Python'), ('J', 'Java')])
form = MyForm()
self.assertHTMLEqual(
form.as_p(),
'<p><label for="id_f1">F1:</label> <input id="id_f1" maxlength="30" name="f1" type="text" required></p>'
'<p><label for="id_f2">F2:</label> <input id="id_f2" maxlength="30" name="f2" type="text"></p>'
'<p><label for="id_f3">F3:</label> <textarea cols="40" id="id_f3" name="f3" rows="10" required>'
'</textarea></p>'
'<p><label for="id_f4">F4:</label> <select id="id_f4" name="f4">'
'<option value="P">Python</option>'
'<option value="J">Java</option>'
'</select></p>',
)
self.assertHTMLEqual(
form.as_ul(),
'<li><label for="id_f1">F1:</label> '
'<input id="id_f1" maxlength="30" name="f1" type="text" required></li>'
'<li><label for="id_f2">F2:</label> <input id="id_f2" maxlength="30" name="f2" type="text"></li>'
'<li><label for="id_f3">F3:</label> <textarea cols="40" id="id_f3" name="f3" rows="10" required>'
'</textarea></li>'
'<li><label for="id_f4">F4:</label> <select id="id_f4" name="f4">'
'<option value="P">Python</option>'
'<option value="J">Java</option>'
'</select></li>',
)
self.assertHTMLEqual(
form.as_table(),
'<tr><th><label for="id_f1">F1:</label></th>'
'<td><input id="id_f1" maxlength="30" name="f1" type="text" required></td></tr>'
'<tr><th><label for="id_f2">F2:</label></th>'
'<td><input id="id_f2" maxlength="30" name="f2" type="text"></td></tr>'
'<tr><th><label for="id_f3">F3:</label></th>'
'<td><textarea cols="40" id="id_f3" name="f3" rows="10" required>'
'</textarea></td></tr>'
'<tr><th><label for="id_f4">F4:</label></th><td><select id="id_f4" name="f4">'
'<option value="P">Python</option>'
'<option value="J">Java</option>'
'</select></td></tr>',
)
def test_use_required_attribute_false(self):
class MyForm(Form):
use_required_attribute = False
f1 = CharField(max_length=30)
f2 = CharField(max_length=30, required=False)
f3 = CharField(widget=Textarea)
f4 = ChoiceField(choices=[('P', 'Python'), ('J', 'Java')])
form = MyForm()
self.assertHTMLEqual(
form.as_p(),
'<p><label for="id_f1">F1:</label> <input id="id_f1" maxlength="30" name="f1" type="text"></p>'
'<p><label for="id_f2">F2:</label> <input id="id_f2" maxlength="30" name="f2" type="text"></p>'
'<p><label for="id_f3">F3:</label> <textarea cols="40" id="id_f3" name="f3" rows="10">'
'</textarea></p>'
'<p><label for="id_f4">F4:</label> <select id="id_f4" name="f4">'
'<option value="P">Python</option>'
'<option value="J">Java</option>'
'</select></p>',
)
self.assertHTMLEqual(
form.as_ul(),
'<li><label for="id_f1">F1:</label> <input id="id_f1" maxlength="30" name="f1" type="text"></li>'
'<li><label for="id_f2">F2:</label> <input id="id_f2" maxlength="30" name="f2" type="text"></li>'
'<li><label for="id_f3">F3:</label> <textarea cols="40" id="id_f3" name="f3" rows="10">'
'</textarea></li>'
'<li><label for="id_f4">F4:</label> <select id="id_f4" name="f4">'
'<option value="P">Python</option>'
'<option value="J">Java</option>'
'</select></li>',
)
self.assertHTMLEqual(
form.as_table(),
'<tr><th><label for="id_f1">F1:</label></th>'
'<td><input id="id_f1" maxlength="30" name="f1" type="text"></td></tr>'
'<tr><th><label for="id_f2">F2:</label></th>'
'<td><input id="id_f2" maxlength="30" name="f2" type="text"></td></tr>'
'<tr><th><label for="id_f3">F3:</label></th><td><textarea cols="40" id="id_f3" name="f3" rows="10">'
'</textarea></td></tr>'
'<tr><th><label for="id_f4">F4:</label></th><td><select id="id_f4" name="f4">'
'<option value="P">Python</option>'
'<option value="J">Java</option>'
'</select></td></tr>',
)
def test_only_hidden_fields(self):
# A form with *only* hidden fields that has errors is going to be very unusual.
class HiddenForm(Form):
data = IntegerField(widget=HiddenInput)
f = HiddenForm({})
self.assertHTMLEqual(
f.as_p(),
'<ul class="errorlist nonfield">'
'<li>(Hidden field data) This field is required.</li></ul>\n<p> '
'<input type="hidden" name="data" id="id_data"></p>'
)
self.assertHTMLEqual(
f.as_table(),
'<tr><td colspan="2"><ul class="errorlist nonfield">'
'<li>(Hidden field data) This field is required.</li></ul>'
'<input type="hidden" name="data" id="id_data"></td></tr>'
)
def test_field_named_data(self):
class DataForm(Form):
data = CharField(max_length=10)
f = DataForm({'data': 'xyzzy'})
self.assertTrue(f.is_valid())
self.assertEqual(f.cleaned_data, {'data': 'xyzzy'})
def test_empty_data_files_multi_value_dict(self):
p = Person()
self.assertIsInstance(p.data, MultiValueDict)
self.assertIsInstance(p.files, MultiValueDict)
def test_field_deep_copy_error_messages(self):
class CustomCharField(CharField):
def __init__(self, **kwargs):
kwargs['error_messages'] = {'invalid': 'Form custom error message.'}
super().__init__(**kwargs)
field = CustomCharField()
field_copy = copy.deepcopy(field)
self.assertIsInstance(field_copy, CustomCharField)
self.assertIsNot(field_copy.error_messages, field.error_messages)
class CustomRenderer(DjangoTemplates):
pass
class RendererTests(SimpleTestCase):
def test_default(self):
form = Form()
self.assertEqual(form.renderer, get_default_renderer())
def test_kwarg_instance(self):
custom = CustomRenderer()
form = Form(renderer=custom)
self.assertEqual(form.renderer, custom)
def test_kwarg_class(self):
custom = CustomRenderer()
form = Form(renderer=custom)
self.assertEqual(form.renderer, custom)
def test_attribute_instance(self):
class CustomForm(Form):
default_renderer = DjangoTemplates()
form = CustomForm()
self.assertEqual(form.renderer, CustomForm.default_renderer)
def test_attribute_class(self):
class CustomForm(Form):
default_renderer = CustomRenderer
form = CustomForm()
self.assertIsInstance(form.renderer, CustomForm.default_renderer)
def test_attribute_override(self):
class CustomForm(Form):
default_renderer = DjangoTemplates()
custom = CustomRenderer()
form = CustomForm(renderer=custom)
self.assertEqual(form.renderer, custom)
|
63688958abcfff9504576dc4f1ea2902e6a3aa514738facec2a9e7f3e72b0646 | import decimal
from django.core.management.color import no_style
from django.db import NotSupportedError, connection, transaction
from django.db.backends.base.operations import BaseDatabaseOperations
from django.db.models import DurationField
from django.test import (
SimpleTestCase, TestCase, TransactionTestCase, override_settings,
skipIfDBFeature,
)
from django.utils import timezone
from ..models import Author, Book
class SimpleDatabaseOperationTests(SimpleTestCase):
may_require_msg = 'subclasses of BaseDatabaseOperations may require a %s() method'
def setUp(self):
self.ops = BaseDatabaseOperations(connection=connection)
def test_deferrable_sql(self):
self.assertEqual(self.ops.deferrable_sql(), '')
def test_end_transaction_rollback(self):
self.assertEqual(self.ops.end_transaction_sql(success=False), 'ROLLBACK;')
def test_no_limit_value(self):
with self.assertRaisesMessage(NotImplementedError, self.may_require_msg % 'no_limit_value'):
self.ops.no_limit_value()
def test_quote_name(self):
with self.assertRaisesMessage(NotImplementedError, self.may_require_msg % 'quote_name'):
self.ops.quote_name('a')
def test_regex_lookup(self):
with self.assertRaisesMessage(NotImplementedError, self.may_require_msg % 'regex_lookup'):
self.ops.regex_lookup(lookup_type='regex')
def test_set_time_zone_sql(self):
self.assertEqual(self.ops.set_time_zone_sql(), '')
def test_sql_flush(self):
msg = 'subclasses of BaseDatabaseOperations must provide a sql_flush() method'
with self.assertRaisesMessage(NotImplementedError, msg):
self.ops.sql_flush(None, None)
def test_pk_default_value(self):
self.assertEqual(self.ops.pk_default_value(), 'DEFAULT')
def test_tablespace_sql(self):
self.assertEqual(self.ops.tablespace_sql(None), '')
def test_sequence_reset_by_name_sql(self):
self.assertEqual(self.ops.sequence_reset_by_name_sql(None, []), [])
def test_adapt_unknown_value_decimal(self):
value = decimal.Decimal('3.14')
self.assertEqual(
self.ops.adapt_unknown_value(value),
self.ops.adapt_decimalfield_value(value)
)
def test_adapt_unknown_value_date(self):
value = timezone.now().date()
self.assertEqual(self.ops.adapt_unknown_value(value), self.ops.adapt_datefield_value(value))
def test_adapt_unknown_value_time(self):
value = timezone.now().time()
self.assertEqual(self.ops.adapt_unknown_value(value), self.ops.adapt_timefield_value(value))
def test_adapt_timefield_value_none(self):
self.assertIsNone(self.ops.adapt_timefield_value(None))
def test_adapt_datetimefield_value(self):
self.assertIsNone(self.ops.adapt_datetimefield_value(None))
def test_adapt_timefield_value(self):
msg = 'Django does not support timezone-aware times.'
with self.assertRaisesMessage(ValueError, msg):
self.ops.adapt_timefield_value(timezone.make_aware(timezone.now()))
@override_settings(USE_TZ=False)
def test_adapt_timefield_value_unaware(self):
now = timezone.now()
self.assertEqual(self.ops.adapt_timefield_value(now), str(now))
def test_date_extract_sql(self):
with self.assertRaisesMessage(NotImplementedError, self.may_require_msg % 'date_extract_sql'):
self.ops.date_extract_sql(None, None)
def test_time_extract_sql(self):
with self.assertRaisesMessage(NotImplementedError, self.may_require_msg % 'date_extract_sql'):
self.ops.time_extract_sql(None, None)
def test_date_interval_sql(self):
with self.assertRaisesMessage(NotImplementedError, self.may_require_msg % 'date_interval_sql'):
self.ops.date_interval_sql(None)
def test_date_trunc_sql(self):
with self.assertRaisesMessage(NotImplementedError, self.may_require_msg % 'date_trunc_sql'):
self.ops.date_trunc_sql(None, None)
def test_time_trunc_sql(self):
with self.assertRaisesMessage(NotImplementedError, self.may_require_msg % 'time_trunc_sql'):
self.ops.time_trunc_sql(None, None)
def test_datetime_trunc_sql(self):
with self.assertRaisesMessage(NotImplementedError, self.may_require_msg % 'datetime_trunc_sql'):
self.ops.datetime_trunc_sql(None, None, None)
def test_datetime_cast_date_sql(self):
with self.assertRaisesMessage(NotImplementedError, self.may_require_msg % 'datetime_cast_date_sql'):
self.ops.datetime_cast_date_sql(None, None)
def test_datetime_cast_time_sql(self):
with self.assertRaisesMessage(NotImplementedError, self.may_require_msg % 'datetime_cast_time_sql'):
self.ops.datetime_cast_time_sql(None, None)
def test_datetime_extract_sql(self):
with self.assertRaisesMessage(NotImplementedError, self.may_require_msg % 'datetime_extract_sql'):
self.ops.datetime_extract_sql(None, None, None)
class DatabaseOperationTests(TestCase):
def setUp(self):
self.ops = BaseDatabaseOperations(connection=connection)
@skipIfDBFeature('supports_over_clause')
def test_window_frame_raise_not_supported_error(self):
msg = 'This backend does not support window expressions.'
with self.assertRaisesMessage(NotSupportedError, msg):
self.ops.window_frame_rows_start_end()
@skipIfDBFeature('can_distinct_on_fields')
def test_distinct_on_fields(self):
msg = 'DISTINCT ON fields is not supported by this database backend'
with self.assertRaisesMessage(NotSupportedError, msg):
self.ops.distinct_sql(['a', 'b'], None)
@skipIfDBFeature('supports_temporal_subtraction')
def test_subtract_temporals(self):
duration_field = DurationField()
duration_field_internal_type = duration_field.get_internal_type()
msg = (
'This backend does not support %s subtraction.' %
duration_field_internal_type
)
with self.assertRaisesMessage(NotSupportedError, msg):
self.ops.subtract_temporals(duration_field_internal_type, None, None)
class SqlFlushTests(TransactionTestCase):
available_apps = ['backends']
def test_sql_flush_no_tables(self):
self.assertEqual(connection.ops.sql_flush(no_style(), []), [])
def test_execute_sql_flush_statements(self):
with transaction.atomic():
author = Author.objects.create(name='George Orwell')
Book.objects.create(author=author)
author = Author.objects.create(name='Harper Lee')
Book.objects.create(author=author)
Book.objects.create(author=author)
self.assertIs(Author.objects.exists(), True)
self.assertIs(Book.objects.exists(), True)
sql_list = connection.ops.sql_flush(
no_style(),
[Author._meta.db_table, Book._meta.db_table],
reset_sequences=True,
allow_cascade=True,
)
connection.ops.execute_sql_flush(sql_list)
with transaction.atomic():
self.assertIs(Author.objects.exists(), False)
self.assertIs(Book.objects.exists(), False)
if connection.features.supports_sequence_reset:
author = Author.objects.create(name='F. Scott Fitzgerald')
self.assertEqual(author.pk, 1)
book = Book.objects.create(author=author)
self.assertEqual(book.pk, 1)
|
4f24fd33cfb774145710d4096d5790ba3b5ff11a18d456803c3a8a8c20cca8c3 | """
Sphinx plugins for Django documentation.
"""
import json
import os
import re
from docutils import nodes
from docutils.parsers.rst import Directive
from docutils.statemachine import ViewList
from sphinx import addnodes
from sphinx.builders.html import StandaloneHTMLBuilder
from sphinx.directives.code import CodeBlock
from sphinx.domains.std import Cmdoption
from sphinx.errors import ExtensionError, SphinxError
from sphinx.util import logging
from sphinx.util.console import bold, red
from sphinx.writers.html import HTMLTranslator
logger = logging.getLogger(__name__)
# RE for option descriptions without a '--' prefix
simple_option_desc_re = re.compile(
r'([-_a-zA-Z0-9]+)(\s*.*?)(?=,\s+(?:/|-|--)|$)')
def setup(app):
app.add_crossref_type(
directivename="setting",
rolename="setting",
indextemplate="pair: %s; setting",
)
app.add_crossref_type(
directivename="templatetag",
rolename="ttag",
indextemplate="pair: %s; template tag"
)
app.add_crossref_type(
directivename="templatefilter",
rolename="tfilter",
indextemplate="pair: %s; template filter"
)
app.add_crossref_type(
directivename="fieldlookup",
rolename="lookup",
indextemplate="pair: %s; field lookup type",
)
app.add_object_type(
directivename="django-admin",
rolename="djadmin",
indextemplate="pair: %s; django-admin command",
parse_node=parse_django_admin_node,
)
app.add_directive('django-admin-option', Cmdoption)
app.add_config_value('django_next_version', '0.0', True)
app.add_directive('versionadded', VersionDirective)
app.add_directive('versionchanged', VersionDirective)
app.add_builder(DjangoStandaloneHTMLBuilder)
app.set_translator('djangohtml', DjangoHTMLTranslator)
app.set_translator('json', DjangoHTMLTranslator)
app.add_node(
ConsoleNode,
html=(visit_console_html, None),
latex=(visit_console_dummy, depart_console_dummy),
man=(visit_console_dummy, depart_console_dummy),
text=(visit_console_dummy, depart_console_dummy),
texinfo=(visit_console_dummy, depart_console_dummy),
)
app.add_directive('console', ConsoleDirective)
app.connect('html-page-context', html_page_context_hook)
app.add_role('default-role-error', default_role_error)
return {'parallel_read_safe': True}
class VersionDirective(Directive):
has_content = True
required_arguments = 1
optional_arguments = 1
final_argument_whitespace = True
option_spec = {}
def run(self):
if len(self.arguments) > 1:
msg = """Only one argument accepted for directive '{directive_name}::'.
Comments should be provided as content,
not as an extra argument.""".format(directive_name=self.name)
raise self.error(msg)
env = self.state.document.settings.env
ret = []
node = addnodes.versionmodified()
ret.append(node)
if self.arguments[0] == env.config.django_next_version:
node['version'] = "Development version"
else:
node['version'] = self.arguments[0]
node['type'] = self.name
if self.content:
self.state.nested_parse(self.content, self.content_offset, node)
try:
env.get_domain('changeset').note_changeset(node)
except ExtensionError:
# Sphinx < 1.8: Domain 'changeset' is not registered
env.note_versionchange(node['type'], node['version'], node, self.lineno)
return ret
class DjangoHTMLTranslator(HTMLTranslator):
"""
Django-specific reST to HTML tweaks.
"""
# Don't use border=1, which docutils does by default.
def visit_table(self, node):
self.context.append(self.compact_p)
self.compact_p = True
self._table_row_index = 0 # Needed by Sphinx
self.body.append(self.starttag(node, 'table', CLASS='docutils'))
def depart_table(self, node):
self.compact_p = self.context.pop()
self.body.append('</table>\n')
def visit_desc_parameterlist(self, node):
self.body.append('(') # by default sphinx puts <big> around the "("
self.first_param = 1
self.optional_param_level = 0
self.param_separator = node.child_text_separator
self.required_params_left = sum(isinstance(c, addnodes.desc_parameter) for c in node.children)
def depart_desc_parameterlist(self, node):
self.body.append(')')
#
# Turn the "new in version" stuff (versionadded/versionchanged) into a
# better callout -- the Sphinx default is just a little span,
# which is a bit less obvious that I'd like.
#
# FIXME: these messages are all hardcoded in English. We need to change
# that to accommodate other language docs, but I can't work out how to make
# that work.
#
version_text = {
'versionchanged': 'Changed in Django %s',
'versionadded': 'New in Django %s',
}
def visit_versionmodified(self, node):
self.body.append(
self.starttag(node, 'div', CLASS=node['type'])
)
version_text = self.version_text.get(node['type'])
if version_text:
title = "%s%s" % (
version_text % node['version'],
":" if len(node) else "."
)
self.body.append('<span class="title">%s</span> ' % title)
def depart_versionmodified(self, node):
self.body.append("</div>\n")
# Give each section a unique ID -- nice for custom CSS hooks
def visit_section(self, node):
old_ids = node.get('ids', [])
node['ids'] = ['s-' + i for i in old_ids]
node['ids'].extend(old_ids)
super().visit_section(node)
node['ids'] = old_ids
def parse_django_admin_node(env, sig, signode):
command = sig.split(' ')[0]
env.ref_context['std:program'] = command
title = "django-admin %s" % sig
signode += addnodes.desc_name(title, title)
return command
class DjangoStandaloneHTMLBuilder(StandaloneHTMLBuilder):
"""
Subclass to add some extra things we need.
"""
name = 'djangohtml'
def finish(self):
super().finish()
logger.info(bold("writing templatebuiltins.js..."))
xrefs = self.env.domaindata["std"]["objects"]
templatebuiltins = {
"ttags": [
n for ((t, n), (k, a)) in xrefs.items()
if t == "templatetag" and k == "ref/templates/builtins"
],
"tfilters": [
n for ((t, n), (k, a)) in xrefs.items()
if t == "templatefilter" and k == "ref/templates/builtins"
],
}
outfilename = os.path.join(self.outdir, "templatebuiltins.js")
with open(outfilename, 'w') as fp:
fp.write('var django_template_builtins = ')
json.dump(templatebuiltins, fp)
fp.write(';\n')
class ConsoleNode(nodes.literal_block):
"""
Custom node to override the visit/depart event handlers at registration
time. Wrap a literal_block object and defer to it.
"""
tagname = 'ConsoleNode'
def __init__(self, litblk_obj):
self.wrapped = litblk_obj
def __getattr__(self, attr):
if attr == 'wrapped':
return self.__dict__.wrapped
return getattr(self.wrapped, attr)
def visit_console_dummy(self, node):
"""Defer to the corresponding parent's handler."""
self.visit_literal_block(node)
def depart_console_dummy(self, node):
"""Defer to the corresponding parent's handler."""
self.depart_literal_block(node)
def visit_console_html(self, node):
"""Generate HTML for the console directive."""
if self.builder.name in ('djangohtml', 'json') and node['win_console_text']:
# Put a mark on the document object signaling the fact the directive
# has been used on it.
self.document._console_directive_used_flag = True
uid = node['uid']
self.body.append('''\
<div class="console-block" id="console-block-%(id)s">
<input class="c-tab-unix" id="c-tab-%(id)s-unix" type="radio" name="console-%(id)s" checked>
<label for="c-tab-%(id)s-unix" title="Linux/macOS">/</label>
<input class="c-tab-win" id="c-tab-%(id)s-win" type="radio" name="console-%(id)s">
<label for="c-tab-%(id)s-win" title="Windows"></label>
<section class="c-content-unix" id="c-content-%(id)s-unix">\n''' % {'id': uid})
try:
self.visit_literal_block(node)
except nodes.SkipNode:
pass
self.body.append('</section>\n')
self.body.append('<section class="c-content-win" id="c-content-%(id)s-win">\n' % {'id': uid})
win_text = node['win_console_text']
highlight_args = {'force': True}
linenos = node.get('linenos', False)
def warner(msg):
self.builder.warn(msg, (self.builder.current_docname, node.line))
highlighted = self.highlighter.highlight_block(
win_text, 'doscon', warn=warner, linenos=linenos, **highlight_args
)
self.body.append(highlighted)
self.body.append('</section>\n')
self.body.append('</div>\n')
raise nodes.SkipNode
else:
self.visit_literal_block(node)
class ConsoleDirective(CodeBlock):
"""
A reStructuredText directive which renders a two-tab code block in which
the second tab shows a Windows command line equivalent of the usual
Unix-oriented examples.
"""
required_arguments = 0
# The 'doscon' Pygments formatter needs a prompt like this. '>' alone
# won't do it because then it simply paints the whole command line as a
# grey comment with no highlighting at all.
WIN_PROMPT = r'...\> '
def run(self):
def args_to_win(cmdline):
changed = False
out = []
for token in cmdline.split():
if token[:2] == './':
token = token[2:]
changed = True
elif token[:2] == '~/':
token = '%HOMEPATH%\\' + token[2:]
changed = True
elif token == 'make':
token = 'make.bat'
changed = True
if '://' not in token and 'git' not in cmdline:
out.append(token.replace('/', '\\'))
changed = True
else:
out.append(token)
if changed:
return ' '.join(out)
return cmdline
def cmdline_to_win(line):
if line.startswith('# '):
return 'REM ' + args_to_win(line[2:])
if line.startswith('$ # '):
return 'REM ' + args_to_win(line[4:])
if line.startswith('$ ./manage.py'):
return 'manage.py ' + args_to_win(line[13:])
if line.startswith('$ manage.py'):
return 'manage.py ' + args_to_win(line[11:])
if line.startswith('$ ./runtests.py'):
return 'runtests.py ' + args_to_win(line[15:])
if line.startswith('$ ./'):
return args_to_win(line[4:])
if line.startswith('$ python3'):
return 'py ' + args_to_win(line[9:])
if line.startswith('$ python'):
return 'py ' + args_to_win(line[8:])
if line.startswith('$ '):
return args_to_win(line[2:])
return None
def code_block_to_win(content):
bchanged = False
lines = []
for line in content:
modline = cmdline_to_win(line)
if modline is None:
lines.append(line)
else:
lines.append(self.WIN_PROMPT + modline)
bchanged = True
if bchanged:
return ViewList(lines)
return None
env = self.state.document.settings.env
self.arguments = ['console']
lit_blk_obj = super().run()[0]
# Only do work when the djangohtml HTML Sphinx builder is being used,
# invoke the default behavior for the rest.
if env.app.builder.name not in ('djangohtml', 'json'):
return [lit_blk_obj]
lit_blk_obj['uid'] = '%s' % env.new_serialno('console')
# Only add the tabbed UI if there is actually a Windows-specific
# version of the CLI example.
win_content = code_block_to_win(self.content)
if win_content is None:
lit_blk_obj['win_console_text'] = None
else:
self.content = win_content
lit_blk_obj['win_console_text'] = super().run()[0].rawsource
# Replace the literal_node object returned by Sphinx's CodeBlock with
# the ConsoleNode wrapper.
return [ConsoleNode(lit_blk_obj)]
def html_page_context_hook(app, pagename, templatename, context, doctree):
# Put a bool on the context used to render the template. It's used to
# control inclusion of console-tabs.css and activation of the JavaScript.
# This way it's include only from HTML files rendered from reST files where
# the ConsoleDirective is used.
context['include_console_assets'] = getattr(doctree, '_console_directive_used_flag', False)
def default_role_error(
name, rawtext, text, lineno, inliner, options=None, content=None
):
msg = (
"Default role used (`single backticks`) at line %s: %s. Did you mean "
"to use two backticks for ``code``, or miss an underscore for a "
"`link`_ ?" % (lineno, rawtext)
)
raise SphinxError(red(msg))
|
575e85c731de895ce5ed43ac4d036880d816b53366aebcabb7e3533bcfdeec44 | import functools
import re
from itertools import chain
from django.conf import settings
from django.db import models
from django.db.migrations import operations
from django.db.migrations.migration import Migration
from django.db.migrations.operations.models import AlterModelOptions
from django.db.migrations.optimizer import MigrationOptimizer
from django.db.migrations.questioner import MigrationQuestioner
from django.db.migrations.utils import (
COMPILED_REGEX_TYPE, RegexObject, get_migration_name_timestamp,
)
from django.utils.topological_sort import stable_topological_sort
class MigrationAutodetector:
"""
Take a pair of ProjectStates and compare them to see what the first would
need doing to make it match the second (the second usually being the
project's current state).
Note that this naturally operates on entire projects at a time,
as it's likely that changes interact (for example, you can't
add a ForeignKey without having a migration to add the table it
depends on first). A user interface may offer single-app usage
if it wishes, with the caveat that it may not always be possible.
"""
def __init__(self, from_state, to_state, questioner=None):
self.from_state = from_state
self.to_state = to_state
self.questioner = questioner or MigrationQuestioner()
self.existing_apps = {app for app, model in from_state.models}
def changes(self, graph, trim_to_apps=None, convert_apps=None, migration_name=None):
"""
Main entry point to produce a list of applicable changes.
Take a graph to base names on and an optional set of apps
to try and restrict to (restriction is not guaranteed)
"""
changes = self._detect_changes(convert_apps, graph)
changes = self.arrange_for_graph(changes, graph, migration_name)
if trim_to_apps:
changes = self._trim_to_apps(changes, trim_to_apps)
return changes
def deep_deconstruct(self, obj):
"""
Recursive deconstruction for a field and its arguments.
Used for full comparison for rename/alter; sometimes a single-level
deconstruction will not compare correctly.
"""
if isinstance(obj, list):
return [self.deep_deconstruct(value) for value in obj]
elif isinstance(obj, tuple):
return tuple(self.deep_deconstruct(value) for value in obj)
elif isinstance(obj, dict):
return {
key: self.deep_deconstruct(value)
for key, value in obj.items()
}
elif isinstance(obj, functools.partial):
return (obj.func, self.deep_deconstruct(obj.args), self.deep_deconstruct(obj.keywords))
elif isinstance(obj, COMPILED_REGEX_TYPE):
return RegexObject(obj)
elif isinstance(obj, type):
# If this is a type that implements 'deconstruct' as an instance method,
# avoid treating this as being deconstructible itself - see #22951
return obj
elif hasattr(obj, 'deconstruct'):
deconstructed = obj.deconstruct()
if isinstance(obj, models.Field):
# we have a field which also returns a name
deconstructed = deconstructed[1:]
path, args, kwargs = deconstructed
return (
path,
[self.deep_deconstruct(value) for value in args],
{
key: self.deep_deconstruct(value)
for key, value in kwargs.items()
},
)
else:
return obj
def only_relation_agnostic_fields(self, fields):
"""
Return a definition of the fields that ignores field names and
what related fields actually relate to. Used for detecting renames (as,
of course, the related fields change during renames).
"""
fields_def = []
for name, field in sorted(fields):
deconstruction = self.deep_deconstruct(field)
if field.remote_field and field.remote_field.model:
del deconstruction[2]['to']
fields_def.append(deconstruction)
return fields_def
def _detect_changes(self, convert_apps=None, graph=None):
"""
Return a dict of migration plans which will achieve the
change from from_state to to_state. The dict has app labels
as keys and a list of migrations as values.
The resulting migrations aren't specially named, but the names
do matter for dependencies inside the set.
convert_apps is the list of apps to convert to use migrations
(i.e. to make initial migrations for, in the usual case)
graph is an optional argument that, if provided, can help improve
dependency generation and avoid potential circular dependencies.
"""
# The first phase is generating all the operations for each app
# and gathering them into a big per-app list.
# Then go through that list, order it, and split into migrations to
# resolve dependencies caused by M2Ms and FKs.
self.generated_operations = {}
self.altered_indexes = {}
self.altered_constraints = {}
# Prepare some old/new state and model lists, separating
# proxy models and ignoring unmigrated apps.
self.old_apps = self.from_state.concrete_apps
self.new_apps = self.to_state.apps
self.old_model_keys = set()
self.old_proxy_keys = set()
self.old_unmanaged_keys = set()
self.new_model_keys = set()
self.new_proxy_keys = set()
self.new_unmanaged_keys = set()
for al, mn in self.from_state.models:
model = self.old_apps.get_model(al, mn)
if not model._meta.managed:
self.old_unmanaged_keys.add((al, mn))
elif al not in self.from_state.real_apps:
if model._meta.proxy:
self.old_proxy_keys.add((al, mn))
else:
self.old_model_keys.add((al, mn))
for al, mn in self.to_state.models:
model = self.new_apps.get_model(al, mn)
if not model._meta.managed:
self.new_unmanaged_keys.add((al, mn))
elif (
al not in self.from_state.real_apps or
(convert_apps and al in convert_apps)
):
if model._meta.proxy:
self.new_proxy_keys.add((al, mn))
else:
self.new_model_keys.add((al, mn))
# Renames have to come first
self.generate_renamed_models()
# Prepare lists of fields and generate through model map
self._prepare_field_lists()
self._generate_through_model_map()
# Generate non-rename model operations
self.generate_deleted_models()
self.generate_created_models()
self.generate_deleted_proxies()
self.generate_created_proxies()
self.generate_altered_options()
self.generate_altered_managers()
# Create the altered indexes and store them in self.altered_indexes.
# This avoids the same computation in generate_removed_indexes()
# and generate_added_indexes().
self.create_altered_indexes()
self.create_altered_constraints()
# Generate index removal operations before field is removed
self.generate_removed_constraints()
self.generate_removed_indexes()
# Generate field operations
self.generate_renamed_fields()
self.generate_removed_fields()
self.generate_added_fields()
self.generate_altered_fields()
self.generate_altered_unique_together()
self.generate_altered_index_together()
self.generate_added_indexes()
self.generate_added_constraints()
self.generate_altered_db_table()
self.generate_altered_order_with_respect_to()
self._sort_migrations()
self._build_migration_list(graph)
self._optimize_migrations()
return self.migrations
def _prepare_field_lists(self):
"""
Prepare field lists and a list of the fields that used through models
in the old state so dependencies can be made from the through model
deletion to the field that uses it.
"""
self.kept_model_keys = self.old_model_keys & self.new_model_keys
self.kept_proxy_keys = self.old_proxy_keys & self.new_proxy_keys
self.kept_unmanaged_keys = self.old_unmanaged_keys & self.new_unmanaged_keys
self.through_users = {}
self.old_field_keys = {
(app_label, model_name, x)
for app_label, model_name in self.kept_model_keys
for x, y in self.from_state.models[
app_label,
self.renamed_models.get((app_label, model_name), model_name)
].fields
}
self.new_field_keys = {
(app_label, model_name, x)
for app_label, model_name in self.kept_model_keys
for x, y in self.to_state.models[app_label, model_name].fields
}
def _generate_through_model_map(self):
"""Through model map generation."""
for app_label, model_name in sorted(self.old_model_keys):
old_model_name = self.renamed_models.get((app_label, model_name), model_name)
old_model_state = self.from_state.models[app_label, old_model_name]
for field_name, field in old_model_state.fields:
old_field = self.old_apps.get_model(app_label, old_model_name)._meta.get_field(field_name)
if (hasattr(old_field, "remote_field") and getattr(old_field.remote_field, "through", None) and
not old_field.remote_field.through._meta.auto_created):
through_key = (
old_field.remote_field.through._meta.app_label,
old_field.remote_field.through._meta.model_name,
)
self.through_users[through_key] = (app_label, old_model_name, field_name)
@staticmethod
def _resolve_dependency(dependency):
"""
Return the resolved dependency and a boolean denoting whether or not
it was swappable.
"""
if dependency[0] != '__setting__':
return dependency, False
resolved_app_label, resolved_object_name = getattr(settings, dependency[1]).split('.')
return (resolved_app_label, resolved_object_name.lower()) + dependency[2:], True
def _build_migration_list(self, graph=None):
"""
Chop the lists of operations up into migrations with dependencies on
each other. Do this by going through an app's list of operations until
one is found that has an outgoing dependency that isn't in another
app's migration yet (hasn't been chopped off its list). Then chop off
the operations before it into a migration and move onto the next app.
If the loops completes without doing anything, there's a circular
dependency (which _should_ be impossible as the operations are
all split at this point so they can't depend and be depended on).
"""
self.migrations = {}
num_ops = sum(len(x) for x in self.generated_operations.values())
chop_mode = False
while num_ops:
# On every iteration, we step through all the apps and see if there
# is a completed set of operations.
# If we find that a subset of the operations are complete we can
# try to chop it off from the rest and continue, but we only
# do this if we've already been through the list once before
# without any chopping and nothing has changed.
for app_label in sorted(self.generated_operations):
chopped = []
dependencies = set()
for operation in list(self.generated_operations[app_label]):
deps_satisfied = True
operation_dependencies = set()
for dep in operation._auto_deps:
# Temporarily resolve the swappable dependency to
# prevent circular references. While keeping the
# dependency checks on the resolved model, add the
# swappable dependencies.
original_dep = dep
dep, is_swappable_dep = self._resolve_dependency(dep)
if dep[0] != app_label:
# External app dependency. See if it's not yet
# satisfied.
for other_operation in self.generated_operations.get(dep[0], []):
if self.check_dependency(other_operation, dep):
deps_satisfied = False
break
if not deps_satisfied:
break
else:
if is_swappable_dep:
operation_dependencies.add((original_dep[0], original_dep[1]))
elif dep[0] in self.migrations:
operation_dependencies.add((dep[0], self.migrations[dep[0]][-1].name))
else:
# If we can't find the other app, we add a first/last dependency,
# but only if we've already been through once and checked everything
if chop_mode:
# If the app already exists, we add a dependency on the last migration,
# as we don't know which migration contains the target field.
# If it's not yet migrated or has no migrations, we use __first__
if graph and graph.leaf_nodes(dep[0]):
operation_dependencies.add(graph.leaf_nodes(dep[0])[0])
else:
operation_dependencies.add((dep[0], "__first__"))
else:
deps_satisfied = False
if deps_satisfied:
chopped.append(operation)
dependencies.update(operation_dependencies)
del self.generated_operations[app_label][0]
else:
break
# Make a migration! Well, only if there's stuff to put in it
if dependencies or chopped:
if not self.generated_operations[app_label] or chop_mode:
subclass = type("Migration", (Migration,), {"operations": [], "dependencies": []})
instance = subclass("auto_%i" % (len(self.migrations.get(app_label, [])) + 1), app_label)
instance.dependencies = list(dependencies)
instance.operations = chopped
instance.initial = app_label not in self.existing_apps
self.migrations.setdefault(app_label, []).append(instance)
chop_mode = False
else:
self.generated_operations[app_label] = chopped + self.generated_operations[app_label]
new_num_ops = sum(len(x) for x in self.generated_operations.values())
if new_num_ops == num_ops:
if not chop_mode:
chop_mode = True
else:
raise ValueError("Cannot resolve operation dependencies: %r" % self.generated_operations)
num_ops = new_num_ops
def _sort_migrations(self):
"""
Reorder to make things possible. Reordering may be needed so FKs work
nicely inside the same app.
"""
for app_label, ops in sorted(self.generated_operations.items()):
# construct a dependency graph for intra-app dependencies
dependency_graph = {op: set() for op in ops}
for op in ops:
for dep in op._auto_deps:
# Resolve intra-app dependencies to handle circular
# references involving a swappable model.
dep = self._resolve_dependency(dep)[0]
if dep[0] == app_label:
for op2 in ops:
if self.check_dependency(op2, dep):
dependency_graph[op].add(op2)
# we use a stable sort for deterministic tests & general behavior
self.generated_operations[app_label] = stable_topological_sort(ops, dependency_graph)
def _optimize_migrations(self):
# Add in internal dependencies among the migrations
for app_label, migrations in self.migrations.items():
for m1, m2 in zip(migrations, migrations[1:]):
m2.dependencies.append((app_label, m1.name))
# De-dupe dependencies
for migrations in self.migrations.values():
for migration in migrations:
migration.dependencies = list(set(migration.dependencies))
# Optimize migrations
for app_label, migrations in self.migrations.items():
for migration in migrations:
migration.operations = MigrationOptimizer().optimize(migration.operations, app_label)
def check_dependency(self, operation, dependency):
"""
Return True if the given operation depends on the given dependency,
False otherwise.
"""
# Created model
if dependency[2] is None and dependency[3] is True:
return (
isinstance(operation, operations.CreateModel) and
operation.name_lower == dependency[1].lower()
)
# Created field
elif dependency[2] is not None and dependency[3] is True:
return (
(
isinstance(operation, operations.CreateModel) and
operation.name_lower == dependency[1].lower() and
any(dependency[2] == x for x, y in operation.fields)
) or
(
isinstance(operation, operations.AddField) and
operation.model_name_lower == dependency[1].lower() and
operation.name_lower == dependency[2].lower()
)
)
# Removed field
elif dependency[2] is not None and dependency[3] is False:
return (
isinstance(operation, operations.RemoveField) and
operation.model_name_lower == dependency[1].lower() and
operation.name_lower == dependency[2].lower()
)
# Removed model
elif dependency[2] is None and dependency[3] is False:
return (
isinstance(operation, operations.DeleteModel) and
operation.name_lower == dependency[1].lower()
)
# Field being altered
elif dependency[2] is not None and dependency[3] == "alter":
return (
isinstance(operation, operations.AlterField) and
operation.model_name_lower == dependency[1].lower() and
operation.name_lower == dependency[2].lower()
)
# order_with_respect_to being unset for a field
elif dependency[2] is not None and dependency[3] == "order_wrt_unset":
return (
isinstance(operation, operations.AlterOrderWithRespectTo) and
operation.name_lower == dependency[1].lower() and
(operation.order_with_respect_to or "").lower() != dependency[2].lower()
)
# Field is removed and part of an index/unique_together
elif dependency[2] is not None and dependency[3] == "foo_together_change":
return (
isinstance(operation, (operations.AlterUniqueTogether,
operations.AlterIndexTogether)) and
operation.name_lower == dependency[1].lower()
)
# Unknown dependency. Raise an error.
else:
raise ValueError("Can't handle dependency %r" % (dependency,))
def add_operation(self, app_label, operation, dependencies=None, beginning=False):
# Dependencies are (app_label, model_name, field_name, create/delete as True/False)
operation._auto_deps = dependencies or []
if beginning:
self.generated_operations.setdefault(app_label, []).insert(0, operation)
else:
self.generated_operations.setdefault(app_label, []).append(operation)
def swappable_first_key(self, item):
"""
Place potential swappable models first in lists of created models (only
real way to solve #22783).
"""
try:
model = self.new_apps.get_model(item[0], item[1])
base_names = [base.__name__ for base in model.__bases__]
string_version = "%s.%s" % (item[0], item[1])
if (
model._meta.swappable or
"AbstractUser" in base_names or
"AbstractBaseUser" in base_names or
settings.AUTH_USER_MODEL.lower() == string_version.lower()
):
return ("___" + item[0], "___" + item[1])
except LookupError:
pass
return item
def generate_renamed_models(self):
"""
Find any renamed models, generate the operations for them, and remove
the old entry from the model lists. Must be run before other
model-level generation.
"""
self.renamed_models = {}
self.renamed_models_rel = {}
added_models = self.new_model_keys - self.old_model_keys
for app_label, model_name in sorted(added_models):
model_state = self.to_state.models[app_label, model_name]
model_fields_def = self.only_relation_agnostic_fields(model_state.fields)
removed_models = self.old_model_keys - self.new_model_keys
for rem_app_label, rem_model_name in removed_models:
if rem_app_label == app_label:
rem_model_state = self.from_state.models[rem_app_label, rem_model_name]
rem_model_fields_def = self.only_relation_agnostic_fields(rem_model_state.fields)
if model_fields_def == rem_model_fields_def:
if self.questioner.ask_rename_model(rem_model_state, model_state):
model_opts = self.new_apps.get_model(app_label, model_name)._meta
dependencies = []
for field in model_opts.get_fields():
if field.is_relation:
dependencies.extend(self._get_dependencies_for_foreign_key(field))
self.add_operation(
app_label,
operations.RenameModel(
old_name=rem_model_state.name,
new_name=model_state.name,
),
dependencies=dependencies,
)
self.renamed_models[app_label, model_name] = rem_model_name
renamed_models_rel_key = '%s.%s' % (
rem_model_state.app_label,
rem_model_state.name_lower,
)
self.renamed_models_rel[renamed_models_rel_key] = '%s.%s' % (
model_state.app_label,
model_state.name_lower,
)
self.old_model_keys.remove((rem_app_label, rem_model_name))
self.old_model_keys.add((app_label, model_name))
break
def generate_created_models(self):
"""
Find all new models (both managed and unmanaged) and make create
operations for them as well as separate operations to create any
foreign key or M2M relationships (these are optimized later, if
possible).
Defer any model options that refer to collections of fields that might
be deferred (e.g. unique_together, index_together).
"""
old_keys = self.old_model_keys | self.old_unmanaged_keys
added_models = self.new_model_keys - old_keys
added_unmanaged_models = self.new_unmanaged_keys - old_keys
all_added_models = chain(
sorted(added_models, key=self.swappable_first_key, reverse=True),
sorted(added_unmanaged_models, key=self.swappable_first_key, reverse=True)
)
for app_label, model_name in all_added_models:
model_state = self.to_state.models[app_label, model_name]
model_opts = self.new_apps.get_model(app_label, model_name)._meta
# Gather related fields
related_fields = {}
primary_key_rel = None
for field in model_opts.local_fields:
if field.remote_field:
if field.remote_field.model:
if field.primary_key:
primary_key_rel = field.remote_field.model
elif not field.remote_field.parent_link:
related_fields[field.name] = field
# through will be none on M2Ms on swapped-out models;
# we can treat lack of through as auto_created=True, though.
if (getattr(field.remote_field, "through", None) and
not field.remote_field.through._meta.auto_created):
related_fields[field.name] = field
for field in model_opts.local_many_to_many:
if field.remote_field.model:
related_fields[field.name] = field
if getattr(field.remote_field, "through", None) and not field.remote_field.through._meta.auto_created:
related_fields[field.name] = field
# Are there indexes/unique|index_together to defer?
indexes = model_state.options.pop('indexes')
constraints = model_state.options.pop('constraints')
unique_together = model_state.options.pop('unique_together', None)
index_together = model_state.options.pop('index_together', None)
order_with_respect_to = model_state.options.pop('order_with_respect_to', None)
# Depend on the deletion of any possible proxy version of us
dependencies = [
(app_label, model_name, None, False),
]
# Depend on all bases
for base in model_state.bases:
if isinstance(base, str) and "." in base:
base_app_label, base_name = base.split(".", 1)
dependencies.append((base_app_label, base_name, None, True))
# Depend on the other end of the primary key if it's a relation
if primary_key_rel:
dependencies.append((
primary_key_rel._meta.app_label,
primary_key_rel._meta.object_name,
None,
True
))
# Generate creation operation
self.add_operation(
app_label,
operations.CreateModel(
name=model_state.name,
fields=[d for d in model_state.fields if d[0] not in related_fields],
options=model_state.options,
bases=model_state.bases,
managers=model_state.managers,
),
dependencies=dependencies,
beginning=True,
)
# Don't add operations which modify the database for unmanaged models
if not model_opts.managed:
continue
# Generate operations for each related field
for name, field in sorted(related_fields.items()):
dependencies = self._get_dependencies_for_foreign_key(field)
# Depend on our own model being created
dependencies.append((app_label, model_name, None, True))
# Make operation
self.add_operation(
app_label,
operations.AddField(
model_name=model_name,
name=name,
field=field,
),
dependencies=list(set(dependencies)),
)
# Generate other opns
related_dependencies = [
(app_label, model_name, name, True)
for name in sorted(related_fields)
]
related_dependencies.append((app_label, model_name, None, True))
for index in indexes:
self.add_operation(
app_label,
operations.AddIndex(
model_name=model_name,
index=index,
),
dependencies=related_dependencies,
)
for constraint in constraints:
self.add_operation(
app_label,
operations.AddConstraint(
model_name=model_name,
constraint=constraint,
),
dependencies=related_dependencies,
)
if unique_together:
self.add_operation(
app_label,
operations.AlterUniqueTogether(
name=model_name,
unique_together=unique_together,
),
dependencies=related_dependencies
)
if index_together:
self.add_operation(
app_label,
operations.AlterIndexTogether(
name=model_name,
index_together=index_together,
),
dependencies=related_dependencies
)
if order_with_respect_to:
self.add_operation(
app_label,
operations.AlterOrderWithRespectTo(
name=model_name,
order_with_respect_to=order_with_respect_to,
),
dependencies=[
(app_label, model_name, order_with_respect_to, True),
(app_label, model_name, None, True),
]
)
# Fix relationships if the model changed from a proxy model to a
# concrete model.
if (app_label, model_name) in self.old_proxy_keys:
for related_object in model_opts.related_objects:
self.add_operation(
related_object.related_model._meta.app_label,
operations.AlterField(
model_name=related_object.related_model._meta.object_name,
name=related_object.field.name,
field=related_object.field,
),
dependencies=[(app_label, model_name, None, True)],
)
def generate_created_proxies(self):
"""
Make CreateModel statements for proxy models. Use the same statements
as that way there's less code duplication, but of course for proxy
models it's safe to skip all the pointless field stuff and just chuck
out an operation.
"""
added = self.new_proxy_keys - self.old_proxy_keys
for app_label, model_name in sorted(added):
model_state = self.to_state.models[app_label, model_name]
assert model_state.options.get("proxy")
# Depend on the deletion of any possible non-proxy version of us
dependencies = [
(app_label, model_name, None, False),
]
# Depend on all bases
for base in model_state.bases:
if isinstance(base, str) and "." in base:
base_app_label, base_name = base.split(".", 1)
dependencies.append((base_app_label, base_name, None, True))
# Generate creation operation
self.add_operation(
app_label,
operations.CreateModel(
name=model_state.name,
fields=[],
options=model_state.options,
bases=model_state.bases,
managers=model_state.managers,
),
# Depend on the deletion of any possible non-proxy version of us
dependencies=dependencies,
)
def generate_deleted_models(self):
"""
Find all deleted models (managed and unmanaged) and make delete
operations for them as well as separate operations to delete any
foreign key or M2M relationships (these are optimized later, if
possible).
Also bring forward removal of any model options that refer to
collections of fields - the inverse of generate_created_models().
"""
new_keys = self.new_model_keys | self.new_unmanaged_keys
deleted_models = self.old_model_keys - new_keys
deleted_unmanaged_models = self.old_unmanaged_keys - new_keys
all_deleted_models = chain(sorted(deleted_models), sorted(deleted_unmanaged_models))
for app_label, model_name in all_deleted_models:
model_state = self.from_state.models[app_label, model_name]
model = self.old_apps.get_model(app_label, model_name)
# Gather related fields
related_fields = {}
for field in model._meta.local_fields:
if field.remote_field:
if field.remote_field.model:
related_fields[field.name] = field
# through will be none on M2Ms on swapped-out models;
# we can treat lack of through as auto_created=True, though.
if (getattr(field.remote_field, "through", None) and
not field.remote_field.through._meta.auto_created):
related_fields[field.name] = field
for field in model._meta.local_many_to_many:
if field.remote_field.model:
related_fields[field.name] = field
if getattr(field.remote_field, "through", None) and not field.remote_field.through._meta.auto_created:
related_fields[field.name] = field
# Generate option removal first
unique_together = model_state.options.pop('unique_together', None)
index_together = model_state.options.pop('index_together', None)
if unique_together:
self.add_operation(
app_label,
operations.AlterUniqueTogether(
name=model_name,
unique_together=None,
)
)
if index_together:
self.add_operation(
app_label,
operations.AlterIndexTogether(
name=model_name,
index_together=None,
)
)
# Then remove each related field
for name in sorted(related_fields):
self.add_operation(
app_label,
operations.RemoveField(
model_name=model_name,
name=name,
)
)
# Finally, remove the model.
# This depends on both the removal/alteration of all incoming fields
# and the removal of all its own related fields, and if it's
# a through model the field that references it.
dependencies = []
for related_object in model._meta.related_objects:
related_object_app_label = related_object.related_model._meta.app_label
object_name = related_object.related_model._meta.object_name
field_name = related_object.field.name
dependencies.append((related_object_app_label, object_name, field_name, False))
if not related_object.many_to_many:
dependencies.append((related_object_app_label, object_name, field_name, "alter"))
for name in sorted(related_fields):
dependencies.append((app_label, model_name, name, False))
# We're referenced in another field's through=
through_user = self.through_users.get((app_label, model_state.name_lower))
if through_user:
dependencies.append((through_user[0], through_user[1], through_user[2], False))
# Finally, make the operation, deduping any dependencies
self.add_operation(
app_label,
operations.DeleteModel(
name=model_state.name,
),
dependencies=list(set(dependencies)),
)
def generate_deleted_proxies(self):
"""Make DeleteModel options for proxy models."""
deleted = self.old_proxy_keys - self.new_proxy_keys
for app_label, model_name in sorted(deleted):
model_state = self.from_state.models[app_label, model_name]
assert model_state.options.get("proxy")
self.add_operation(
app_label,
operations.DeleteModel(
name=model_state.name,
),
)
def generate_renamed_fields(self):
"""Work out renamed fields."""
self.renamed_fields = {}
for app_label, model_name, field_name in sorted(self.new_field_keys - self.old_field_keys):
old_model_name = self.renamed_models.get((app_label, model_name), model_name)
old_model_state = self.from_state.models[app_label, old_model_name]
field = self.new_apps.get_model(app_label, model_name)._meta.get_field(field_name)
# Scan to see if this is actually a rename!
field_dec = self.deep_deconstruct(field)
for rem_app_label, rem_model_name, rem_field_name in sorted(self.old_field_keys - self.new_field_keys):
if rem_app_label == app_label and rem_model_name == model_name:
old_field = old_model_state.get_field_by_name(rem_field_name)
old_field_dec = self.deep_deconstruct(old_field)
if field.remote_field and field.remote_field.model and 'to' in old_field_dec[2]:
old_rel_to = old_field_dec[2]['to']
if old_rel_to in self.renamed_models_rel:
old_field_dec[2]['to'] = self.renamed_models_rel[old_rel_to]
old_field.set_attributes_from_name(rem_field_name)
old_db_column = old_field.get_attname_column()[1]
if (old_field_dec == field_dec or (
# Was the field renamed and db_column equal to the
# old field's column added?
old_field_dec[0:2] == field_dec[0:2] and
dict(old_field_dec[2], db_column=old_db_column) == field_dec[2])):
if self.questioner.ask_rename(model_name, rem_field_name, field_name, field):
self.add_operation(
app_label,
operations.RenameField(
model_name=model_name,
old_name=rem_field_name,
new_name=field_name,
)
)
self.old_field_keys.remove((rem_app_label, rem_model_name, rem_field_name))
self.old_field_keys.add((app_label, model_name, field_name))
self.renamed_fields[app_label, model_name, field_name] = rem_field_name
break
def generate_added_fields(self):
"""Make AddField operations."""
for app_label, model_name, field_name in sorted(self.new_field_keys - self.old_field_keys):
self._generate_added_field(app_label, model_name, field_name)
def _generate_added_field(self, app_label, model_name, field_name):
field = self.new_apps.get_model(app_label, model_name)._meta.get_field(field_name)
# Fields that are foreignkeys/m2ms depend on stuff
dependencies = []
if field.remote_field and field.remote_field.model:
dependencies.extend(self._get_dependencies_for_foreign_key(field))
# You can't just add NOT NULL fields with no default or fields
# which don't allow empty strings as default.
time_fields = (models.DateField, models.DateTimeField, models.TimeField)
preserve_default = (
field.null or field.has_default() or field.many_to_many or
(field.blank and field.empty_strings_allowed) or
(isinstance(field, time_fields) and field.auto_now)
)
if not preserve_default:
field = field.clone()
if isinstance(field, time_fields) and field.auto_now_add:
field.default = self.questioner.ask_auto_now_add_addition(field_name, model_name)
else:
field.default = self.questioner.ask_not_null_addition(field_name, model_name)
self.add_operation(
app_label,
operations.AddField(
model_name=model_name,
name=field_name,
field=field,
preserve_default=preserve_default,
),
dependencies=dependencies,
)
def generate_removed_fields(self):
"""Make RemoveField operations."""
for app_label, model_name, field_name in sorted(self.old_field_keys - self.new_field_keys):
self._generate_removed_field(app_label, model_name, field_name)
def _generate_removed_field(self, app_label, model_name, field_name):
self.add_operation(
app_label,
operations.RemoveField(
model_name=model_name,
name=field_name,
),
# We might need to depend on the removal of an
# order_with_respect_to or index/unique_together operation;
# this is safely ignored if there isn't one
dependencies=[
(app_label, model_name, field_name, "order_wrt_unset"),
(app_label, model_name, field_name, "foo_together_change"),
],
)
def generate_altered_fields(self):
"""
Make AlterField operations, or possibly RemovedField/AddField if alter
isn's possible.
"""
for app_label, model_name, field_name in sorted(self.old_field_keys & self.new_field_keys):
# Did the field change?
old_model_name = self.renamed_models.get((app_label, model_name), model_name)
old_field_name = self.renamed_fields.get((app_label, model_name, field_name), field_name)
old_field = self.old_apps.get_model(app_label, old_model_name)._meta.get_field(old_field_name)
new_field = self.new_apps.get_model(app_label, model_name)._meta.get_field(field_name)
dependencies = []
# Implement any model renames on relations; these are handled by RenameModel
# so we need to exclude them from the comparison
if hasattr(new_field, "remote_field") and getattr(new_field.remote_field, "model", None):
rename_key = (
new_field.remote_field.model._meta.app_label,
new_field.remote_field.model._meta.model_name,
)
if rename_key in self.renamed_models:
new_field.remote_field.model = old_field.remote_field.model
# Handle ForeignKey which can only have a single to_field.
remote_field_name = getattr(new_field.remote_field, 'field_name', None)
if remote_field_name:
to_field_rename_key = rename_key + (remote_field_name,)
if to_field_rename_key in self.renamed_fields:
# Repoint both model and field name because to_field
# inclusion in ForeignKey.deconstruct() is based on
# both.
new_field.remote_field.model = old_field.remote_field.model
new_field.remote_field.field_name = old_field.remote_field.field_name
# Handle ForeignObjects which can have multiple from_fields/to_fields.
from_fields = getattr(new_field, 'from_fields', None)
if from_fields:
from_rename_key = (app_label, model_name)
new_field.from_fields = tuple([
self.renamed_fields.get(from_rename_key + (from_field,), from_field)
for from_field in from_fields
])
new_field.to_fields = tuple([
self.renamed_fields.get(rename_key + (to_field,), to_field)
for to_field in new_field.to_fields
])
dependencies.extend(self._get_dependencies_for_foreign_key(new_field))
if hasattr(new_field, "remote_field") and getattr(new_field.remote_field, "through", None):
rename_key = (
new_field.remote_field.through._meta.app_label,
new_field.remote_field.through._meta.model_name,
)
if rename_key in self.renamed_models:
new_field.remote_field.through = old_field.remote_field.through
old_field_dec = self.deep_deconstruct(old_field)
new_field_dec = self.deep_deconstruct(new_field)
if old_field_dec != new_field_dec:
both_m2m = old_field.many_to_many and new_field.many_to_many
neither_m2m = not old_field.many_to_many and not new_field.many_to_many
if both_m2m or neither_m2m:
# Either both fields are m2m or neither is
preserve_default = True
if (old_field.null and not new_field.null and not new_field.has_default() and
not new_field.many_to_many):
field = new_field.clone()
new_default = self.questioner.ask_not_null_alteration(field_name, model_name)
if new_default is not models.NOT_PROVIDED:
field.default = new_default
preserve_default = False
else:
field = new_field
self.add_operation(
app_label,
operations.AlterField(
model_name=model_name,
name=field_name,
field=field,
preserve_default=preserve_default,
),
dependencies=dependencies,
)
else:
# We cannot alter between m2m and concrete fields
self._generate_removed_field(app_label, model_name, field_name)
self._generate_added_field(app_label, model_name, field_name)
def create_altered_indexes(self):
option_name = operations.AddIndex.option_name
for app_label, model_name in sorted(self.kept_model_keys):
old_model_name = self.renamed_models.get((app_label, model_name), model_name)
old_model_state = self.from_state.models[app_label, old_model_name]
new_model_state = self.to_state.models[app_label, model_name]
old_indexes = old_model_state.options[option_name]
new_indexes = new_model_state.options[option_name]
add_idx = [idx for idx in new_indexes if idx not in old_indexes]
rem_idx = [idx for idx in old_indexes if idx not in new_indexes]
self.altered_indexes.update({
(app_label, model_name): {
'added_indexes': add_idx, 'removed_indexes': rem_idx,
}
})
def generate_added_indexes(self):
for (app_label, model_name), alt_indexes in self.altered_indexes.items():
for index in alt_indexes['added_indexes']:
self.add_operation(
app_label,
operations.AddIndex(
model_name=model_name,
index=index,
)
)
def generate_removed_indexes(self):
for (app_label, model_name), alt_indexes in self.altered_indexes.items():
for index in alt_indexes['removed_indexes']:
self.add_operation(
app_label,
operations.RemoveIndex(
model_name=model_name,
name=index.name,
)
)
def create_altered_constraints(self):
option_name = operations.AddConstraint.option_name
for app_label, model_name in sorted(self.kept_model_keys):
old_model_name = self.renamed_models.get((app_label, model_name), model_name)
old_model_state = self.from_state.models[app_label, old_model_name]
new_model_state = self.to_state.models[app_label, model_name]
old_constraints = old_model_state.options[option_name]
new_constraints = new_model_state.options[option_name]
add_constraints = [c for c in new_constraints if c not in old_constraints]
rem_constraints = [c for c in old_constraints if c not in new_constraints]
self.altered_constraints.update({
(app_label, model_name): {
'added_constraints': add_constraints, 'removed_constraints': rem_constraints,
}
})
def generate_added_constraints(self):
for (app_label, model_name), alt_constraints in self.altered_constraints.items():
for constraint in alt_constraints['added_constraints']:
self.add_operation(
app_label,
operations.AddConstraint(
model_name=model_name,
constraint=constraint,
)
)
def generate_removed_constraints(self):
for (app_label, model_name), alt_constraints in self.altered_constraints.items():
for constraint in alt_constraints['removed_constraints']:
self.add_operation(
app_label,
operations.RemoveConstraint(
model_name=model_name,
name=constraint.name,
)
)
def _get_dependencies_for_foreign_key(self, field):
# Account for FKs to swappable models
swappable_setting = getattr(field, 'swappable_setting', None)
if swappable_setting is not None:
dep_app_label = "__setting__"
dep_object_name = swappable_setting
else:
dep_app_label = field.remote_field.model._meta.app_label
dep_object_name = field.remote_field.model._meta.object_name
dependencies = [(dep_app_label, dep_object_name, None, True)]
if getattr(field.remote_field, "through", None) and not field.remote_field.through._meta.auto_created:
dependencies.append((
field.remote_field.through._meta.app_label,
field.remote_field.through._meta.object_name,
None,
True,
))
return dependencies
def _generate_altered_foo_together(self, operation):
option_name = operation.option_name
for app_label, model_name in sorted(self.kept_model_keys):
old_model_name = self.renamed_models.get((app_label, model_name), model_name)
old_model_state = self.from_state.models[app_label, old_model_name]
new_model_state = self.to_state.models[app_label, model_name]
# We run the old version through the field renames to account for those
old_value = old_model_state.options.get(option_name)
old_value = {
tuple(
self.renamed_fields.get((app_label, model_name, n), n)
for n in unique
)
for unique in old_value
} if old_value else set()
new_value = new_model_state.options.get(option_name)
new_value = set(new_value) if new_value else set()
if old_value != new_value:
dependencies = []
for foo_togethers in new_value:
for field_name in foo_togethers:
field = self.new_apps.get_model(app_label, model_name)._meta.get_field(field_name)
if field.remote_field and field.remote_field.model:
dependencies.extend(self._get_dependencies_for_foreign_key(field))
self.add_operation(
app_label,
operation(
name=model_name,
**{option_name: new_value}
),
dependencies=dependencies,
)
def generate_altered_unique_together(self):
self._generate_altered_foo_together(operations.AlterUniqueTogether)
def generate_altered_index_together(self):
self._generate_altered_foo_together(operations.AlterIndexTogether)
def generate_altered_db_table(self):
models_to_check = self.kept_model_keys.union(self.kept_proxy_keys, self.kept_unmanaged_keys)
for app_label, model_name in sorted(models_to_check):
old_model_name = self.renamed_models.get((app_label, model_name), model_name)
old_model_state = self.from_state.models[app_label, old_model_name]
new_model_state = self.to_state.models[app_label, model_name]
old_db_table_name = old_model_state.options.get('db_table')
new_db_table_name = new_model_state.options.get('db_table')
if old_db_table_name != new_db_table_name:
self.add_operation(
app_label,
operations.AlterModelTable(
name=model_name,
table=new_db_table_name,
)
)
def generate_altered_options(self):
"""
Work out if any non-schema-affecting options have changed and make an
operation to represent them in state changes (in case Python code in
migrations needs them).
"""
models_to_check = self.kept_model_keys.union(
self.kept_proxy_keys,
self.kept_unmanaged_keys,
# unmanaged converted to managed
self.old_unmanaged_keys & self.new_model_keys,
# managed converted to unmanaged
self.old_model_keys & self.new_unmanaged_keys,
)
for app_label, model_name in sorted(models_to_check):
old_model_name = self.renamed_models.get((app_label, model_name), model_name)
old_model_state = self.from_state.models[app_label, old_model_name]
new_model_state = self.to_state.models[app_label, model_name]
old_options = {
key: value for key, value in old_model_state.options.items()
if key in AlterModelOptions.ALTER_OPTION_KEYS
}
new_options = {
key: value for key, value in new_model_state.options.items()
if key in AlterModelOptions.ALTER_OPTION_KEYS
}
if old_options != new_options:
self.add_operation(
app_label,
operations.AlterModelOptions(
name=model_name,
options=new_options,
)
)
def generate_altered_order_with_respect_to(self):
for app_label, model_name in sorted(self.kept_model_keys):
old_model_name = self.renamed_models.get((app_label, model_name), model_name)
old_model_state = self.from_state.models[app_label, old_model_name]
new_model_state = self.to_state.models[app_label, model_name]
if (old_model_state.options.get("order_with_respect_to") !=
new_model_state.options.get("order_with_respect_to")):
# Make sure it comes second if we're adding
# (removal dependency is part of RemoveField)
dependencies = []
if new_model_state.options.get("order_with_respect_to"):
dependencies.append((
app_label,
model_name,
new_model_state.options["order_with_respect_to"],
True,
))
# Actually generate the operation
self.add_operation(
app_label,
operations.AlterOrderWithRespectTo(
name=model_name,
order_with_respect_to=new_model_state.options.get('order_with_respect_to'),
),
dependencies=dependencies,
)
def generate_altered_managers(self):
for app_label, model_name in sorted(self.kept_model_keys):
old_model_name = self.renamed_models.get((app_label, model_name), model_name)
old_model_state = self.from_state.models[app_label, old_model_name]
new_model_state = self.to_state.models[app_label, model_name]
if old_model_state.managers != new_model_state.managers:
self.add_operation(
app_label,
operations.AlterModelManagers(
name=model_name,
managers=new_model_state.managers,
)
)
def arrange_for_graph(self, changes, graph, migration_name=None):
"""
Take a result from changes() and a MigrationGraph, and fix the names
and dependencies of the changes so they extend the graph from the leaf
nodes for each app.
"""
leaves = graph.leaf_nodes()
name_map = {}
for app_label, migrations in list(changes.items()):
if not migrations:
continue
# Find the app label's current leaf node
app_leaf = None
for leaf in leaves:
if leaf[0] == app_label:
app_leaf = leaf
break
# Do they want an initial migration for this app?
if app_leaf is None and not self.questioner.ask_initial(app_label):
# They don't.
for migration in migrations:
name_map[(app_label, migration.name)] = (app_label, "__first__")
del changes[app_label]
continue
# Work out the next number in the sequence
if app_leaf is None:
next_number = 1
else:
next_number = (self.parse_number(app_leaf[1]) or 0) + 1
# Name each migration
for i, migration in enumerate(migrations):
if i == 0 and app_leaf:
migration.dependencies.append(app_leaf)
if i == 0 and not app_leaf:
new_name = "0001_%s" % migration_name if migration_name else "0001_initial"
else:
new_name = "%04i_%s" % (
next_number,
migration_name or self.suggest_name(migration.operations)[:100],
)
name_map[(app_label, migration.name)] = (app_label, new_name)
next_number += 1
migration.name = new_name
# Now fix dependencies
for migrations in changes.values():
for migration in migrations:
migration.dependencies = [name_map.get(d, d) for d in migration.dependencies]
return changes
def _trim_to_apps(self, changes, app_labels):
"""
Take changes from arrange_for_graph() and set of app labels, and return
a modified set of changes which trims out as many migrations that are
not in app_labels as possible. Note that some other migrations may
still be present as they may be required dependencies.
"""
# Gather other app dependencies in a first pass
app_dependencies = {}
for app_label, migrations in changes.items():
for migration in migrations:
for dep_app_label, name in migration.dependencies:
app_dependencies.setdefault(app_label, set()).add(dep_app_label)
required_apps = set(app_labels)
# Keep resolving till there's no change
old_required_apps = None
while old_required_apps != required_apps:
old_required_apps = set(required_apps)
required_apps.update(*[app_dependencies.get(app_label, ()) for app_label in required_apps])
# Remove all migrations that aren't needed
for app_label in list(changes):
if app_label not in required_apps:
del changes[app_label]
return changes
@classmethod
def suggest_name(cls, ops):
"""
Given a set of operations, suggest a name for the migration they might
represent. Names are not guaranteed to be unique, but put some effort
into the fallback name to avoid VCS conflicts if possible.
"""
if len(ops) == 1:
if isinstance(ops[0], operations.CreateModel):
return ops[0].name_lower
elif isinstance(ops[0], operations.DeleteModel):
return "delete_%s" % ops[0].name_lower
elif isinstance(ops[0], operations.AddField):
return "%s_%s" % (ops[0].model_name_lower, ops[0].name_lower)
elif isinstance(ops[0], operations.RemoveField):
return "remove_%s_%s" % (ops[0].model_name_lower, ops[0].name_lower)
elif ops:
if all(isinstance(o, operations.CreateModel) for o in ops):
return "_".join(sorted(o.name_lower for o in ops))
return "auto_%s" % get_migration_name_timestamp()
@classmethod
def parse_number(cls, name):
"""
Given a migration name, try to extract a number from the beginning of
it. If no number is found, return None.
"""
match = re.match(r'^\d+', name)
if match:
return int(match.group())
return None
|
1a13e29090741a421cb1de7c863cc779dc05c7e8ed24cf2369ad2074157ff9ef | from django.db import models
from django.db.migrations.operations.base import Operation
from django.db.migrations.state import ModelState
from django.db.models.options import normalize_together
from django.utils.functional import cached_property
from .fields import (
AddField, AlterField, FieldOperation, RemoveField, RenameField,
)
from .utils import field_references, get_references, resolve_relation
def _check_for_duplicates(arg_name, objs):
used_vals = set()
for val in objs:
if val in used_vals:
raise ValueError(
"Found duplicate value %s in CreateModel %s argument." % (val, arg_name)
)
used_vals.add(val)
class ModelOperation(Operation):
def __init__(self, name):
self.name = name
@cached_property
def name_lower(self):
return self.name.lower()
def references_model(self, name, app_label):
return name.lower() == self.name_lower
def reduce(self, operation, app_label):
return (
super().reduce(operation, app_label) or
not operation.references_model(self.name, app_label)
)
class CreateModel(ModelOperation):
"""Create a model's table."""
serialization_expand_args = ['fields', 'options', 'managers']
def __init__(self, name, fields, options=None, bases=None, managers=None):
self.fields = fields
self.options = options or {}
self.bases = bases or (models.Model,)
self.managers = managers or []
super().__init__(name)
# Sanity-check that there are no duplicated field names, bases, or
# manager names
_check_for_duplicates('fields', (name for name, _ in self.fields))
_check_for_duplicates('bases', (
base._meta.label_lower if hasattr(base, '_meta') else
base.lower() if isinstance(base, str) else base
for base in self.bases
))
_check_for_duplicates('managers', (name for name, _ in self.managers))
def deconstruct(self):
kwargs = {
'name': self.name,
'fields': self.fields,
}
if self.options:
kwargs['options'] = self.options
if self.bases and self.bases != (models.Model,):
kwargs['bases'] = self.bases
if self.managers and self.managers != [('objects', models.Manager())]:
kwargs['managers'] = self.managers
return (
self.__class__.__qualname__,
[],
kwargs
)
def state_forwards(self, app_label, state):
state.add_model(ModelState(
app_label,
self.name,
list(self.fields),
dict(self.options),
tuple(self.bases),
list(self.managers),
))
def database_forwards(self, app_label, schema_editor, from_state, to_state):
model = to_state.apps.get_model(app_label, self.name)
if self.allow_migrate_model(schema_editor.connection.alias, model):
schema_editor.create_model(model)
def database_backwards(self, app_label, schema_editor, from_state, to_state):
model = from_state.apps.get_model(app_label, self.name)
if self.allow_migrate_model(schema_editor.connection.alias, model):
schema_editor.delete_model(model)
def describe(self):
return "Create %smodel %s" % ("proxy " if self.options.get("proxy", False) else "", self.name)
def references_model(self, name, app_label):
name_lower = name.lower()
if name_lower == self.name_lower:
return True
# Check we didn't inherit from the model
reference_model_tuple = (app_label, name_lower)
for base in self.bases:
if (base is not models.Model and isinstance(base, (models.base.ModelBase, str)) and
resolve_relation(base, app_label) == reference_model_tuple):
return True
# Check we have no FKs/M2Ms with it
for _name, field in self.fields:
if field_references((app_label, self.name_lower), field, reference_model_tuple):
return True
return False
def reduce(self, operation, app_label):
if (isinstance(operation, DeleteModel) and
self.name_lower == operation.name_lower and
not self.options.get("proxy", False)):
return []
elif isinstance(operation, RenameModel) and self.name_lower == operation.old_name_lower:
return [
CreateModel(
operation.new_name,
fields=self.fields,
options=self.options,
bases=self.bases,
managers=self.managers,
),
]
elif isinstance(operation, AlterModelOptions) and self.name_lower == operation.name_lower:
return [
CreateModel(
self.name,
fields=self.fields,
options={**self.options, **operation.options},
bases=self.bases,
managers=self.managers,
),
]
elif isinstance(operation, AlterTogetherOptionOperation) and self.name_lower == operation.name_lower:
return [
CreateModel(
self.name,
fields=self.fields,
options={**self.options, **{operation.option_name: operation.option_value}},
bases=self.bases,
managers=self.managers,
),
]
elif isinstance(operation, AlterOrderWithRespectTo) and self.name_lower == operation.name_lower:
return [
CreateModel(
self.name,
fields=self.fields,
options={**self.options, 'order_with_respect_to': operation.order_with_respect_to},
bases=self.bases,
managers=self.managers,
),
]
elif isinstance(operation, FieldOperation) and self.name_lower == operation.model_name_lower:
if isinstance(operation, AddField):
return [
CreateModel(
self.name,
fields=self.fields + [(operation.name, operation.field)],
options=self.options,
bases=self.bases,
managers=self.managers,
),
]
elif isinstance(operation, AlterField):
return [
CreateModel(
self.name,
fields=[
(n, operation.field if n == operation.name else v)
for n, v in self.fields
],
options=self.options,
bases=self.bases,
managers=self.managers,
),
]
elif isinstance(operation, RemoveField):
options = self.options.copy()
for option_name in ('unique_together', 'index_together'):
option = options.pop(option_name, None)
if option:
option = set(filter(bool, (
tuple(f for f in fields if f != operation.name_lower) for fields in option
)))
if option:
options[option_name] = option
order_with_respect_to = options.get('order_with_respect_to')
if order_with_respect_to == operation.name_lower:
del options['order_with_respect_to']
return [
CreateModel(
self.name,
fields=[
(n, v)
for n, v in self.fields
if n.lower() != operation.name_lower
],
options=options,
bases=self.bases,
managers=self.managers,
),
]
elif isinstance(operation, RenameField):
options = self.options.copy()
for option_name in ('unique_together', 'index_together'):
option = options.get(option_name)
if option:
options[option_name] = {
tuple(operation.new_name if f == operation.old_name else f for f in fields)
for fields in option
}
order_with_respect_to = options.get('order_with_respect_to')
if order_with_respect_to == operation.old_name:
options['order_with_respect_to'] = operation.new_name
return [
CreateModel(
self.name,
fields=[
(operation.new_name if n == operation.old_name else n, v)
for n, v in self.fields
],
options=options,
bases=self.bases,
managers=self.managers,
),
]
return super().reduce(operation, app_label)
class DeleteModel(ModelOperation):
"""Drop a model's table."""
def deconstruct(self):
kwargs = {
'name': self.name,
}
return (
self.__class__.__qualname__,
[],
kwargs
)
def state_forwards(self, app_label, state):
state.remove_model(app_label, self.name_lower)
def database_forwards(self, app_label, schema_editor, from_state, to_state):
model = from_state.apps.get_model(app_label, self.name)
if self.allow_migrate_model(schema_editor.connection.alias, model):
schema_editor.delete_model(model)
def database_backwards(self, app_label, schema_editor, from_state, to_state):
model = to_state.apps.get_model(app_label, self.name)
if self.allow_migrate_model(schema_editor.connection.alias, model):
schema_editor.create_model(model)
def references_model(self, name, app_label):
# The deleted model could be referencing the specified model through
# related fields.
return True
def describe(self):
return "Delete model %s" % self.name
class RenameModel(ModelOperation):
"""Rename a model."""
def __init__(self, old_name, new_name):
self.old_name = old_name
self.new_name = new_name
super().__init__(old_name)
@cached_property
def old_name_lower(self):
return self.old_name.lower()
@cached_property
def new_name_lower(self):
return self.new_name.lower()
def deconstruct(self):
kwargs = {
'old_name': self.old_name,
'new_name': self.new_name,
}
return (
self.__class__.__qualname__,
[],
kwargs
)
def state_forwards(self, app_label, state):
# Add a new model.
renamed_model = state.models[app_label, self.old_name_lower].clone()
renamed_model.name = self.new_name
state.models[app_label, self.new_name_lower] = renamed_model
# Repoint all fields pointing to the old model to the new one.
old_model_tuple = (app_label, self.old_name_lower)
new_remote_model = '%s.%s' % (app_label, self.new_name)
to_reload = set()
for model_state, index, name, field, reference in get_references(state, old_model_tuple):
changed_field = None
if reference.to:
changed_field = field.clone()
changed_field.remote_field.model = new_remote_model
if reference.through:
if changed_field is None:
changed_field = field.clone()
changed_field.remote_field.through = new_remote_model
if changed_field:
model_state.fields[index] = name, changed_field
to_reload.add((model_state.app_label, model_state.name_lower))
# Reload models related to old model before removing the old model.
state.reload_models(to_reload, delay=True)
# Remove the old model.
state.remove_model(app_label, self.old_name_lower)
state.reload_model(app_label, self.new_name_lower, delay=True)
def database_forwards(self, app_label, schema_editor, from_state, to_state):
new_model = to_state.apps.get_model(app_label, self.new_name)
if self.allow_migrate_model(schema_editor.connection.alias, new_model):
old_model = from_state.apps.get_model(app_label, self.old_name)
# Move the main table
schema_editor.alter_db_table(
new_model,
old_model._meta.db_table,
new_model._meta.db_table,
)
# Alter the fields pointing to us
for related_object in old_model._meta.related_objects:
if related_object.related_model == old_model:
model = new_model
related_key = (app_label, self.new_name_lower)
else:
model = related_object.related_model
related_key = (
related_object.related_model._meta.app_label,
related_object.related_model._meta.model_name,
)
to_field = to_state.apps.get_model(
*related_key
)._meta.get_field(related_object.field.name)
schema_editor.alter_field(
model,
related_object.field,
to_field,
)
# Rename M2M fields whose name is based on this model's name.
fields = zip(old_model._meta.local_many_to_many, new_model._meta.local_many_to_many)
for (old_field, new_field) in fields:
# Skip self-referential fields as these are renamed above.
if new_field.model == new_field.related_model or not new_field.remote_field.through._meta.auto_created:
continue
# Rename the M2M table that's based on this model's name.
old_m2m_model = old_field.remote_field.through
new_m2m_model = new_field.remote_field.through
schema_editor.alter_db_table(
new_m2m_model,
old_m2m_model._meta.db_table,
new_m2m_model._meta.db_table,
)
# Rename the column in the M2M table that's based on this
# model's name.
schema_editor.alter_field(
new_m2m_model,
old_m2m_model._meta.get_field(old_model._meta.model_name),
new_m2m_model._meta.get_field(new_model._meta.model_name),
)
def database_backwards(self, app_label, schema_editor, from_state, to_state):
self.new_name_lower, self.old_name_lower = self.old_name_lower, self.new_name_lower
self.new_name, self.old_name = self.old_name, self.new_name
self.database_forwards(app_label, schema_editor, from_state, to_state)
self.new_name_lower, self.old_name_lower = self.old_name_lower, self.new_name_lower
self.new_name, self.old_name = self.old_name, self.new_name
def references_model(self, name, app_label):
return (
name.lower() == self.old_name_lower or
name.lower() == self.new_name_lower
)
def describe(self):
return "Rename model %s to %s" % (self.old_name, self.new_name)
def reduce(self, operation, app_label):
if (isinstance(operation, RenameModel) and
self.new_name_lower == operation.old_name_lower):
return [
RenameModel(
self.old_name,
operation.new_name,
),
]
# Skip `ModelOperation.reduce` as we want to run `references_model`
# against self.new_name.
return (
super(ModelOperation, self).reduce(operation, app_label) or
not operation.references_model(self.new_name, app_label)
)
class ModelOptionOperation(ModelOperation):
def reduce(self, operation, app_label):
if isinstance(operation, (self.__class__, DeleteModel)) and self.name_lower == operation.name_lower:
return [operation]
return super().reduce(operation, app_label)
class AlterModelTable(ModelOptionOperation):
"""Rename a model's table."""
def __init__(self, name, table):
self.table = table
super().__init__(name)
def deconstruct(self):
kwargs = {
'name': self.name,
'table': self.table,
}
return (
self.__class__.__qualname__,
[],
kwargs
)
def state_forwards(self, app_label, state):
state.models[app_label, self.name_lower].options["db_table"] = self.table
state.reload_model(app_label, self.name_lower, delay=True)
def database_forwards(self, app_label, schema_editor, from_state, to_state):
new_model = to_state.apps.get_model(app_label, self.name)
if self.allow_migrate_model(schema_editor.connection.alias, new_model):
old_model = from_state.apps.get_model(app_label, self.name)
schema_editor.alter_db_table(
new_model,
old_model._meta.db_table,
new_model._meta.db_table,
)
# Rename M2M fields whose name is based on this model's db_table
for (old_field, new_field) in zip(old_model._meta.local_many_to_many, new_model._meta.local_many_to_many):
if new_field.remote_field.through._meta.auto_created:
schema_editor.alter_db_table(
new_field.remote_field.through,
old_field.remote_field.through._meta.db_table,
new_field.remote_field.through._meta.db_table,
)
def database_backwards(self, app_label, schema_editor, from_state, to_state):
return self.database_forwards(app_label, schema_editor, from_state, to_state)
def describe(self):
return "Rename table for %s to %s" % (
self.name,
self.table if self.table is not None else "(default)"
)
class AlterTogetherOptionOperation(ModelOptionOperation):
option_name = None
def __init__(self, name, option_value):
if option_value:
option_value = set(normalize_together(option_value))
setattr(self, self.option_name, option_value)
super().__init__(name)
@cached_property
def option_value(self):
return getattr(self, self.option_name)
def deconstruct(self):
kwargs = {
'name': self.name,
self.option_name: self.option_value,
}
return (
self.__class__.__qualname__,
[],
kwargs
)
def state_forwards(self, app_label, state):
model_state = state.models[app_label, self.name_lower]
model_state.options[self.option_name] = self.option_value
state.reload_model(app_label, self.name_lower, delay=True)
def database_forwards(self, app_label, schema_editor, from_state, to_state):
new_model = to_state.apps.get_model(app_label, self.name)
if self.allow_migrate_model(schema_editor.connection.alias, new_model):
old_model = from_state.apps.get_model(app_label, self.name)
alter_together = getattr(schema_editor, 'alter_%s' % self.option_name)
alter_together(
new_model,
getattr(old_model._meta, self.option_name, set()),
getattr(new_model._meta, self.option_name, set()),
)
def database_backwards(self, app_label, schema_editor, from_state, to_state):
return self.database_forwards(app_label, schema_editor, from_state, to_state)
def references_field(self, model_name, name, app_label):
return (
self.references_model(model_name, app_label) and
(
not self.option_value or
any((name in fields) for fields in self.option_value)
)
)
def describe(self):
return "Alter %s for %s (%s constraint(s))" % (self.option_name, self.name, len(self.option_value or ''))
class AlterUniqueTogether(AlterTogetherOptionOperation):
"""
Change the value of unique_together to the target one.
Input value of unique_together must be a set of tuples.
"""
option_name = 'unique_together'
def __init__(self, name, unique_together):
super().__init__(name, unique_together)
class AlterIndexTogether(AlterTogetherOptionOperation):
"""
Change the value of index_together to the target one.
Input value of index_together must be a set of tuples.
"""
option_name = "index_together"
def __init__(self, name, index_together):
super().__init__(name, index_together)
class AlterOrderWithRespectTo(ModelOptionOperation):
"""Represent a change with the order_with_respect_to option."""
option_name = 'order_with_respect_to'
def __init__(self, name, order_with_respect_to):
self.order_with_respect_to = order_with_respect_to
super().__init__(name)
def deconstruct(self):
kwargs = {
'name': self.name,
'order_with_respect_to': self.order_with_respect_to,
}
return (
self.__class__.__qualname__,
[],
kwargs
)
def state_forwards(self, app_label, state):
model_state = state.models[app_label, self.name_lower]
model_state.options['order_with_respect_to'] = self.order_with_respect_to
state.reload_model(app_label, self.name_lower, delay=True)
def database_forwards(self, app_label, schema_editor, from_state, to_state):
to_model = to_state.apps.get_model(app_label, self.name)
if self.allow_migrate_model(schema_editor.connection.alias, to_model):
from_model = from_state.apps.get_model(app_label, self.name)
# Remove a field if we need to
if from_model._meta.order_with_respect_to and not to_model._meta.order_with_respect_to:
schema_editor.remove_field(from_model, from_model._meta.get_field("_order"))
# Add a field if we need to (altering the column is untouched as
# it's likely a rename)
elif to_model._meta.order_with_respect_to and not from_model._meta.order_with_respect_to:
field = to_model._meta.get_field("_order")
if not field.has_default():
field.default = 0
schema_editor.add_field(
from_model,
field,
)
def database_backwards(self, app_label, schema_editor, from_state, to_state):
self.database_forwards(app_label, schema_editor, from_state, to_state)
def references_field(self, model_name, name, app_label):
return (
self.references_model(model_name, app_label) and
(
self.order_with_respect_to is None or
name == self.order_with_respect_to
)
)
def describe(self):
return "Set order_with_respect_to on %s to %s" % (self.name, self.order_with_respect_to)
class AlterModelOptions(ModelOptionOperation):
"""
Set new model options that don't directly affect the database schema
(like verbose_name, permissions, ordering). Python code in migrations
may still need them.
"""
# Model options we want to compare and preserve in an AlterModelOptions op
ALTER_OPTION_KEYS = [
"base_manager_name",
"default_manager_name",
"default_related_name",
"get_latest_by",
"managed",
"ordering",
"permissions",
"default_permissions",
"select_on_save",
"verbose_name",
"verbose_name_plural",
]
def __init__(self, name, options):
self.options = options
super().__init__(name)
def deconstruct(self):
kwargs = {
'name': self.name,
'options': self.options,
}
return (
self.__class__.__qualname__,
[],
kwargs
)
def state_forwards(self, app_label, state):
model_state = state.models[app_label, self.name_lower]
model_state.options = {**model_state.options, **self.options}
for key in self.ALTER_OPTION_KEYS:
if key not in self.options:
model_state.options.pop(key, False)
state.reload_model(app_label, self.name_lower, delay=True)
def database_forwards(self, app_label, schema_editor, from_state, to_state):
pass
def database_backwards(self, app_label, schema_editor, from_state, to_state):
pass
def describe(self):
return "Change Meta options on %s" % self.name
class AlterModelManagers(ModelOptionOperation):
"""Alter the model's managers."""
serialization_expand_args = ['managers']
def __init__(self, name, managers):
self.managers = managers
super().__init__(name)
def deconstruct(self):
return (
self.__class__.__qualname__,
[self.name, self.managers],
{}
)
def state_forwards(self, app_label, state):
model_state = state.models[app_label, self.name_lower]
model_state.managers = list(self.managers)
state.reload_model(app_label, self.name_lower, delay=True)
def database_forwards(self, app_label, schema_editor, from_state, to_state):
pass
def database_backwards(self, app_label, schema_editor, from_state, to_state):
pass
def describe(self):
return "Change managers on %s" % self.name
class IndexOperation(Operation):
option_name = 'indexes'
@cached_property
def model_name_lower(self):
return self.model_name.lower()
class AddIndex(IndexOperation):
"""Add an index on a model."""
def __init__(self, model_name, index):
self.model_name = model_name
if not index.name:
raise ValueError(
"Indexes passed to AddIndex operations require a name "
"argument. %r doesn't have one." % index
)
self.index = index
def state_forwards(self, app_label, state):
model_state = state.models[app_label, self.model_name_lower]
model_state.options[self.option_name] = [*model_state.options[self.option_name], self.index.clone()]
state.reload_model(app_label, self.model_name_lower, delay=True)
def database_forwards(self, app_label, schema_editor, from_state, to_state):
model = to_state.apps.get_model(app_label, self.model_name)
if self.allow_migrate_model(schema_editor.connection.alias, model):
schema_editor.add_index(model, self.index)
def database_backwards(self, app_label, schema_editor, from_state, to_state):
model = from_state.apps.get_model(app_label, self.model_name)
if self.allow_migrate_model(schema_editor.connection.alias, model):
schema_editor.remove_index(model, self.index)
def deconstruct(self):
kwargs = {
'model_name': self.model_name,
'index': self.index,
}
return (
self.__class__.__qualname__,
[],
kwargs,
)
def describe(self):
return 'Create index %s on field(s) %s of model %s' % (
self.index.name,
', '.join(self.index.fields),
self.model_name,
)
class RemoveIndex(IndexOperation):
"""Remove an index from a model."""
def __init__(self, model_name, name):
self.model_name = model_name
self.name = name
def state_forwards(self, app_label, state):
model_state = state.models[app_label, self.model_name_lower]
indexes = model_state.options[self.option_name]
model_state.options[self.option_name] = [idx for idx in indexes if idx.name != self.name]
state.reload_model(app_label, self.model_name_lower, delay=True)
def database_forwards(self, app_label, schema_editor, from_state, to_state):
model = from_state.apps.get_model(app_label, self.model_name)
if self.allow_migrate_model(schema_editor.connection.alias, model):
from_model_state = from_state.models[app_label, self.model_name_lower]
index = from_model_state.get_index_by_name(self.name)
schema_editor.remove_index(model, index)
def database_backwards(self, app_label, schema_editor, from_state, to_state):
model = to_state.apps.get_model(app_label, self.model_name)
if self.allow_migrate_model(schema_editor.connection.alias, model):
to_model_state = to_state.models[app_label, self.model_name_lower]
index = to_model_state.get_index_by_name(self.name)
schema_editor.add_index(model, index)
def deconstruct(self):
kwargs = {
'model_name': self.model_name,
'name': self.name,
}
return (
self.__class__.__qualname__,
[],
kwargs,
)
def describe(self):
return 'Remove index %s from %s' % (self.name, self.model_name)
class AddConstraint(IndexOperation):
option_name = 'constraints'
def __init__(self, model_name, constraint):
self.model_name = model_name
self.constraint = constraint
def state_forwards(self, app_label, state):
model_state = state.models[app_label, self.model_name_lower]
model_state.options[self.option_name] = [*model_state.options[self.option_name], self.constraint]
state.reload_model(app_label, self.model_name_lower, delay=True)
def database_forwards(self, app_label, schema_editor, from_state, to_state):
model = to_state.apps.get_model(app_label, self.model_name)
if self.allow_migrate_model(schema_editor.connection.alias, model):
schema_editor.add_constraint(model, self.constraint)
def database_backwards(self, app_label, schema_editor, from_state, to_state):
model = to_state.apps.get_model(app_label, self.model_name)
if self.allow_migrate_model(schema_editor.connection.alias, model):
schema_editor.remove_constraint(model, self.constraint)
def deconstruct(self):
return self.__class__.__name__, [], {
'model_name': self.model_name,
'constraint': self.constraint,
}
def describe(self):
return 'Create constraint %s on model %s' % (self.constraint.name, self.model_name)
class RemoveConstraint(IndexOperation):
option_name = 'constraints'
def __init__(self, model_name, name):
self.model_name = model_name
self.name = name
def state_forwards(self, app_label, state):
model_state = state.models[app_label, self.model_name_lower]
constraints = model_state.options[self.option_name]
model_state.options[self.option_name] = [c for c in constraints if c.name != self.name]
state.reload_model(app_label, self.model_name_lower, delay=True)
def database_forwards(self, app_label, schema_editor, from_state, to_state):
model = to_state.apps.get_model(app_label, self.model_name)
if self.allow_migrate_model(schema_editor.connection.alias, model):
from_model_state = from_state.models[app_label, self.model_name_lower]
constraint = from_model_state.get_constraint_by_name(self.name)
schema_editor.remove_constraint(model, constraint)
def database_backwards(self, app_label, schema_editor, from_state, to_state):
model = to_state.apps.get_model(app_label, self.model_name)
if self.allow_migrate_model(schema_editor.connection.alias, model):
to_model_state = to_state.models[app_label, self.model_name_lower]
constraint = to_model_state.get_constraint_by_name(self.name)
schema_editor.add_constraint(model, constraint)
def deconstruct(self):
return self.__class__.__name__, [], {
'model_name': self.model_name,
'name': self.name,
}
def describe(self):
return 'Remove constraint %s from model %s' % (self.name, self.model_name)
|
8fefd0ff1761acb7784de9ecdb73d6290e99ecab2a4deabb2b995deb0fd50111 | from django.core.exceptions import FieldDoesNotExist
from django.db.models import NOT_PROVIDED
from django.utils.functional import cached_property
from .base import Operation
from .utils import field_is_referenced, field_references, get_references
class FieldOperation(Operation):
def __init__(self, model_name, name, field=None):
self.model_name = model_name
self.name = name
self.field = field
@cached_property
def model_name_lower(self):
return self.model_name.lower()
@cached_property
def name_lower(self):
return self.name.lower()
def is_same_model_operation(self, operation):
return self.model_name_lower == operation.model_name_lower
def is_same_field_operation(self, operation):
return self.is_same_model_operation(operation) and self.name_lower == operation.name_lower
def references_model(self, name, app_label):
name_lower = name.lower()
if name_lower == self.model_name_lower:
return True
if self.field:
return bool(field_references(
(app_label, self.model_name_lower), self.field, (app_label, name_lower)
))
return False
def references_field(self, model_name, name, app_label):
model_name_lower = model_name.lower()
# Check if this operation locally references the field.
if model_name_lower == self.model_name_lower:
if name == self.name:
return True
elif self.field and hasattr(self.field, 'from_fields') and name in self.field.from_fields:
return True
# Check if this operation remotely references the field.
if self.field is None:
return False
return bool(field_references(
(app_label, self.model_name_lower),
self.field,
(app_label, model_name_lower),
name,
))
def reduce(self, operation, app_label):
return (
super().reduce(operation, app_label) or
not operation.references_field(self.model_name, self.name, app_label)
)
class AddField(FieldOperation):
"""Add a field to a model."""
def __init__(self, model_name, name, field, preserve_default=True):
self.preserve_default = preserve_default
super().__init__(model_name, name, field)
def deconstruct(self):
kwargs = {
'model_name': self.model_name,
'name': self.name,
'field': self.field,
}
if self.preserve_default is not True:
kwargs['preserve_default'] = self.preserve_default
return (
self.__class__.__name__,
[],
kwargs
)
def state_forwards(self, app_label, state):
# If preserve default is off, don't use the default for future state
if not self.preserve_default:
field = self.field.clone()
field.default = NOT_PROVIDED
else:
field = self.field
state.models[app_label, self.model_name_lower].fields.append((self.name, field))
# Delay rendering of relationships if it's not a relational field
delay = not field.is_relation
state.reload_model(app_label, self.model_name_lower, delay=delay)
def database_forwards(self, app_label, schema_editor, from_state, to_state):
to_model = to_state.apps.get_model(app_label, self.model_name)
if self.allow_migrate_model(schema_editor.connection.alias, to_model):
from_model = from_state.apps.get_model(app_label, self.model_name)
field = to_model._meta.get_field(self.name)
if not self.preserve_default:
field.default = self.field.default
schema_editor.add_field(
from_model,
field,
)
if not self.preserve_default:
field.default = NOT_PROVIDED
def database_backwards(self, app_label, schema_editor, from_state, to_state):
from_model = from_state.apps.get_model(app_label, self.model_name)
if self.allow_migrate_model(schema_editor.connection.alias, from_model):
schema_editor.remove_field(from_model, from_model._meta.get_field(self.name))
def describe(self):
return "Add field %s to %s" % (self.name, self.model_name)
def reduce(self, operation, app_label):
if isinstance(operation, FieldOperation) and self.is_same_field_operation(operation):
if isinstance(operation, AlterField):
return [
AddField(
model_name=self.model_name,
name=operation.name,
field=operation.field,
),
]
elif isinstance(operation, RemoveField):
return []
elif isinstance(operation, RenameField):
return [
AddField(
model_name=self.model_name,
name=operation.new_name,
field=self.field,
),
]
return super().reduce(operation, app_label)
class RemoveField(FieldOperation):
"""Remove a field from a model."""
def deconstruct(self):
kwargs = {
'model_name': self.model_name,
'name': self.name,
}
return (
self.__class__.__name__,
[],
kwargs
)
def state_forwards(self, app_label, state):
new_fields = []
old_field = None
for name, instance in state.models[app_label, self.model_name_lower].fields:
if name != self.name:
new_fields.append((name, instance))
else:
old_field = instance
state.models[app_label, self.model_name_lower].fields = new_fields
# Delay rendering of relationships if it's not a relational field
delay = not old_field.is_relation
state.reload_model(app_label, self.model_name_lower, delay=delay)
def database_forwards(self, app_label, schema_editor, from_state, to_state):
from_model = from_state.apps.get_model(app_label, self.model_name)
if self.allow_migrate_model(schema_editor.connection.alias, from_model):
schema_editor.remove_field(from_model, from_model._meta.get_field(self.name))
def database_backwards(self, app_label, schema_editor, from_state, to_state):
to_model = to_state.apps.get_model(app_label, self.model_name)
if self.allow_migrate_model(schema_editor.connection.alias, to_model):
from_model = from_state.apps.get_model(app_label, self.model_name)
schema_editor.add_field(from_model, to_model._meta.get_field(self.name))
def describe(self):
return "Remove field %s from %s" % (self.name, self.model_name)
def reduce(self, operation, app_label):
from .models import DeleteModel
if isinstance(operation, DeleteModel) and operation.name_lower == self.model_name_lower:
return [operation]
return super().reduce(operation, app_label)
class AlterField(FieldOperation):
"""
Alter a field's database column (e.g. null, max_length) to the provided
new field.
"""
def __init__(self, model_name, name, field, preserve_default=True):
self.preserve_default = preserve_default
super().__init__(model_name, name, field)
def deconstruct(self):
kwargs = {
'model_name': self.model_name,
'name': self.name,
'field': self.field,
}
if self.preserve_default is not True:
kwargs['preserve_default'] = self.preserve_default
return (
self.__class__.__name__,
[],
kwargs
)
def state_forwards(self, app_label, state):
if not self.preserve_default:
field = self.field.clone()
field.default = NOT_PROVIDED
else:
field = self.field
state.models[app_label, self.model_name_lower].fields = [
(n, field if n == self.name else f)
for n, f in
state.models[app_label, self.model_name_lower].fields
]
# TODO: investigate if old relational fields must be reloaded or if it's
# sufficient if the new field is (#27737).
# Delay rendering of relationships if it's not a relational field and
# not referenced by a foreign key.
delay = (
not field.is_relation and
not field_is_referenced(
state, (app_label, self.model_name_lower), (self.name, field),
)
)
state.reload_model(app_label, self.model_name_lower, delay=delay)
def database_forwards(self, app_label, schema_editor, from_state, to_state):
to_model = to_state.apps.get_model(app_label, self.model_name)
if self.allow_migrate_model(schema_editor.connection.alias, to_model):
from_model = from_state.apps.get_model(app_label, self.model_name)
from_field = from_model._meta.get_field(self.name)
to_field = to_model._meta.get_field(self.name)
if not self.preserve_default:
to_field.default = self.field.default
schema_editor.alter_field(from_model, from_field, to_field)
if not self.preserve_default:
to_field.default = NOT_PROVIDED
def database_backwards(self, app_label, schema_editor, from_state, to_state):
self.database_forwards(app_label, schema_editor, from_state, to_state)
def describe(self):
return "Alter field %s on %s" % (self.name, self.model_name)
def reduce(self, operation, app_label):
if isinstance(operation, RemoveField) and self.is_same_field_operation(operation):
return [operation]
elif isinstance(operation, RenameField) and self.is_same_field_operation(operation):
return [
operation,
AlterField(
model_name=self.model_name,
name=operation.new_name,
field=self.field,
),
]
return super().reduce(operation, app_label)
class RenameField(FieldOperation):
"""Rename a field on the model. Might affect db_column too."""
def __init__(self, model_name, old_name, new_name):
self.old_name = old_name
self.new_name = new_name
super().__init__(model_name, old_name)
@cached_property
def old_name_lower(self):
return self.old_name.lower()
@cached_property
def new_name_lower(self):
return self.new_name.lower()
def deconstruct(self):
kwargs = {
'model_name': self.model_name,
'old_name': self.old_name,
'new_name': self.new_name,
}
return (
self.__class__.__name__,
[],
kwargs
)
def state_forwards(self, app_label, state):
model_state = state.models[app_label, self.model_name_lower]
# Rename the field
fields = model_state.fields
found = None
for index, (name, field) in enumerate(fields):
if not found and name == self.old_name:
fields[index] = (self.new_name, field)
found = field
# Fix from_fields to refer to the new field.
from_fields = getattr(field, 'from_fields', None)
if from_fields:
field.from_fields = tuple([
self.new_name if from_field_name == self.old_name else from_field_name
for from_field_name in from_fields
])
if found is None:
raise FieldDoesNotExist(
"%s.%s has no field named '%s'" % (app_label, self.model_name, self.old_name)
)
# Fix index/unique_together to refer to the new field
options = model_state.options
for option in ('index_together', 'unique_together'):
if option in options:
options[option] = [
[self.new_name if n == self.old_name else n for n in together]
for together in options[option]
]
# Fix to_fields to refer to the new field.
delay = True
references = get_references(
state, (app_label, self.model_name_lower), (self.old_name, found),
)
for *_, field, reference in references:
delay = False
if reference.to:
remote_field, to_fields = reference.to
if getattr(remote_field, 'field_name', None) == self.old_name:
remote_field.field_name = self.new_name
if to_fields:
field.to_fields = tuple([
self.new_name if to_field_name == self.old_name else to_field_name
for to_field_name in to_fields
])
state.reload_model(app_label, self.model_name_lower, delay=delay)
def database_forwards(self, app_label, schema_editor, from_state, to_state):
to_model = to_state.apps.get_model(app_label, self.model_name)
if self.allow_migrate_model(schema_editor.connection.alias, to_model):
from_model = from_state.apps.get_model(app_label, self.model_name)
schema_editor.alter_field(
from_model,
from_model._meta.get_field(self.old_name),
to_model._meta.get_field(self.new_name),
)
def database_backwards(self, app_label, schema_editor, from_state, to_state):
to_model = to_state.apps.get_model(app_label, self.model_name)
if self.allow_migrate_model(schema_editor.connection.alias, to_model):
from_model = from_state.apps.get_model(app_label, self.model_name)
schema_editor.alter_field(
from_model,
from_model._meta.get_field(self.new_name),
to_model._meta.get_field(self.old_name),
)
def describe(self):
return "Rename field %s on %s to %s" % (self.old_name, self.model_name, self.new_name)
def references_field(self, model_name, name, app_label):
return self.references_model(model_name, app_label) and (
name.lower() == self.old_name_lower or
name.lower() == self.new_name_lower
)
def reduce(self, operation, app_label):
if (isinstance(operation, RenameField) and
self.is_same_model_operation(operation) and
self.new_name_lower == operation.old_name_lower):
return [
RenameField(
self.model_name,
self.old_name,
operation.new_name,
),
]
# Skip `FieldOperation.reduce` as we want to run `references_field`
# against self.new_name.
return (
super(FieldOperation, self).reduce(operation, app_label) or
not operation.references_field(self.model_name, self.new_name, app_label)
)
|
fd763324c3bfbcad7a82cb225a4a25090e4e225e6c262b0f1e7a028575c360fc | from collections import namedtuple
from django.db.models.fields.related import RECURSIVE_RELATIONSHIP_CONSTANT
def resolve_relation(model, app_label=None, model_name=None):
"""
Turn a model class or model reference string and return a model tuple.
app_label and model_name are used to resolve the scope of recursive and
unscoped model relationship.
"""
if isinstance(model, str):
if model == RECURSIVE_RELATIONSHIP_CONSTANT:
if app_label is None or model_name is None:
raise TypeError(
'app_label and model_name must be provided to resolve '
'recursive relationships.'
)
return app_label, model_name
if '.' in model:
return tuple(model.lower().split('.', 1))
if app_label is None:
raise TypeError(
'app_label must be provided to resolve unscoped model '
'relationships.'
)
return app_label, model.lower()
return model._meta.app_label, model._meta.model_name
FieldReference = namedtuple('FieldReference', 'to through')
def field_references(
model_tuple,
field,
reference_model_tuple,
reference_field_name=None,
reference_field=None,
):
"""
Return either False or a FieldReference if `field` references provided
context.
False positives can be returned if `reference_field_name` is provided
without `reference_field` because of the introspection limitation it
incurs. This should not be an issue when this function is used to determine
whether or not an optimization can take place.
"""
remote_field = field.remote_field
if not remote_field:
return False
references_to = None
references_through = None
if resolve_relation(remote_field.model, *model_tuple) == reference_model_tuple:
to_fields = getattr(field, 'to_fields', None)
if (
reference_field_name is None or
# Unspecified to_field(s).
to_fields is None or
# Reference to primary key.
(None in to_fields and (reference_field is None or reference_field.primary_key)) or
# Reference to field.
reference_field_name in to_fields
):
references_to = (remote_field, to_fields)
through = getattr(remote_field, 'through', None)
if through and resolve_relation(through, *model_tuple) == reference_model_tuple:
through_fields = remote_field.through_fields
if (
reference_field_name is None or
# Unspecified through_fields.
through_fields is None or
# Reference to field.
reference_field_name in through_fields
):
references_through = (remote_field, through_fields)
if not (references_to or references_through):
return False
return FieldReference(references_to, references_through)
def get_references(state, model_tuple, field_tuple=()):
"""
Generator of (model_state, index, name, field, reference) referencing
provided context.
If field_tuple is provided only references to this particular field of
model_tuple will be generated.
"""
for state_model_tuple, model_state in state.models.items():
for index, (name, field) in enumerate(model_state.fields):
reference = field_references(state_model_tuple, field, model_tuple, *field_tuple)
if reference:
yield model_state, index, name, field, reference
def field_is_referenced(state, model_tuple, field_tuple):
"""Return whether `field_tuple` is referenced by any state models."""
return next(get_references(state, model_tuple, field_tuple), None) is not None
|
67b5b4aece288ad87f90cb4984dcd207d726b58eb823a5d760dfbf0bdc125909 | from django.core.exceptions import FieldDoesNotExist
from django.db import (
IntegrityError, connection, migrations, models, transaction,
)
from django.db.migrations.migration import Migration
from django.db.migrations.operations import CreateModel
from django.db.migrations.operations.fields import FieldOperation
from django.db.migrations.state import ModelState, ProjectState
from django.db.transaction import atomic
from django.test import SimpleTestCase, override_settings, skipUnlessDBFeature
from .models import FoodManager, FoodQuerySet, UnicodeModel
from .test_base import OperationTestBase
class Mixin:
pass
class OperationTests(OperationTestBase):
"""
Tests running the operations and making sure they do what they say they do.
Each test looks at their state changing, and then their database operation -
both forwards and backwards.
"""
def test_create_model(self):
"""
Tests the CreateModel operation.
Most other tests use this operation as part of setup, so check failures here first.
"""
operation = migrations.CreateModel(
"Pony",
[
("id", models.AutoField(primary_key=True)),
("pink", models.IntegerField(default=1)),
],
)
self.assertEqual(operation.describe(), "Create model Pony")
# Test the state alteration
project_state = ProjectState()
new_state = project_state.clone()
operation.state_forwards("test_crmo", new_state)
self.assertEqual(new_state.models["test_crmo", "pony"].name, "Pony")
self.assertEqual(len(new_state.models["test_crmo", "pony"].fields), 2)
# Test the database alteration
self.assertTableNotExists("test_crmo_pony")
with connection.schema_editor() as editor:
operation.database_forwards("test_crmo", editor, project_state, new_state)
self.assertTableExists("test_crmo_pony")
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards("test_crmo", editor, new_state, project_state)
self.assertTableNotExists("test_crmo_pony")
# And deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "CreateModel")
self.assertEqual(definition[1], [])
self.assertEqual(sorted(definition[2]), ["fields", "name"])
# And default manager not in set
operation = migrations.CreateModel("Foo", fields=[], managers=[("objects", models.Manager())])
definition = operation.deconstruct()
self.assertNotIn('managers', definition[2])
def test_create_model_with_duplicate_field_name(self):
with self.assertRaisesMessage(ValueError, 'Found duplicate value pink in CreateModel fields argument.'):
migrations.CreateModel(
"Pony",
[
("id", models.AutoField(primary_key=True)),
("pink", models.TextField()),
("pink", models.IntegerField(default=1)),
],
)
def test_create_model_with_duplicate_base(self):
message = 'Found duplicate value test_crmo.pony in CreateModel bases argument.'
with self.assertRaisesMessage(ValueError, message):
migrations.CreateModel(
"Pony",
fields=[],
bases=("test_crmo.Pony", "test_crmo.Pony",),
)
with self.assertRaisesMessage(ValueError, message):
migrations.CreateModel(
"Pony",
fields=[],
bases=("test_crmo.Pony", "test_crmo.pony",),
)
message = 'Found duplicate value migrations.unicodemodel in CreateModel bases argument.'
with self.assertRaisesMessage(ValueError, message):
migrations.CreateModel(
"Pony",
fields=[],
bases=(UnicodeModel, UnicodeModel,),
)
with self.assertRaisesMessage(ValueError, message):
migrations.CreateModel(
"Pony",
fields=[],
bases=(UnicodeModel, 'migrations.unicodemodel',),
)
with self.assertRaisesMessage(ValueError, message):
migrations.CreateModel(
"Pony",
fields=[],
bases=(UnicodeModel, 'migrations.UnicodeModel',),
)
message = "Found duplicate value <class 'django.db.models.base.Model'> in CreateModel bases argument."
with self.assertRaisesMessage(ValueError, message):
migrations.CreateModel(
"Pony",
fields=[],
bases=(models.Model, models.Model,),
)
message = "Found duplicate value <class 'migrations.test_operations.Mixin'> in CreateModel bases argument."
with self.assertRaisesMessage(ValueError, message):
migrations.CreateModel(
"Pony",
fields=[],
bases=(Mixin, Mixin,),
)
def test_create_model_with_duplicate_manager_name(self):
with self.assertRaisesMessage(ValueError, 'Found duplicate value objects in CreateModel managers argument.'):
migrations.CreateModel(
"Pony",
fields=[],
managers=[
("objects", models.Manager()),
("objects", models.Manager()),
],
)
def test_create_model_with_unique_after(self):
"""
Tests the CreateModel operation directly followed by an
AlterUniqueTogether (bug #22844 - sqlite remake issues)
"""
operation1 = migrations.CreateModel(
"Pony",
[
("id", models.AutoField(primary_key=True)),
("pink", models.IntegerField(default=1)),
],
)
operation2 = migrations.CreateModel(
"Rider",
[
("id", models.AutoField(primary_key=True)),
("number", models.IntegerField(default=1)),
("pony", models.ForeignKey("test_crmoua.Pony", models.CASCADE)),
],
)
operation3 = migrations.AlterUniqueTogether(
"Rider",
[
("number", "pony"),
],
)
# Test the database alteration
project_state = ProjectState()
self.assertTableNotExists("test_crmoua_pony")
self.assertTableNotExists("test_crmoua_rider")
with connection.schema_editor() as editor:
new_state = project_state.clone()
operation1.state_forwards("test_crmoua", new_state)
operation1.database_forwards("test_crmoua", editor, project_state, new_state)
project_state, new_state = new_state, new_state.clone()
operation2.state_forwards("test_crmoua", new_state)
operation2.database_forwards("test_crmoua", editor, project_state, new_state)
project_state, new_state = new_state, new_state.clone()
operation3.state_forwards("test_crmoua", new_state)
operation3.database_forwards("test_crmoua", editor, project_state, new_state)
self.assertTableExists("test_crmoua_pony")
self.assertTableExists("test_crmoua_rider")
def test_create_model_m2m(self):
"""
Test the creation of a model with a ManyToMany field and the
auto-created "through" model.
"""
project_state = self.set_up_test_model("test_crmomm")
operation = migrations.CreateModel(
"Stable",
[
("id", models.AutoField(primary_key=True)),
("ponies", models.ManyToManyField("Pony", related_name="stables"))
]
)
# Test the state alteration
new_state = project_state.clone()
operation.state_forwards("test_crmomm", new_state)
# Test the database alteration
self.assertTableNotExists("test_crmomm_stable_ponies")
with connection.schema_editor() as editor:
operation.database_forwards("test_crmomm", editor, project_state, new_state)
self.assertTableExists("test_crmomm_stable")
self.assertTableExists("test_crmomm_stable_ponies")
self.assertColumnNotExists("test_crmomm_stable", "ponies")
# Make sure the M2M field actually works
with atomic():
Pony = new_state.apps.get_model("test_crmomm", "Pony")
Stable = new_state.apps.get_model("test_crmomm", "Stable")
stable = Stable.objects.create()
p1 = Pony.objects.create(pink=False, weight=4.55)
p2 = Pony.objects.create(pink=True, weight=5.43)
stable.ponies.add(p1, p2)
self.assertEqual(stable.ponies.count(), 2)
stable.ponies.all().delete()
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards("test_crmomm", editor, new_state, project_state)
self.assertTableNotExists("test_crmomm_stable")
self.assertTableNotExists("test_crmomm_stable_ponies")
def test_create_model_inheritance(self):
"""
Tests the CreateModel operation on a multi-table inheritance setup.
"""
project_state = self.set_up_test_model("test_crmoih")
# Test the state alteration
operation = migrations.CreateModel(
"ShetlandPony",
[
('pony_ptr', models.OneToOneField(
'test_crmoih.Pony',
models.CASCADE,
auto_created=True,
primary_key=True,
to_field='id',
serialize=False,
)),
("cuteness", models.IntegerField(default=1)),
],
)
new_state = project_state.clone()
operation.state_forwards("test_crmoih", new_state)
self.assertIn(("test_crmoih", "shetlandpony"), new_state.models)
# Test the database alteration
self.assertTableNotExists("test_crmoih_shetlandpony")
with connection.schema_editor() as editor:
operation.database_forwards("test_crmoih", editor, project_state, new_state)
self.assertTableExists("test_crmoih_shetlandpony")
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards("test_crmoih", editor, new_state, project_state)
self.assertTableNotExists("test_crmoih_shetlandpony")
def test_create_proxy_model(self):
"""
CreateModel ignores proxy models.
"""
project_state = self.set_up_test_model("test_crprmo")
# Test the state alteration
operation = migrations.CreateModel(
"ProxyPony",
[],
options={"proxy": True},
bases=("test_crprmo.Pony",),
)
self.assertEqual(operation.describe(), "Create proxy model ProxyPony")
new_state = project_state.clone()
operation.state_forwards("test_crprmo", new_state)
self.assertIn(("test_crprmo", "proxypony"), new_state.models)
# Test the database alteration
self.assertTableNotExists("test_crprmo_proxypony")
self.assertTableExists("test_crprmo_pony")
with connection.schema_editor() as editor:
operation.database_forwards("test_crprmo", editor, project_state, new_state)
self.assertTableNotExists("test_crprmo_proxypony")
self.assertTableExists("test_crprmo_pony")
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards("test_crprmo", editor, new_state, project_state)
self.assertTableNotExists("test_crprmo_proxypony")
self.assertTableExists("test_crprmo_pony")
# And deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "CreateModel")
self.assertEqual(definition[1], [])
self.assertEqual(sorted(definition[2]), ["bases", "fields", "name", "options"])
def test_create_unmanaged_model(self):
"""
CreateModel ignores unmanaged models.
"""
project_state = self.set_up_test_model("test_crummo")
# Test the state alteration
operation = migrations.CreateModel(
"UnmanagedPony",
[],
options={"proxy": True},
bases=("test_crummo.Pony",),
)
self.assertEqual(operation.describe(), "Create proxy model UnmanagedPony")
new_state = project_state.clone()
operation.state_forwards("test_crummo", new_state)
self.assertIn(("test_crummo", "unmanagedpony"), new_state.models)
# Test the database alteration
self.assertTableNotExists("test_crummo_unmanagedpony")
self.assertTableExists("test_crummo_pony")
with connection.schema_editor() as editor:
operation.database_forwards("test_crummo", editor, project_state, new_state)
self.assertTableNotExists("test_crummo_unmanagedpony")
self.assertTableExists("test_crummo_pony")
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards("test_crummo", editor, new_state, project_state)
self.assertTableNotExists("test_crummo_unmanagedpony")
self.assertTableExists("test_crummo_pony")
@skipUnlessDBFeature('supports_table_check_constraints')
def test_create_model_with_constraint(self):
where = models.Q(pink__gt=2)
check_constraint = models.CheckConstraint(check=where, name='test_constraint_pony_pink_gt_2')
operation = migrations.CreateModel(
"Pony",
[
("id", models.AutoField(primary_key=True)),
("pink", models.IntegerField(default=3)),
],
options={'constraints': [check_constraint]},
)
# Test the state alteration
project_state = ProjectState()
new_state = project_state.clone()
operation.state_forwards("test_crmo", new_state)
self.assertEqual(len(new_state.models['test_crmo', 'pony'].options['constraints']), 1)
# Test database alteration
self.assertTableNotExists("test_crmo_pony")
with connection.schema_editor() as editor:
operation.database_forwards("test_crmo", editor, project_state, new_state)
self.assertTableExists("test_crmo_pony")
with connection.cursor() as cursor:
with self.assertRaises(IntegrityError):
cursor.execute("INSERT INTO test_crmo_pony (id, pink) VALUES (1, 1)")
# Test reversal
with connection.schema_editor() as editor:
operation.database_backwards("test_crmo", editor, new_state, project_state)
self.assertTableNotExists("test_crmo_pony")
# Test deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "CreateModel")
self.assertEqual(definition[1], [])
self.assertEqual(definition[2]['options']['constraints'], [check_constraint])
def test_create_model_with_partial_unique_constraint(self):
partial_unique_constraint = models.UniqueConstraint(
fields=['pink'],
condition=models.Q(weight__gt=5),
name='test_constraint_pony_pink_for_weight_gt_5_uniq',
)
operation = migrations.CreateModel(
'Pony',
[
('id', models.AutoField(primary_key=True)),
('pink', models.IntegerField(default=3)),
('weight', models.FloatField()),
],
options={'constraints': [partial_unique_constraint]},
)
# Test the state alteration
project_state = ProjectState()
new_state = project_state.clone()
operation.state_forwards('test_crmo', new_state)
self.assertEqual(len(new_state.models['test_crmo', 'pony'].options['constraints']), 1)
# Test database alteration
self.assertTableNotExists('test_crmo_pony')
with connection.schema_editor() as editor:
operation.database_forwards('test_crmo', editor, project_state, new_state)
self.assertTableExists('test_crmo_pony')
# Test constraint works
Pony = new_state.apps.get_model('test_crmo', 'Pony')
Pony.objects.create(pink=1, weight=4.0)
Pony.objects.create(pink=1, weight=4.0)
Pony.objects.create(pink=1, weight=6.0)
if connection.features.supports_partial_indexes:
with self.assertRaises(IntegrityError):
Pony.objects.create(pink=1, weight=7.0)
else:
Pony.objects.create(pink=1, weight=7.0)
# Test reversal
with connection.schema_editor() as editor:
operation.database_backwards('test_crmo', editor, new_state, project_state)
self.assertTableNotExists('test_crmo_pony')
# Test deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], 'CreateModel')
self.assertEqual(definition[1], [])
self.assertEqual(definition[2]['options']['constraints'], [partial_unique_constraint])
def test_create_model_managers(self):
"""
The managers on a model are set.
"""
project_state = self.set_up_test_model("test_cmoma")
# Test the state alteration
operation = migrations.CreateModel(
"Food",
fields=[
("id", models.AutoField(primary_key=True)),
],
managers=[
("food_qs", FoodQuerySet.as_manager()),
("food_mgr", FoodManager("a", "b")),
("food_mgr_kwargs", FoodManager("x", "y", 3, 4)),
]
)
self.assertEqual(operation.describe(), "Create model Food")
new_state = project_state.clone()
operation.state_forwards("test_cmoma", new_state)
self.assertIn(("test_cmoma", "food"), new_state.models)
managers = new_state.models["test_cmoma", "food"].managers
self.assertEqual(managers[0][0], "food_qs")
self.assertIsInstance(managers[0][1], models.Manager)
self.assertEqual(managers[1][0], "food_mgr")
self.assertIsInstance(managers[1][1], FoodManager)
self.assertEqual(managers[1][1].args, ("a", "b", 1, 2))
self.assertEqual(managers[2][0], "food_mgr_kwargs")
self.assertIsInstance(managers[2][1], FoodManager)
self.assertEqual(managers[2][1].args, ("x", "y", 3, 4))
def test_delete_model(self):
"""
Tests the DeleteModel operation.
"""
project_state = self.set_up_test_model("test_dlmo")
# Test the state alteration
operation = migrations.DeleteModel("Pony")
self.assertEqual(operation.describe(), "Delete model Pony")
new_state = project_state.clone()
operation.state_forwards("test_dlmo", new_state)
self.assertNotIn(("test_dlmo", "pony"), new_state.models)
# Test the database alteration
self.assertTableExists("test_dlmo_pony")
with connection.schema_editor() as editor:
operation.database_forwards("test_dlmo", editor, project_state, new_state)
self.assertTableNotExists("test_dlmo_pony")
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards("test_dlmo", editor, new_state, project_state)
self.assertTableExists("test_dlmo_pony")
# And deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "DeleteModel")
self.assertEqual(definition[1], [])
self.assertEqual(list(definition[2]), ["name"])
def test_delete_proxy_model(self):
"""
Tests the DeleteModel operation ignores proxy models.
"""
project_state = self.set_up_test_model("test_dlprmo", proxy_model=True)
# Test the state alteration
operation = migrations.DeleteModel("ProxyPony")
new_state = project_state.clone()
operation.state_forwards("test_dlprmo", new_state)
self.assertIn(("test_dlprmo", "proxypony"), project_state.models)
self.assertNotIn(("test_dlprmo", "proxypony"), new_state.models)
# Test the database alteration
self.assertTableExists("test_dlprmo_pony")
self.assertTableNotExists("test_dlprmo_proxypony")
with connection.schema_editor() as editor:
operation.database_forwards("test_dlprmo", editor, project_state, new_state)
self.assertTableExists("test_dlprmo_pony")
self.assertTableNotExists("test_dlprmo_proxypony")
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards("test_dlprmo", editor, new_state, project_state)
self.assertTableExists("test_dlprmo_pony")
self.assertTableNotExists("test_dlprmo_proxypony")
def test_delete_mti_model(self):
project_state = self.set_up_test_model('test_dlmtimo', mti_model=True)
# Test the state alteration
operation = migrations.DeleteModel('ShetlandPony')
new_state = project_state.clone()
operation.state_forwards('test_dlmtimo', new_state)
self.assertIn(('test_dlmtimo', 'shetlandpony'), project_state.models)
self.assertNotIn(('test_dlmtimo', 'shetlandpony'), new_state.models)
# Test the database alteration
self.assertTableExists('test_dlmtimo_pony')
self.assertTableExists('test_dlmtimo_shetlandpony')
self.assertColumnExists('test_dlmtimo_shetlandpony', 'pony_ptr_id')
with connection.schema_editor() as editor:
operation.database_forwards('test_dlmtimo', editor, project_state, new_state)
self.assertTableExists('test_dlmtimo_pony')
self.assertTableNotExists('test_dlmtimo_shetlandpony')
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards('test_dlmtimo', editor, new_state, project_state)
self.assertTableExists('test_dlmtimo_pony')
self.assertTableExists('test_dlmtimo_shetlandpony')
self.assertColumnExists('test_dlmtimo_shetlandpony', 'pony_ptr_id')
def test_rename_model(self):
"""
Tests the RenameModel operation.
"""
project_state = self.set_up_test_model("test_rnmo", related_model=True)
# Test the state alteration
operation = migrations.RenameModel("Pony", "Horse")
self.assertEqual(operation.describe(), "Rename model Pony to Horse")
# Test initial state and database
self.assertIn(("test_rnmo", "pony"), project_state.models)
self.assertNotIn(("test_rnmo", "horse"), project_state.models)
self.assertTableExists("test_rnmo_pony")
self.assertTableNotExists("test_rnmo_horse")
if connection.features.supports_foreign_keys:
self.assertFKExists("test_rnmo_rider", ["pony_id"], ("test_rnmo_pony", "id"))
self.assertFKNotExists("test_rnmo_rider", ["pony_id"], ("test_rnmo_horse", "id"))
# Migrate forwards
new_state = project_state.clone()
atomic_rename = connection.features.supports_atomic_references_rename
new_state = self.apply_operations("test_rnmo", new_state, [operation], atomic=atomic_rename)
# Test new state and database
self.assertNotIn(("test_rnmo", "pony"), new_state.models)
self.assertIn(("test_rnmo", "horse"), new_state.models)
# RenameModel also repoints all incoming FKs and M2Ms
self.assertEqual("test_rnmo.Horse", new_state.models["test_rnmo", "rider"].fields[1][1].remote_field.model)
self.assertTableNotExists("test_rnmo_pony")
self.assertTableExists("test_rnmo_horse")
if connection.features.supports_foreign_keys:
self.assertFKNotExists("test_rnmo_rider", ["pony_id"], ("test_rnmo_pony", "id"))
self.assertFKExists("test_rnmo_rider", ["pony_id"], ("test_rnmo_horse", "id"))
# Migrate backwards
original_state = self.unapply_operations("test_rnmo", project_state, [operation], atomic=atomic_rename)
# Test original state and database
self.assertIn(("test_rnmo", "pony"), original_state.models)
self.assertNotIn(("test_rnmo", "horse"), original_state.models)
self.assertEqual("Pony", original_state.models["test_rnmo", "rider"].fields[1][1].remote_field.model)
self.assertTableExists("test_rnmo_pony")
self.assertTableNotExists("test_rnmo_horse")
if connection.features.supports_foreign_keys:
self.assertFKExists("test_rnmo_rider", ["pony_id"], ("test_rnmo_pony", "id"))
self.assertFKNotExists("test_rnmo_rider", ["pony_id"], ("test_rnmo_horse", "id"))
# And deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "RenameModel")
self.assertEqual(definition[1], [])
self.assertEqual(definition[2], {'old_name': "Pony", 'new_name': "Horse"})
def test_rename_model_state_forwards(self):
"""
RenameModel operations shouldn't trigger the caching of rendered apps
on state without prior apps.
"""
state = ProjectState()
state.add_model(ModelState('migrations', 'Foo', []))
operation = migrations.RenameModel('Foo', 'Bar')
operation.state_forwards('migrations', state)
self.assertNotIn('apps', state.__dict__)
self.assertNotIn(('migrations', 'foo'), state.models)
self.assertIn(('migrations', 'bar'), state.models)
# Now with apps cached.
apps = state.apps
operation = migrations.RenameModel('Bar', 'Foo')
operation.state_forwards('migrations', state)
self.assertIs(state.apps, apps)
self.assertNotIn(('migrations', 'bar'), state.models)
self.assertIn(('migrations', 'foo'), state.models)
def test_rename_model_with_self_referential_fk(self):
"""
Tests the RenameModel operation on model with self referential FK.
"""
project_state = self.set_up_test_model("test_rmwsrf", related_model=True)
# Test the state alteration
operation = migrations.RenameModel("Rider", "HorseRider")
self.assertEqual(operation.describe(), "Rename model Rider to HorseRider")
new_state = project_state.clone()
operation.state_forwards("test_rmwsrf", new_state)
self.assertNotIn(("test_rmwsrf", "rider"), new_state.models)
self.assertIn(("test_rmwsrf", "horserider"), new_state.models)
# Remember, RenameModel also repoints all incoming FKs and M2Ms
self.assertEqual(
'self',
new_state.models["test_rmwsrf", "horserider"].fields[2][1].remote_field.model
)
HorseRider = new_state.apps.get_model('test_rmwsrf', 'horserider')
self.assertIs(HorseRider._meta.get_field('horserider').remote_field.model, HorseRider)
# Test the database alteration
self.assertTableExists("test_rmwsrf_rider")
self.assertTableNotExists("test_rmwsrf_horserider")
if connection.features.supports_foreign_keys:
self.assertFKExists("test_rmwsrf_rider", ["friend_id"], ("test_rmwsrf_rider", "id"))
self.assertFKNotExists("test_rmwsrf_rider", ["friend_id"], ("test_rmwsrf_horserider", "id"))
atomic_rename = connection.features.supports_atomic_references_rename
with connection.schema_editor(atomic=atomic_rename) as editor:
operation.database_forwards("test_rmwsrf", editor, project_state, new_state)
self.assertTableNotExists("test_rmwsrf_rider")
self.assertTableExists("test_rmwsrf_horserider")
if connection.features.supports_foreign_keys:
self.assertFKNotExists("test_rmwsrf_horserider", ["friend_id"], ("test_rmwsrf_rider", "id"))
self.assertFKExists("test_rmwsrf_horserider", ["friend_id"], ("test_rmwsrf_horserider", "id"))
# And test reversal
with connection.schema_editor(atomic=atomic_rename) as editor:
operation.database_backwards("test_rmwsrf", editor, new_state, project_state)
self.assertTableExists("test_rmwsrf_rider")
self.assertTableNotExists("test_rmwsrf_horserider")
if connection.features.supports_foreign_keys:
self.assertFKExists("test_rmwsrf_rider", ["friend_id"], ("test_rmwsrf_rider", "id"))
self.assertFKNotExists("test_rmwsrf_rider", ["friend_id"], ("test_rmwsrf_horserider", "id"))
def test_rename_model_with_superclass_fk(self):
"""
Tests the RenameModel operation on a model which has a superclass that
has a foreign key.
"""
project_state = self.set_up_test_model("test_rmwsc", related_model=True, mti_model=True)
# Test the state alteration
operation = migrations.RenameModel("ShetlandPony", "LittleHorse")
self.assertEqual(operation.describe(), "Rename model ShetlandPony to LittleHorse")
new_state = project_state.clone()
operation.state_forwards("test_rmwsc", new_state)
self.assertNotIn(("test_rmwsc", "shetlandpony"), new_state.models)
self.assertIn(("test_rmwsc", "littlehorse"), new_state.models)
# RenameModel shouldn't repoint the superclass's relations, only local ones
self.assertEqual(
project_state.models["test_rmwsc", "rider"].fields[1][1].remote_field.model,
new_state.models["test_rmwsc", "rider"].fields[1][1].remote_field.model
)
# Before running the migration we have a table for Shetland Pony, not Little Horse
self.assertTableExists("test_rmwsc_shetlandpony")
self.assertTableNotExists("test_rmwsc_littlehorse")
if connection.features.supports_foreign_keys:
# and the foreign key on rider points to pony, not shetland pony
self.assertFKExists("test_rmwsc_rider", ["pony_id"], ("test_rmwsc_pony", "id"))
self.assertFKNotExists("test_rmwsc_rider", ["pony_id"], ("test_rmwsc_shetlandpony", "id"))
with connection.schema_editor(atomic=connection.features.supports_atomic_references_rename) as editor:
operation.database_forwards("test_rmwsc", editor, project_state, new_state)
# Now we have a little horse table, not shetland pony
self.assertTableNotExists("test_rmwsc_shetlandpony")
self.assertTableExists("test_rmwsc_littlehorse")
if connection.features.supports_foreign_keys:
# but the Foreign keys still point at pony, not little horse
self.assertFKExists("test_rmwsc_rider", ["pony_id"], ("test_rmwsc_pony", "id"))
self.assertFKNotExists("test_rmwsc_rider", ["pony_id"], ("test_rmwsc_littlehorse", "id"))
def test_rename_model_with_self_referential_m2m(self):
app_label = "test_rename_model_with_self_referential_m2m"
project_state = self.apply_operations(app_label, ProjectState(), operations=[
migrations.CreateModel("ReflexivePony", fields=[
("id", models.AutoField(primary_key=True)),
("ponies", models.ManyToManyField("self")),
]),
])
project_state = self.apply_operations(app_label, project_state, operations=[
migrations.RenameModel("ReflexivePony", "ReflexivePony2"),
], atomic=connection.features.supports_atomic_references_rename)
Pony = project_state.apps.get_model(app_label, "ReflexivePony2")
pony = Pony.objects.create()
pony.ponies.add(pony)
def test_rename_model_with_m2m(self):
app_label = "test_rename_model_with_m2m"
project_state = self.apply_operations(app_label, ProjectState(), operations=[
migrations.CreateModel("Rider", fields=[
("id", models.AutoField(primary_key=True)),
]),
migrations.CreateModel("Pony", fields=[
("id", models.AutoField(primary_key=True)),
("riders", models.ManyToManyField("Rider")),
]),
])
Pony = project_state.apps.get_model(app_label, "Pony")
Rider = project_state.apps.get_model(app_label, "Rider")
pony = Pony.objects.create()
rider = Rider.objects.create()
pony.riders.add(rider)
project_state = self.apply_operations(app_label, project_state, operations=[
migrations.RenameModel("Pony", "Pony2"),
], atomic=connection.features.supports_atomic_references_rename)
Pony = project_state.apps.get_model(app_label, "Pony2")
Rider = project_state.apps.get_model(app_label, "Rider")
pony = Pony.objects.create()
rider = Rider.objects.create()
pony.riders.add(rider)
self.assertEqual(Pony.objects.count(), 2)
self.assertEqual(Rider.objects.count(), 2)
self.assertEqual(Pony._meta.get_field('riders').remote_field.through.objects.count(), 2)
def test_rename_m2m_target_model(self):
app_label = "test_rename_m2m_target_model"
project_state = self.apply_operations(app_label, ProjectState(), operations=[
migrations.CreateModel("Rider", fields=[
("id", models.AutoField(primary_key=True)),
]),
migrations.CreateModel("Pony", fields=[
("id", models.AutoField(primary_key=True)),
("riders", models.ManyToManyField("Rider")),
]),
])
Pony = project_state.apps.get_model(app_label, "Pony")
Rider = project_state.apps.get_model(app_label, "Rider")
pony = Pony.objects.create()
rider = Rider.objects.create()
pony.riders.add(rider)
project_state = self.apply_operations(app_label, project_state, operations=[
migrations.RenameModel("Rider", "Rider2"),
], atomic=connection.features.supports_atomic_references_rename)
Pony = project_state.apps.get_model(app_label, "Pony")
Rider = project_state.apps.get_model(app_label, "Rider2")
pony = Pony.objects.create()
rider = Rider.objects.create()
pony.riders.add(rider)
self.assertEqual(Pony.objects.count(), 2)
self.assertEqual(Rider.objects.count(), 2)
self.assertEqual(Pony._meta.get_field('riders').remote_field.through.objects.count(), 2)
def test_rename_m2m_through_model(self):
app_label = "test_rename_through"
project_state = self.apply_operations(app_label, ProjectState(), operations=[
migrations.CreateModel("Rider", fields=[
("id", models.AutoField(primary_key=True)),
]),
migrations.CreateModel("Pony", fields=[
("id", models.AutoField(primary_key=True)),
]),
migrations.CreateModel("PonyRider", fields=[
("id", models.AutoField(primary_key=True)),
("rider", models.ForeignKey("test_rename_through.Rider", models.CASCADE)),
("pony", models.ForeignKey("test_rename_through.Pony", models.CASCADE)),
]),
migrations.AddField(
"Pony",
"riders",
models.ManyToManyField("test_rename_through.Rider", through="test_rename_through.PonyRider"),
),
])
Pony = project_state.apps.get_model(app_label, "Pony")
Rider = project_state.apps.get_model(app_label, "Rider")
PonyRider = project_state.apps.get_model(app_label, "PonyRider")
pony = Pony.objects.create()
rider = Rider.objects.create()
PonyRider.objects.create(pony=pony, rider=rider)
project_state = self.apply_operations(app_label, project_state, operations=[
migrations.RenameModel("PonyRider", "PonyRider2"),
])
Pony = project_state.apps.get_model(app_label, "Pony")
Rider = project_state.apps.get_model(app_label, "Rider")
PonyRider = project_state.apps.get_model(app_label, "PonyRider2")
pony = Pony.objects.first()
rider = Rider.objects.create()
PonyRider.objects.create(pony=pony, rider=rider)
self.assertEqual(Pony.objects.count(), 1)
self.assertEqual(Rider.objects.count(), 2)
self.assertEqual(PonyRider.objects.count(), 2)
self.assertEqual(pony.riders.count(), 2)
def test_rename_m2m_model_after_rename_field(self):
"""RenameModel renames a many-to-many column after a RenameField."""
app_label = 'test_rename_multiple'
project_state = self.apply_operations(app_label, ProjectState(), operations=[
migrations.CreateModel('Pony', fields=[
('id', models.AutoField(primary_key=True)),
('name', models.CharField(max_length=20)),
]),
migrations.CreateModel('Rider', fields=[
('id', models.AutoField(primary_key=True)),
('pony', models.ForeignKey('test_rename_multiple.Pony', models.CASCADE)),
]),
migrations.CreateModel('PonyRider', fields=[
('id', models.AutoField(primary_key=True)),
('riders', models.ManyToManyField('Rider')),
]),
migrations.RenameField(model_name='pony', old_name='name', new_name='fancy_name'),
migrations.RenameModel(old_name='Rider', new_name='Jockey'),
], atomic=connection.features.supports_atomic_references_rename)
Pony = project_state.apps.get_model(app_label, 'Pony')
Jockey = project_state.apps.get_model(app_label, 'Jockey')
PonyRider = project_state.apps.get_model(app_label, 'PonyRider')
# No "no such column" error means the column was renamed correctly.
pony = Pony.objects.create(fancy_name='a good name')
jockey = Jockey.objects.create(pony=pony)
ponyrider = PonyRider.objects.create()
ponyrider.riders.add(jockey)
def test_add_field(self):
"""
Tests the AddField operation.
"""
# Test the state alteration
operation = migrations.AddField(
"Pony",
"height",
models.FloatField(null=True, default=5),
)
self.assertEqual(operation.describe(), "Add field height to Pony")
project_state, new_state = self.make_test_state("test_adfl", operation)
self.assertEqual(len(new_state.models["test_adfl", "pony"].fields), 4)
field = [
f for n, f in new_state.models["test_adfl", "pony"].fields
if n == "height"
][0]
self.assertEqual(field.default, 5)
# Test the database alteration
self.assertColumnNotExists("test_adfl_pony", "height")
with connection.schema_editor() as editor:
operation.database_forwards("test_adfl", editor, project_state, new_state)
self.assertColumnExists("test_adfl_pony", "height")
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards("test_adfl", editor, new_state, project_state)
self.assertColumnNotExists("test_adfl_pony", "height")
# And deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "AddField")
self.assertEqual(definition[1], [])
self.assertEqual(sorted(definition[2]), ["field", "model_name", "name"])
def test_add_charfield(self):
"""
Tests the AddField operation on TextField.
"""
project_state = self.set_up_test_model("test_adchfl")
Pony = project_state.apps.get_model("test_adchfl", "Pony")
pony = Pony.objects.create(weight=42)
new_state = self.apply_operations("test_adchfl", project_state, [
migrations.AddField(
"Pony",
"text",
models.CharField(max_length=10, default="some text"),
),
migrations.AddField(
"Pony",
"empty",
models.CharField(max_length=10, default=""),
),
# If not properly quoted digits would be interpreted as an int.
migrations.AddField(
"Pony",
"digits",
models.CharField(max_length=10, default="42"),
),
# Manual quoting is fragile and could trip on quotes. Refs #xyz.
migrations.AddField(
"Pony",
"quotes",
models.CharField(max_length=10, default='"\'"'),
),
])
Pony = new_state.apps.get_model("test_adchfl", "Pony")
pony = Pony.objects.get(pk=pony.pk)
self.assertEqual(pony.text, "some text")
self.assertEqual(pony.empty, "")
self.assertEqual(pony.digits, "42")
self.assertEqual(pony.quotes, '"\'"')
def test_add_textfield(self):
"""
Tests the AddField operation on TextField.
"""
project_state = self.set_up_test_model("test_adtxtfl")
Pony = project_state.apps.get_model("test_adtxtfl", "Pony")
pony = Pony.objects.create(weight=42)
new_state = self.apply_operations("test_adtxtfl", project_state, [
migrations.AddField(
"Pony",
"text",
models.TextField(default="some text"),
),
migrations.AddField(
"Pony",
"empty",
models.TextField(default=""),
),
# If not properly quoted digits would be interpreted as an int.
migrations.AddField(
"Pony",
"digits",
models.TextField(default="42"),
),
# Manual quoting is fragile and could trip on quotes. Refs #xyz.
migrations.AddField(
"Pony",
"quotes",
models.TextField(default='"\'"'),
),
])
Pony = new_state.apps.get_model("test_adtxtfl", "Pony")
pony = Pony.objects.get(pk=pony.pk)
self.assertEqual(pony.text, "some text")
self.assertEqual(pony.empty, "")
self.assertEqual(pony.digits, "42")
self.assertEqual(pony.quotes, '"\'"')
def test_add_binaryfield(self):
"""
Tests the AddField operation on TextField/BinaryField.
"""
project_state = self.set_up_test_model("test_adbinfl")
Pony = project_state.apps.get_model("test_adbinfl", "Pony")
pony = Pony.objects.create(weight=42)
new_state = self.apply_operations("test_adbinfl", project_state, [
migrations.AddField(
"Pony",
"blob",
models.BinaryField(default=b"some text"),
),
migrations.AddField(
"Pony",
"empty",
models.BinaryField(default=b""),
),
# If not properly quoted digits would be interpreted as an int.
migrations.AddField(
"Pony",
"digits",
models.BinaryField(default=b"42"),
),
# Manual quoting is fragile and could trip on quotes. Refs #xyz.
migrations.AddField(
"Pony",
"quotes",
models.BinaryField(default=b'"\'"'),
),
])
Pony = new_state.apps.get_model("test_adbinfl", "Pony")
pony = Pony.objects.get(pk=pony.pk)
# SQLite returns buffer/memoryview, cast to bytes for checking.
self.assertEqual(bytes(pony.blob), b"some text")
self.assertEqual(bytes(pony.empty), b"")
self.assertEqual(bytes(pony.digits), b"42")
self.assertEqual(bytes(pony.quotes), b'"\'"')
def test_column_name_quoting(self):
"""
Column names that are SQL keywords shouldn't cause problems when used
in migrations (#22168).
"""
project_state = self.set_up_test_model("test_regr22168")
operation = migrations.AddField(
"Pony",
"order",
models.IntegerField(default=0),
)
new_state = project_state.clone()
operation.state_forwards("test_regr22168", new_state)
with connection.schema_editor() as editor:
operation.database_forwards("test_regr22168", editor, project_state, new_state)
self.assertColumnExists("test_regr22168_pony", "order")
def test_add_field_preserve_default(self):
"""
Tests the AddField operation's state alteration
when preserve_default = False.
"""
project_state = self.set_up_test_model("test_adflpd")
# Test the state alteration
operation = migrations.AddField(
"Pony",
"height",
models.FloatField(null=True, default=4),
preserve_default=False,
)
new_state = project_state.clone()
operation.state_forwards("test_adflpd", new_state)
self.assertEqual(len(new_state.models["test_adflpd", "pony"].fields), 4)
field = [
f for n, f in new_state.models["test_adflpd", "pony"].fields
if n == "height"
][0]
self.assertEqual(field.default, models.NOT_PROVIDED)
# Test the database alteration
project_state.apps.get_model("test_adflpd", "pony").objects.create(
weight=4,
)
self.assertColumnNotExists("test_adflpd_pony", "height")
with connection.schema_editor() as editor:
operation.database_forwards("test_adflpd", editor, project_state, new_state)
self.assertColumnExists("test_adflpd_pony", "height")
# And deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "AddField")
self.assertEqual(definition[1], [])
self.assertEqual(sorted(definition[2]), ["field", "model_name", "name", "preserve_default"])
def test_add_field_m2m(self):
"""
Tests the AddField operation with a ManyToManyField.
"""
project_state = self.set_up_test_model("test_adflmm", second_model=True)
# Test the state alteration
operation = migrations.AddField("Pony", "stables", models.ManyToManyField("Stable", related_name="ponies"))
new_state = project_state.clone()
operation.state_forwards("test_adflmm", new_state)
self.assertEqual(len(new_state.models["test_adflmm", "pony"].fields), 4)
# Test the database alteration
self.assertTableNotExists("test_adflmm_pony_stables")
with connection.schema_editor() as editor:
operation.database_forwards("test_adflmm", editor, project_state, new_state)
self.assertTableExists("test_adflmm_pony_stables")
self.assertColumnNotExists("test_adflmm_pony", "stables")
# Make sure the M2M field actually works
with atomic():
Pony = new_state.apps.get_model("test_adflmm", "Pony")
p = Pony.objects.create(pink=False, weight=4.55)
p.stables.create()
self.assertEqual(p.stables.count(), 1)
p.stables.all().delete()
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards("test_adflmm", editor, new_state, project_state)
self.assertTableNotExists("test_adflmm_pony_stables")
def test_alter_field_m2m(self):
project_state = self.set_up_test_model("test_alflmm", second_model=True)
project_state = self.apply_operations("test_alflmm", project_state, operations=[
migrations.AddField("Pony", "stables", models.ManyToManyField("Stable", related_name="ponies"))
])
Pony = project_state.apps.get_model("test_alflmm", "Pony")
self.assertFalse(Pony._meta.get_field('stables').blank)
project_state = self.apply_operations("test_alflmm", project_state, operations=[
migrations.AlterField(
"Pony", "stables", models.ManyToManyField(to="Stable", related_name="ponies", blank=True)
)
])
Pony = project_state.apps.get_model("test_alflmm", "Pony")
self.assertTrue(Pony._meta.get_field('stables').blank)
def test_repoint_field_m2m(self):
project_state = self.set_up_test_model("test_alflmm", second_model=True, third_model=True)
project_state = self.apply_operations("test_alflmm", project_state, operations=[
migrations.AddField("Pony", "places", models.ManyToManyField("Stable", related_name="ponies"))
])
Pony = project_state.apps.get_model("test_alflmm", "Pony")
project_state = self.apply_operations("test_alflmm", project_state, operations=[
migrations.AlterField("Pony", "places", models.ManyToManyField(to="Van", related_name="ponies"))
])
# Ensure the new field actually works
Pony = project_state.apps.get_model("test_alflmm", "Pony")
p = Pony.objects.create(pink=False, weight=4.55)
p.places.create()
self.assertEqual(p.places.count(), 1)
p.places.all().delete()
def test_remove_field_m2m(self):
project_state = self.set_up_test_model("test_rmflmm", second_model=True)
project_state = self.apply_operations("test_rmflmm", project_state, operations=[
migrations.AddField("Pony", "stables", models.ManyToManyField("Stable", related_name="ponies"))
])
self.assertTableExists("test_rmflmm_pony_stables")
with_field_state = project_state.clone()
operations = [migrations.RemoveField("Pony", "stables")]
project_state = self.apply_operations("test_rmflmm", project_state, operations=operations)
self.assertTableNotExists("test_rmflmm_pony_stables")
# And test reversal
self.unapply_operations("test_rmflmm", with_field_state, operations=operations)
self.assertTableExists("test_rmflmm_pony_stables")
def test_remove_field_m2m_with_through(self):
project_state = self.set_up_test_model("test_rmflmmwt", second_model=True)
self.assertTableNotExists("test_rmflmmwt_ponystables")
project_state = self.apply_operations("test_rmflmmwt", project_state, operations=[
migrations.CreateModel("PonyStables", fields=[
("pony", models.ForeignKey('test_rmflmmwt.Pony', models.CASCADE)),
("stable", models.ForeignKey('test_rmflmmwt.Stable', models.CASCADE)),
]),
migrations.AddField(
"Pony", "stables",
models.ManyToManyField("Stable", related_name="ponies", through='test_rmflmmwt.PonyStables')
)
])
self.assertTableExists("test_rmflmmwt_ponystables")
operations = [migrations.RemoveField("Pony", "stables"), migrations.DeleteModel("PonyStables")]
self.apply_operations("test_rmflmmwt", project_state, operations=operations)
def test_remove_field(self):
"""
Tests the RemoveField operation.
"""
project_state = self.set_up_test_model("test_rmfl")
# Test the state alteration
operation = migrations.RemoveField("Pony", "pink")
self.assertEqual(operation.describe(), "Remove field pink from Pony")
new_state = project_state.clone()
operation.state_forwards("test_rmfl", new_state)
self.assertEqual(len(new_state.models["test_rmfl", "pony"].fields), 2)
# Test the database alteration
self.assertColumnExists("test_rmfl_pony", "pink")
with connection.schema_editor() as editor:
operation.database_forwards("test_rmfl", editor, project_state, new_state)
self.assertColumnNotExists("test_rmfl_pony", "pink")
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards("test_rmfl", editor, new_state, project_state)
self.assertColumnExists("test_rmfl_pony", "pink")
# And deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "RemoveField")
self.assertEqual(definition[1], [])
self.assertEqual(definition[2], {'model_name': "Pony", 'name': 'pink'})
def test_remove_fk(self):
"""
Tests the RemoveField operation on a foreign key.
"""
project_state = self.set_up_test_model("test_rfk", related_model=True)
self.assertColumnExists("test_rfk_rider", "pony_id")
operation = migrations.RemoveField("Rider", "pony")
new_state = project_state.clone()
operation.state_forwards("test_rfk", new_state)
with connection.schema_editor() as editor:
operation.database_forwards("test_rfk", editor, project_state, new_state)
self.assertColumnNotExists("test_rfk_rider", "pony_id")
with connection.schema_editor() as editor:
operation.database_backwards("test_rfk", editor, new_state, project_state)
self.assertColumnExists("test_rfk_rider", "pony_id")
def test_alter_model_table(self):
"""
Tests the AlterModelTable operation.
"""
project_state = self.set_up_test_model("test_almota")
# Test the state alteration
operation = migrations.AlterModelTable("Pony", "test_almota_pony_2")
self.assertEqual(operation.describe(), "Rename table for Pony to test_almota_pony_2")
new_state = project_state.clone()
operation.state_forwards("test_almota", new_state)
self.assertEqual(new_state.models["test_almota", "pony"].options["db_table"], "test_almota_pony_2")
# Test the database alteration
self.assertTableExists("test_almota_pony")
self.assertTableNotExists("test_almota_pony_2")
with connection.schema_editor() as editor:
operation.database_forwards("test_almota", editor, project_state, new_state)
self.assertTableNotExists("test_almota_pony")
self.assertTableExists("test_almota_pony_2")
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards("test_almota", editor, new_state, project_state)
self.assertTableExists("test_almota_pony")
self.assertTableNotExists("test_almota_pony_2")
# And deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "AlterModelTable")
self.assertEqual(definition[1], [])
self.assertEqual(definition[2], {'name': "Pony", 'table': "test_almota_pony_2"})
def test_alter_model_table_none(self):
"""
Tests the AlterModelTable operation if the table name is set to None.
"""
operation = migrations.AlterModelTable("Pony", None)
self.assertEqual(operation.describe(), "Rename table for Pony to (default)")
def test_alter_model_table_noop(self):
"""
Tests the AlterModelTable operation if the table name is not changed.
"""
project_state = self.set_up_test_model("test_almota")
# Test the state alteration
operation = migrations.AlterModelTable("Pony", "test_almota_pony")
new_state = project_state.clone()
operation.state_forwards("test_almota", new_state)
self.assertEqual(new_state.models["test_almota", "pony"].options["db_table"], "test_almota_pony")
# Test the database alteration
self.assertTableExists("test_almota_pony")
with connection.schema_editor() as editor:
operation.database_forwards("test_almota", editor, project_state, new_state)
self.assertTableExists("test_almota_pony")
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards("test_almota", editor, new_state, project_state)
self.assertTableExists("test_almota_pony")
def test_alter_model_table_m2m(self):
"""
AlterModelTable should rename auto-generated M2M tables.
"""
app_label = "test_talflmltlm2m"
pony_db_table = 'pony_foo'
project_state = self.set_up_test_model(app_label, second_model=True, db_table=pony_db_table)
# Add the M2M field
first_state = project_state.clone()
operation = migrations.AddField("Pony", "stables", models.ManyToManyField("Stable"))
operation.state_forwards(app_label, first_state)
with connection.schema_editor() as editor:
operation.database_forwards(app_label, editor, project_state, first_state)
original_m2m_table = "%s_%s" % (pony_db_table, "stables")
new_m2m_table = "%s_%s" % (app_label, "pony_stables")
self.assertTableExists(original_m2m_table)
self.assertTableNotExists(new_m2m_table)
# Rename the Pony db_table which should also rename the m2m table.
second_state = first_state.clone()
operation = migrations.AlterModelTable(name='pony', table=None)
operation.state_forwards(app_label, second_state)
atomic_rename = connection.features.supports_atomic_references_rename
with connection.schema_editor(atomic=atomic_rename) as editor:
operation.database_forwards(app_label, editor, first_state, second_state)
self.assertTableExists(new_m2m_table)
self.assertTableNotExists(original_m2m_table)
# And test reversal
with connection.schema_editor(atomic=atomic_rename) as editor:
operation.database_backwards(app_label, editor, second_state, first_state)
self.assertTableExists(original_m2m_table)
self.assertTableNotExists(new_m2m_table)
def test_alter_field(self):
"""
Tests the AlterField operation.
"""
project_state = self.set_up_test_model("test_alfl")
# Test the state alteration
operation = migrations.AlterField("Pony", "pink", models.IntegerField(null=True))
self.assertEqual(operation.describe(), "Alter field pink on Pony")
new_state = project_state.clone()
operation.state_forwards("test_alfl", new_state)
self.assertIs(project_state.models["test_alfl", "pony"].get_field_by_name("pink").null, False)
self.assertIs(new_state.models["test_alfl", "pony"].get_field_by_name("pink").null, True)
# Test the database alteration
self.assertColumnNotNull("test_alfl_pony", "pink")
with connection.schema_editor() as editor:
operation.database_forwards("test_alfl", editor, project_state, new_state)
self.assertColumnNull("test_alfl_pony", "pink")
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards("test_alfl", editor, new_state, project_state)
self.assertColumnNotNull("test_alfl_pony", "pink")
# And deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "AlterField")
self.assertEqual(definition[1], [])
self.assertEqual(sorted(definition[2]), ["field", "model_name", "name"])
def test_alter_field_pk(self):
"""
Tests the AlterField operation on primary keys (for things like PostgreSQL's SERIAL weirdness)
"""
project_state = self.set_up_test_model("test_alflpk")
# Test the state alteration
operation = migrations.AlterField("Pony", "id", models.IntegerField(primary_key=True))
new_state = project_state.clone()
operation.state_forwards("test_alflpk", new_state)
self.assertIsInstance(project_state.models["test_alflpk", "pony"].get_field_by_name("id"), models.AutoField)
self.assertIsInstance(new_state.models["test_alflpk", "pony"].get_field_by_name("id"), models.IntegerField)
# Test the database alteration
with connection.schema_editor() as editor:
operation.database_forwards("test_alflpk", editor, project_state, new_state)
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards("test_alflpk", editor, new_state, project_state)
@skipUnlessDBFeature('supports_foreign_keys')
def test_alter_field_pk_fk(self):
"""
Tests the AlterField operation on primary keys changes any FKs pointing to it.
"""
project_state = self.set_up_test_model("test_alflpkfk", related_model=True)
# Test the state alteration
operation = migrations.AlterField("Pony", "id", models.FloatField(primary_key=True))
new_state = project_state.clone()
operation.state_forwards("test_alflpkfk", new_state)
self.assertIsInstance(project_state.models["test_alflpkfk", "pony"].get_field_by_name("id"), models.AutoField)
self.assertIsInstance(new_state.models["test_alflpkfk", "pony"].get_field_by_name("id"), models.FloatField)
def assertIdTypeEqualsFkType():
with connection.cursor() as cursor:
id_type, id_null = [
(c.type_code, c.null_ok)
for c in connection.introspection.get_table_description(cursor, "test_alflpkfk_pony")
if c.name == "id"
][0]
fk_type, fk_null = [
(c.type_code, c.null_ok)
for c in connection.introspection.get_table_description(cursor, "test_alflpkfk_rider")
if c.name == "pony_id"
][0]
self.assertEqual(id_type, fk_type)
self.assertEqual(id_null, fk_null)
assertIdTypeEqualsFkType()
# Test the database alteration
with connection.schema_editor() as editor:
operation.database_forwards("test_alflpkfk", editor, project_state, new_state)
assertIdTypeEqualsFkType()
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards("test_alflpkfk", editor, new_state, project_state)
assertIdTypeEqualsFkType()
@skipUnlessDBFeature('supports_foreign_keys')
def test_alter_field_reloads_state_on_fk_with_to_field_target_type_change(self):
app_label = 'test_alflrsfkwtflttc'
project_state = self.apply_operations(app_label, ProjectState(), operations=[
migrations.CreateModel('Rider', fields=[
('id', models.AutoField(primary_key=True)),
('code', models.PositiveIntegerField(unique=True)),
]),
migrations.CreateModel('Pony', fields=[
('id', models.AutoField(primary_key=True)),
('rider', models.ForeignKey('%s.Rider' % app_label, models.CASCADE, to_field='code')),
]),
])
operation = migrations.AlterField(
'Rider',
'code',
models.CharField(max_length=100, unique=True),
)
self.apply_operations(app_label, project_state, operations=[operation])
@skipUnlessDBFeature('supports_foreign_keys')
def test_alter_field_reloads_state_on_fk_with_to_field_related_name_target_type_change(self):
app_label = 'test_alflrsfkwtflrnttc'
project_state = self.apply_operations(app_label, ProjectState(), operations=[
migrations.CreateModel('Rider', fields=[
('id', models.AutoField(primary_key=True)),
('code', models.PositiveIntegerField(unique=True)),
]),
migrations.CreateModel('Pony', fields=[
('id', models.AutoField(primary_key=True)),
('rider', models.ForeignKey(
'%s.Rider' % app_label,
models.CASCADE,
to_field='code',
related_name='+',
)),
]),
])
operation = migrations.AlterField(
'Rider',
'code',
models.CharField(max_length=100, unique=True),
)
self.apply_operations(app_label, project_state, operations=[operation])
def test_alter_field_reloads_state_on_fk_target_changes(self):
"""
If AlterField doesn't reload state appropriately, the second AlterField
crashes on MySQL due to not dropping the PonyRider.pony foreign key
constraint before modifying the column.
"""
app_label = 'alter_alter_field_reloads_state_on_fk_target_changes'
project_state = self.apply_operations(app_label, ProjectState(), operations=[
migrations.CreateModel('Rider', fields=[
('id', models.CharField(primary_key=True, max_length=100)),
]),
migrations.CreateModel('Pony', fields=[
('id', models.CharField(primary_key=True, max_length=100)),
('rider', models.ForeignKey('%s.Rider' % app_label, models.CASCADE)),
]),
migrations.CreateModel('PonyRider', fields=[
('id', models.AutoField(primary_key=True)),
('pony', models.ForeignKey('%s.Pony' % app_label, models.CASCADE)),
]),
])
project_state = self.apply_operations(app_label, project_state, operations=[
migrations.AlterField('Rider', 'id', models.CharField(primary_key=True, max_length=99)),
migrations.AlterField('Pony', 'id', models.CharField(primary_key=True, max_length=99)),
])
def test_alter_field_reloads_state_on_fk_with_to_field_target_changes(self):
"""
If AlterField doesn't reload state appropriately, the second AlterField
crashes on MySQL due to not dropping the PonyRider.pony foreign key
constraint before modifying the column.
"""
app_label = 'alter_alter_field_reloads_state_on_fk_with_to_field_target_changes'
project_state = self.apply_operations(app_label, ProjectState(), operations=[
migrations.CreateModel('Rider', fields=[
('id', models.CharField(primary_key=True, max_length=100)),
('slug', models.CharField(unique=True, max_length=100)),
]),
migrations.CreateModel('Pony', fields=[
('id', models.CharField(primary_key=True, max_length=100)),
('rider', models.ForeignKey('%s.Rider' % app_label, models.CASCADE, to_field='slug')),
('slug', models.CharField(unique=True, max_length=100)),
]),
migrations.CreateModel('PonyRider', fields=[
('id', models.AutoField(primary_key=True)),
('pony', models.ForeignKey('%s.Pony' % app_label, models.CASCADE, to_field='slug')),
]),
])
project_state = self.apply_operations(app_label, project_state, operations=[
migrations.AlterField('Rider', 'slug', models.CharField(unique=True, max_length=99)),
migrations.AlterField('Pony', 'slug', models.CharField(unique=True, max_length=99)),
])
def test_rename_field_reloads_state_on_fk_target_changes(self):
"""
If RenameField doesn't reload state appropriately, the AlterField
crashes on MySQL due to not dropping the PonyRider.pony foreign key
constraint before modifying the column.
"""
app_label = 'alter_rename_field_reloads_state_on_fk_target_changes'
project_state = self.apply_operations(app_label, ProjectState(), operations=[
migrations.CreateModel('Rider', fields=[
('id', models.CharField(primary_key=True, max_length=100)),
]),
migrations.CreateModel('Pony', fields=[
('id', models.CharField(primary_key=True, max_length=100)),
('rider', models.ForeignKey('%s.Rider' % app_label, models.CASCADE)),
]),
migrations.CreateModel('PonyRider', fields=[
('id', models.AutoField(primary_key=True)),
('pony', models.ForeignKey('%s.Pony' % app_label, models.CASCADE)),
]),
])
project_state = self.apply_operations(app_label, project_state, operations=[
migrations.RenameField('Rider', 'id', 'id2'),
migrations.AlterField('Pony', 'id', models.CharField(primary_key=True, max_length=99)),
], atomic=connection.features.supports_atomic_references_rename)
def test_rename_field(self):
"""
Tests the RenameField operation.
"""
project_state = self.set_up_test_model("test_rnfl", unique_together=True, index_together=True)
# Test the state alteration
operation = migrations.RenameField("Pony", "pink", "blue")
self.assertEqual(operation.describe(), "Rename field pink on Pony to blue")
new_state = project_state.clone()
operation.state_forwards("test_rnfl", new_state)
self.assertIn("blue", [n for n, f in new_state.models["test_rnfl", "pony"].fields])
self.assertNotIn("pink", [n for n, f in new_state.models["test_rnfl", "pony"].fields])
# Make sure the unique_together has the renamed column too
self.assertIn("blue", new_state.models["test_rnfl", "pony"].options['unique_together'][0])
self.assertNotIn("pink", new_state.models["test_rnfl", "pony"].options['unique_together'][0])
# Make sure the index_together has the renamed column too
self.assertIn("blue", new_state.models["test_rnfl", "pony"].options['index_together'][0])
self.assertNotIn("pink", new_state.models["test_rnfl", "pony"].options['index_together'][0])
# Test the database alteration
self.assertColumnExists("test_rnfl_pony", "pink")
self.assertColumnNotExists("test_rnfl_pony", "blue")
with connection.schema_editor() as editor:
operation.database_forwards("test_rnfl", editor, project_state, new_state)
self.assertColumnExists("test_rnfl_pony", "blue")
self.assertColumnNotExists("test_rnfl_pony", "pink")
# Ensure the unique constraint has been ported over
with connection.cursor() as cursor:
cursor.execute("INSERT INTO test_rnfl_pony (blue, weight) VALUES (1, 1)")
with self.assertRaises(IntegrityError):
with atomic():
cursor.execute("INSERT INTO test_rnfl_pony (blue, weight) VALUES (1, 1)")
cursor.execute("DELETE FROM test_rnfl_pony")
# Ensure the index constraint has been ported over
self.assertIndexExists("test_rnfl_pony", ["weight", "blue"])
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards("test_rnfl", editor, new_state, project_state)
self.assertColumnExists("test_rnfl_pony", "pink")
self.assertColumnNotExists("test_rnfl_pony", "blue")
# Ensure the index constraint has been reset
self.assertIndexExists("test_rnfl_pony", ["weight", "pink"])
# And deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "RenameField")
self.assertEqual(definition[1], [])
self.assertEqual(definition[2], {'model_name': "Pony", 'old_name': "pink", 'new_name': "blue"})
def test_rename_missing_field(self):
state = ProjectState()
state.add_model(ModelState('app', 'model', []))
with self.assertRaisesMessage(FieldDoesNotExist, "app.model has no field named 'field'"):
migrations.RenameField('model', 'field', 'new_field').state_forwards('app', state)
def test_rename_referenced_field_state_forward(self):
state = ProjectState()
state.add_model(ModelState('app', 'Model', [
('id', models.AutoField(primary_key=True)),
('field', models.IntegerField(unique=True)),
]))
state.add_model(ModelState('app', 'OtherModel', [
('id', models.AutoField(primary_key=True)),
('fk', models.ForeignKey('Model', models.CASCADE, to_field='field')),
('fo', models.ForeignObject('Model', models.CASCADE, from_fields=('fk',), to_fields=('field',))),
]))
operation = migrations.RenameField('Model', 'field', 'renamed')
new_state = state.clone()
operation.state_forwards('app', new_state)
self.assertEqual(new_state.models['app', 'othermodel'].fields[1][1].remote_field.field_name, 'renamed')
self.assertEqual(new_state.models['app', 'othermodel'].fields[1][1].from_fields, ['self'])
self.assertEqual(new_state.models['app', 'othermodel'].fields[1][1].to_fields, ('renamed',))
self.assertEqual(new_state.models['app', 'othermodel'].fields[2][1].from_fields, ('fk',))
self.assertEqual(new_state.models['app', 'othermodel'].fields[2][1].to_fields, ('renamed',))
operation = migrations.RenameField('OtherModel', 'fk', 'renamed_fk')
new_state = state.clone()
operation.state_forwards('app', new_state)
self.assertEqual(new_state.models['app', 'othermodel'].fields[1][1].remote_field.field_name, 'renamed')
self.assertEqual(new_state.models['app', 'othermodel'].fields[1][1].from_fields, ('self',))
self.assertEqual(new_state.models['app', 'othermodel'].fields[1][1].to_fields, ('renamed',))
self.assertEqual(new_state.models['app', 'othermodel'].fields[2][1].from_fields, ('renamed_fk',))
self.assertEqual(new_state.models['app', 'othermodel'].fields[2][1].to_fields, ('renamed',))
def test_alter_unique_together(self):
"""
Tests the AlterUniqueTogether operation.
"""
project_state = self.set_up_test_model("test_alunto")
# Test the state alteration
operation = migrations.AlterUniqueTogether("Pony", [("pink", "weight")])
self.assertEqual(operation.describe(), "Alter unique_together for Pony (1 constraint(s))")
new_state = project_state.clone()
operation.state_forwards("test_alunto", new_state)
self.assertEqual(len(project_state.models["test_alunto", "pony"].options.get("unique_together", set())), 0)
self.assertEqual(len(new_state.models["test_alunto", "pony"].options.get("unique_together", set())), 1)
# Make sure we can insert duplicate rows
with connection.cursor() as cursor:
cursor.execute("INSERT INTO test_alunto_pony (pink, weight) VALUES (1, 1)")
cursor.execute("INSERT INTO test_alunto_pony (pink, weight) VALUES (1, 1)")
cursor.execute("DELETE FROM test_alunto_pony")
# Test the database alteration
with connection.schema_editor() as editor:
operation.database_forwards("test_alunto", editor, project_state, new_state)
cursor.execute("INSERT INTO test_alunto_pony (pink, weight) VALUES (1, 1)")
with self.assertRaises(IntegrityError):
with atomic():
cursor.execute("INSERT INTO test_alunto_pony (pink, weight) VALUES (1, 1)")
cursor.execute("DELETE FROM test_alunto_pony")
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards("test_alunto", editor, new_state, project_state)
cursor.execute("INSERT INTO test_alunto_pony (pink, weight) VALUES (1, 1)")
cursor.execute("INSERT INTO test_alunto_pony (pink, weight) VALUES (1, 1)")
cursor.execute("DELETE FROM test_alunto_pony")
# Test flat unique_together
operation = migrations.AlterUniqueTogether("Pony", ("pink", "weight"))
operation.state_forwards("test_alunto", new_state)
self.assertEqual(len(new_state.models["test_alunto", "pony"].options.get("unique_together", set())), 1)
# And deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "AlterUniqueTogether")
self.assertEqual(definition[1], [])
self.assertEqual(definition[2], {'name': "Pony", 'unique_together': {("pink", "weight")}})
def test_alter_unique_together_remove(self):
operation = migrations.AlterUniqueTogether("Pony", None)
self.assertEqual(operation.describe(), "Alter unique_together for Pony (0 constraint(s))")
def test_add_index(self):
"""
Test the AddIndex operation.
"""
project_state = self.set_up_test_model("test_adin")
msg = (
"Indexes passed to AddIndex operations require a name argument. "
"<Index: fields='pink'> doesn't have one."
)
with self.assertRaisesMessage(ValueError, msg):
migrations.AddIndex("Pony", models.Index(fields=["pink"]))
index = models.Index(fields=["pink"], name="test_adin_pony_pink_idx")
operation = migrations.AddIndex("Pony", index)
self.assertEqual(operation.describe(), "Create index test_adin_pony_pink_idx on field(s) pink of model Pony")
new_state = project_state.clone()
operation.state_forwards("test_adin", new_state)
# Test the database alteration
self.assertEqual(len(new_state.models["test_adin", "pony"].options['indexes']), 1)
self.assertIndexNotExists("test_adin_pony", ["pink"])
with connection.schema_editor() as editor:
operation.database_forwards("test_adin", editor, project_state, new_state)
self.assertIndexExists("test_adin_pony", ["pink"])
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards("test_adin", editor, new_state, project_state)
self.assertIndexNotExists("test_adin_pony", ["pink"])
# And deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "AddIndex")
self.assertEqual(definition[1], [])
self.assertEqual(definition[2], {'model_name': "Pony", 'index': index})
def test_remove_index(self):
"""
Test the RemoveIndex operation.
"""
project_state = self.set_up_test_model("test_rmin", multicol_index=True)
self.assertTableExists("test_rmin_pony")
self.assertIndexExists("test_rmin_pony", ["pink", "weight"])
operation = migrations.RemoveIndex("Pony", "pony_test_idx")
self.assertEqual(operation.describe(), "Remove index pony_test_idx from Pony")
new_state = project_state.clone()
operation.state_forwards("test_rmin", new_state)
# Test the state alteration
self.assertEqual(len(new_state.models["test_rmin", "pony"].options['indexes']), 0)
self.assertIndexExists("test_rmin_pony", ["pink", "weight"])
# Test the database alteration
with connection.schema_editor() as editor:
operation.database_forwards("test_rmin", editor, project_state, new_state)
self.assertIndexNotExists("test_rmin_pony", ["pink", "weight"])
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards("test_rmin", editor, new_state, project_state)
self.assertIndexExists("test_rmin_pony", ["pink", "weight"])
# And deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "RemoveIndex")
self.assertEqual(definition[1], [])
self.assertEqual(definition[2], {'model_name': "Pony", 'name': "pony_test_idx"})
# Also test a field dropped with index - sqlite remake issue
operations = [
migrations.RemoveIndex("Pony", "pony_test_idx"),
migrations.RemoveField("Pony", "pink"),
]
self.assertColumnExists("test_rmin_pony", "pink")
self.assertIndexExists("test_rmin_pony", ["pink", "weight"])
# Test database alteration
new_state = project_state.clone()
self.apply_operations('test_rmin', new_state, operations=operations)
self.assertColumnNotExists("test_rmin_pony", "pink")
self.assertIndexNotExists("test_rmin_pony", ["pink", "weight"])
# And test reversal
self.unapply_operations("test_rmin", project_state, operations=operations)
self.assertIndexExists("test_rmin_pony", ["pink", "weight"])
def test_add_index_state_forwards(self):
project_state = self.set_up_test_model('test_adinsf')
index = models.Index(fields=['pink'], name='test_adinsf_pony_pink_idx')
old_model = project_state.apps.get_model('test_adinsf', 'Pony')
new_state = project_state.clone()
operation = migrations.AddIndex('Pony', index)
operation.state_forwards('test_adinsf', new_state)
new_model = new_state.apps.get_model('test_adinsf', 'Pony')
self.assertIsNot(old_model, new_model)
def test_remove_index_state_forwards(self):
project_state = self.set_up_test_model('test_rminsf')
index = models.Index(fields=['pink'], name='test_rminsf_pony_pink_idx')
migrations.AddIndex('Pony', index).state_forwards('test_rminsf', project_state)
old_model = project_state.apps.get_model('test_rminsf', 'Pony')
new_state = project_state.clone()
operation = migrations.RemoveIndex('Pony', 'test_rminsf_pony_pink_idx')
operation.state_forwards('test_rminsf', new_state)
new_model = new_state.apps.get_model('test_rminsf', 'Pony')
self.assertIsNot(old_model, new_model)
def test_alter_field_with_index(self):
"""
Test AlterField operation with an index to ensure indexes created via
Meta.indexes don't get dropped with sqlite3 remake.
"""
project_state = self.set_up_test_model("test_alflin", index=True)
operation = migrations.AlterField("Pony", "pink", models.IntegerField(null=True))
new_state = project_state.clone()
operation.state_forwards("test_alflin", new_state)
# Test the database alteration
self.assertColumnNotNull("test_alflin_pony", "pink")
with connection.schema_editor() as editor:
operation.database_forwards("test_alflin", editor, project_state, new_state)
# Index hasn't been dropped
self.assertIndexExists("test_alflin_pony", ["pink"])
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards("test_alflin", editor, new_state, project_state)
# Ensure the index is still there
self.assertIndexExists("test_alflin_pony", ["pink"])
def test_alter_index_together(self):
"""
Tests the AlterIndexTogether operation.
"""
project_state = self.set_up_test_model("test_alinto")
# Test the state alteration
operation = migrations.AlterIndexTogether("Pony", [("pink", "weight")])
self.assertEqual(operation.describe(), "Alter index_together for Pony (1 constraint(s))")
new_state = project_state.clone()
operation.state_forwards("test_alinto", new_state)
self.assertEqual(len(project_state.models["test_alinto", "pony"].options.get("index_together", set())), 0)
self.assertEqual(len(new_state.models["test_alinto", "pony"].options.get("index_together", set())), 1)
# Make sure there's no matching index
self.assertIndexNotExists("test_alinto_pony", ["pink", "weight"])
# Test the database alteration
with connection.schema_editor() as editor:
operation.database_forwards("test_alinto", editor, project_state, new_state)
self.assertIndexExists("test_alinto_pony", ["pink", "weight"])
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards("test_alinto", editor, new_state, project_state)
self.assertIndexNotExists("test_alinto_pony", ["pink", "weight"])
# And deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "AlterIndexTogether")
self.assertEqual(definition[1], [])
self.assertEqual(definition[2], {'name': "Pony", 'index_together': {("pink", "weight")}})
def test_alter_index_together_remove(self):
operation = migrations.AlterIndexTogether("Pony", None)
self.assertEqual(operation.describe(), "Alter index_together for Pony (0 constraint(s))")
@skipUnlessDBFeature('supports_table_check_constraints')
def test_add_constraint(self):
project_state = self.set_up_test_model("test_addconstraint")
gt_check = models.Q(pink__gt=2)
gt_constraint = models.CheckConstraint(check=gt_check, name="test_add_constraint_pony_pink_gt_2")
gt_operation = migrations.AddConstraint("Pony", gt_constraint)
self.assertEqual(
gt_operation.describe(), "Create constraint test_add_constraint_pony_pink_gt_2 on model Pony"
)
# Test the state alteration
new_state = project_state.clone()
gt_operation.state_forwards("test_addconstraint", new_state)
self.assertEqual(len(new_state.models["test_addconstraint", "pony"].options["constraints"]), 1)
Pony = new_state.apps.get_model("test_addconstraint", "Pony")
self.assertEqual(len(Pony._meta.constraints), 1)
# Test the database alteration
with connection.schema_editor() as editor:
gt_operation.database_forwards("test_addconstraint", editor, project_state, new_state)
with self.assertRaises(IntegrityError), transaction.atomic():
Pony.objects.create(pink=1, weight=1.0)
# Add another one.
lt_check = models.Q(pink__lt=100)
lt_constraint = models.CheckConstraint(check=lt_check, name="test_add_constraint_pony_pink_lt_100")
lt_operation = migrations.AddConstraint("Pony", lt_constraint)
lt_operation.state_forwards("test_addconstraint", new_state)
self.assertEqual(len(new_state.models["test_addconstraint", "pony"].options["constraints"]), 2)
Pony = new_state.apps.get_model("test_addconstraint", "Pony")
self.assertEqual(len(Pony._meta.constraints), 2)
with connection.schema_editor() as editor:
lt_operation.database_forwards("test_addconstraint", editor, project_state, new_state)
with self.assertRaises(IntegrityError), transaction.atomic():
Pony.objects.create(pink=100, weight=1.0)
# Test reversal
with connection.schema_editor() as editor:
gt_operation.database_backwards("test_addconstraint", editor, new_state, project_state)
Pony.objects.create(pink=1, weight=1.0)
# Test deconstruction
definition = gt_operation.deconstruct()
self.assertEqual(definition[0], "AddConstraint")
self.assertEqual(definition[1], [])
self.assertEqual(definition[2], {'model_name': "Pony", 'constraint': gt_constraint})
@skipUnlessDBFeature('supports_table_check_constraints')
def test_add_constraint_percent_escaping(self):
app_label = 'add_constraint_string_quoting'
operations = [
CreateModel(
'Author',
fields=[
('id', models.AutoField(primary_key=True)),
('name', models.CharField(max_length=100)),
('rebate', models.CharField(max_length=100)),
],
),
]
from_state = self.apply_operations(app_label, ProjectState(), operations)
# "%" generated in startswith lookup should be escaped in a way that is
# considered a leading wildcard.
check = models.Q(name__startswith='Albert')
constraint = models.CheckConstraint(check=check, name='name_constraint')
operation = migrations.AddConstraint('Author', constraint)
to_state = from_state.clone()
operation.state_forwards(app_label, to_state)
with connection.schema_editor() as editor:
operation.database_forwards(app_label, editor, from_state, to_state)
Author = to_state.apps.get_model(app_label, 'Author')
with self.assertRaises(IntegrityError), transaction.atomic():
Author.objects.create(name='Artur')
# Literal "%" should be escaped in a way that is not a considered a
# wildcard.
check = models.Q(rebate__endswith='%')
constraint = models.CheckConstraint(check=check, name='rebate_constraint')
operation = migrations.AddConstraint('Author', constraint)
from_state = to_state
to_state = from_state.clone()
operation.state_forwards(app_label, to_state)
Author = to_state.apps.get_model(app_label, 'Author')
with connection.schema_editor() as editor:
operation.database_forwards(app_label, editor, from_state, to_state)
Author = to_state.apps.get_model(app_label, 'Author')
with self.assertRaises(IntegrityError), transaction.atomic():
Author.objects.create(name='Albert', rebate='10$')
author = Author.objects.create(name='Albert', rebate='10%')
self.assertEqual(Author.objects.get(), author)
@skipUnlessDBFeature('supports_table_check_constraints')
def test_add_or_constraint(self):
app_label = 'test_addorconstraint'
constraint_name = 'add_constraint_or'
from_state = self.set_up_test_model(app_label)
check = models.Q(pink__gt=2, weight__gt=2) | models.Q(weight__lt=0)
constraint = models.CheckConstraint(check=check, name=constraint_name)
operation = migrations.AddConstraint('Pony', constraint)
to_state = from_state.clone()
operation.state_forwards(app_label, to_state)
with connection.schema_editor() as editor:
operation.database_forwards(app_label, editor, from_state, to_state)
Pony = to_state.apps.get_model(app_label, 'Pony')
with self.assertRaises(IntegrityError), transaction.atomic():
Pony.objects.create(pink=2, weight=3.0)
with self.assertRaises(IntegrityError), transaction.atomic():
Pony.objects.create(pink=3, weight=1.0)
Pony.objects.bulk_create([
Pony(pink=3, weight=-1.0),
Pony(pink=1, weight=-1.0),
Pony(pink=3, weight=3.0),
])
@skipUnlessDBFeature('supports_table_check_constraints')
def test_add_constraint_combinable(self):
app_label = 'test_addconstraint_combinable'
operations = [
CreateModel(
'Book',
fields=[
('id', models.AutoField(primary_key=True)),
('read', models.PositiveIntegerField()),
('unread', models.PositiveIntegerField()),
],
),
]
from_state = self.apply_operations(app_label, ProjectState(), operations)
constraint = models.CheckConstraint(
check=models.Q(read=(100 - models.F('unread'))),
name='test_addconstraint_combinable_sum_100',
)
operation = migrations.AddConstraint('Book', constraint)
to_state = from_state.clone()
operation.state_forwards(app_label, to_state)
with connection.schema_editor() as editor:
operation.database_forwards(app_label, editor, from_state, to_state)
Book = to_state.apps.get_model(app_label, 'Book')
with self.assertRaises(IntegrityError), transaction.atomic():
Book.objects.create(read=70, unread=10)
Book.objects.create(read=70, unread=30)
@skipUnlessDBFeature('supports_table_check_constraints')
def test_remove_constraint(self):
project_state = self.set_up_test_model("test_removeconstraint", constraints=[
models.CheckConstraint(check=models.Q(pink__gt=2), name="test_remove_constraint_pony_pink_gt_2"),
models.CheckConstraint(check=models.Q(pink__lt=100), name="test_remove_constraint_pony_pink_lt_100"),
])
gt_operation = migrations.RemoveConstraint("Pony", "test_remove_constraint_pony_pink_gt_2")
self.assertEqual(
gt_operation.describe(), "Remove constraint test_remove_constraint_pony_pink_gt_2 from model Pony"
)
# Test state alteration
new_state = project_state.clone()
gt_operation.state_forwards("test_removeconstraint", new_state)
self.assertEqual(len(new_state.models["test_removeconstraint", "pony"].options['constraints']), 1)
Pony = new_state.apps.get_model("test_removeconstraint", "Pony")
self.assertEqual(len(Pony._meta.constraints), 1)
# Test database alteration
with connection.schema_editor() as editor:
gt_operation.database_forwards("test_removeconstraint", editor, project_state, new_state)
Pony.objects.create(pink=1, weight=1.0).delete()
with self.assertRaises(IntegrityError), transaction.atomic():
Pony.objects.create(pink=100, weight=1.0)
# Remove the other one.
lt_operation = migrations.RemoveConstraint("Pony", "test_remove_constraint_pony_pink_lt_100")
lt_operation.state_forwards("test_removeconstraint", new_state)
self.assertEqual(len(new_state.models["test_removeconstraint", "pony"].options['constraints']), 0)
Pony = new_state.apps.get_model("test_removeconstraint", "Pony")
self.assertEqual(len(Pony._meta.constraints), 0)
with connection.schema_editor() as editor:
lt_operation.database_forwards("test_removeconstraint", editor, project_state, new_state)
Pony.objects.create(pink=100, weight=1.0).delete()
# Test reversal
with connection.schema_editor() as editor:
gt_operation.database_backwards("test_removeconstraint", editor, new_state, project_state)
with self.assertRaises(IntegrityError), transaction.atomic():
Pony.objects.create(pink=1, weight=1.0)
# Test deconstruction
definition = gt_operation.deconstruct()
self.assertEqual(definition[0], "RemoveConstraint")
self.assertEqual(definition[1], [])
self.assertEqual(definition[2], {'model_name': "Pony", 'name': "test_remove_constraint_pony_pink_gt_2"})
def test_add_partial_unique_constraint(self):
project_state = self.set_up_test_model('test_addpartialuniqueconstraint')
partial_unique_constraint = models.UniqueConstraint(
fields=['pink'],
condition=models.Q(weight__gt=5),
name='test_constraint_pony_pink_for_weight_gt_5_uniq',
)
operation = migrations.AddConstraint('Pony', partial_unique_constraint)
self.assertEqual(
operation.describe(),
'Create constraint test_constraint_pony_pink_for_weight_gt_5_uniq '
'on model Pony'
)
# Test the state alteration
new_state = project_state.clone()
operation.state_forwards('test_addpartialuniqueconstraint', new_state)
self.assertEqual(len(new_state.models['test_addpartialuniqueconstraint', 'pony'].options['constraints']), 1)
Pony = new_state.apps.get_model('test_addpartialuniqueconstraint', 'Pony')
self.assertEqual(len(Pony._meta.constraints), 1)
# Test the database alteration
with connection.schema_editor() as editor:
operation.database_forwards('test_addpartialuniqueconstraint', editor, project_state, new_state)
# Test constraint works
Pony.objects.create(pink=1, weight=4.0)
Pony.objects.create(pink=1, weight=4.0)
Pony.objects.create(pink=1, weight=6.0)
if connection.features.supports_partial_indexes:
with self.assertRaises(IntegrityError), transaction.atomic():
Pony.objects.create(pink=1, weight=7.0)
else:
Pony.objects.create(pink=1, weight=7.0)
# Test reversal
with connection.schema_editor() as editor:
operation.database_backwards('test_addpartialuniqueconstraint', editor, new_state, project_state)
# Test constraint doesn't work
Pony.objects.create(pink=1, weight=7.0)
# Test deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], 'AddConstraint')
self.assertEqual(definition[1], [])
self.assertEqual(definition[2], {'model_name': 'Pony', 'constraint': partial_unique_constraint})
def test_remove_partial_unique_constraint(self):
project_state = self.set_up_test_model('test_removepartialuniqueconstraint', constraints=[
models.UniqueConstraint(
fields=['pink'],
condition=models.Q(weight__gt=5),
name='test_constraint_pony_pink_for_weight_gt_5_uniq',
),
])
gt_operation = migrations.RemoveConstraint('Pony', 'test_constraint_pony_pink_for_weight_gt_5_uniq')
self.assertEqual(
gt_operation.describe(), 'Remove constraint test_constraint_pony_pink_for_weight_gt_5_uniq from model Pony'
)
# Test state alteration
new_state = project_state.clone()
gt_operation.state_forwards('test_removepartialuniqueconstraint', new_state)
self.assertEqual(len(new_state.models['test_removepartialuniqueconstraint', 'pony'].options['constraints']), 0)
Pony = new_state.apps.get_model('test_removepartialuniqueconstraint', 'Pony')
self.assertEqual(len(Pony._meta.constraints), 0)
# Test database alteration
with connection.schema_editor() as editor:
gt_operation.database_forwards('test_removepartialuniqueconstraint', editor, project_state, new_state)
# Test constraint doesn't work
Pony.objects.create(pink=1, weight=4.0)
Pony.objects.create(pink=1, weight=4.0)
Pony.objects.create(pink=1, weight=6.0)
Pony.objects.create(pink=1, weight=7.0).delete()
# Test reversal
with connection.schema_editor() as editor:
gt_operation.database_backwards('test_removepartialuniqueconstraint', editor, new_state, project_state)
# Test constraint works
if connection.features.supports_partial_indexes:
with self.assertRaises(IntegrityError), transaction.atomic():
Pony.objects.create(pink=1, weight=7.0)
else:
Pony.objects.create(pink=1, weight=7.0)
# Test deconstruction
definition = gt_operation.deconstruct()
self.assertEqual(definition[0], 'RemoveConstraint')
self.assertEqual(definition[1], [])
self.assertEqual(definition[2], {
'model_name': 'Pony',
'name': 'test_constraint_pony_pink_for_weight_gt_5_uniq',
})
def test_alter_model_options(self):
"""
Tests the AlterModelOptions operation.
"""
project_state = self.set_up_test_model("test_almoop")
# Test the state alteration (no DB alteration to test)
operation = migrations.AlterModelOptions("Pony", {"permissions": [("can_groom", "Can groom")]})
self.assertEqual(operation.describe(), "Change Meta options on Pony")
new_state = project_state.clone()
operation.state_forwards("test_almoop", new_state)
self.assertEqual(len(project_state.models["test_almoop", "pony"].options.get("permissions", [])), 0)
self.assertEqual(len(new_state.models["test_almoop", "pony"].options.get("permissions", [])), 1)
self.assertEqual(new_state.models["test_almoop", "pony"].options["permissions"][0][0], "can_groom")
# And deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "AlterModelOptions")
self.assertEqual(definition[1], [])
self.assertEqual(definition[2], {'name': "Pony", 'options': {"permissions": [("can_groom", "Can groom")]}})
def test_alter_model_options_emptying(self):
"""
The AlterModelOptions operation removes keys from the dict (#23121)
"""
project_state = self.set_up_test_model("test_almoop", options=True)
# Test the state alteration (no DB alteration to test)
operation = migrations.AlterModelOptions("Pony", {})
self.assertEqual(operation.describe(), "Change Meta options on Pony")
new_state = project_state.clone()
operation.state_forwards("test_almoop", new_state)
self.assertEqual(len(project_state.models["test_almoop", "pony"].options.get("permissions", [])), 1)
self.assertEqual(len(new_state.models["test_almoop", "pony"].options.get("permissions", [])), 0)
# And deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "AlterModelOptions")
self.assertEqual(definition[1], [])
self.assertEqual(definition[2], {'name': "Pony", 'options': {}})
def test_alter_order_with_respect_to(self):
"""
Tests the AlterOrderWithRespectTo operation.
"""
project_state = self.set_up_test_model("test_alorwrtto", related_model=True)
# Test the state alteration
operation = migrations.AlterOrderWithRespectTo("Rider", "pony")
self.assertEqual(operation.describe(), "Set order_with_respect_to on Rider to pony")
new_state = project_state.clone()
operation.state_forwards("test_alorwrtto", new_state)
self.assertIsNone(
project_state.models["test_alorwrtto", "rider"].options.get("order_with_respect_to", None)
)
self.assertEqual(
new_state.models["test_alorwrtto", "rider"].options.get("order_with_respect_to", None),
"pony"
)
# Make sure there's no matching index
self.assertColumnNotExists("test_alorwrtto_rider", "_order")
# Create some rows before alteration
rendered_state = project_state.apps
pony = rendered_state.get_model("test_alorwrtto", "Pony").objects.create(weight=50)
rendered_state.get_model("test_alorwrtto", "Rider").objects.create(pony=pony, friend_id=1)
rendered_state.get_model("test_alorwrtto", "Rider").objects.create(pony=pony, friend_id=2)
# Test the database alteration
with connection.schema_editor() as editor:
operation.database_forwards("test_alorwrtto", editor, project_state, new_state)
self.assertColumnExists("test_alorwrtto_rider", "_order")
# Check for correct value in rows
updated_riders = new_state.apps.get_model("test_alorwrtto", "Rider").objects.all()
self.assertEqual(updated_riders[0]._order, 0)
self.assertEqual(updated_riders[1]._order, 0)
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards("test_alorwrtto", editor, new_state, project_state)
self.assertColumnNotExists("test_alorwrtto_rider", "_order")
# And deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "AlterOrderWithRespectTo")
self.assertEqual(definition[1], [])
self.assertEqual(definition[2], {'name': "Rider", 'order_with_respect_to': "pony"})
def test_alter_model_managers(self):
"""
The managers on a model are set.
"""
project_state = self.set_up_test_model("test_almoma")
# Test the state alteration
operation = migrations.AlterModelManagers(
"Pony",
managers=[
("food_qs", FoodQuerySet.as_manager()),
("food_mgr", FoodManager("a", "b")),
("food_mgr_kwargs", FoodManager("x", "y", 3, 4)),
]
)
self.assertEqual(operation.describe(), "Change managers on Pony")
managers = project_state.models["test_almoma", "pony"].managers
self.assertEqual(managers, [])
new_state = project_state.clone()
operation.state_forwards("test_almoma", new_state)
self.assertIn(("test_almoma", "pony"), new_state.models)
managers = new_state.models["test_almoma", "pony"].managers
self.assertEqual(managers[0][0], "food_qs")
self.assertIsInstance(managers[0][1], models.Manager)
self.assertEqual(managers[1][0], "food_mgr")
self.assertIsInstance(managers[1][1], FoodManager)
self.assertEqual(managers[1][1].args, ("a", "b", 1, 2))
self.assertEqual(managers[2][0], "food_mgr_kwargs")
self.assertIsInstance(managers[2][1], FoodManager)
self.assertEqual(managers[2][1].args, ("x", "y", 3, 4))
rendered_state = new_state.apps
model = rendered_state.get_model('test_almoma', 'pony')
self.assertIsInstance(model.food_qs, models.Manager)
self.assertIsInstance(model.food_mgr, FoodManager)
self.assertIsInstance(model.food_mgr_kwargs, FoodManager)
def test_alter_model_managers_emptying(self):
"""
The managers on a model are set.
"""
project_state = self.set_up_test_model("test_almomae", manager_model=True)
# Test the state alteration
operation = migrations.AlterModelManagers("Food", managers=[])
self.assertEqual(operation.describe(), "Change managers on Food")
self.assertIn(("test_almomae", "food"), project_state.models)
managers = project_state.models["test_almomae", "food"].managers
self.assertEqual(managers[0][0], "food_qs")
self.assertIsInstance(managers[0][1], models.Manager)
self.assertEqual(managers[1][0], "food_mgr")
self.assertIsInstance(managers[1][1], FoodManager)
self.assertEqual(managers[1][1].args, ("a", "b", 1, 2))
self.assertEqual(managers[2][0], "food_mgr_kwargs")
self.assertIsInstance(managers[2][1], FoodManager)
self.assertEqual(managers[2][1].args, ("x", "y", 3, 4))
new_state = project_state.clone()
operation.state_forwards("test_almomae", new_state)
managers = new_state.models["test_almomae", "food"].managers
self.assertEqual(managers, [])
def test_alter_fk(self):
"""
Creating and then altering an FK works correctly
and deals with the pending SQL (#23091)
"""
project_state = self.set_up_test_model("test_alfk")
# Test adding and then altering the FK in one go
create_operation = migrations.CreateModel(
name="Rider",
fields=[
("id", models.AutoField(primary_key=True)),
("pony", models.ForeignKey("Pony", models.CASCADE)),
],
)
create_state = project_state.clone()
create_operation.state_forwards("test_alfk", create_state)
alter_operation = migrations.AlterField(
model_name='Rider',
name='pony',
field=models.ForeignKey("Pony", models.CASCADE, editable=False),
)
alter_state = create_state.clone()
alter_operation.state_forwards("test_alfk", alter_state)
with connection.schema_editor() as editor:
create_operation.database_forwards("test_alfk", editor, project_state, create_state)
alter_operation.database_forwards("test_alfk", editor, create_state, alter_state)
def test_alter_fk_non_fk(self):
"""
Altering an FK to a non-FK works (#23244)
"""
# Test the state alteration
operation = migrations.AlterField(
model_name="Rider",
name="pony",
field=models.FloatField(),
)
project_state, new_state = self.make_test_state("test_afknfk", operation, related_model=True)
# Test the database alteration
self.assertColumnExists("test_afknfk_rider", "pony_id")
self.assertColumnNotExists("test_afknfk_rider", "pony")
with connection.schema_editor() as editor:
operation.database_forwards("test_afknfk", editor, project_state, new_state)
self.assertColumnExists("test_afknfk_rider", "pony")
self.assertColumnNotExists("test_afknfk_rider", "pony_id")
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards("test_afknfk", editor, new_state, project_state)
self.assertColumnExists("test_afknfk_rider", "pony_id")
self.assertColumnNotExists("test_afknfk_rider", "pony")
def test_run_sql(self):
"""
Tests the RunSQL operation.
"""
project_state = self.set_up_test_model("test_runsql")
# Create the operation
operation = migrations.RunSQL(
# Use a multi-line string with a comment to test splitting on SQLite and MySQL respectively
"CREATE TABLE i_love_ponies (id int, special_thing varchar(15));\n"
"INSERT INTO i_love_ponies (id, special_thing) VALUES (1, 'i love ponies'); -- this is magic!\n"
"INSERT INTO i_love_ponies (id, special_thing) VALUES (2, 'i love django');\n"
"UPDATE i_love_ponies SET special_thing = 'Ponies' WHERE special_thing LIKE '%%ponies';"
"UPDATE i_love_ponies SET special_thing = 'Django' WHERE special_thing LIKE '%django';",
# Run delete queries to test for parameter substitution failure
# reported in #23426
"DELETE FROM i_love_ponies WHERE special_thing LIKE '%Django%';"
"DELETE FROM i_love_ponies WHERE special_thing LIKE '%%Ponies%%';"
"DROP TABLE i_love_ponies",
state_operations=[migrations.CreateModel("SomethingElse", [("id", models.AutoField(primary_key=True))])],
)
self.assertEqual(operation.describe(), "Raw SQL operation")
# Test the state alteration
new_state = project_state.clone()
operation.state_forwards("test_runsql", new_state)
self.assertEqual(len(new_state.models["test_runsql", "somethingelse"].fields), 1)
# Make sure there's no table
self.assertTableNotExists("i_love_ponies")
# Test SQL collection
with connection.schema_editor(collect_sql=True) as editor:
operation.database_forwards("test_runsql", editor, project_state, new_state)
self.assertIn("LIKE '%%ponies';", "\n".join(editor.collected_sql))
operation.database_backwards("test_runsql", editor, project_state, new_state)
self.assertIn("LIKE '%%Ponies%%';", "\n".join(editor.collected_sql))
# Test the database alteration
with connection.schema_editor() as editor:
operation.database_forwards("test_runsql", editor, project_state, new_state)
self.assertTableExists("i_love_ponies")
# Make sure all the SQL was processed
with connection.cursor() as cursor:
cursor.execute("SELECT COUNT(*) FROM i_love_ponies")
self.assertEqual(cursor.fetchall()[0][0], 2)
cursor.execute("SELECT COUNT(*) FROM i_love_ponies WHERE special_thing = 'Django'")
self.assertEqual(cursor.fetchall()[0][0], 1)
cursor.execute("SELECT COUNT(*) FROM i_love_ponies WHERE special_thing = 'Ponies'")
self.assertEqual(cursor.fetchall()[0][0], 1)
# And test reversal
self.assertTrue(operation.reversible)
with connection.schema_editor() as editor:
operation.database_backwards("test_runsql", editor, new_state, project_state)
self.assertTableNotExists("i_love_ponies")
# And deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "RunSQL")
self.assertEqual(definition[1], [])
self.assertEqual(sorted(definition[2]), ["reverse_sql", "sql", "state_operations"])
# And elidable reduction
self.assertIs(False, operation.reduce(operation, []))
elidable_operation = migrations.RunSQL('SELECT 1 FROM void;', elidable=True)
self.assertEqual(elidable_operation.reduce(operation, []), [operation])
def test_run_sql_params(self):
"""
#23426 - RunSQL should accept parameters.
"""
project_state = self.set_up_test_model("test_runsql")
# Create the operation
operation = migrations.RunSQL(
["CREATE TABLE i_love_ponies (id int, special_thing varchar(15));"],
["DROP TABLE i_love_ponies"],
)
param_operation = migrations.RunSQL(
# forwards
(
"INSERT INTO i_love_ponies (id, special_thing) VALUES (1, 'Django');",
["INSERT INTO i_love_ponies (id, special_thing) VALUES (2, %s);", ['Ponies']],
("INSERT INTO i_love_ponies (id, special_thing) VALUES (%s, %s);", (3, 'Python',)),
),
# backwards
[
"DELETE FROM i_love_ponies WHERE special_thing = 'Django';",
["DELETE FROM i_love_ponies WHERE special_thing = 'Ponies';", None],
("DELETE FROM i_love_ponies WHERE id = %s OR special_thing = %s;", [3, 'Python']),
]
)
# Make sure there's no table
self.assertTableNotExists("i_love_ponies")
new_state = project_state.clone()
# Test the database alteration
with connection.schema_editor() as editor:
operation.database_forwards("test_runsql", editor, project_state, new_state)
# Test parameter passing
with connection.schema_editor() as editor:
param_operation.database_forwards("test_runsql", editor, project_state, new_state)
# Make sure all the SQL was processed
with connection.cursor() as cursor:
cursor.execute("SELECT COUNT(*) FROM i_love_ponies")
self.assertEqual(cursor.fetchall()[0][0], 3)
with connection.schema_editor() as editor:
param_operation.database_backwards("test_runsql", editor, new_state, project_state)
with connection.cursor() as cursor:
cursor.execute("SELECT COUNT(*) FROM i_love_ponies")
self.assertEqual(cursor.fetchall()[0][0], 0)
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards("test_runsql", editor, new_state, project_state)
self.assertTableNotExists("i_love_ponies")
def test_run_sql_params_invalid(self):
"""
#23426 - RunSQL should fail when a list of statements with an incorrect
number of tuples is given.
"""
project_state = self.set_up_test_model("test_runsql")
new_state = project_state.clone()
operation = migrations.RunSQL(
# forwards
[
["INSERT INTO foo (bar) VALUES ('buz');"]
],
# backwards
(
("DELETE FROM foo WHERE bar = 'buz';", 'invalid', 'parameter count'),
),
)
with connection.schema_editor() as editor:
with self.assertRaisesMessage(ValueError, "Expected a 2-tuple but got 1"):
operation.database_forwards("test_runsql", editor, project_state, new_state)
with connection.schema_editor() as editor:
with self.assertRaisesMessage(ValueError, "Expected a 2-tuple but got 3"):
operation.database_backwards("test_runsql", editor, new_state, project_state)
def test_run_sql_noop(self):
"""
#24098 - Tests no-op RunSQL operations.
"""
operation = migrations.RunSQL(migrations.RunSQL.noop, migrations.RunSQL.noop)
with connection.schema_editor() as editor:
operation.database_forwards("test_runsql", editor, None, None)
operation.database_backwards("test_runsql", editor, None, None)
def test_run_python(self):
"""
Tests the RunPython operation
"""
project_state = self.set_up_test_model("test_runpython", mti_model=True)
# Create the operation
def inner_method(models, schema_editor):
Pony = models.get_model("test_runpython", "Pony")
Pony.objects.create(pink=1, weight=3.55)
Pony.objects.create(weight=5)
def inner_method_reverse(models, schema_editor):
Pony = models.get_model("test_runpython", "Pony")
Pony.objects.filter(pink=1, weight=3.55).delete()
Pony.objects.filter(weight=5).delete()
operation = migrations.RunPython(inner_method, reverse_code=inner_method_reverse)
self.assertEqual(operation.describe(), "Raw Python operation")
# Test the state alteration does nothing
new_state = project_state.clone()
operation.state_forwards("test_runpython", new_state)
self.assertEqual(new_state, project_state)
# Test the database alteration
self.assertEqual(project_state.apps.get_model("test_runpython", "Pony").objects.count(), 0)
with connection.schema_editor() as editor:
operation.database_forwards("test_runpython", editor, project_state, new_state)
self.assertEqual(project_state.apps.get_model("test_runpython", "Pony").objects.count(), 2)
# Now test reversal
self.assertTrue(operation.reversible)
with connection.schema_editor() as editor:
operation.database_backwards("test_runpython", editor, project_state, new_state)
self.assertEqual(project_state.apps.get_model("test_runpython", "Pony").objects.count(), 0)
# Now test we can't use a string
with self.assertRaisesMessage(ValueError, 'RunPython must be supplied with a callable'):
migrations.RunPython("print 'ahahaha'")
# And deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "RunPython")
self.assertEqual(definition[1], [])
self.assertEqual(sorted(definition[2]), ["code", "reverse_code"])
# Also test reversal fails, with an operation identical to above but without reverse_code set
no_reverse_operation = migrations.RunPython(inner_method)
self.assertFalse(no_reverse_operation.reversible)
with connection.schema_editor() as editor:
no_reverse_operation.database_forwards("test_runpython", editor, project_state, new_state)
with self.assertRaises(NotImplementedError):
no_reverse_operation.database_backwards("test_runpython", editor, new_state, project_state)
self.assertEqual(project_state.apps.get_model("test_runpython", "Pony").objects.count(), 2)
def create_ponies(models, schema_editor):
Pony = models.get_model("test_runpython", "Pony")
pony1 = Pony.objects.create(pink=1, weight=3.55)
self.assertIsNot(pony1.pk, None)
pony2 = Pony.objects.create(weight=5)
self.assertIsNot(pony2.pk, None)
self.assertNotEqual(pony1.pk, pony2.pk)
operation = migrations.RunPython(create_ponies)
with connection.schema_editor() as editor:
operation.database_forwards("test_runpython", editor, project_state, new_state)
self.assertEqual(project_state.apps.get_model("test_runpython", "Pony").objects.count(), 4)
# And deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "RunPython")
self.assertEqual(definition[1], [])
self.assertEqual(sorted(definition[2]), ["code"])
def create_shetlandponies(models, schema_editor):
ShetlandPony = models.get_model("test_runpython", "ShetlandPony")
pony1 = ShetlandPony.objects.create(weight=4.0)
self.assertIsNot(pony1.pk, None)
pony2 = ShetlandPony.objects.create(weight=5.0)
self.assertIsNot(pony2.pk, None)
self.assertNotEqual(pony1.pk, pony2.pk)
operation = migrations.RunPython(create_shetlandponies)
with connection.schema_editor() as editor:
operation.database_forwards("test_runpython", editor, project_state, new_state)
self.assertEqual(project_state.apps.get_model("test_runpython", "Pony").objects.count(), 6)
self.assertEqual(project_state.apps.get_model("test_runpython", "ShetlandPony").objects.count(), 2)
# And elidable reduction
self.assertIs(False, operation.reduce(operation, []))
elidable_operation = migrations.RunPython(inner_method, elidable=True)
self.assertEqual(elidable_operation.reduce(operation, []), [operation])
def test_run_python_atomic(self):
"""
Tests the RunPython operation correctly handles the "atomic" keyword
"""
project_state = self.set_up_test_model("test_runpythonatomic", mti_model=True)
def inner_method(models, schema_editor):
Pony = models.get_model("test_runpythonatomic", "Pony")
Pony.objects.create(pink=1, weight=3.55)
raise ValueError("Adrian hates ponies.")
# Verify atomicity when applying.
atomic_migration = Migration("test", "test_runpythonatomic")
atomic_migration.operations = [migrations.RunPython(inner_method, reverse_code=inner_method)]
non_atomic_migration = Migration("test", "test_runpythonatomic")
non_atomic_migration.operations = [migrations.RunPython(inner_method, reverse_code=inner_method, atomic=False)]
# If we're a fully-transactional database, both versions should rollback
if connection.features.can_rollback_ddl:
self.assertEqual(project_state.apps.get_model("test_runpythonatomic", "Pony").objects.count(), 0)
with self.assertRaises(ValueError):
with connection.schema_editor() as editor:
atomic_migration.apply(project_state, editor)
self.assertEqual(project_state.apps.get_model("test_runpythonatomic", "Pony").objects.count(), 0)
with self.assertRaises(ValueError):
with connection.schema_editor() as editor:
non_atomic_migration.apply(project_state, editor)
self.assertEqual(project_state.apps.get_model("test_runpythonatomic", "Pony").objects.count(), 0)
# Otherwise, the non-atomic operation should leave a row there
else:
self.assertEqual(project_state.apps.get_model("test_runpythonatomic", "Pony").objects.count(), 0)
with self.assertRaises(ValueError):
with connection.schema_editor() as editor:
atomic_migration.apply(project_state, editor)
self.assertEqual(project_state.apps.get_model("test_runpythonatomic", "Pony").objects.count(), 0)
with self.assertRaises(ValueError):
with connection.schema_editor() as editor:
non_atomic_migration.apply(project_state, editor)
self.assertEqual(project_state.apps.get_model("test_runpythonatomic", "Pony").objects.count(), 1)
# Reset object count to zero and verify atomicity when unapplying.
project_state.apps.get_model("test_runpythonatomic", "Pony").objects.all().delete()
# On a fully-transactional database, both versions rollback.
if connection.features.can_rollback_ddl:
self.assertEqual(project_state.apps.get_model("test_runpythonatomic", "Pony").objects.count(), 0)
with self.assertRaises(ValueError):
with connection.schema_editor() as editor:
atomic_migration.unapply(project_state, editor)
self.assertEqual(project_state.apps.get_model("test_runpythonatomic", "Pony").objects.count(), 0)
with self.assertRaises(ValueError):
with connection.schema_editor() as editor:
non_atomic_migration.unapply(project_state, editor)
self.assertEqual(project_state.apps.get_model("test_runpythonatomic", "Pony").objects.count(), 0)
# Otherwise, the non-atomic operation leaves a row there.
else:
self.assertEqual(project_state.apps.get_model("test_runpythonatomic", "Pony").objects.count(), 0)
with self.assertRaises(ValueError):
with connection.schema_editor() as editor:
atomic_migration.unapply(project_state, editor)
self.assertEqual(project_state.apps.get_model("test_runpythonatomic", "Pony").objects.count(), 0)
with self.assertRaises(ValueError):
with connection.schema_editor() as editor:
non_atomic_migration.unapply(project_state, editor)
self.assertEqual(project_state.apps.get_model("test_runpythonatomic", "Pony").objects.count(), 1)
# Verify deconstruction.
definition = non_atomic_migration.operations[0].deconstruct()
self.assertEqual(definition[0], "RunPython")
self.assertEqual(definition[1], [])
self.assertEqual(sorted(definition[2]), ["atomic", "code", "reverse_code"])
def test_run_python_related_assignment(self):
"""
#24282 - Model changes to a FK reverse side update the model
on the FK side as well.
"""
def inner_method(models, schema_editor):
Author = models.get_model("test_authors", "Author")
Book = models.get_model("test_books", "Book")
author = Author.objects.create(name="Hemingway")
Book.objects.create(title="Old Man and The Sea", author=author)
create_author = migrations.CreateModel(
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=100)),
],
options={},
)
create_book = migrations.CreateModel(
"Book",
[
("id", models.AutoField(primary_key=True)),
("title", models.CharField(max_length=100)),
("author", models.ForeignKey("test_authors.Author", models.CASCADE))
],
options={},
)
add_hometown = migrations.AddField(
"Author",
"hometown",
models.CharField(max_length=100),
)
create_old_man = migrations.RunPython(inner_method, inner_method)
project_state = ProjectState()
new_state = project_state.clone()
with connection.schema_editor() as editor:
create_author.state_forwards("test_authors", new_state)
create_author.database_forwards("test_authors", editor, project_state, new_state)
project_state = new_state
new_state = new_state.clone()
with connection.schema_editor() as editor:
create_book.state_forwards("test_books", new_state)
create_book.database_forwards("test_books", editor, project_state, new_state)
project_state = new_state
new_state = new_state.clone()
with connection.schema_editor() as editor:
add_hometown.state_forwards("test_authors", new_state)
add_hometown.database_forwards("test_authors", editor, project_state, new_state)
project_state = new_state
new_state = new_state.clone()
with connection.schema_editor() as editor:
create_old_man.state_forwards("test_books", new_state)
create_old_man.database_forwards("test_books", editor, project_state, new_state)
def test_model_with_bigautofield(self):
"""
A model with BigAutoField can be created.
"""
def create_data(models, schema_editor):
Author = models.get_model("test_author", "Author")
Book = models.get_model("test_book", "Book")
author1 = Author.objects.create(name="Hemingway")
Book.objects.create(title="Old Man and The Sea", author=author1)
Book.objects.create(id=2 ** 33, title="A farewell to arms", author=author1)
author2 = Author.objects.create(id=2 ** 33, name="Remarque")
Book.objects.create(title="All quiet on the western front", author=author2)
Book.objects.create(title="Arc de Triomphe", author=author2)
create_author = migrations.CreateModel(
"Author",
[
("id", models.BigAutoField(primary_key=True)),
("name", models.CharField(max_length=100)),
],
options={},
)
create_book = migrations.CreateModel(
"Book",
[
("id", models.BigAutoField(primary_key=True)),
("title", models.CharField(max_length=100)),
("author", models.ForeignKey(to="test_author.Author", on_delete=models.CASCADE))
],
options={},
)
fill_data = migrations.RunPython(create_data)
project_state = ProjectState()
new_state = project_state.clone()
with connection.schema_editor() as editor:
create_author.state_forwards("test_author", new_state)
create_author.database_forwards("test_author", editor, project_state, new_state)
project_state = new_state
new_state = new_state.clone()
with connection.schema_editor() as editor:
create_book.state_forwards("test_book", new_state)
create_book.database_forwards("test_book", editor, project_state, new_state)
project_state = new_state
new_state = new_state.clone()
with connection.schema_editor() as editor:
fill_data.state_forwards("fill_data", new_state)
fill_data.database_forwards("fill_data", editor, project_state, new_state)
def _test_autofield_foreignfield_growth(self, source_field, target_field, target_value):
"""
A field may be migrated in the following ways:
- AutoField to BigAutoField
- SmallAutoField to AutoField
- SmallAutoField to BigAutoField
"""
def create_initial_data(models, schema_editor):
Article = models.get_model("test_article", "Article")
Blog = models.get_model("test_blog", "Blog")
blog = Blog.objects.create(name="web development done right")
Article.objects.create(name="Frameworks", blog=blog)
Article.objects.create(name="Programming Languages", blog=blog)
def create_big_data(models, schema_editor):
Article = models.get_model("test_article", "Article")
Blog = models.get_model("test_blog", "Blog")
blog2 = Blog.objects.create(name="Frameworks", id=target_value)
Article.objects.create(name="Django", blog=blog2)
Article.objects.create(id=target_value, name="Django2", blog=blog2)
create_blog = migrations.CreateModel(
"Blog",
[
("id", source_field(primary_key=True)),
("name", models.CharField(max_length=100)),
],
options={},
)
create_article = migrations.CreateModel(
"Article",
[
("id", source_field(primary_key=True)),
("blog", models.ForeignKey(to="test_blog.Blog", on_delete=models.CASCADE)),
("name", models.CharField(max_length=100)),
("data", models.TextField(default="")),
],
options={},
)
fill_initial_data = migrations.RunPython(create_initial_data, create_initial_data)
fill_big_data = migrations.RunPython(create_big_data, create_big_data)
grow_article_id = migrations.AlterField('Article', 'id', target_field(primary_key=True))
grow_blog_id = migrations.AlterField('Blog', 'id', target_field(primary_key=True))
project_state = ProjectState()
new_state = project_state.clone()
with connection.schema_editor() as editor:
create_blog.state_forwards("test_blog", new_state)
create_blog.database_forwards("test_blog", editor, project_state, new_state)
project_state = new_state
new_state = new_state.clone()
with connection.schema_editor() as editor:
create_article.state_forwards("test_article", new_state)
create_article.database_forwards("test_article", editor, project_state, new_state)
project_state = new_state
new_state = new_state.clone()
with connection.schema_editor() as editor:
fill_initial_data.state_forwards("fill_initial_data", new_state)
fill_initial_data.database_forwards("fill_initial_data", editor, project_state, new_state)
project_state = new_state
new_state = new_state.clone()
with connection.schema_editor() as editor:
grow_article_id.state_forwards("test_article", new_state)
grow_article_id.database_forwards("test_article", editor, project_state, new_state)
state = new_state.clone()
article = state.apps.get_model("test_article.Article")
self.assertIsInstance(article._meta.pk, target_field)
project_state = new_state
new_state = new_state.clone()
with connection.schema_editor() as editor:
grow_blog_id.state_forwards("test_blog", new_state)
grow_blog_id.database_forwards("test_blog", editor, project_state, new_state)
state = new_state.clone()
blog = state.apps.get_model("test_blog.Blog")
self.assertIsInstance(blog._meta.pk, target_field)
project_state = new_state
new_state = new_state.clone()
with connection.schema_editor() as editor:
fill_big_data.state_forwards("fill_big_data", new_state)
fill_big_data.database_forwards("fill_big_data", editor, project_state, new_state)
def test_autofield__bigautofield_foreignfield_growth(self):
"""A field may be migrated from AutoField to BigAutoField."""
self._test_autofield_foreignfield_growth(
models.AutoField,
models.BigAutoField,
2 ** 33,
)
def test_smallfield_autofield_foreignfield_growth(self):
"""A field may be migrated from SmallAutoField to AutoField."""
self._test_autofield_foreignfield_growth(
models.SmallAutoField,
models.AutoField,
2 ** 22,
)
def test_smallfield_bigautofield_foreignfield_growth(self):
"""A field may be migrated from SmallAutoField to BigAutoField."""
self._test_autofield_foreignfield_growth(
models.SmallAutoField,
models.BigAutoField,
2 ** 33,
)
def test_run_python_noop(self):
"""
#24098 - Tests no-op RunPython operations.
"""
project_state = ProjectState()
new_state = project_state.clone()
operation = migrations.RunPython(migrations.RunPython.noop, migrations.RunPython.noop)
with connection.schema_editor() as editor:
operation.database_forwards("test_runpython", editor, project_state, new_state)
operation.database_backwards("test_runpython", editor, new_state, project_state)
def test_separate_database_and_state(self):
"""
Tests the SeparateDatabaseAndState operation.
"""
project_state = self.set_up_test_model("test_separatedatabaseandstate")
# Create the operation
database_operation = migrations.RunSQL(
"CREATE TABLE i_love_ponies (id int, special_thing int);",
"DROP TABLE i_love_ponies;"
)
state_operation = migrations.CreateModel("SomethingElse", [("id", models.AutoField(primary_key=True))])
operation = migrations.SeparateDatabaseAndState(
state_operations=[state_operation],
database_operations=[database_operation]
)
self.assertEqual(operation.describe(), "Custom state/database change combination")
# Test the state alteration
new_state = project_state.clone()
operation.state_forwards("test_separatedatabaseandstate", new_state)
self.assertEqual(len(new_state.models["test_separatedatabaseandstate", "somethingelse"].fields), 1)
# Make sure there's no table
self.assertTableNotExists("i_love_ponies")
# Test the database alteration
with connection.schema_editor() as editor:
operation.database_forwards("test_separatedatabaseandstate", editor, project_state, new_state)
self.assertTableExists("i_love_ponies")
# And test reversal
self.assertTrue(operation.reversible)
with connection.schema_editor() as editor:
operation.database_backwards("test_separatedatabaseandstate", editor, new_state, project_state)
self.assertTableNotExists("i_love_ponies")
# And deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "SeparateDatabaseAndState")
self.assertEqual(definition[1], [])
self.assertEqual(sorted(definition[2]), ["database_operations", "state_operations"])
def test_separate_database_and_state2(self):
"""
A complex SeparateDatabaseAndState operation: Multiple operations both
for state and database. Verify the state dependencies within each list
and that state ops don't affect the database.
"""
app_label = "test_separatedatabaseandstate2"
project_state = self.set_up_test_model(app_label)
# Create the operation
database_operations = [
migrations.CreateModel(
"ILovePonies",
[("id", models.AutoField(primary_key=True))],
options={"db_table": "iloveponies"},
),
migrations.CreateModel(
"ILoveMorePonies",
# We use IntegerField and not AutoField because
# the model is going to be deleted immediately
# and with an AutoField this fails on Oracle
[("id", models.IntegerField(primary_key=True))],
options={"db_table": "ilovemoreponies"},
),
migrations.DeleteModel("ILoveMorePonies"),
migrations.CreateModel(
"ILoveEvenMorePonies",
[("id", models.AutoField(primary_key=True))],
options={"db_table": "iloveevenmoreponies"},
),
]
state_operations = [
migrations.CreateModel(
"SomethingElse",
[("id", models.AutoField(primary_key=True))],
options={"db_table": "somethingelse"},
),
migrations.DeleteModel("SomethingElse"),
migrations.CreateModel(
"SomethingCompletelyDifferent",
[("id", models.AutoField(primary_key=True))],
options={"db_table": "somethingcompletelydifferent"},
),
]
operation = migrations.SeparateDatabaseAndState(
state_operations=state_operations,
database_operations=database_operations,
)
# Test the state alteration
new_state = project_state.clone()
operation.state_forwards(app_label, new_state)
def assertModelsAndTables(after_db):
# Tables and models exist, or don't, as they should:
self.assertNotIn((app_label, "somethingelse"), new_state.models)
self.assertEqual(len(new_state.models[app_label, "somethingcompletelydifferent"].fields), 1)
self.assertNotIn((app_label, "iloveponiesonies"), new_state.models)
self.assertNotIn((app_label, "ilovemoreponies"), new_state.models)
self.assertNotIn((app_label, "iloveevenmoreponies"), new_state.models)
self.assertTableNotExists("somethingelse")
self.assertTableNotExists("somethingcompletelydifferent")
self.assertTableNotExists("ilovemoreponies")
if after_db:
self.assertTableExists("iloveponies")
self.assertTableExists("iloveevenmoreponies")
else:
self.assertTableNotExists("iloveponies")
self.assertTableNotExists("iloveevenmoreponies")
assertModelsAndTables(after_db=False)
# Test the database alteration
with connection.schema_editor() as editor:
operation.database_forwards(app_label, editor, project_state, new_state)
assertModelsAndTables(after_db=True)
# And test reversal
self.assertTrue(operation.reversible)
with connection.schema_editor() as editor:
operation.database_backwards(app_label, editor, new_state, project_state)
assertModelsAndTables(after_db=False)
class SwappableOperationTests(OperationTestBase):
"""
Key operations ignore swappable models
(we don't want to replicate all of them here, as the functionality
is in a common base class anyway)
"""
available_apps = ['migrations']
@override_settings(TEST_SWAP_MODEL="migrations.SomeFakeModel")
def test_create_ignore_swapped(self):
"""
The CreateTable operation ignores swapped models.
"""
operation = migrations.CreateModel(
"Pony",
[
("id", models.AutoField(primary_key=True)),
("pink", models.IntegerField(default=1)),
],
options={
"swappable": "TEST_SWAP_MODEL",
},
)
# Test the state alteration (it should still be there!)
project_state = ProjectState()
new_state = project_state.clone()
operation.state_forwards("test_crigsw", new_state)
self.assertEqual(new_state.models["test_crigsw", "pony"].name, "Pony")
self.assertEqual(len(new_state.models["test_crigsw", "pony"].fields), 2)
# Test the database alteration
self.assertTableNotExists("test_crigsw_pony")
with connection.schema_editor() as editor:
operation.database_forwards("test_crigsw", editor, project_state, new_state)
self.assertTableNotExists("test_crigsw_pony")
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards("test_crigsw", editor, new_state, project_state)
self.assertTableNotExists("test_crigsw_pony")
@override_settings(TEST_SWAP_MODEL="migrations.SomeFakeModel")
def test_delete_ignore_swapped(self):
"""
Tests the DeleteModel operation ignores swapped models.
"""
operation = migrations.DeleteModel("Pony")
project_state, new_state = self.make_test_state("test_dligsw", operation)
# Test the database alteration
self.assertTableNotExists("test_dligsw_pony")
with connection.schema_editor() as editor:
operation.database_forwards("test_dligsw", editor, project_state, new_state)
self.assertTableNotExists("test_dligsw_pony")
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards("test_dligsw", editor, new_state, project_state)
self.assertTableNotExists("test_dligsw_pony")
@override_settings(TEST_SWAP_MODEL="migrations.SomeFakeModel")
def test_add_field_ignore_swapped(self):
"""
Tests the AddField operation.
"""
# Test the state alteration
operation = migrations.AddField(
"Pony",
"height",
models.FloatField(null=True, default=5),
)
project_state, new_state = self.make_test_state("test_adfligsw", operation)
# Test the database alteration
self.assertTableNotExists("test_adfligsw_pony")
with connection.schema_editor() as editor:
operation.database_forwards("test_adfligsw", editor, project_state, new_state)
self.assertTableNotExists("test_adfligsw_pony")
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards("test_adfligsw", editor, new_state, project_state)
self.assertTableNotExists("test_adfligsw_pony")
@override_settings(TEST_SWAP_MODEL='migrations.SomeFakeModel')
def test_indexes_ignore_swapped(self):
"""
Add/RemoveIndex operations ignore swapped models.
"""
operation = migrations.AddIndex('Pony', models.Index(fields=['pink'], name='my_name_idx'))
project_state, new_state = self.make_test_state('test_adinigsw', operation)
with connection.schema_editor() as editor:
# No database queries should be run for swapped models
operation.database_forwards('test_adinigsw', editor, project_state, new_state)
operation.database_backwards('test_adinigsw', editor, new_state, project_state)
operation = migrations.RemoveIndex('Pony', models.Index(fields=['pink'], name='my_name_idx'))
project_state, new_state = self.make_test_state("test_rminigsw", operation)
with connection.schema_editor() as editor:
operation.database_forwards('test_rminigsw', editor, project_state, new_state)
operation.database_backwards('test_rminigsw', editor, new_state, project_state)
class TestCreateModel(SimpleTestCase):
def test_references_model_mixin(self):
CreateModel('name', [], bases=(Mixin, models.Model)).references_model('other_model', 'migrations')
class FieldOperationTests(SimpleTestCase):
def test_references_model(self):
operation = FieldOperation('MoDel', 'field', models.ForeignKey('Other', models.CASCADE))
# Model name match.
self.assertIs(operation.references_model('mOdEl', 'migrations'), True)
# Referenced field.
self.assertIs(operation.references_model('oTher', 'migrations'), True)
# Doesn't reference.
self.assertIs(operation.references_model('Whatever', 'migrations'), False)
def test_references_field_by_name(self):
operation = FieldOperation('MoDel', 'field', models.BooleanField(default=False))
self.assertIs(operation.references_field('model', 'field', 'migrations'), True)
def test_references_field_by_remote_field_model(self):
operation = FieldOperation('Model', 'field', models.ForeignKey('Other', models.CASCADE))
self.assertIs(operation.references_field('Other', 'whatever', 'migrations'), True)
self.assertIs(operation.references_field('Missing', 'whatever', 'migrations'), False)
def test_references_field_by_from_fields(self):
operation = FieldOperation(
'Model', 'field', models.fields.related.ForeignObject('Other', models.CASCADE, ['from'], ['to'])
)
self.assertIs(operation.references_field('Model', 'from', 'migrations'), True)
self.assertIs(operation.references_field('Model', 'to', 'migrations'), False)
self.assertIs(operation.references_field('Other', 'from', 'migrations'), False)
self.assertIs(operation.references_field('Model', 'to', 'migrations'), False)
def test_references_field_by_to_fields(self):
operation = FieldOperation('Model', 'field', models.ForeignKey('Other', models.CASCADE, to_field='field'))
self.assertIs(operation.references_field('Other', 'field', 'migrations'), True)
self.assertIs(operation.references_field('Other', 'whatever', 'migrations'), False)
self.assertIs(operation.references_field('Missing', 'whatever', 'migrations'), False)
def test_references_field_by_through(self):
operation = FieldOperation('Model', 'field', models.ManyToManyField('Other', through='Through'))
self.assertIs(operation.references_field('Other', 'whatever', 'migrations'), True)
self.assertIs(operation.references_field('Through', 'whatever', 'migrations'), True)
self.assertIs(operation.references_field('Missing', 'whatever', 'migrations'), False)
def test_reference_field_by_through_fields(self):
operation = FieldOperation(
'Model', 'field', models.ManyToManyField('Other', through='Through', through_fields=('first', 'second'))
)
self.assertIs(operation.references_field('Other', 'whatever', 'migrations'), True)
self.assertIs(operation.references_field('Through', 'whatever', 'migrations'), False)
self.assertIs(operation.references_field('Through', 'first', 'migrations'), True)
self.assertIs(operation.references_field('Through', 'second', 'migrations'), True)
|
29c8d0c41ed6aa05e4cf77bf9dc9d114ae5d060033e1250d6bac49a3b268b52e | from django.apps.registry import Apps
from django.contrib.contenttypes.fields import GenericForeignKey
from django.db import models
from django.db.migrations.exceptions import InvalidBasesError
from django.db.migrations.operations import (
AddField, AlterField, DeleteModel, RemoveField,
)
from django.db.migrations.state import (
ModelState, ProjectState, get_related_models_recursive,
)
from django.test import SimpleTestCase, override_settings
from django.test.utils import isolate_apps
from .models import (
FoodManager, FoodQuerySet, ModelWithCustomBase, NoMigrationFoodManager,
UnicodeModel,
)
class StateTests(SimpleTestCase):
"""
Tests state construction, rendering and modification by operations.
"""
def test_create(self):
"""
Tests making a ProjectState from an Apps
"""
new_apps = Apps(["migrations"])
class Author(models.Model):
name = models.CharField(max_length=255)
bio = models.TextField()
age = models.IntegerField(blank=True, null=True)
class Meta:
app_label = "migrations"
apps = new_apps
unique_together = ["name", "bio"]
index_together = ["bio", "age"]
class AuthorProxy(Author):
class Meta:
app_label = "migrations"
apps = new_apps
proxy = True
ordering = ["name"]
class SubAuthor(Author):
width = models.FloatField(null=True)
class Meta:
app_label = "migrations"
apps = new_apps
class Book(models.Model):
title = models.CharField(max_length=1000)
author = models.ForeignKey(Author, models.CASCADE)
contributors = models.ManyToManyField(Author)
class Meta:
app_label = "migrations"
apps = new_apps
verbose_name = "tome"
db_table = "test_tome"
indexes = [models.Index(fields=['title'])]
class Food(models.Model):
food_mgr = FoodManager('a', 'b')
food_qs = FoodQuerySet.as_manager()
food_no_mgr = NoMigrationFoodManager('x', 'y')
class Meta:
app_label = "migrations"
apps = new_apps
class FoodNoManagers(models.Model):
class Meta:
app_label = "migrations"
apps = new_apps
class FoodNoDefaultManager(models.Model):
food_no_mgr = NoMigrationFoodManager('x', 'y')
food_mgr = FoodManager('a', 'b')
food_qs = FoodQuerySet.as_manager()
class Meta:
app_label = "migrations"
apps = new_apps
mgr1 = FoodManager('a', 'b')
mgr2 = FoodManager('x', 'y', c=3, d=4)
class FoodOrderedManagers(models.Model):
# The managers on this model should be ordered by their creation
# counter and not by the order in model body
food_no_mgr = NoMigrationFoodManager('x', 'y')
food_mgr2 = mgr2
food_mgr1 = mgr1
class Meta:
app_label = "migrations"
apps = new_apps
project_state = ProjectState.from_apps(new_apps)
author_state = project_state.models['migrations', 'author']
author_proxy_state = project_state.models['migrations', 'authorproxy']
sub_author_state = project_state.models['migrations', 'subauthor']
book_state = project_state.models['migrations', 'book']
food_state = project_state.models['migrations', 'food']
food_no_managers_state = project_state.models['migrations', 'foodnomanagers']
food_no_default_manager_state = project_state.models['migrations', 'foodnodefaultmanager']
food_order_manager_state = project_state.models['migrations', 'foodorderedmanagers']
book_index = models.Index(fields=['title'])
book_index.set_name_with_model(Book)
self.assertEqual(author_state.app_label, "migrations")
self.assertEqual(author_state.name, "Author")
self.assertEqual([x for x, y in author_state.fields], ["id", "name", "bio", "age"])
self.assertEqual(author_state.fields[1][1].max_length, 255)
self.assertIs(author_state.fields[2][1].null, False)
self.assertIs(author_state.fields[3][1].null, True)
self.assertEqual(
author_state.options,
{
"unique_together": {("name", "bio")},
"index_together": {("bio", "age")},
"indexes": [],
"constraints": [],
}
)
self.assertEqual(author_state.bases, (models.Model,))
self.assertEqual(book_state.app_label, "migrations")
self.assertEqual(book_state.name, "Book")
self.assertEqual([x for x, y in book_state.fields], ["id", "title", "author", "contributors"])
self.assertEqual(book_state.fields[1][1].max_length, 1000)
self.assertIs(book_state.fields[2][1].null, False)
self.assertEqual(book_state.fields[3][1].__class__.__name__, "ManyToManyField")
self.assertEqual(
book_state.options,
{"verbose_name": "tome", "db_table": "test_tome", "indexes": [book_index], "constraints": []},
)
self.assertEqual(book_state.bases, (models.Model,))
self.assertEqual(author_proxy_state.app_label, "migrations")
self.assertEqual(author_proxy_state.name, "AuthorProxy")
self.assertEqual(author_proxy_state.fields, [])
self.assertEqual(
author_proxy_state.options,
{"proxy": True, "ordering": ["name"], "indexes": [], "constraints": []},
)
self.assertEqual(author_proxy_state.bases, ("migrations.author",))
self.assertEqual(sub_author_state.app_label, "migrations")
self.assertEqual(sub_author_state.name, "SubAuthor")
self.assertEqual(len(sub_author_state.fields), 2)
self.assertEqual(sub_author_state.bases, ("migrations.author",))
# The default manager is used in migrations
self.assertEqual([name for name, mgr in food_state.managers], ['food_mgr'])
self.assertTrue(all(isinstance(name, str) for name, mgr in food_state.managers))
self.assertEqual(food_state.managers[0][1].args, ('a', 'b', 1, 2))
# No explicit managers defined. Migrations will fall back to the default
self.assertEqual(food_no_managers_state.managers, [])
# food_mgr is used in migration but isn't the default mgr, hence add the
# default
self.assertEqual([name for name, mgr in food_no_default_manager_state.managers],
['food_no_mgr', 'food_mgr'])
self.assertTrue(all(isinstance(name, str) for name, mgr in food_no_default_manager_state.managers))
self.assertEqual(food_no_default_manager_state.managers[0][1].__class__, models.Manager)
self.assertIsInstance(food_no_default_manager_state.managers[1][1], FoodManager)
self.assertEqual([name for name, mgr in food_order_manager_state.managers],
['food_mgr1', 'food_mgr2'])
self.assertTrue(all(isinstance(name, str) for name, mgr in food_order_manager_state.managers))
self.assertEqual([mgr.args for name, mgr in food_order_manager_state.managers],
[('a', 'b', 1, 2), ('x', 'y', 3, 4)])
def test_custom_default_manager_added_to_the_model_state(self):
"""
When the default manager of the model is a custom manager,
it needs to be added to the model state.
"""
new_apps = Apps(['migrations'])
custom_manager = models.Manager()
class Author(models.Model):
objects = models.TextField()
authors = custom_manager
class Meta:
app_label = 'migrations'
apps = new_apps
project_state = ProjectState.from_apps(new_apps)
author_state = project_state.models['migrations', 'author']
self.assertEqual(author_state.managers, [('authors', custom_manager)])
def test_custom_default_manager_named_objects_with_false_migration_flag(self):
"""
When a manager is added with a name of 'objects' but it does not
have `use_in_migrations = True`, no migration should be added to the
model state (#26643).
"""
new_apps = Apps(['migrations'])
class Author(models.Model):
objects = models.Manager()
class Meta:
app_label = 'migrations'
apps = new_apps
project_state = ProjectState.from_apps(new_apps)
author_state = project_state.models['migrations', 'author']
self.assertEqual(author_state.managers, [])
def test_no_duplicate_managers(self):
"""
When a manager is added with `use_in_migrations = True` and a parent
model had a manager with the same name and `use_in_migrations = True`,
the parent's manager shouldn't appear in the model state (#26881).
"""
new_apps = Apps(['migrations'])
class PersonManager(models.Manager):
use_in_migrations = True
class Person(models.Model):
objects = PersonManager()
class Meta:
abstract = True
class BossManager(PersonManager):
use_in_migrations = True
class Boss(Person):
objects = BossManager()
class Meta:
app_label = 'migrations'
apps = new_apps
project_state = ProjectState.from_apps(new_apps)
boss_state = project_state.models['migrations', 'boss']
self.assertEqual(boss_state.managers, [('objects', Boss.objects)])
def test_custom_default_manager(self):
new_apps = Apps(['migrations'])
class Author(models.Model):
manager1 = models.Manager()
manager2 = models.Manager()
class Meta:
app_label = 'migrations'
apps = new_apps
default_manager_name = 'manager2'
project_state = ProjectState.from_apps(new_apps)
author_state = project_state.models['migrations', 'author']
self.assertEqual(author_state.options['default_manager_name'], 'manager2')
self.assertEqual(author_state.managers, [('manager2', Author.manager1)])
def test_custom_base_manager(self):
new_apps = Apps(['migrations'])
class Author(models.Model):
manager1 = models.Manager()
manager2 = models.Manager()
class Meta:
app_label = 'migrations'
apps = new_apps
base_manager_name = 'manager2'
class Author2(models.Model):
manager1 = models.Manager()
manager2 = models.Manager()
class Meta:
app_label = 'migrations'
apps = new_apps
base_manager_name = 'manager1'
project_state = ProjectState.from_apps(new_apps)
author_state = project_state.models['migrations', 'author']
self.assertEqual(author_state.options['base_manager_name'], 'manager2')
self.assertEqual(author_state.managers, [
('manager1', Author.manager1),
('manager2', Author.manager2),
])
author2_state = project_state.models['migrations', 'author2']
self.assertEqual(author2_state.options['base_manager_name'], 'manager1')
self.assertEqual(author2_state.managers, [
('manager1', Author2.manager1),
])
def test_apps_bulk_update(self):
"""
StateApps.bulk_update() should update apps.ready to False and reset
the value afterwards.
"""
project_state = ProjectState()
apps = project_state.apps
with apps.bulk_update():
self.assertFalse(apps.ready)
self.assertTrue(apps.ready)
with self.assertRaises(ValueError):
with apps.bulk_update():
self.assertFalse(apps.ready)
raise ValueError()
self.assertTrue(apps.ready)
def test_render(self):
"""
Tests rendering a ProjectState into an Apps.
"""
project_state = ProjectState()
project_state.add_model(ModelState(
app_label="migrations",
name="Tag",
fields=[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=100)),
("hidden", models.BooleanField()),
],
))
project_state.add_model(ModelState(
app_label="migrations",
name="SubTag",
fields=[
('tag_ptr', models.OneToOneField(
'migrations.Tag',
models.CASCADE,
auto_created=True,
parent_link=True,
primary_key=True,
to_field='id',
serialize=False,
)),
("awesome", models.BooleanField()),
],
bases=("migrations.Tag",),
))
base_mgr = models.Manager()
mgr1 = FoodManager('a', 'b')
mgr2 = FoodManager('x', 'y', c=3, d=4)
project_state.add_model(ModelState(
app_label="migrations",
name="Food",
fields=[
("id", models.AutoField(primary_key=True)),
],
managers=[
# The ordering we really want is objects, mgr1, mgr2
('default', base_mgr),
('food_mgr2', mgr2),
('food_mgr1', mgr1),
]
))
new_apps = project_state.apps
self.assertEqual(new_apps.get_model("migrations", "Tag")._meta.get_field("name").max_length, 100)
self.assertIs(new_apps.get_model("migrations", "Tag")._meta.get_field("hidden").null, False)
self.assertEqual(len(new_apps.get_model("migrations", "SubTag")._meta.local_fields), 2)
Food = new_apps.get_model("migrations", "Food")
self.assertEqual([mgr.name for mgr in Food._meta.managers],
['default', 'food_mgr1', 'food_mgr2'])
self.assertTrue(all(isinstance(mgr.name, str) for mgr in Food._meta.managers))
self.assertEqual([mgr.__class__ for mgr in Food._meta.managers],
[models.Manager, FoodManager, FoodManager])
def test_render_model_inheritance(self):
class Book(models.Model):
title = models.CharField(max_length=1000)
class Meta:
app_label = "migrations"
apps = Apps()
class Novel(Book):
class Meta:
app_label = "migrations"
apps = Apps()
# First, test rendering individually
apps = Apps(["migrations"])
# We shouldn't be able to render yet
ms = ModelState.from_model(Novel)
with self.assertRaises(InvalidBasesError):
ms.render(apps)
# Once the parent model is in the app registry, it should be fine
ModelState.from_model(Book).render(apps)
ModelState.from_model(Novel).render(apps)
def test_render_model_with_multiple_inheritance(self):
class Foo(models.Model):
class Meta:
app_label = "migrations"
apps = Apps()
class Bar(models.Model):
class Meta:
app_label = "migrations"
apps = Apps()
class FooBar(Foo, Bar):
class Meta:
app_label = "migrations"
apps = Apps()
class AbstractSubFooBar(FooBar):
class Meta:
abstract = True
apps = Apps()
class SubFooBar(AbstractSubFooBar):
class Meta:
app_label = "migrations"
apps = Apps()
apps = Apps(["migrations"])
# We shouldn't be able to render yet
ms = ModelState.from_model(FooBar)
with self.assertRaises(InvalidBasesError):
ms.render(apps)
# Once the parent models are in the app registry, it should be fine
ModelState.from_model(Foo).render(apps)
self.assertSequenceEqual(ModelState.from_model(Foo).bases, [models.Model])
ModelState.from_model(Bar).render(apps)
self.assertSequenceEqual(ModelState.from_model(Bar).bases, [models.Model])
ModelState.from_model(FooBar).render(apps)
self.assertSequenceEqual(ModelState.from_model(FooBar).bases, ['migrations.foo', 'migrations.bar'])
ModelState.from_model(SubFooBar).render(apps)
self.assertSequenceEqual(ModelState.from_model(SubFooBar).bases, ['migrations.foobar'])
def test_render_project_dependencies(self):
"""
The ProjectState render method correctly renders models
to account for inter-model base dependencies.
"""
new_apps = Apps()
class A(models.Model):
class Meta:
app_label = "migrations"
apps = new_apps
class B(A):
class Meta:
app_label = "migrations"
apps = new_apps
class C(B):
class Meta:
app_label = "migrations"
apps = new_apps
class D(A):
class Meta:
app_label = "migrations"
apps = new_apps
class E(B):
class Meta:
app_label = "migrations"
apps = new_apps
proxy = True
class F(D):
class Meta:
app_label = "migrations"
apps = new_apps
proxy = True
# Make a ProjectState and render it
project_state = ProjectState()
project_state.add_model(ModelState.from_model(A))
project_state.add_model(ModelState.from_model(B))
project_state.add_model(ModelState.from_model(C))
project_state.add_model(ModelState.from_model(D))
project_state.add_model(ModelState.from_model(E))
project_state.add_model(ModelState.from_model(F))
final_apps = project_state.apps
self.assertEqual(len(final_apps.get_models()), 6)
# Now make an invalid ProjectState and make sure it fails
project_state = ProjectState()
project_state.add_model(ModelState.from_model(A))
project_state.add_model(ModelState.from_model(B))
project_state.add_model(ModelState.from_model(C))
project_state.add_model(ModelState.from_model(F))
with self.assertRaises(InvalidBasesError):
project_state.apps
def test_render_unique_app_labels(self):
"""
The ProjectState render method doesn't raise an
ImproperlyConfigured exception about unique labels if two dotted app
names have the same last part.
"""
class A(models.Model):
class Meta:
app_label = "django.contrib.auth"
class B(models.Model):
class Meta:
app_label = "vendor.auth"
# Make a ProjectState and render it
project_state = ProjectState()
project_state.add_model(ModelState.from_model(A))
project_state.add_model(ModelState.from_model(B))
self.assertEqual(len(project_state.apps.get_models()), 2)
def test_reload_related_model_on_non_relational_fields(self):
"""
The model is reloaded even on changes that are not involved in
relations. Other models pointing to or from it are also reloaded.
"""
project_state = ProjectState()
project_state.apps # Render project state.
project_state.add_model(ModelState('migrations', 'A', []))
project_state.add_model(ModelState('migrations', 'B', [
('a', models.ForeignKey('A', models.CASCADE)),
]))
project_state.add_model(ModelState('migrations', 'C', [
('b', models.ForeignKey('B', models.CASCADE)),
('name', models.TextField()),
]))
project_state.add_model(ModelState('migrations', 'D', [
('a', models.ForeignKey('A', models.CASCADE)),
]))
operation = AlterField(
model_name='C',
name='name',
field=models.TextField(blank=True),
)
operation.state_forwards('migrations', project_state)
project_state.reload_model('migrations', 'a', delay=True)
A = project_state.apps.get_model('migrations.A')
B = project_state.apps.get_model('migrations.B')
D = project_state.apps.get_model('migrations.D')
self.assertIs(B._meta.get_field('a').related_model, A)
self.assertIs(D._meta.get_field('a').related_model, A)
def test_reload_model_relationship_consistency(self):
project_state = ProjectState()
project_state.add_model(ModelState('migrations', 'A', []))
project_state.add_model(ModelState('migrations', 'B', [
('a', models.ForeignKey('A', models.CASCADE)),
]))
project_state.add_model(ModelState('migrations', 'C', [
('b', models.ForeignKey('B', models.CASCADE)),
]))
A = project_state.apps.get_model('migrations.A')
B = project_state.apps.get_model('migrations.B')
C = project_state.apps.get_model('migrations.C')
self.assertEqual([r.related_model for r in A._meta.related_objects], [B])
self.assertEqual([r.related_model for r in B._meta.related_objects], [C])
self.assertEqual([r.related_model for r in C._meta.related_objects], [])
project_state.reload_model('migrations', 'a', delay=True)
A = project_state.apps.get_model('migrations.A')
B = project_state.apps.get_model('migrations.B')
C = project_state.apps.get_model('migrations.C')
self.assertEqual([r.related_model for r in A._meta.related_objects], [B])
self.assertEqual([r.related_model for r in B._meta.related_objects], [C])
self.assertEqual([r.related_model for r in C._meta.related_objects], [])
def test_add_relations(self):
"""
#24573 - Adding relations to existing models should reload the
referenced models too.
"""
new_apps = Apps()
class A(models.Model):
class Meta:
app_label = 'something'
apps = new_apps
class B(A):
class Meta:
app_label = 'something'
apps = new_apps
class C(models.Model):
class Meta:
app_label = 'something'
apps = new_apps
project_state = ProjectState()
project_state.add_model(ModelState.from_model(A))
project_state.add_model(ModelState.from_model(B))
project_state.add_model(ModelState.from_model(C))
project_state.apps # We need to work with rendered models
old_state = project_state.clone()
model_a_old = old_state.apps.get_model('something', 'A')
model_b_old = old_state.apps.get_model('something', 'B')
model_c_old = old_state.apps.get_model('something', 'C')
# The relations between the old models are correct
self.assertIs(model_a_old._meta.get_field('b').related_model, model_b_old)
self.assertIs(model_b_old._meta.get_field('a_ptr').related_model, model_a_old)
operation = AddField('c', 'to_a', models.OneToOneField(
'something.A',
models.CASCADE,
related_name='from_c',
))
operation.state_forwards('something', project_state)
model_a_new = project_state.apps.get_model('something', 'A')
model_b_new = project_state.apps.get_model('something', 'B')
model_c_new = project_state.apps.get_model('something', 'C')
# All models have changed
self.assertIsNot(model_a_old, model_a_new)
self.assertIsNot(model_b_old, model_b_new)
self.assertIsNot(model_c_old, model_c_new)
# The relations between the old models still hold
self.assertIs(model_a_old._meta.get_field('b').related_model, model_b_old)
self.assertIs(model_b_old._meta.get_field('a_ptr').related_model, model_a_old)
# The relations between the new models correct
self.assertIs(model_a_new._meta.get_field('b').related_model, model_b_new)
self.assertIs(model_b_new._meta.get_field('a_ptr').related_model, model_a_new)
self.assertIs(model_a_new._meta.get_field('from_c').related_model, model_c_new)
self.assertIs(model_c_new._meta.get_field('to_a').related_model, model_a_new)
def test_remove_relations(self):
"""
#24225 - Relations between models are updated while
remaining the relations and references for models of an old state.
"""
new_apps = Apps()
class A(models.Model):
class Meta:
app_label = "something"
apps = new_apps
class B(models.Model):
to_a = models.ForeignKey(A, models.CASCADE)
class Meta:
app_label = "something"
apps = new_apps
def get_model_a(state):
return [mod for mod in state.apps.get_models() if mod._meta.model_name == 'a'][0]
project_state = ProjectState()
project_state.add_model(ModelState.from_model(A))
project_state.add_model(ModelState.from_model(B))
self.assertEqual(len(get_model_a(project_state)._meta.related_objects), 1)
old_state = project_state.clone()
operation = RemoveField("b", "to_a")
operation.state_forwards("something", project_state)
# Model from old_state still has the relation
model_a_old = get_model_a(old_state)
model_a_new = get_model_a(project_state)
self.assertIsNot(model_a_old, model_a_new)
self.assertEqual(len(model_a_old._meta.related_objects), 1)
self.assertEqual(len(model_a_new._meta.related_objects), 0)
# Same test for deleted model
project_state = ProjectState()
project_state.add_model(ModelState.from_model(A))
project_state.add_model(ModelState.from_model(B))
old_state = project_state.clone()
operation = DeleteModel("b")
operation.state_forwards("something", project_state)
model_a_old = get_model_a(old_state)
model_a_new = get_model_a(project_state)
self.assertIsNot(model_a_old, model_a_new)
self.assertEqual(len(model_a_old._meta.related_objects), 1)
self.assertEqual(len(model_a_new._meta.related_objects), 0)
def test_self_relation(self):
"""
#24513 - Modifying an object pointing to itself would cause it to be
rendered twice and thus breaking its related M2M through objects.
"""
class A(models.Model):
to_a = models.ManyToManyField('something.A', symmetrical=False)
class Meta:
app_label = "something"
def get_model_a(state):
return [mod for mod in state.apps.get_models() if mod._meta.model_name == 'a'][0]
project_state = ProjectState()
project_state.add_model(ModelState.from_model(A))
self.assertEqual(len(get_model_a(project_state)._meta.related_objects), 1)
old_state = project_state.clone()
operation = AlterField(
model_name="a",
name="to_a",
field=models.ManyToManyField("something.A", symmetrical=False, blank=True)
)
# At this point the model would be rendered twice causing its related
# M2M through objects to point to an old copy and thus breaking their
# attribute lookup.
operation.state_forwards("something", project_state)
model_a_old = get_model_a(old_state)
model_a_new = get_model_a(project_state)
self.assertIsNot(model_a_old, model_a_new)
# The old model's _meta is still consistent
field_to_a_old = model_a_old._meta.get_field("to_a")
self.assertEqual(field_to_a_old.m2m_field_name(), "from_a")
self.assertEqual(field_to_a_old.m2m_reverse_field_name(), "to_a")
self.assertIs(field_to_a_old.related_model, model_a_old)
self.assertIs(field_to_a_old.remote_field.through._meta.get_field('to_a').related_model, model_a_old)
self.assertIs(field_to_a_old.remote_field.through._meta.get_field('from_a').related_model, model_a_old)
# The new model's _meta is still consistent
field_to_a_new = model_a_new._meta.get_field("to_a")
self.assertEqual(field_to_a_new.m2m_field_name(), "from_a")
self.assertEqual(field_to_a_new.m2m_reverse_field_name(), "to_a")
self.assertIs(field_to_a_new.related_model, model_a_new)
self.assertIs(field_to_a_new.remote_field.through._meta.get_field('to_a').related_model, model_a_new)
self.assertIs(field_to_a_new.remote_field.through._meta.get_field('from_a').related_model, model_a_new)
def test_equality(self):
"""
== and != are implemented correctly.
"""
# Test two things that should be equal
project_state = ProjectState()
project_state.add_model(ModelState(
"migrations",
"Tag",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=100)),
("hidden", models.BooleanField()),
],
{},
None,
))
project_state.apps # Fill the apps cached property
other_state = project_state.clone()
self.assertEqual(project_state, project_state)
self.assertEqual(project_state, other_state)
self.assertIs(project_state != project_state, False)
self.assertIs(project_state != other_state, False)
self.assertNotEqual(project_state.apps, other_state.apps)
# Make a very small change (max_len 99) and see if that affects it
project_state = ProjectState()
project_state.add_model(ModelState(
"migrations",
"Tag",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=99)),
("hidden", models.BooleanField()),
],
{},
None,
))
self.assertNotEqual(project_state, other_state)
self.assertIs(project_state == other_state, False)
def test_dangling_references_throw_error(self):
new_apps = Apps()
class Author(models.Model):
name = models.TextField()
class Meta:
app_label = "migrations"
apps = new_apps
class Publisher(models.Model):
name = models.TextField()
class Meta:
app_label = "migrations"
apps = new_apps
class Book(models.Model):
author = models.ForeignKey(Author, models.CASCADE)
publisher = models.ForeignKey(Publisher, models.CASCADE)
class Meta:
app_label = "migrations"
apps = new_apps
class Magazine(models.Model):
authors = models.ManyToManyField(Author)
class Meta:
app_label = "migrations"
apps = new_apps
# Make a valid ProjectState and render it
project_state = ProjectState()
project_state.add_model(ModelState.from_model(Author))
project_state.add_model(ModelState.from_model(Publisher))
project_state.add_model(ModelState.from_model(Book))
project_state.add_model(ModelState.from_model(Magazine))
self.assertEqual(len(project_state.apps.get_models()), 4)
# now make an invalid one with a ForeignKey
project_state = ProjectState()
project_state.add_model(ModelState.from_model(Book))
msg = (
"The field migrations.Book.author was declared with a lazy reference "
"to 'migrations.author', but app 'migrations' doesn't provide model 'author'.\n"
"The field migrations.Book.publisher was declared with a lazy reference "
"to 'migrations.publisher', but app 'migrations' doesn't provide model 'publisher'."
)
with self.assertRaisesMessage(ValueError, msg):
project_state.apps
# And another with ManyToManyField.
project_state = ProjectState()
project_state.add_model(ModelState.from_model(Magazine))
msg = (
"The field migrations.Magazine.authors was declared with a lazy reference "
"to 'migrations.author\', but app 'migrations' doesn't provide model 'author'.\n"
"The field migrations.Magazine_authors.author was declared with a lazy reference "
"to \'migrations.author\', but app 'migrations' doesn't provide model 'author'."
)
with self.assertRaisesMessage(ValueError, msg):
project_state.apps
# And now with multiple models and multiple fields.
project_state.add_model(ModelState.from_model(Book))
msg = (
"The field migrations.Book.author was declared with a lazy reference "
"to 'migrations.author', but app 'migrations' doesn't provide model 'author'.\n"
"The field migrations.Book.publisher was declared with a lazy reference "
"to 'migrations.publisher', but app 'migrations' doesn't provide model 'publisher'.\n"
"The field migrations.Magazine.authors was declared with a lazy reference "
"to 'migrations.author', but app 'migrations' doesn't provide model 'author'.\n"
"The field migrations.Magazine_authors.author was declared with a lazy reference "
"to 'migrations.author', but app 'migrations' doesn't provide model 'author'."
)
with self.assertRaisesMessage(ValueError, msg):
project_state.apps
def test_real_apps(self):
"""
Including real apps can resolve dangling FK errors.
This test relies on the fact that contenttypes is always loaded.
"""
new_apps = Apps()
class TestModel(models.Model):
ct = models.ForeignKey("contenttypes.ContentType", models.CASCADE)
class Meta:
app_label = "migrations"
apps = new_apps
# If we just stick it into an empty state it should fail
project_state = ProjectState()
project_state.add_model(ModelState.from_model(TestModel))
with self.assertRaises(ValueError):
project_state.apps
# If we include the real app it should succeed
project_state = ProjectState(real_apps=["contenttypes"])
project_state.add_model(ModelState.from_model(TestModel))
rendered_state = project_state.apps
self.assertEqual(
len([x for x in rendered_state.get_models() if x._meta.app_label == "migrations"]),
1,
)
def test_ignore_order_wrt(self):
"""
Makes sure ProjectState doesn't include OrderWrt fields when
making from existing models.
"""
new_apps = Apps()
class Author(models.Model):
name = models.TextField()
class Meta:
app_label = "migrations"
apps = new_apps
class Book(models.Model):
author = models.ForeignKey(Author, models.CASCADE)
class Meta:
app_label = "migrations"
apps = new_apps
order_with_respect_to = "author"
# Make a valid ProjectState and render it
project_state = ProjectState()
project_state.add_model(ModelState.from_model(Author))
project_state.add_model(ModelState.from_model(Book))
self.assertEqual(
[name for name, field in project_state.models["migrations", "book"].fields],
["id", "author"],
)
def test_manager_refer_correct_model_version(self):
"""
#24147 - Managers refer to the correct version of a
historical model
"""
project_state = ProjectState()
project_state.add_model(ModelState(
app_label="migrations",
name="Tag",
fields=[
("id", models.AutoField(primary_key=True)),
("hidden", models.BooleanField()),
],
managers=[
('food_mgr', FoodManager('a', 'b')),
('food_qs', FoodQuerySet.as_manager()),
]
))
old_model = project_state.apps.get_model('migrations', 'tag')
new_state = project_state.clone()
operation = RemoveField("tag", "hidden")
operation.state_forwards("migrations", new_state)
new_model = new_state.apps.get_model('migrations', 'tag')
self.assertIsNot(old_model, new_model)
self.assertIs(old_model, old_model.food_mgr.model)
self.assertIs(old_model, old_model.food_qs.model)
self.assertIs(new_model, new_model.food_mgr.model)
self.assertIs(new_model, new_model.food_qs.model)
self.assertIsNot(old_model.food_mgr, new_model.food_mgr)
self.assertIsNot(old_model.food_qs, new_model.food_qs)
self.assertIsNot(old_model.food_mgr.model, new_model.food_mgr.model)
self.assertIsNot(old_model.food_qs.model, new_model.food_qs.model)
def test_choices_iterator(self):
"""
#24483 - ProjectState.from_apps should not destructively consume
Field.choices iterators.
"""
new_apps = Apps(["migrations"])
choices = [('a', 'A'), ('b', 'B')]
class Author(models.Model):
name = models.CharField(max_length=255)
choice = models.CharField(max_length=255, choices=iter(choices))
class Meta:
app_label = "migrations"
apps = new_apps
ProjectState.from_apps(new_apps)
choices_field = Author._meta.get_field('choice')
self.assertEqual(list(choices_field.choices), choices)
class ModelStateTests(SimpleTestCase):
def test_custom_model_base(self):
state = ModelState.from_model(ModelWithCustomBase)
self.assertEqual(state.bases, (models.Model,))
def test_bound_field_sanity_check(self):
field = models.CharField(max_length=1)
field.model = models.Model
with self.assertRaisesMessage(ValueError, 'ModelState.fields cannot be bound to a model - "field" is.'):
ModelState('app', 'Model', [('field', field)])
def test_sanity_check_to(self):
field = models.ForeignKey(UnicodeModel, models.CASCADE)
with self.assertRaisesMessage(
ValueError,
'ModelState.fields cannot refer to a model class - "field.to" does. '
'Use a string reference instead.'
):
ModelState('app', 'Model', [('field', field)])
def test_sanity_check_through(self):
field = models.ManyToManyField('UnicodeModel')
field.remote_field.through = UnicodeModel
with self.assertRaisesMessage(
ValueError,
'ModelState.fields cannot refer to a model class - "field.through" does. '
'Use a string reference instead.'
):
ModelState('app', 'Model', [('field', field)])
def test_sanity_index_name(self):
field = models.IntegerField()
options = {'indexes': [models.Index(fields=['field'])]}
msg = "Indexes passed to ModelState require a name attribute. <Index: fields='field'> doesn't have one."
with self.assertRaisesMessage(ValueError, msg):
ModelState('app', 'Model', [('field', field)], options=options)
def test_fields_immutability(self):
"""
Rendering a model state doesn't alter its internal fields.
"""
apps = Apps()
field = models.CharField(max_length=1)
state = ModelState('app', 'Model', [('name', field)])
Model = state.render(apps)
self.assertNotEqual(Model._meta.get_field('name'), field)
def test_repr(self):
field = models.CharField(max_length=1)
state = ModelState('app', 'Model', [('name', field)], bases=['app.A', 'app.B', 'app.C'])
self.assertEqual(repr(state), "<ModelState: 'app.Model'>")
project_state = ProjectState()
project_state.add_model(state)
with self.assertRaisesMessage(InvalidBasesError, "Cannot resolve bases for [<ModelState: 'app.Model'>]"):
project_state.apps
@override_settings(TEST_SWAPPABLE_MODEL='migrations.SomeFakeModel')
def test_create_swappable(self):
"""
Tests making a ProjectState from an Apps with a swappable model
"""
new_apps = Apps(['migrations'])
class Author(models.Model):
name = models.CharField(max_length=255)
bio = models.TextField()
age = models.IntegerField(blank=True, null=True)
class Meta:
app_label = 'migrations'
apps = new_apps
swappable = 'TEST_SWAPPABLE_MODEL'
author_state = ModelState.from_model(Author)
self.assertEqual(author_state.app_label, 'migrations')
self.assertEqual(author_state.name, 'Author')
self.assertEqual([x for x, y in author_state.fields], ['id', 'name', 'bio', 'age'])
self.assertEqual(author_state.fields[1][1].max_length, 255)
self.assertIs(author_state.fields[2][1].null, False)
self.assertIs(author_state.fields[3][1].null, True)
self.assertEqual(author_state.options, {'swappable': 'TEST_SWAPPABLE_MODEL', 'indexes': [], "constraints": []})
self.assertEqual(author_state.bases, (models.Model,))
self.assertEqual(author_state.managers, [])
@override_settings(TEST_SWAPPABLE_MODEL='migrations.SomeFakeModel')
def test_create_swappable_from_abstract(self):
"""
A swappable model inheriting from a hierarchy:
concrete -> abstract -> concrete.
"""
new_apps = Apps(['migrations'])
class SearchableLocation(models.Model):
keywords = models.CharField(max_length=256)
class Meta:
app_label = 'migrations'
apps = new_apps
class Station(SearchableLocation):
name = models.CharField(max_length=128)
class Meta:
abstract = True
class BusStation(Station):
bus_routes = models.CharField(max_length=128)
inbound = models.BooleanField(default=False)
class Meta(Station.Meta):
app_label = 'migrations'
apps = new_apps
swappable = 'TEST_SWAPPABLE_MODEL'
station_state = ModelState.from_model(BusStation)
self.assertEqual(station_state.app_label, 'migrations')
self.assertEqual(station_state.name, 'BusStation')
self.assertEqual(
[x for x, y in station_state.fields],
['searchablelocation_ptr', 'name', 'bus_routes', 'inbound']
)
self.assertEqual(station_state.fields[1][1].max_length, 128)
self.assertIs(station_state.fields[2][1].null, False)
self.assertEqual(
station_state.options,
{'abstract': False, 'swappable': 'TEST_SWAPPABLE_MODEL', 'indexes': [], 'constraints': []}
)
self.assertEqual(station_state.bases, ('migrations.searchablelocation',))
self.assertEqual(station_state.managers, [])
@override_settings(TEST_SWAPPABLE_MODEL='migrations.SomeFakeModel')
def test_custom_manager_swappable(self):
"""
Tests making a ProjectState from unused models with custom managers
"""
new_apps = Apps(['migrations'])
class Food(models.Model):
food_mgr = FoodManager('a', 'b')
food_qs = FoodQuerySet.as_manager()
food_no_mgr = NoMigrationFoodManager('x', 'y')
class Meta:
app_label = "migrations"
apps = new_apps
swappable = 'TEST_SWAPPABLE_MODEL'
food_state = ModelState.from_model(Food)
# The default manager is used in migrations
self.assertEqual([name for name, mgr in food_state.managers], ['food_mgr'])
self.assertEqual(food_state.managers[0][1].args, ('a', 'b', 1, 2))
@isolate_apps('migrations', 'django.contrib.contenttypes')
def test_order_with_respect_to_private_field(self):
class PrivateFieldModel(models.Model):
content_type = models.ForeignKey('contenttypes.ContentType', models.CASCADE)
object_id = models.PositiveIntegerField()
private = GenericForeignKey()
class Meta:
order_with_respect_to = 'private'
state = ModelState.from_model(PrivateFieldModel)
self.assertNotIn('order_with_respect_to', state.options)
@isolate_apps('migrations')
def test_abstract_model_children_inherit_indexes(self):
class Abstract(models.Model):
name = models.CharField(max_length=50)
class Meta:
app_label = 'migrations'
abstract = True
indexes = [models.Index(fields=['name'])]
class Child1(Abstract):
pass
class Child2(Abstract):
pass
child1_state = ModelState.from_model(Child1)
child2_state = ModelState.from_model(Child2)
index_names = [index.name for index in child1_state.options['indexes']]
self.assertEqual(index_names, ['migrations__name_b0afd7_idx'])
index_names = [index.name for index in child2_state.options['indexes']]
self.assertEqual(index_names, ['migrations__name_016466_idx'])
# Modifying the state doesn't modify the index on the model.
child1_state.options['indexes'][0].name = 'bar'
self.assertEqual(Child1._meta.indexes[0].name, 'migrations__name_b0afd7_idx')
@isolate_apps('migrations')
def test_explicit_index_name(self):
class TestModel(models.Model):
name = models.CharField(max_length=50)
class Meta:
app_label = 'migrations'
indexes = [models.Index(fields=['name'], name='foo_idx')]
model_state = ModelState.from_model(TestModel)
index_names = [index.name for index in model_state.options['indexes']]
self.assertEqual(index_names, ['foo_idx'])
@isolate_apps('migrations')
def test_from_model_constraints(self):
class ModelWithConstraints(models.Model):
size = models.IntegerField()
class Meta:
constraints = [models.CheckConstraint(check=models.Q(size__gt=1), name='size_gt_1')]
state = ModelState.from_model(ModelWithConstraints)
model_constraints = ModelWithConstraints._meta.constraints
state_constraints = state.options['constraints']
self.assertEqual(model_constraints, state_constraints)
self.assertIsNot(model_constraints, state_constraints)
self.assertIsNot(model_constraints[0], state_constraints[0])
class RelatedModelsTests(SimpleTestCase):
def setUp(self):
self.apps = Apps(['migrations.related_models_app'])
def create_model(self, name, foreign_keys=[], bases=(), abstract=False, proxy=False):
test_name = 'related_models_app'
assert not (abstract and proxy)
meta_contents = {
'abstract': abstract,
'app_label': test_name,
'apps': self.apps,
'proxy': proxy,
}
meta = type("Meta", (), meta_contents)
if not bases:
bases = (models.Model,)
body = {
'Meta': meta,
'__module__': "__fake__",
}
fname_base = fname = '%s_%%d' % name.lower()
for i, fk in enumerate(foreign_keys, 1):
fname = fname_base % i
body[fname] = fk
return type(name, bases, body)
def assertRelated(self, model, needle):
self.assertEqual(
get_related_models_recursive(model),
{(n._meta.app_label, n._meta.model_name) for n in needle},
)
def test_unrelated(self):
A = self.create_model("A")
B = self.create_model("B")
self.assertRelated(A, [])
self.assertRelated(B, [])
def test_direct_fk(self):
A = self.create_model("A", foreign_keys=[models.ForeignKey('B', models.CASCADE)])
B = self.create_model("B")
self.assertRelated(A, [B])
self.assertRelated(B, [A])
def test_direct_hidden_fk(self):
A = self.create_model("A", foreign_keys=[models.ForeignKey('B', models.CASCADE, related_name='+')])
B = self.create_model("B")
self.assertRelated(A, [B])
self.assertRelated(B, [A])
def test_fk_through_proxy(self):
A = self.create_model("A")
B = self.create_model("B", bases=(A,), proxy=True)
C = self.create_model("C", bases=(B,), proxy=True)
D = self.create_model("D", foreign_keys=[models.ForeignKey('C', models.CASCADE)])
self.assertRelated(A, [B, C, D])
self.assertRelated(B, [A, C, D])
self.assertRelated(C, [A, B, D])
self.assertRelated(D, [A, B, C])
def test_nested_fk(self):
A = self.create_model("A", foreign_keys=[models.ForeignKey('B', models.CASCADE)])
B = self.create_model("B", foreign_keys=[models.ForeignKey('C', models.CASCADE)])
C = self.create_model("C")
self.assertRelated(A, [B, C])
self.assertRelated(B, [A, C])
self.assertRelated(C, [A, B])
def test_two_sided(self):
A = self.create_model("A", foreign_keys=[models.ForeignKey('B', models.CASCADE)])
B = self.create_model("B", foreign_keys=[models.ForeignKey('A', models.CASCADE)])
self.assertRelated(A, [B])
self.assertRelated(B, [A])
def test_circle(self):
A = self.create_model("A", foreign_keys=[models.ForeignKey('B', models.CASCADE)])
B = self.create_model("B", foreign_keys=[models.ForeignKey('C', models.CASCADE)])
C = self.create_model("C", foreign_keys=[models.ForeignKey('A', models.CASCADE)])
self.assertRelated(A, [B, C])
self.assertRelated(B, [A, C])
self.assertRelated(C, [A, B])
def test_base(self):
A = self.create_model("A")
B = self.create_model("B", bases=(A,))
self.assertRelated(A, [B])
self.assertRelated(B, [A])
def test_nested_base(self):
A = self.create_model("A")
B = self.create_model("B", bases=(A,))
C = self.create_model("C", bases=(B,))
self.assertRelated(A, [B, C])
self.assertRelated(B, [A, C])
self.assertRelated(C, [A, B])
def test_multiple_bases(self):
A = self.create_model("A")
B = self.create_model("B")
C = self.create_model("C", bases=(A, B,))
self.assertRelated(A, [B, C])
self.assertRelated(B, [A, C])
self.assertRelated(C, [A, B])
def test_multiple_nested_bases(self):
A = self.create_model("A")
B = self.create_model("B")
C = self.create_model("C", bases=(A, B,))
D = self.create_model("D")
E = self.create_model("E", bases=(D,))
F = self.create_model("F", bases=(C, E,))
Y = self.create_model("Y")
Z = self.create_model("Z", bases=(Y,))
self.assertRelated(A, [B, C, D, E, F])
self.assertRelated(B, [A, C, D, E, F])
self.assertRelated(C, [A, B, D, E, F])
self.assertRelated(D, [A, B, C, E, F])
self.assertRelated(E, [A, B, C, D, F])
self.assertRelated(F, [A, B, C, D, E])
self.assertRelated(Y, [Z])
self.assertRelated(Z, [Y])
def test_base_to_base_fk(self):
A = self.create_model("A", foreign_keys=[models.ForeignKey('Y', models.CASCADE)])
B = self.create_model("B", bases=(A,))
Y = self.create_model("Y")
Z = self.create_model("Z", bases=(Y,))
self.assertRelated(A, [B, Y, Z])
self.assertRelated(B, [A, Y, Z])
self.assertRelated(Y, [A, B, Z])
self.assertRelated(Z, [A, B, Y])
def test_base_to_subclass_fk(self):
A = self.create_model("A", foreign_keys=[models.ForeignKey('Z', models.CASCADE)])
B = self.create_model("B", bases=(A,))
Y = self.create_model("Y")
Z = self.create_model("Z", bases=(Y,))
self.assertRelated(A, [B, Y, Z])
self.assertRelated(B, [A, Y, Z])
self.assertRelated(Y, [A, B, Z])
self.assertRelated(Z, [A, B, Y])
def test_direct_m2m(self):
A = self.create_model("A", foreign_keys=[models.ManyToManyField('B')])
B = self.create_model("B")
self.assertRelated(A, [A.a_1.rel.through, B])
self.assertRelated(B, [A, A.a_1.rel.through])
def test_direct_m2m_self(self):
A = self.create_model("A", foreign_keys=[models.ManyToManyField('A')])
self.assertRelated(A, [A.a_1.rel.through])
def test_intermediate_m2m_self(self):
A = self.create_model("A", foreign_keys=[models.ManyToManyField('A', through='T')])
T = self.create_model("T", foreign_keys=[
models.ForeignKey('A', models.CASCADE),
models.ForeignKey('A', models.CASCADE),
])
self.assertRelated(A, [T])
self.assertRelated(T, [A])
def test_intermediate_m2m(self):
A = self.create_model("A", foreign_keys=[models.ManyToManyField('B', through='T')])
B = self.create_model("B")
T = self.create_model("T", foreign_keys=[
models.ForeignKey('A', models.CASCADE),
models.ForeignKey('B', models.CASCADE),
])
self.assertRelated(A, [B, T])
self.assertRelated(B, [A, T])
self.assertRelated(T, [A, B])
def test_intermediate_m2m_extern_fk(self):
A = self.create_model("A", foreign_keys=[models.ManyToManyField('B', through='T')])
B = self.create_model("B")
Z = self.create_model("Z")
T = self.create_model("T", foreign_keys=[
models.ForeignKey('A', models.CASCADE),
models.ForeignKey('B', models.CASCADE),
models.ForeignKey('Z', models.CASCADE),
])
self.assertRelated(A, [B, T, Z])
self.assertRelated(B, [A, T, Z])
self.assertRelated(T, [A, B, Z])
self.assertRelated(Z, [A, B, T])
def test_intermediate_m2m_base(self):
A = self.create_model("A", foreign_keys=[models.ManyToManyField('B', through='T')])
B = self.create_model("B")
S = self.create_model("S")
T = self.create_model("T", foreign_keys=[
models.ForeignKey('A', models.CASCADE),
models.ForeignKey('B', models.CASCADE),
], bases=(S,))
self.assertRelated(A, [B, S, T])
self.assertRelated(B, [A, S, T])
self.assertRelated(S, [A, B, T])
self.assertRelated(T, [A, B, S])
def test_generic_fk(self):
A = self.create_model("A", foreign_keys=[
models.ForeignKey('B', models.CASCADE),
GenericForeignKey(),
])
B = self.create_model("B", foreign_keys=[
models.ForeignKey('C', models.CASCADE),
])
self.assertRelated(A, [B])
self.assertRelated(B, [A])
def test_abstract_base(self):
A = self.create_model("A", abstract=True)
B = self.create_model("B", bases=(A,))
self.assertRelated(A, [B])
self.assertRelated(B, [])
def test_nested_abstract_base(self):
A = self.create_model("A", abstract=True)
B = self.create_model("B", bases=(A,), abstract=True)
C = self.create_model("C", bases=(B,))
self.assertRelated(A, [B, C])
self.assertRelated(B, [C])
self.assertRelated(C, [])
def test_proxy_base(self):
A = self.create_model("A")
B = self.create_model("B", bases=(A,), proxy=True)
self.assertRelated(A, [B])
self.assertRelated(B, [])
def test_nested_proxy_base(self):
A = self.create_model("A")
B = self.create_model("B", bases=(A,), proxy=True)
C = self.create_model("C", bases=(B,), proxy=True)
self.assertRelated(A, [B, C])
self.assertRelated(B, [C])
self.assertRelated(C, [])
def test_multiple_mixed_bases(self):
A = self.create_model("A", abstract=True)
M = self.create_model("M")
P = self.create_model("P")
Q = self.create_model("Q", bases=(P,), proxy=True)
Z = self.create_model("Z", bases=(A, M, Q))
# M has a pointer O2O field p_ptr to P
self.assertRelated(A, [M, P, Q, Z])
self.assertRelated(M, [P, Q, Z])
self.assertRelated(P, [M, Q, Z])
self.assertRelated(Q, [M, P, Z])
self.assertRelated(Z, [M, P, Q])
|
b918c84087fff4ab5138102ac16309187c1968a3beba39daeacc20285ae1262f | import os
from io import StringIO
from unittest import mock
from admin_scripts.tests import AdminScriptTestCase
from django.apps import apps
from django.core import management
from django.core.management import BaseCommand, CommandError, find_commands
from django.core.management.utils import (
find_command, get_random_secret_key, is_ignored_path,
normalize_path_patterns, popen_wrapper,
)
from django.db import connection
from django.test import SimpleTestCase, override_settings
from django.test.utils import captured_stderr, extend_sys_path
from django.utils import translation
from django.utils.version import PY37
from .management.commands import dance
# A minimal set of apps to avoid system checks running on all apps.
@override_settings(
INSTALLED_APPS=[
'django.contrib.auth',
'django.contrib.contenttypes',
'user_commands',
],
)
class CommandTests(SimpleTestCase):
def test_command(self):
out = StringIO()
management.call_command('dance', stdout=out)
self.assertIn("I don't feel like dancing Rock'n'Roll.\n", out.getvalue())
def test_command_style(self):
out = StringIO()
management.call_command('dance', style='Jive', stdout=out)
self.assertIn("I don't feel like dancing Jive.\n", out.getvalue())
# Passing options as arguments also works (thanks argparse)
management.call_command('dance', '--style', 'Jive', stdout=out)
self.assertIn("I don't feel like dancing Jive.\n", out.getvalue())
def test_language_preserved(self):
out = StringIO()
with translation.override('fr'):
management.call_command('dance', stdout=out)
self.assertEqual(translation.get_language(), 'fr')
def test_explode(self):
""" An unknown command raises CommandError """
with self.assertRaisesMessage(CommandError, "Unknown command: 'explode'"):
management.call_command(('explode',))
def test_system_exit(self):
""" Exception raised in a command should raise CommandError with
call_command, but SystemExit when run from command line
"""
with self.assertRaises(CommandError) as cm:
management.call_command('dance', example="raise")
self.assertEqual(cm.exception.returncode, 3)
dance.Command.requires_system_checks = False
try:
with captured_stderr() as stderr, self.assertRaises(SystemExit) as cm:
management.ManagementUtility(['manage.py', 'dance', '--example=raise']).execute()
self.assertEqual(cm.exception.code, 3)
finally:
dance.Command.requires_system_checks = True
self.assertIn("CommandError", stderr.getvalue())
def test_no_translations_deactivate_translations(self):
"""
When the Command handle method is decorated with @no_translations,
translations are deactivated inside the command.
"""
current_locale = translation.get_language()
with translation.override('pl'):
result = management.call_command('no_translations', stdout=StringIO())
self.assertIsNone(result)
self.assertEqual(translation.get_language(), current_locale)
def test_find_command_without_PATH(self):
"""
find_command should still work when the PATH environment variable
doesn't exist (#22256).
"""
current_path = os.environ.pop('PATH', None)
try:
self.assertIsNone(find_command('_missing_'))
finally:
if current_path is not None:
os.environ['PATH'] = current_path
def test_discover_commands_in_eggs(self):
"""
Management commands can also be loaded from Python eggs.
"""
egg_dir = '%s/eggs' % os.path.dirname(__file__)
egg_name = '%s/basic.egg' % egg_dir
with extend_sys_path(egg_name):
with self.settings(INSTALLED_APPS=['commandegg']):
cmds = find_commands(os.path.join(apps.get_app_config('commandegg').path, 'management'))
self.assertEqual(cmds, ['eggcommand'])
def test_call_command_option_parsing(self):
"""
When passing the long option name to call_command, the available option
key is the option dest name (#22985).
"""
out = StringIO()
management.call_command('dance', stdout=out, opt_3=True)
self.assertIn("option3", out.getvalue())
self.assertNotIn("opt_3", out.getvalue())
self.assertNotIn("opt-3", out.getvalue())
def test_call_command_option_parsing_non_string_arg(self):
"""
It should be possible to pass non-string arguments to call_command.
"""
out = StringIO()
management.call_command('dance', 1, verbosity=0, stdout=out)
self.assertIn("You passed 1 as a positional argument.", out.getvalue())
def test_calling_a_command_with_only_empty_parameter_should_ends_gracefully(self):
out = StringIO()
management.call_command('hal', "--empty", stdout=out)
self.assertEqual(out.getvalue(), "\nDave, I can't do that.\n")
def test_calling_command_with_app_labels_and_parameters_should_be_ok(self):
out = StringIO()
management.call_command('hal', 'myapp', "--verbosity", "3", stdout=out)
self.assertIn("Dave, my mind is going. I can feel it. I can feel it.\n", out.getvalue())
def test_calling_command_with_parameters_and_app_labels_at_the_end_should_be_ok(self):
out = StringIO()
management.call_command('hal', "--verbosity", "3", "myapp", stdout=out)
self.assertIn("Dave, my mind is going. I can feel it. I can feel it.\n", out.getvalue())
def test_calling_a_command_with_no_app_labels_and_parameters_should_raise_a_command_error(self):
with self.assertRaises(CommandError):
management.call_command('hal', stdout=StringIO())
def test_output_transaction(self):
output = management.call_command('transaction', stdout=StringIO(), no_color=True)
self.assertTrue(output.strip().startswith(connection.ops.start_transaction_sql()))
self.assertTrue(output.strip().endswith(connection.ops.end_transaction_sql()))
def test_call_command_no_checks(self):
"""
By default, call_command should not trigger the check framework, unless
specifically asked.
"""
self.counter = 0
def patched_check(self_, **kwargs):
self.counter += 1
saved_check = BaseCommand.check
BaseCommand.check = patched_check
try:
management.call_command("dance", verbosity=0)
self.assertEqual(self.counter, 0)
management.call_command("dance", verbosity=0, skip_checks=False)
self.assertEqual(self.counter, 1)
finally:
BaseCommand.check = saved_check
def test_check_migrations(self):
requires_migrations_checks = dance.Command.requires_migrations_checks
self.assertIs(requires_migrations_checks, False)
try:
with mock.patch.object(BaseCommand, 'check_migrations') as check_migrations:
management.call_command('dance', verbosity=0)
self.assertFalse(check_migrations.called)
dance.Command.requires_migrations_checks = True
management.call_command('dance', verbosity=0)
self.assertTrue(check_migrations.called)
finally:
dance.Command.requires_migrations_checks = requires_migrations_checks
def test_call_command_unrecognized_option(self):
msg = (
'Unknown option(s) for dance command: unrecognized. Valid options '
'are: example, force_color, help, integer, no_color, opt_3, '
'option3, pythonpath, settings, skip_checks, stderr, stdout, '
'style, traceback, verbosity, version.'
)
with self.assertRaisesMessage(TypeError, msg):
management.call_command('dance', unrecognized=1)
msg = (
'Unknown option(s) for dance command: unrecognized, unrecognized2. '
'Valid options are: example, force_color, help, integer, no_color, '
'opt_3, option3, pythonpath, settings, skip_checks, stderr, '
'stdout, style, traceback, verbosity, version.'
)
with self.assertRaisesMessage(TypeError, msg):
management.call_command('dance', unrecognized=1, unrecognized2=1)
def test_call_command_with_required_parameters_in_options(self):
out = StringIO()
management.call_command('required_option', need_me='foo', needme2='bar', stdout=out)
self.assertIn('need_me', out.getvalue())
self.assertIn('needme2', out.getvalue())
def test_call_command_with_required_parameters_in_mixed_options(self):
out = StringIO()
management.call_command('required_option', '--need-me=foo', needme2='bar', stdout=out)
self.assertIn('need_me', out.getvalue())
self.assertIn('needme2', out.getvalue())
def test_command_add_arguments_after_common_arguments(self):
out = StringIO()
management.call_command('common_args', stdout=out)
self.assertIn('Detected that --version already exists', out.getvalue())
def test_mutually_exclusive_group_required_options(self):
out = StringIO()
management.call_command('mutually_exclusive_required', foo_id=1, stdout=out)
self.assertIn('foo_id', out.getvalue())
management.call_command('mutually_exclusive_required', foo_name='foo', stdout=out)
self.assertIn('foo_name', out.getvalue())
msg = 'Error: one of the arguments --foo-id --foo-name is required'
with self.assertRaisesMessage(CommandError, msg):
management.call_command('mutually_exclusive_required', stdout=out)
def test_subparser(self):
out = StringIO()
management.call_command('subparser', 'foo', 12, stdout=out)
self.assertIn('bar', out.getvalue())
def test_subparser_dest_args(self):
out = StringIO()
management.call_command('subparser_dest', 'foo', bar=12, stdout=out)
self.assertIn('bar', out.getvalue())
def test_subparser_dest_required_args(self):
out = StringIO()
management.call_command('subparser_required', 'foo_1', 'foo_2', bar=12, stdout=out)
self.assertIn('bar', out.getvalue())
def test_subparser_invalid_option(self):
msg = "Error: invalid choice: 'test' (choose from 'foo')"
with self.assertRaisesMessage(CommandError, msg):
management.call_command('subparser', 'test', 12)
if PY37:
# "required" option requires Python 3.7 and later.
msg = 'Error: the following arguments are required: subcommand'
with self.assertRaisesMessage(CommandError, msg):
management.call_command('subparser_dest', subcommand='foo', bar=12)
else:
msg = (
'Unknown option(s) for subparser_dest command: subcommand. '
'Valid options are: bar, force_color, help, no_color, '
'pythonpath, settings, skip_checks, stderr, stdout, '
'traceback, verbosity, version.'
)
with self.assertRaisesMessage(TypeError, msg):
management.call_command('subparser_dest', subcommand='foo', bar=12)
def test_create_parser_kwargs(self):
"""BaseCommand.create_parser() passes kwargs to CommandParser."""
epilog = 'some epilog text'
parser = BaseCommand().create_parser('prog_name', 'subcommand', epilog=epilog)
self.assertEqual(parser.epilog, epilog)
class CommandRunTests(AdminScriptTestCase):
"""
Tests that need to run by simulating the command line, not by call_command.
"""
def test_script_prefix_set_in_commands(self):
self.write_settings('settings.py', apps=['user_commands'], sdict={
'ROOT_URLCONF': '"user_commands.urls"',
'FORCE_SCRIPT_NAME': '"/PREFIX/"',
})
out, err = self.run_manage(['reverse_url'])
self.assertNoOutput(err)
self.assertEqual(out.strip(), '/PREFIX/some/url/')
def test_disallowed_abbreviated_options(self):
"""
To avoid conflicts with custom options, commands don't allow
abbreviated forms of the --setting and --pythonpath options.
"""
self.write_settings('settings.py', apps=['user_commands'])
out, err = self.run_manage(['set_option', '--set', 'foo'])
self.assertNoOutput(err)
self.assertEqual(out.strip(), 'Set foo')
def test_skip_checks(self):
self.write_settings('settings.py', apps=['django.contrib.staticfiles', 'user_commands'], sdict={
# (staticfiles.E001) The STATICFILES_DIRS setting is not a tuple or
# list.
'STATICFILES_DIRS': '"foo"',
})
out, err = self.run_manage(['set_option', '--skip-checks', '--set', 'foo'])
self.assertNoOutput(err)
self.assertEqual(out.strip(), 'Set foo')
class UtilsTests(SimpleTestCase):
def test_no_existent_external_program(self):
msg = 'Error executing a_42_command_that_doesnt_exist_42'
with self.assertRaisesMessage(CommandError, msg):
popen_wrapper(['a_42_command_that_doesnt_exist_42'])
def test_get_random_secret_key(self):
key = get_random_secret_key()
self.assertEqual(len(key), 50)
for char in key:
self.assertIn(char, 'abcdefghijklmnopqrstuvwxyz0123456789!@#$%^&*(-_=+)')
def test_is_ignored_path_true(self):
patterns = (
['foo/bar/baz'],
['baz'],
['foo/bar/baz'],
['*/baz'],
['*'],
['b?z'],
['[abc]az'],
['*/ba[!z]/baz'],
)
for ignore_patterns in patterns:
with self.subTest(ignore_patterns=ignore_patterns):
self.assertIs(is_ignored_path('foo/bar/baz', ignore_patterns=ignore_patterns), True)
def test_is_ignored_path_false(self):
self.assertIs(is_ignored_path('foo/bar/baz', ignore_patterns=['foo/bar/bat', 'bar', 'flub/blub']), False)
def test_normalize_path_patterns_truncates_wildcard_base(self):
expected = [os.path.normcase(p) for p in ['foo/bar', 'bar/*/']]
self.assertEqual(normalize_path_patterns(['foo/bar/*', 'bar/*/']), expected)
|
4a55bddd7890abbc5008360807b70205c038f4919f9ea43b45cd77742827da5e | from django.core.exceptions import ImproperlyConfigured
from django.core.handlers.wsgi import WSGIHandler, WSGIRequest, get_script_name
from django.core.signals import request_finished, request_started
from django.db import close_old_connections, connection
from django.test import (
RequestFactory, SimpleTestCase, TransactionTestCase, override_settings,
)
from django.utils.version import PY37
class HandlerTests(SimpleTestCase):
request_factory = RequestFactory()
def setUp(self):
request_started.disconnect(close_old_connections)
def tearDown(self):
request_started.connect(close_old_connections)
def test_middleware_initialized(self):
handler = WSGIHandler()
self.assertIsNotNone(handler._middleware_chain)
def test_bad_path_info(self):
"""
A non-UTF-8 path populates PATH_INFO with an URL-encoded path and
produces a 404.
"""
environ = self.request_factory.get('/').environ
environ['PATH_INFO'] = '\xed'
handler = WSGIHandler()
response = handler(environ, lambda *a, **k: None)
# The path of the request will be encoded to '/%ED'.
self.assertEqual(response.status_code, 404)
def test_non_ascii_query_string(self):
"""
Non-ASCII query strings are properly decoded (#20530, #22996).
"""
environ = self.request_factory.get('/').environ
raw_query_strings = [
b'want=caf%C3%A9', # This is the proper way to encode 'café'
b'want=caf\xc3\xa9', # UA forgot to quote bytes
b'want=caf%E9', # UA quoted, but not in UTF-8
b'want=caf\xe9', # UA forgot to convert Latin-1 to UTF-8 and to quote (typical of MSIE)
]
got = []
for raw_query_string in raw_query_strings:
# Simulate http.server.BaseHTTPRequestHandler.parse_request handling of raw request
environ['QUERY_STRING'] = str(raw_query_string, 'iso-8859-1')
request = WSGIRequest(environ)
got.append(request.GET['want'])
# %E9 is converted to the unicode replacement character by parse_qsl
self.assertEqual(got, ['café', 'café', 'caf\ufffd', 'café'])
def test_non_ascii_cookie(self):
"""Non-ASCII cookies set in JavaScript are properly decoded (#20557)."""
environ = self.request_factory.get('/').environ
raw_cookie = 'want="café"'.encode('utf-8').decode('iso-8859-1')
environ['HTTP_COOKIE'] = raw_cookie
request = WSGIRequest(environ)
self.assertEqual(request.COOKIES['want'], "café")
def test_invalid_unicode_cookie(self):
"""
Invalid cookie content should result in an absent cookie, but not in a
crash while trying to decode it (#23638).
"""
environ = self.request_factory.get('/').environ
environ['HTTP_COOKIE'] = 'x=W\x03c(h]\x8e'
request = WSGIRequest(environ)
# We don't test COOKIES content, as the result might differ between
# Python version because parsing invalid content became stricter in
# latest versions.
self.assertIsInstance(request.COOKIES, dict)
@override_settings(ROOT_URLCONF='handlers.urls')
def test_invalid_multipart_boundary(self):
"""
Invalid boundary string should produce a "Bad Request" response, not a
server error (#23887).
"""
environ = self.request_factory.post('/malformed_post/').environ
environ['CONTENT_TYPE'] = 'multipart/form-data; boundary=WRONG\x07'
handler = WSGIHandler()
response = handler(environ, lambda *a, **k: None)
# Expect "bad request" response
self.assertEqual(response.status_code, 400)
@override_settings(ROOT_URLCONF='handlers.urls', MIDDLEWARE=[])
class TransactionsPerRequestTests(TransactionTestCase):
available_apps = []
def test_no_transaction(self):
response = self.client.get('/in_transaction/')
self.assertContains(response, 'False')
def test_auto_transaction(self):
old_atomic_requests = connection.settings_dict['ATOMIC_REQUESTS']
try:
connection.settings_dict['ATOMIC_REQUESTS'] = True
response = self.client.get('/in_transaction/')
finally:
connection.settings_dict['ATOMIC_REQUESTS'] = old_atomic_requests
self.assertContains(response, 'True')
async def test_auto_transaction_async_view(self):
old_atomic_requests = connection.settings_dict['ATOMIC_REQUESTS']
try:
connection.settings_dict['ATOMIC_REQUESTS'] = True
msg = 'You cannot use ATOMIC_REQUESTS with async views.'
with self.assertRaisesMessage(RuntimeError, msg):
await self.async_client.get('/async_regular/')
finally:
connection.settings_dict['ATOMIC_REQUESTS'] = old_atomic_requests
def test_no_auto_transaction(self):
old_atomic_requests = connection.settings_dict['ATOMIC_REQUESTS']
try:
connection.settings_dict['ATOMIC_REQUESTS'] = True
response = self.client.get('/not_in_transaction/')
finally:
connection.settings_dict['ATOMIC_REQUESTS'] = old_atomic_requests
self.assertContains(response, 'False')
@override_settings(ROOT_URLCONF='handlers.urls')
class SignalsTests(SimpleTestCase):
def setUp(self):
self.signals = []
self.signaled_environ = None
request_started.connect(self.register_started)
request_finished.connect(self.register_finished)
def tearDown(self):
request_started.disconnect(self.register_started)
request_finished.disconnect(self.register_finished)
def register_started(self, **kwargs):
self.signals.append('started')
self.signaled_environ = kwargs.get('environ')
def register_finished(self, **kwargs):
self.signals.append('finished')
def test_request_signals(self):
response = self.client.get('/regular/')
self.assertEqual(self.signals, ['started', 'finished'])
self.assertEqual(response.content, b"regular content")
self.assertEqual(self.signaled_environ, response.wsgi_request.environ)
def test_request_signals_streaming_response(self):
response = self.client.get('/streaming/')
self.assertEqual(self.signals, ['started'])
self.assertEqual(b''.join(response.streaming_content), b"streaming content")
self.assertEqual(self.signals, ['started', 'finished'])
def empty_middleware(get_response):
pass
@override_settings(ROOT_URLCONF='handlers.urls')
class HandlerRequestTests(SimpleTestCase):
request_factory = RequestFactory()
def test_async_view(self):
"""Calling an async view down the normal synchronous path."""
response = self.client.get('/async_regular/')
self.assertEqual(response.status_code, 200)
def test_suspiciousop_in_view_returns_400(self):
response = self.client.get('/suspicious/')
self.assertEqual(response.status_code, 400)
def test_invalid_urls(self):
response = self.client.get('~%A9helloworld')
self.assertEqual(response.status_code, 404)
self.assertEqual(response.context['request_path'], '/~%25A9helloworld' if PY37 else '/%7E%25A9helloworld')
response = self.client.get('d%aao%aaw%aan%aal%aao%aaa%aad%aa/')
self.assertEqual(response.context['request_path'], '/d%25AAo%25AAw%25AAn%25AAl%25AAo%25AAa%25AAd%25AA')
response = self.client.get('/%E2%99%E2%99%A5/')
self.assertEqual(response.context['request_path'], '/%25E2%2599%E2%99%A5/')
response = self.client.get('/%E2%98%8E%E2%A9%E2%99%A5/')
self.assertEqual(response.context['request_path'], '/%E2%98%8E%25E2%25A9%E2%99%A5/')
def test_environ_path_info_type(self):
environ = self.request_factory.get('/%E2%A8%87%87%A5%E2%A8%A0').environ
self.assertIsInstance(environ['PATH_INFO'], str)
def test_handle_accepts_httpstatus_enum_value(self):
def start_response(status, headers):
start_response.status = status
environ = self.request_factory.get('/httpstatus_enum/').environ
WSGIHandler()(environ, start_response)
self.assertEqual(start_response.status, '200 OK')
@override_settings(MIDDLEWARE=['handlers.tests.empty_middleware'])
def test_middleware_returns_none(self):
msg = 'Middleware factory handlers.tests.empty_middleware returned None.'
with self.assertRaisesMessage(ImproperlyConfigured, msg):
self.client.get('/')
def test_no_response(self):
msg = "The view %s didn't return an HttpResponse object. It returned None instead."
tests = (
('/no_response_fbv/', 'handlers.views.no_response'),
('/no_response_cbv/', 'handlers.views.NoResponse.__call__'),
)
for url, view in tests:
with self.subTest(url=url), self.assertRaisesMessage(ValueError, msg % view):
self.client.get(url)
class ScriptNameTests(SimpleTestCase):
def test_get_script_name(self):
# Regression test for #23173
# Test first without PATH_INFO
script_name = get_script_name({'SCRIPT_URL': '/foobar/'})
self.assertEqual(script_name, '/foobar/')
script_name = get_script_name({'SCRIPT_URL': '/foobar/', 'PATH_INFO': '/'})
self.assertEqual(script_name, '/foobar')
def test_get_script_name_double_slashes(self):
"""
WSGI squashes multiple successive slashes in PATH_INFO, get_script_name
should take that into account when forming SCRIPT_NAME (#17133).
"""
script_name = get_script_name({
'SCRIPT_URL': '/mst/milestones//accounts/login//help',
'PATH_INFO': '/milestones/accounts/login/help',
})
self.assertEqual(script_name, '/mst')
@override_settings(ROOT_URLCONF='handlers.urls')
class AsyncHandlerRequestTests(SimpleTestCase):
"""Async variants of the normal handler request tests."""
async def test_sync_view(self):
"""Calling a sync view down the asynchronous path."""
response = await self.async_client.get('/regular/')
self.assertEqual(response.status_code, 200)
async def test_async_view(self):
"""Calling an async view down the asynchronous path."""
response = await self.async_client.get('/async_regular/')
self.assertEqual(response.status_code, 200)
async def test_suspiciousop_in_view_returns_400(self):
response = await self.async_client.get('/suspicious/')
self.assertEqual(response.status_code, 400)
async def test_no_response(self):
msg = (
"The view handlers.views.no_response didn't return an "
"HttpResponse object. It returned None instead."
)
with self.assertRaisesMessage(ValueError, msg):
await self.async_client.get('/no_response_fbv/')
async def test_unawaited_response(self):
msg = (
"The view handlers.views.CoroutineClearingView.__call__ didn't"
" return an HttpResponse object. It returned an unawaited"
" coroutine instead. You may need to add an 'await'"
" into your view."
)
with self.assertRaisesMessage(ValueError, msg):
await self.async_client.get('/unawaited/')
|
69c54d27a972ece6976f77a49ca5e4d9cd190ed3b32a92818bd4510e2db42931 | import importlib
import inspect
import os
import re
import sys
import tempfile
import threading
from io import StringIO
from pathlib import Path
from unittest import mock
from django.core import mail
from django.core.files.uploadedfile import SimpleUploadedFile
from django.db import DatabaseError, connection
from django.http import Http404
from django.shortcuts import render
from django.template import TemplateDoesNotExist
from django.test import RequestFactory, SimpleTestCase, override_settings
from django.test.utils import LoggingCaptureMixin
from django.urls import path, reverse
from django.urls.converters import IntConverter
from django.utils.functional import SimpleLazyObject
from django.utils.regex_helper import _lazy_re_compile
from django.utils.safestring import mark_safe
from django.views.debug import (
CallableSettingWrapper, ExceptionReporter, Path as DebugPath,
SafeExceptionReporterFilter, default_urlconf,
get_default_exception_reporter_filter, technical_404_response,
technical_500_response,
)
from django.views.decorators.debug import (
sensitive_post_parameters, sensitive_variables,
)
from ..views import (
custom_exception_reporter_filter_view, index_page,
multivalue_dict_key_error, non_sensitive_view, paranoid_view,
sensitive_args_function_caller, sensitive_kwargs_function_caller,
sensitive_method_view, sensitive_view,
)
class User:
def __str__(self):
return 'jacob'
class WithoutEmptyPathUrls:
urlpatterns = [path('url/', index_page, name='url')]
class CallableSettingWrapperTests(SimpleTestCase):
""" Unittests for CallableSettingWrapper
"""
def test_repr(self):
class WrappedCallable:
def __repr__(self):
return "repr from the wrapped callable"
def __call__(self):
pass
actual = repr(CallableSettingWrapper(WrappedCallable()))
self.assertEqual(actual, "repr from the wrapped callable")
@override_settings(DEBUG=True, ROOT_URLCONF='view_tests.urls')
class DebugViewTests(SimpleTestCase):
def test_files(self):
with self.assertLogs('django.request', 'ERROR'):
response = self.client.get('/raises/')
self.assertEqual(response.status_code, 500)
data = {
'file_data.txt': SimpleUploadedFile('file_data.txt', b'haha'),
}
with self.assertLogs('django.request', 'ERROR'):
response = self.client.post('/raises/', data)
self.assertContains(response, 'file_data.txt', status_code=500)
self.assertNotContains(response, 'haha', status_code=500)
def test_400(self):
# When DEBUG=True, technical_500_template() is called.
with self.assertLogs('django.security', 'WARNING'):
response = self.client.get('/raises400/')
self.assertContains(response, '<div class="context" id="', status_code=400)
# Ensure no 403.html template exists to test the default case.
@override_settings(TEMPLATES=[{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
}])
def test_403(self):
response = self.client.get('/raises403/')
self.assertContains(response, '<h1>403 Forbidden</h1>', status_code=403)
# Set up a test 403.html template.
@override_settings(TEMPLATES=[{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'OPTIONS': {
'loaders': [
('django.template.loaders.locmem.Loader', {
'403.html': 'This is a test template for a 403 error ({{ exception }}).',
}),
],
},
}])
def test_403_template(self):
response = self.client.get('/raises403/')
self.assertContains(response, 'test template', status_code=403)
self.assertContains(response, '(Insufficient Permissions).', status_code=403)
def test_404(self):
response = self.client.get('/raises404/')
self.assertEqual(response.status_code, 404)
self.assertContains(response, "<code>not-in-urls</code>, didn't match", status_code=404)
def test_404_not_in_urls(self):
response = self.client.get('/not-in-urls')
self.assertNotContains(response, "Raised by:", status_code=404)
self.assertContains(response, "Django tried these URL patterns", status_code=404)
self.assertContains(response, "<code>not-in-urls</code>, didn't match", status_code=404)
# Pattern and view name of a RegexURLPattern appear.
self.assertContains(response, r"^regex-post/(?P<pk>[0-9]+)/$", status_code=404)
self.assertContains(response, "[name='regex-post']", status_code=404)
# Pattern and view name of a RoutePattern appear.
self.assertContains(response, r"path-post/<int:pk>/", status_code=404)
self.assertContains(response, "[name='path-post']", status_code=404)
@override_settings(ROOT_URLCONF=WithoutEmptyPathUrls)
def test_404_empty_path_not_in_urls(self):
response = self.client.get('/')
self.assertContains(response, "The empty path didn't match any of these.", status_code=404)
def test_technical_404(self):
response = self.client.get('/technical404/')
self.assertContains(response, "Raised by:", status_code=404)
self.assertContains(response, "view_tests.views.technical404", status_code=404)
def test_classbased_technical_404(self):
response = self.client.get('/classbased404/')
self.assertContains(response, "Raised by:", status_code=404)
self.assertContains(response, "view_tests.views.Http404View", status_code=404)
def test_non_l10ned_numeric_ids(self):
"""
Numeric IDs and fancy traceback context blocks line numbers shouldn't be localized.
"""
with self.settings(DEBUG=True, USE_L10N=True):
with self.assertLogs('django.request', 'ERROR'):
response = self.client.get('/raises500/')
# We look for a HTML fragment of the form
# '<div class="context" id="c38123208">', not '<div class="context" id="c38,123,208"'
self.assertContains(response, '<div class="context" id="', status_code=500)
match = re.search(b'<div class="context" id="(?P<id>[^"]+)">', response.content)
self.assertIsNotNone(match)
id_repr = match.group('id')
self.assertFalse(
re.search(b'[^c0-9]', id_repr),
"Numeric IDs in debug response HTML page shouldn't be localized (value: %s)." % id_repr.decode()
)
def test_template_exceptions(self):
with self.assertLogs('django.request', 'ERROR'):
try:
self.client.get(reverse('template_exception'))
except Exception:
raising_loc = inspect.trace()[-1][-2][0].strip()
self.assertNotEqual(
raising_loc.find("raise Exception('boom')"), -1,
"Failed to find 'raise Exception' in last frame of "
"traceback, instead found: %s" % raising_loc
)
def test_template_loader_postmortem(self):
"""Tests for not existing file"""
template_name = "notfound.html"
with tempfile.NamedTemporaryFile(prefix=template_name) as tmpfile:
tempdir = os.path.dirname(tmpfile.name)
template_path = os.path.join(tempdir, template_name)
with override_settings(TEMPLATES=[{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [tempdir],
}]), self.assertLogs('django.request', 'ERROR'):
response = self.client.get(reverse('raises_template_does_not_exist', kwargs={"path": template_name}))
self.assertContains(response, "%s (Source does not exist)" % template_path, status_code=500, count=2)
# Assert as HTML.
self.assertContains(
response,
'<li><code>django.template.loaders.filesystem.Loader</code>: '
'%s (Source does not exist)</li>' % os.path.join(tempdir, 'notfound.html'),
status_code=500,
html=True,
)
def test_no_template_source_loaders(self):
"""
Make sure if you don't specify a template, the debug view doesn't blow up.
"""
with self.assertLogs('django.request', 'ERROR'):
with self.assertRaises(TemplateDoesNotExist):
self.client.get('/render_no_template/')
@override_settings(ROOT_URLCONF='view_tests.default_urls')
def test_default_urlconf_template(self):
"""
Make sure that the default URLconf template is shown shown instead
of the technical 404 page, if the user has not altered their
URLconf yet.
"""
response = self.client.get('/')
self.assertContains(
response,
"<h2>The install worked successfully! Congratulations!</h2>"
)
@override_settings(ROOT_URLCONF='view_tests.regression_21530_urls')
def test_regression_21530(self):
"""
Regression test for bug #21530.
If the admin app include is replaced with exactly one url
pattern, then the technical 404 template should be displayed.
The bug here was that an AttributeError caused a 500 response.
"""
response = self.client.get('/')
self.assertContains(
response,
"Page not found <span>(404)</span>",
status_code=404
)
def test_template_encoding(self):
"""
The templates are loaded directly, not via a template loader, and
should be opened as utf-8 charset as is the default specified on
template engines.
"""
with mock.patch.object(DebugPath, 'open') as m:
default_urlconf(None)
m.assert_called_once_with(encoding='utf-8')
m.reset_mock()
technical_404_response(mock.MagicMock(), mock.Mock())
m.assert_called_once_with(encoding='utf-8')
def test_technical_404_converter_raise_404(self):
with mock.patch.object(IntConverter, 'to_python', side_effect=Http404):
response = self.client.get('/path-post/1/')
self.assertContains(response, 'Page not found', status_code=404)
def test_exception_reporter_from_request(self):
with self.assertLogs('django.request', 'ERROR'):
response = self.client.get('/custom_reporter_class_view/')
self.assertContains(response, 'custom traceback text', status_code=500)
@override_settings(DEFAULT_EXCEPTION_REPORTER='view_tests.views.CustomExceptionReporter')
def test_exception_reporter_from_settings(self):
with self.assertLogs('django.request', 'ERROR'):
response = self.client.get('/raises500/')
self.assertContains(response, 'custom traceback text', status_code=500)
class DebugViewQueriesAllowedTests(SimpleTestCase):
# May need a query to initialize MySQL connection
databases = {'default'}
def test_handle_db_exception(self):
"""
Ensure the debug view works when a database exception is raised by
performing an invalid query and passing the exception to the debug view.
"""
with connection.cursor() as cursor:
try:
cursor.execute('INVALID SQL')
except DatabaseError:
exc_info = sys.exc_info()
rf = RequestFactory()
response = technical_500_response(rf.get('/'), *exc_info)
self.assertContains(response, 'OperationalError at /', status_code=500)
@override_settings(
DEBUG=True,
ROOT_URLCONF='view_tests.urls',
# No template directories are configured, so no templates will be found.
TEMPLATES=[{
'BACKEND': 'django.template.backends.dummy.TemplateStrings',
}],
)
class NonDjangoTemplatesDebugViewTests(SimpleTestCase):
def test_400(self):
# When DEBUG=True, technical_500_template() is called.
with self.assertLogs('django.security', 'WARNING'):
response = self.client.get('/raises400/')
self.assertContains(response, '<div class="context" id="', status_code=400)
def test_403(self):
response = self.client.get('/raises403/')
self.assertContains(response, '<h1>403 Forbidden</h1>', status_code=403)
def test_404(self):
response = self.client.get('/raises404/')
self.assertEqual(response.status_code, 404)
def test_template_not_found_error(self):
# Raises a TemplateDoesNotExist exception and shows the debug view.
url = reverse('raises_template_does_not_exist', kwargs={"path": "notfound.html"})
with self.assertLogs('django.request', 'ERROR'):
response = self.client.get(url)
self.assertContains(response, '<div class="context" id="', status_code=500)
class ExceptionReporterTests(SimpleTestCase):
rf = RequestFactory()
def test_request_and_exception(self):
"A simple exception report can be generated"
try:
request = self.rf.get('/test_view/')
request.user = User()
raise ValueError("Can't find my keys")
except ValueError:
exc_type, exc_value, tb = sys.exc_info()
reporter = ExceptionReporter(request, exc_type, exc_value, tb)
html = reporter.get_traceback_html()
self.assertInHTML('<h1>ValueError at /test_view/</h1>', html)
self.assertIn('<pre class="exception_value">Can't find my keys</pre>', html)
self.assertIn('<th>Request Method:</th>', html)
self.assertIn('<th>Request URL:</th>', html)
self.assertIn('<h3 id="user-info">USER</h3>', html)
self.assertIn('<p>jacob</p>', html)
self.assertIn('<th>Exception Type:</th>', html)
self.assertIn('<th>Exception Value:</th>', html)
self.assertIn('<h2>Traceback ', html)
self.assertIn('<h2>Request information</h2>', html)
self.assertNotIn('<p>Request data not supplied</p>', html)
self.assertIn('<p>No POST data</p>', html)
def test_no_request(self):
"An exception report can be generated without request"
try:
raise ValueError("Can't find my keys")
except ValueError:
exc_type, exc_value, tb = sys.exc_info()
reporter = ExceptionReporter(None, exc_type, exc_value, tb)
html = reporter.get_traceback_html()
self.assertInHTML('<h1>ValueError</h1>', html)
self.assertIn('<pre class="exception_value">Can't find my keys</pre>', html)
self.assertNotIn('<th>Request Method:</th>', html)
self.assertNotIn('<th>Request URL:</th>', html)
self.assertNotIn('<h3 id="user-info">USER</h3>', html)
self.assertIn('<th>Exception Type:</th>', html)
self.assertIn('<th>Exception Value:</th>', html)
self.assertIn('<h2>Traceback ', html)
self.assertIn('<h2>Request information</h2>', html)
self.assertIn('<p>Request data not supplied</p>', html)
def test_eol_support(self):
"""The ExceptionReporter supports Unix, Windows and Macintosh EOL markers"""
LINES = ['print %d' % i for i in range(1, 6)]
reporter = ExceptionReporter(None, None, None, None)
for newline in ['\n', '\r\n', '\r']:
fd, filename = tempfile.mkstemp(text=False)
os.write(fd, (newline.join(LINES) + newline).encode())
os.close(fd)
try:
self.assertEqual(
reporter._get_lines_from_file(filename, 3, 2),
(1, LINES[1:3], LINES[3], LINES[4:])
)
finally:
os.unlink(filename)
def test_no_exception(self):
"An exception report can be generated for just a request"
request = self.rf.get('/test_view/')
reporter = ExceptionReporter(request, None, None, None)
html = reporter.get_traceback_html()
self.assertInHTML('<h1>Report at /test_view/</h1>', html)
self.assertIn('<pre class="exception_value">No exception message supplied</pre>', html)
self.assertIn('<th>Request Method:</th>', html)
self.assertIn('<th>Request URL:</th>', html)
self.assertNotIn('<th>Exception Type:</th>', html)
self.assertNotIn('<th>Exception Value:</th>', html)
self.assertNotIn('<h2>Traceback ', html)
self.assertIn('<h2>Request information</h2>', html)
self.assertNotIn('<p>Request data not supplied</p>', html)
def test_reporting_of_nested_exceptions(self):
request = self.rf.get('/test_view/')
try:
try:
raise AttributeError(mark_safe('<p>Top level</p>'))
except AttributeError as explicit:
try:
raise ValueError(mark_safe('<p>Second exception</p>')) from explicit
except ValueError:
raise IndexError(mark_safe('<p>Final exception</p>'))
except Exception:
# Custom exception handler, just pass it into ExceptionReporter
exc_type, exc_value, tb = sys.exc_info()
explicit_exc = 'The above exception ({0}) was the direct cause of the following exception:'
implicit_exc = 'During handling of the above exception ({0}), another exception occurred:'
reporter = ExceptionReporter(request, exc_type, exc_value, tb)
html = reporter.get_traceback_html()
# Both messages are twice on page -- one rendered as html,
# one as plain text (for pastebin)
self.assertEqual(2, html.count(explicit_exc.format('<p>Top level</p>')))
self.assertEqual(2, html.count(implicit_exc.format('<p>Second exception</p>')))
self.assertEqual(10, html.count('<p>Final exception</p>'))
text = reporter.get_traceback_text()
self.assertIn(explicit_exc.format('<p>Top level</p>'), text)
self.assertIn(implicit_exc.format('<p>Second exception</p>'), text)
self.assertEqual(3, text.count('<p>Final exception</p>'))
def test_reporting_frames_without_source(self):
try:
source = "def funcName():\n raise Error('Whoops')\nfuncName()"
namespace = {}
code = compile(source, 'generated', 'exec')
exec(code, namespace)
except Exception:
exc_type, exc_value, tb = sys.exc_info()
request = self.rf.get('/test_view/')
reporter = ExceptionReporter(request, exc_type, exc_value, tb)
frames = reporter.get_traceback_frames()
last_frame = frames[-1]
self.assertEqual(last_frame['context_line'], '<source code not available>')
self.assertEqual(last_frame['filename'], 'generated')
self.assertEqual(last_frame['function'], 'funcName')
self.assertEqual(last_frame['lineno'], 2)
html = reporter.get_traceback_html()
self.assertIn(
'<span class="fname">generated</span>, line 2, in funcName',
html,
)
self.assertIn(
'<code class="fname">generated</code>, line 2, in funcName',
html,
)
self.assertIn(
'"generated", line 2, in funcName\n'
' <source code not available>',
html,
)
text = reporter.get_traceback_text()
self.assertIn(
'"generated", line 2, in funcName\n'
' <source code not available>',
text,
)
def test_reporting_frames_source_not_match(self):
try:
source = "def funcName():\n raise Error('Whoops')\nfuncName()"
namespace = {}
code = compile(source, 'generated', 'exec')
exec(code, namespace)
except Exception:
exc_type, exc_value, tb = sys.exc_info()
with mock.patch(
'django.views.debug.ExceptionReporter._get_source',
return_value=['wrong source'],
):
request = self.rf.get('/test_view/')
reporter = ExceptionReporter(request, exc_type, exc_value, tb)
frames = reporter.get_traceback_frames()
last_frame = frames[-1]
self.assertEqual(last_frame['context_line'], '<source code not available>')
self.assertEqual(last_frame['filename'], 'generated')
self.assertEqual(last_frame['function'], 'funcName')
self.assertEqual(last_frame['lineno'], 2)
html = reporter.get_traceback_html()
self.assertIn(
'<span class="fname">generated</span>, line 2, in funcName',
html,
)
self.assertIn(
'<code class="fname">generated</code>, line 2, in funcName',
html,
)
self.assertIn(
'"generated", line 2, in funcName\n'
' <source code not available>',
html,
)
text = reporter.get_traceback_text()
self.assertIn(
'"generated", line 2, in funcName\n'
' <source code not available>',
text,
)
def test_reporting_frames_for_cyclic_reference(self):
try:
def test_func():
try:
raise RuntimeError('outer') from RuntimeError('inner')
except RuntimeError as exc:
raise exc.__cause__
test_func()
except Exception:
exc_type, exc_value, tb = sys.exc_info()
request = self.rf.get('/test_view/')
reporter = ExceptionReporter(request, exc_type, exc_value, tb)
def generate_traceback_frames(*args, **kwargs):
nonlocal tb_frames
tb_frames = reporter.get_traceback_frames()
tb_frames = None
tb_generator = threading.Thread(target=generate_traceback_frames, daemon=True)
tb_generator.start()
tb_generator.join(timeout=5)
if tb_generator.is_alive():
# tb_generator is a daemon that runs until the main thread/process
# exits. This is resource heavy when running the full test suite.
# Setting the following values to None makes
# reporter.get_traceback_frames() exit early.
exc_value.__traceback__ = exc_value.__context__ = exc_value.__cause__ = None
tb_generator.join()
self.fail('Cyclic reference in Exception Reporter.get_traceback_frames()')
if tb_frames is None:
# can happen if the thread generating traceback got killed
# or exception while generating the traceback
self.fail('Traceback generation failed')
last_frame = tb_frames[-1]
self.assertIn('raise exc.__cause__', last_frame['context_line'])
self.assertEqual(last_frame['filename'], __file__)
self.assertEqual(last_frame['function'], 'test_func')
def test_request_and_message(self):
"A message can be provided in addition to a request"
request = self.rf.get('/test_view/')
reporter = ExceptionReporter(request, None, "I'm a little teapot", None)
html = reporter.get_traceback_html()
self.assertInHTML('<h1>Report at /test_view/</h1>', html)
self.assertIn('<pre class="exception_value">I'm a little teapot</pre>', html)
self.assertIn('<th>Request Method:</th>', html)
self.assertIn('<th>Request URL:</th>', html)
self.assertNotIn('<th>Exception Type:</th>', html)
self.assertNotIn('<th>Exception Value:</th>', html)
self.assertNotIn('<h2>Traceback ', html)
self.assertIn('<h2>Request information</h2>', html)
self.assertNotIn('<p>Request data not supplied</p>', html)
def test_message_only(self):
reporter = ExceptionReporter(None, None, "I'm a little teapot", None)
html = reporter.get_traceback_html()
self.assertInHTML('<h1>Report</h1>', html)
self.assertIn('<pre class="exception_value">I'm a little teapot</pre>', html)
self.assertNotIn('<th>Request Method:</th>', html)
self.assertNotIn('<th>Request URL:</th>', html)
self.assertNotIn('<th>Exception Type:</th>', html)
self.assertNotIn('<th>Exception Value:</th>', html)
self.assertNotIn('<h2>Traceback ', html)
self.assertIn('<h2>Request information</h2>', html)
self.assertIn('<p>Request data not supplied</p>', html)
def test_non_utf8_values_handling(self):
"Non-UTF-8 exceptions/values should not make the output generation choke."
try:
class NonUtf8Output(Exception):
def __repr__(self):
return b'EXC\xe9EXC'
somevar = b'VAL\xe9VAL' # NOQA
raise NonUtf8Output()
except Exception:
exc_type, exc_value, tb = sys.exc_info()
reporter = ExceptionReporter(None, exc_type, exc_value, tb)
html = reporter.get_traceback_html()
self.assertIn('VAL\\xe9VAL', html)
self.assertIn('EXC\\xe9EXC', html)
def test_local_variable_escaping(self):
"""Safe strings in local variables are escaped."""
try:
local = mark_safe('<p>Local variable</p>')
raise ValueError(local)
except Exception:
exc_type, exc_value, tb = sys.exc_info()
html = ExceptionReporter(None, exc_type, exc_value, tb).get_traceback_html()
self.assertIn('<td class="code"><pre>'<p>Local variable</p>'</pre></td>', html)
def test_unprintable_values_handling(self):
"Unprintable values should not make the output generation choke."
try:
class OomOutput:
def __repr__(self):
raise MemoryError('OOM')
oomvalue = OomOutput() # NOQA
raise ValueError()
except Exception:
exc_type, exc_value, tb = sys.exc_info()
reporter = ExceptionReporter(None, exc_type, exc_value, tb)
html = reporter.get_traceback_html()
self.assertIn('<td class="code"><pre>Error in formatting', html)
def test_too_large_values_handling(self):
"Large values should not create a large HTML."
large = 256 * 1024
repr_of_str_adds = len(repr(''))
try:
class LargeOutput:
def __repr__(self):
return repr('A' * large)
largevalue = LargeOutput() # NOQA
raise ValueError()
except Exception:
exc_type, exc_value, tb = sys.exc_info()
reporter = ExceptionReporter(None, exc_type, exc_value, tb)
html = reporter.get_traceback_html()
self.assertEqual(len(html) // 1024 // 128, 0) # still fit in 128Kb
self.assertIn('<trimmed %d bytes string>' % (large + repr_of_str_adds,), html)
def test_encoding_error(self):
"""
A UnicodeError displays a portion of the problematic string. HTML in
safe strings is escaped.
"""
try:
mark_safe('abcdefghijkl<p>mnὀp</p>qrstuwxyz').encode('ascii')
except Exception:
exc_type, exc_value, tb = sys.exc_info()
reporter = ExceptionReporter(None, exc_type, exc_value, tb)
html = reporter.get_traceback_html()
self.assertIn('<h2>Unicode error hint</h2>', html)
self.assertIn('The string that could not be encoded/decoded was: ', html)
self.assertIn('<strong><p>mnὀp</p></strong>', html)
def test_unfrozen_importlib(self):
"""
importlib is not a frozen app, but its loader thinks it's frozen which
results in an ImportError. Refs #21443.
"""
try:
request = self.rf.get('/test_view/')
importlib.import_module('abc.def.invalid.name')
except Exception:
exc_type, exc_value, tb = sys.exc_info()
reporter = ExceptionReporter(request, exc_type, exc_value, tb)
html = reporter.get_traceback_html()
self.assertInHTML('<h1>ModuleNotFoundError at /test_view/</h1>', html)
def test_ignore_traceback_evaluation_exceptions(self):
"""
Don't trip over exceptions generated by crafted objects when
evaluating them while cleansing (#24455).
"""
class BrokenEvaluation(Exception):
pass
def broken_setup():
raise BrokenEvaluation
request = self.rf.get('/test_view/')
broken_lazy = SimpleLazyObject(broken_setup)
try:
bool(broken_lazy)
except BrokenEvaluation:
exc_type, exc_value, tb = sys.exc_info()
self.assertIn(
"BrokenEvaluation",
ExceptionReporter(request, exc_type, exc_value, tb).get_traceback_html(),
"Evaluation exception reason not mentioned in traceback"
)
@override_settings(ALLOWED_HOSTS='example.com')
def test_disallowed_host(self):
"An exception report can be generated even for a disallowed host."
request = self.rf.get('/', HTTP_HOST='evil.com')
reporter = ExceptionReporter(request, None, None, None)
html = reporter.get_traceback_html()
self.assertIn("http://evil.com/", html)
def test_request_with_items_key(self):
"""
An exception report can be generated for requests with 'items' in
request GET, POST, FILES, or COOKIES QueryDicts.
"""
value = '<td>items</td><td class="code"><pre>'Oops'</pre></td>'
# GET
request = self.rf.get('/test_view/?items=Oops')
reporter = ExceptionReporter(request, None, None, None)
html = reporter.get_traceback_html()
self.assertInHTML(value, html)
# POST
request = self.rf.post('/test_view/', data={'items': 'Oops'})
reporter = ExceptionReporter(request, None, None, None)
html = reporter.get_traceback_html()
self.assertInHTML(value, html)
# FILES
fp = StringIO('filecontent')
request = self.rf.post('/test_view/', data={'name': 'filename', 'items': fp})
reporter = ExceptionReporter(request, None, None, None)
html = reporter.get_traceback_html()
self.assertInHTML(
'<td>items</td><td class="code"><pre><InMemoryUploadedFile: '
'items (application/octet-stream)></pre></td>',
html
)
# COOKIES
rf = RequestFactory()
rf.cookies['items'] = 'Oops'
request = rf.get('/test_view/')
reporter = ExceptionReporter(request, None, None, None)
html = reporter.get_traceback_html()
self.assertInHTML('<td>items</td><td class="code"><pre>'Oops'</pre></td>', html)
def test_exception_fetching_user(self):
"""
The error page can be rendered if the current user can't be retrieved
(such as when the database is unavailable).
"""
class ExceptionUser:
def __str__(self):
raise Exception()
request = self.rf.get('/test_view/')
request.user = ExceptionUser()
try:
raise ValueError('Oops')
except ValueError:
exc_type, exc_value, tb = sys.exc_info()
reporter = ExceptionReporter(request, exc_type, exc_value, tb)
html = reporter.get_traceback_html()
self.assertInHTML('<h1>ValueError at /test_view/</h1>', html)
self.assertIn('<pre class="exception_value">Oops</pre>', html)
self.assertIn('<h3 id="user-info">USER</h3>', html)
self.assertIn('<p>[unable to retrieve the current user]</p>', html)
text = reporter.get_traceback_text()
self.assertIn('USER: [unable to retrieve the current user]', text)
def test_template_encoding(self):
"""
The templates are loaded directly, not via a template loader, and
should be opened as utf-8 charset as is the default specified on
template engines.
"""
reporter = ExceptionReporter(None, None, None, None)
with mock.patch.object(DebugPath, 'open') as m:
reporter.get_traceback_html()
m.assert_called_once_with(encoding='utf-8')
m.reset_mock()
reporter.get_traceback_text()
m.assert_called_once_with(encoding='utf-8')
class PlainTextReportTests(SimpleTestCase):
rf = RequestFactory()
def test_request_and_exception(self):
"A simple exception report can be generated"
try:
request = self.rf.get('/test_view/')
request.user = User()
raise ValueError("Can't find my keys")
except ValueError:
exc_type, exc_value, tb = sys.exc_info()
reporter = ExceptionReporter(request, exc_type, exc_value, tb)
text = reporter.get_traceback_text()
self.assertIn('ValueError at /test_view/', text)
self.assertIn("Can't find my keys", text)
self.assertIn('Request Method:', text)
self.assertIn('Request URL:', text)
self.assertIn('USER: jacob', text)
self.assertIn('Exception Type:', text)
self.assertIn('Exception Value:', text)
self.assertIn('Traceback (most recent call last):', text)
self.assertIn('Request information:', text)
self.assertNotIn('Request data not supplied', text)
def test_no_request(self):
"An exception report can be generated without request"
try:
raise ValueError("Can't find my keys")
except ValueError:
exc_type, exc_value, tb = sys.exc_info()
reporter = ExceptionReporter(None, exc_type, exc_value, tb)
text = reporter.get_traceback_text()
self.assertIn('ValueError', text)
self.assertIn("Can't find my keys", text)
self.assertNotIn('Request Method:', text)
self.assertNotIn('Request URL:', text)
self.assertNotIn('USER:', text)
self.assertIn('Exception Type:', text)
self.assertIn('Exception Value:', text)
self.assertIn('Traceback (most recent call last):', text)
self.assertIn('Request data not supplied', text)
def test_no_exception(self):
"An exception report can be generated for just a request"
request = self.rf.get('/test_view/')
reporter = ExceptionReporter(request, None, None, None)
reporter.get_traceback_text()
def test_request_and_message(self):
"A message can be provided in addition to a request"
request = self.rf.get('/test_view/')
reporter = ExceptionReporter(request, None, "I'm a little teapot", None)
reporter.get_traceback_text()
@override_settings(DEBUG=True)
def test_template_exception(self):
request = self.rf.get('/test_view/')
try:
render(request, 'debug/template_error.html')
except Exception:
exc_type, exc_value, tb = sys.exc_info()
reporter = ExceptionReporter(request, exc_type, exc_value, tb)
text = reporter.get_traceback_text()
templ_path = Path(Path(__file__).parents[1], 'templates', 'debug', 'template_error.html')
self.assertIn(
'Template error:\n'
'In template %(path)s, error at line 2\n'
' \'cycle\' tag requires at least two arguments\n'
' 1 : Template with error:\n'
' 2 : {%% cycle %%} \n'
' 3 : ' % {'path': templ_path},
text
)
def test_request_with_items_key(self):
"""
An exception report can be generated for requests with 'items' in
request GET, POST, FILES, or COOKIES QueryDicts.
"""
# GET
request = self.rf.get('/test_view/?items=Oops')
reporter = ExceptionReporter(request, None, None, None)
text = reporter.get_traceback_text()
self.assertIn("items = 'Oops'", text)
# POST
request = self.rf.post('/test_view/', data={'items': 'Oops'})
reporter = ExceptionReporter(request, None, None, None)
text = reporter.get_traceback_text()
self.assertIn("items = 'Oops'", text)
# FILES
fp = StringIO('filecontent')
request = self.rf.post('/test_view/', data={'name': 'filename', 'items': fp})
reporter = ExceptionReporter(request, None, None, None)
text = reporter.get_traceback_text()
self.assertIn('items = <InMemoryUploadedFile:', text)
# COOKIES
rf = RequestFactory()
rf.cookies['items'] = 'Oops'
request = rf.get('/test_view/')
reporter = ExceptionReporter(request, None, None, None)
text = reporter.get_traceback_text()
self.assertIn("items = 'Oops'", text)
def test_message_only(self):
reporter = ExceptionReporter(None, None, "I'm a little teapot", None)
reporter.get_traceback_text()
@override_settings(ALLOWED_HOSTS='example.com')
def test_disallowed_host(self):
"An exception report can be generated even for a disallowed host."
request = self.rf.get('/', HTTP_HOST='evil.com')
reporter = ExceptionReporter(request, None, None, None)
text = reporter.get_traceback_text()
self.assertIn("http://evil.com/", text)
class ExceptionReportTestMixin:
# Mixin used in the ExceptionReporterFilterTests and
# AjaxResponseExceptionReporterFilter tests below
breakfast_data = {
'sausage-key': 'sausage-value',
'baked-beans-key': 'baked-beans-value',
'hash-brown-key': 'hash-brown-value',
'bacon-key': 'bacon-value',
}
def verify_unsafe_response(self, view, check_for_vars=True,
check_for_POST_params=True):
"""
Asserts that potentially sensitive info are displayed in the response.
"""
request = self.rf.post('/some_url/', self.breakfast_data)
response = view(request)
if check_for_vars:
# All variables are shown.
self.assertContains(response, 'cooked_eggs', status_code=500)
self.assertContains(response, 'scrambled', status_code=500)
self.assertContains(response, 'sauce', status_code=500)
self.assertContains(response, 'worcestershire', status_code=500)
if check_for_POST_params:
for k, v in self.breakfast_data.items():
# All POST parameters are shown.
self.assertContains(response, k, status_code=500)
self.assertContains(response, v, status_code=500)
def verify_safe_response(self, view, check_for_vars=True,
check_for_POST_params=True):
"""
Asserts that certain sensitive info are not displayed in the response.
"""
request = self.rf.post('/some_url/', self.breakfast_data)
response = view(request)
if check_for_vars:
# Non-sensitive variable's name and value are shown.
self.assertContains(response, 'cooked_eggs', status_code=500)
self.assertContains(response, 'scrambled', status_code=500)
# Sensitive variable's name is shown but not its value.
self.assertContains(response, 'sauce', status_code=500)
self.assertNotContains(response, 'worcestershire', status_code=500)
if check_for_POST_params:
for k in self.breakfast_data:
# All POST parameters' names are shown.
self.assertContains(response, k, status_code=500)
# Non-sensitive POST parameters' values are shown.
self.assertContains(response, 'baked-beans-value', status_code=500)
self.assertContains(response, 'hash-brown-value', status_code=500)
# Sensitive POST parameters' values are not shown.
self.assertNotContains(response, 'sausage-value', status_code=500)
self.assertNotContains(response, 'bacon-value', status_code=500)
def verify_paranoid_response(self, view, check_for_vars=True,
check_for_POST_params=True):
"""
Asserts that no variables or POST parameters are displayed in the response.
"""
request = self.rf.post('/some_url/', self.breakfast_data)
response = view(request)
if check_for_vars:
# Show variable names but not their values.
self.assertContains(response, 'cooked_eggs', status_code=500)
self.assertNotContains(response, 'scrambled', status_code=500)
self.assertContains(response, 'sauce', status_code=500)
self.assertNotContains(response, 'worcestershire', status_code=500)
if check_for_POST_params:
for k, v in self.breakfast_data.items():
# All POST parameters' names are shown.
self.assertContains(response, k, status_code=500)
# No POST parameters' values are shown.
self.assertNotContains(response, v, status_code=500)
def verify_unsafe_email(self, view, check_for_POST_params=True):
"""
Asserts that potentially sensitive info are displayed in the email report.
"""
with self.settings(ADMINS=[('Admin', '[email protected]')]):
mail.outbox = [] # Empty outbox
request = self.rf.post('/some_url/', self.breakfast_data)
view(request)
self.assertEqual(len(mail.outbox), 1)
email = mail.outbox[0]
# Frames vars are never shown in plain text email reports.
body_plain = str(email.body)
self.assertNotIn('cooked_eggs', body_plain)
self.assertNotIn('scrambled', body_plain)
self.assertNotIn('sauce', body_plain)
self.assertNotIn('worcestershire', body_plain)
# Frames vars are shown in html email reports.
body_html = str(email.alternatives[0][0])
self.assertIn('cooked_eggs', body_html)
self.assertIn('scrambled', body_html)
self.assertIn('sauce', body_html)
self.assertIn('worcestershire', body_html)
if check_for_POST_params:
for k, v in self.breakfast_data.items():
# All POST parameters are shown.
self.assertIn(k, body_plain)
self.assertIn(v, body_plain)
self.assertIn(k, body_html)
self.assertIn(v, body_html)
def verify_safe_email(self, view, check_for_POST_params=True):
"""
Asserts that certain sensitive info are not displayed in the email report.
"""
with self.settings(ADMINS=[('Admin', '[email protected]')]):
mail.outbox = [] # Empty outbox
request = self.rf.post('/some_url/', self.breakfast_data)
view(request)
self.assertEqual(len(mail.outbox), 1)
email = mail.outbox[0]
# Frames vars are never shown in plain text email reports.
body_plain = str(email.body)
self.assertNotIn('cooked_eggs', body_plain)
self.assertNotIn('scrambled', body_plain)
self.assertNotIn('sauce', body_plain)
self.assertNotIn('worcestershire', body_plain)
# Frames vars are shown in html email reports.
body_html = str(email.alternatives[0][0])
self.assertIn('cooked_eggs', body_html)
self.assertIn('scrambled', body_html)
self.assertIn('sauce', body_html)
self.assertNotIn('worcestershire', body_html)
if check_for_POST_params:
for k in self.breakfast_data:
# All POST parameters' names are shown.
self.assertIn(k, body_plain)
# Non-sensitive POST parameters' values are shown.
self.assertIn('baked-beans-value', body_plain)
self.assertIn('hash-brown-value', body_plain)
self.assertIn('baked-beans-value', body_html)
self.assertIn('hash-brown-value', body_html)
# Sensitive POST parameters' values are not shown.
self.assertNotIn('sausage-value', body_plain)
self.assertNotIn('bacon-value', body_plain)
self.assertNotIn('sausage-value', body_html)
self.assertNotIn('bacon-value', body_html)
def verify_paranoid_email(self, view):
"""
Asserts that no variables or POST parameters are displayed in the email report.
"""
with self.settings(ADMINS=[('Admin', '[email protected]')]):
mail.outbox = [] # Empty outbox
request = self.rf.post('/some_url/', self.breakfast_data)
view(request)
self.assertEqual(len(mail.outbox), 1)
email = mail.outbox[0]
# Frames vars are never shown in plain text email reports.
body = str(email.body)
self.assertNotIn('cooked_eggs', body)
self.assertNotIn('scrambled', body)
self.assertNotIn('sauce', body)
self.assertNotIn('worcestershire', body)
for k, v in self.breakfast_data.items():
# All POST parameters' names are shown.
self.assertIn(k, body)
# No POST parameters' values are shown.
self.assertNotIn(v, body)
@override_settings(ROOT_URLCONF='view_tests.urls')
class ExceptionReporterFilterTests(ExceptionReportTestMixin, LoggingCaptureMixin, SimpleTestCase):
"""
Sensitive information can be filtered out of error reports (#14614).
"""
rf = RequestFactory()
def test_non_sensitive_request(self):
"""
Everything (request info and frame variables) can bee seen
in the default error reports for non-sensitive requests.
"""
with self.settings(DEBUG=True):
self.verify_unsafe_response(non_sensitive_view)
self.verify_unsafe_email(non_sensitive_view)
with self.settings(DEBUG=False):
self.verify_unsafe_response(non_sensitive_view)
self.verify_unsafe_email(non_sensitive_view)
def test_sensitive_request(self):
"""
Sensitive POST parameters and frame variables cannot be
seen in the default error reports for sensitive requests.
"""
with self.settings(DEBUG=True):
self.verify_unsafe_response(sensitive_view)
self.verify_unsafe_email(sensitive_view)
with self.settings(DEBUG=False):
self.verify_safe_response(sensitive_view)
self.verify_safe_email(sensitive_view)
def test_paranoid_request(self):
"""
No POST parameters and frame variables can be seen in the
default error reports for "paranoid" requests.
"""
with self.settings(DEBUG=True):
self.verify_unsafe_response(paranoid_view)
self.verify_unsafe_email(paranoid_view)
with self.settings(DEBUG=False):
self.verify_paranoid_response(paranoid_view)
self.verify_paranoid_email(paranoid_view)
def test_multivalue_dict_key_error(self):
"""
#21098 -- Sensitive POST parameters cannot be seen in the
error reports for if request.POST['nonexistent_key'] throws an error.
"""
with self.settings(DEBUG=True):
self.verify_unsafe_response(multivalue_dict_key_error)
self.verify_unsafe_email(multivalue_dict_key_error)
with self.settings(DEBUG=False):
self.verify_safe_response(multivalue_dict_key_error)
self.verify_safe_email(multivalue_dict_key_error)
def test_custom_exception_reporter_filter(self):
"""
It's possible to assign an exception reporter filter to
the request to bypass the one set in DEFAULT_EXCEPTION_REPORTER_FILTER.
"""
with self.settings(DEBUG=True):
self.verify_unsafe_response(custom_exception_reporter_filter_view)
self.verify_unsafe_email(custom_exception_reporter_filter_view)
with self.settings(DEBUG=False):
self.verify_unsafe_response(custom_exception_reporter_filter_view)
self.verify_unsafe_email(custom_exception_reporter_filter_view)
def test_sensitive_method(self):
"""
The sensitive_variables decorator works with object methods.
"""
with self.settings(DEBUG=True):
self.verify_unsafe_response(sensitive_method_view, check_for_POST_params=False)
self.verify_unsafe_email(sensitive_method_view, check_for_POST_params=False)
with self.settings(DEBUG=False):
self.verify_safe_response(sensitive_method_view, check_for_POST_params=False)
self.verify_safe_email(sensitive_method_view, check_for_POST_params=False)
def test_sensitive_function_arguments(self):
"""
Sensitive variables don't leak in the sensitive_variables decorator's
frame, when those variables are passed as arguments to the decorated
function.
"""
with self.settings(DEBUG=True):
self.verify_unsafe_response(sensitive_args_function_caller)
self.verify_unsafe_email(sensitive_args_function_caller)
with self.settings(DEBUG=False):
self.verify_safe_response(sensitive_args_function_caller, check_for_POST_params=False)
self.verify_safe_email(sensitive_args_function_caller, check_for_POST_params=False)
def test_sensitive_function_keyword_arguments(self):
"""
Sensitive variables don't leak in the sensitive_variables decorator's
frame, when those variables are passed as keyword arguments to the
decorated function.
"""
with self.settings(DEBUG=True):
self.verify_unsafe_response(sensitive_kwargs_function_caller)
self.verify_unsafe_email(sensitive_kwargs_function_caller)
with self.settings(DEBUG=False):
self.verify_safe_response(sensitive_kwargs_function_caller, check_for_POST_params=False)
self.verify_safe_email(sensitive_kwargs_function_caller, check_for_POST_params=False)
def test_callable_settings(self):
"""
Callable settings should not be evaluated in the debug page (#21345).
"""
def callable_setting():
return "This should not be displayed"
with self.settings(DEBUG=True, FOOBAR=callable_setting):
response = self.client.get('/raises500/')
self.assertNotContains(response, "This should not be displayed", status_code=500)
def test_callable_settings_forbidding_to_set_attributes(self):
"""
Callable settings which forbid to set attributes should not break
the debug page (#23070).
"""
class CallableSettingWithSlots:
__slots__ = []
def __call__(self):
return "This should not be displayed"
with self.settings(DEBUG=True, WITH_SLOTS=CallableSettingWithSlots()):
response = self.client.get('/raises500/')
self.assertNotContains(response, "This should not be displayed", status_code=500)
def test_dict_setting_with_non_str_key(self):
"""
A dict setting containing a non-string key should not break the
debug page (#12744).
"""
with self.settings(DEBUG=True, FOOBAR={42: None}):
response = self.client.get('/raises500/')
self.assertContains(response, 'FOOBAR', status_code=500)
def test_sensitive_settings(self):
"""
The debug page should not show some sensitive settings
(password, secret key, ...).
"""
sensitive_settings = [
'SECRET_KEY',
'PASSWORD',
'API_KEY',
'AUTH_TOKEN',
]
for setting in sensitive_settings:
with self.settings(DEBUG=True, **{setting: "should not be displayed"}):
response = self.client.get('/raises500/')
self.assertNotContains(response, 'should not be displayed', status_code=500)
def test_settings_with_sensitive_keys(self):
"""
The debug page should filter out some sensitive information found in
dict settings.
"""
sensitive_settings = [
'SECRET_KEY',
'PASSWORD',
'API_KEY',
'AUTH_TOKEN',
]
for setting in sensitive_settings:
FOOBAR = {
setting: "should not be displayed",
'recursive': {setting: "should not be displayed"},
}
with self.settings(DEBUG=True, FOOBAR=FOOBAR):
response = self.client.get('/raises500/')
self.assertNotContains(response, 'should not be displayed', status_code=500)
def test_cleanse_setting_basic(self):
reporter_filter = SafeExceptionReporterFilter()
self.assertEqual(reporter_filter.cleanse_setting('TEST', 'TEST'), 'TEST')
self.assertEqual(
reporter_filter.cleanse_setting('PASSWORD', 'super_secret'),
reporter_filter.cleansed_substitute,
)
def test_cleanse_setting_ignore_case(self):
reporter_filter = SafeExceptionReporterFilter()
self.assertEqual(
reporter_filter.cleanse_setting('password', 'super_secret'),
reporter_filter.cleansed_substitute,
)
def test_cleanse_setting_recurses_in_dictionary(self):
reporter_filter = SafeExceptionReporterFilter()
initial = {'login': 'cooper', 'password': 'secret'}
self.assertEqual(
reporter_filter.cleanse_setting('SETTING_NAME', initial),
{'login': 'cooper', 'password': reporter_filter.cleansed_substitute},
)
def test_request_meta_filtering(self):
request = self.rf.get('/', HTTP_SECRET_HEADER='super_secret')
reporter_filter = SafeExceptionReporterFilter()
self.assertEqual(
reporter_filter.get_safe_request_meta(request)['HTTP_SECRET_HEADER'],
reporter_filter.cleansed_substitute,
)
def test_exception_report_uses_meta_filtering(self):
response = self.client.get('/raises500/', HTTP_SECRET_HEADER='super_secret')
self.assertNotIn(b'super_secret', response.content)
response = self.client.get(
'/raises500/',
HTTP_SECRET_HEADER='super_secret',
HTTP_ACCEPT='application/json',
)
self.assertNotIn(b'super_secret', response.content)
class CustomExceptionReporterFilter(SafeExceptionReporterFilter):
cleansed_substitute = 'XXXXXXXXXXXXXXXXXXXX'
hidden_settings = _lazy_re_compile('API|TOKEN|KEY|SECRET|PASS|SIGNATURE|DATABASE_URL', flags=re.I)
@override_settings(
ROOT_URLCONF='view_tests.urls',
DEFAULT_EXCEPTION_REPORTER_FILTER='%s.CustomExceptionReporterFilter' % __name__,
)
class CustomExceptionReporterFilterTests(SimpleTestCase):
def setUp(self):
get_default_exception_reporter_filter.cache_clear()
def tearDown(self):
get_default_exception_reporter_filter.cache_clear()
def test_setting_allows_custom_subclass(self):
self.assertIsInstance(
get_default_exception_reporter_filter(),
CustomExceptionReporterFilter,
)
def test_cleansed_substitute_override(self):
reporter_filter = get_default_exception_reporter_filter()
self.assertEqual(
reporter_filter.cleanse_setting('password', 'super_secret'),
reporter_filter.cleansed_substitute,
)
def test_hidden_settings_override(self):
reporter_filter = get_default_exception_reporter_filter()
self.assertEqual(
reporter_filter.cleanse_setting('database_url', 'super_secret'),
reporter_filter.cleansed_substitute,
)
class NonHTMLResponseExceptionReporterFilter(ExceptionReportTestMixin, LoggingCaptureMixin, SimpleTestCase):
"""
Sensitive information can be filtered out of error reports.
The plain text 500 debug-only error page is served when it has been
detected the request doesn't accept HTML content. Don't check for
(non)existence of frames vars in the traceback information section of the
response content because they're not included in these error pages.
Refs #14614.
"""
rf = RequestFactory(HTTP_ACCEPT='application/json')
def test_non_sensitive_request(self):
"""
Request info can bee seen in the default error reports for
non-sensitive requests.
"""
with self.settings(DEBUG=True):
self.verify_unsafe_response(non_sensitive_view, check_for_vars=False)
with self.settings(DEBUG=False):
self.verify_unsafe_response(non_sensitive_view, check_for_vars=False)
def test_sensitive_request(self):
"""
Sensitive POST parameters cannot be seen in the default
error reports for sensitive requests.
"""
with self.settings(DEBUG=True):
self.verify_unsafe_response(sensitive_view, check_for_vars=False)
with self.settings(DEBUG=False):
self.verify_safe_response(sensitive_view, check_for_vars=False)
def test_paranoid_request(self):
"""
No POST parameters can be seen in the default error reports
for "paranoid" requests.
"""
with self.settings(DEBUG=True):
self.verify_unsafe_response(paranoid_view, check_for_vars=False)
with self.settings(DEBUG=False):
self.verify_paranoid_response(paranoid_view, check_for_vars=False)
def test_custom_exception_reporter_filter(self):
"""
It's possible to assign an exception reporter filter to
the request to bypass the one set in DEFAULT_EXCEPTION_REPORTER_FILTER.
"""
with self.settings(DEBUG=True):
self.verify_unsafe_response(custom_exception_reporter_filter_view, check_for_vars=False)
with self.settings(DEBUG=False):
self.verify_unsafe_response(custom_exception_reporter_filter_view, check_for_vars=False)
@override_settings(DEBUG=True, ROOT_URLCONF='view_tests.urls')
def test_non_html_response_encoding(self):
response = self.client.get('/raises500/', HTTP_ACCEPT='application/json')
self.assertEqual(response['Content-Type'], 'text/plain; charset=utf-8')
class DecoratorsTests(SimpleTestCase):
def test_sensitive_variables_not_called(self):
msg = (
'sensitive_variables() must be called to use it as a decorator, '
'e.g., use @sensitive_variables(), not @sensitive_variables.'
)
with self.assertRaisesMessage(TypeError, msg):
@sensitive_variables
def test_func(password):
pass
def test_sensitive_post_parameters_not_called(self):
msg = (
'sensitive_post_parameters() must be called to use it as a '
'decorator, e.g., use @sensitive_post_parameters(), not '
'@sensitive_post_parameters.'
)
with self.assertRaisesMessage(TypeError, msg):
@sensitive_post_parameters
def test_func(request):
return index_page(request)
|
badb874b7a47458dc972f129dace4470706229d092d2bda7c9d9d035c4f937c5 | import decimal
from django.db import NotSupportedError, connection
from django.db.backends.base.operations import BaseDatabaseOperations
from django.db.models import DurationField
from django.test import (
SimpleTestCase, TestCase, override_settings, skipIfDBFeature,
)
from django.utils import timezone
class SimpleDatabaseOperationTests(SimpleTestCase):
may_require_msg = 'subclasses of BaseDatabaseOperations may require a %s() method'
def setUp(self):
self.ops = BaseDatabaseOperations(connection=connection)
def test_deferrable_sql(self):
self.assertEqual(self.ops.deferrable_sql(), '')
def test_end_transaction_rollback(self):
self.assertEqual(self.ops.end_transaction_sql(success=False), 'ROLLBACK;')
def test_no_limit_value(self):
with self.assertRaisesMessage(NotImplementedError, self.may_require_msg % 'no_limit_value'):
self.ops.no_limit_value()
def test_quote_name(self):
with self.assertRaisesMessage(NotImplementedError, self.may_require_msg % 'quote_name'):
self.ops.quote_name('a')
def test_regex_lookup(self):
with self.assertRaisesMessage(NotImplementedError, self.may_require_msg % 'regex_lookup'):
self.ops.regex_lookup(lookup_type='regex')
def test_set_time_zone_sql(self):
self.assertEqual(self.ops.set_time_zone_sql(), '')
def test_sql_flush(self):
msg = 'subclasses of BaseDatabaseOperations must provide a sql_flush() method'
with self.assertRaisesMessage(NotImplementedError, msg):
self.ops.sql_flush(None, None, None)
def test_pk_default_value(self):
self.assertEqual(self.ops.pk_default_value(), 'DEFAULT')
def test_tablespace_sql(self):
self.assertEqual(self.ops.tablespace_sql(None), '')
def test_sequence_reset_by_name_sql(self):
self.assertEqual(self.ops.sequence_reset_by_name_sql(None, []), [])
def test_adapt_unknown_value_decimal(self):
value = decimal.Decimal('3.14')
self.assertEqual(
self.ops.adapt_unknown_value(value),
self.ops.adapt_decimalfield_value(value)
)
def test_adapt_unknown_value_date(self):
value = timezone.now().date()
self.assertEqual(self.ops.adapt_unknown_value(value), self.ops.adapt_datefield_value(value))
def test_adapt_unknown_value_time(self):
value = timezone.now().time()
self.assertEqual(self.ops.adapt_unknown_value(value), self.ops.adapt_timefield_value(value))
def test_adapt_timefield_value_none(self):
self.assertIsNone(self.ops.adapt_timefield_value(None))
def test_adapt_datetimefield_value(self):
self.assertIsNone(self.ops.adapt_datetimefield_value(None))
def test_adapt_timefield_value(self):
msg = 'Django does not support timezone-aware times.'
with self.assertRaisesMessage(ValueError, msg):
self.ops.adapt_timefield_value(timezone.make_aware(timezone.now()))
@override_settings(USE_TZ=False)
def test_adapt_timefield_value_unaware(self):
now = timezone.now()
self.assertEqual(self.ops.adapt_timefield_value(now), str(now))
def test_date_extract_sql(self):
with self.assertRaisesMessage(NotImplementedError, self.may_require_msg % 'date_extract_sql'):
self.ops.date_extract_sql(None, None)
def test_time_extract_sql(self):
with self.assertRaisesMessage(NotImplementedError, self.may_require_msg % 'date_extract_sql'):
self.ops.time_extract_sql(None, None)
def test_date_interval_sql(self):
with self.assertRaisesMessage(NotImplementedError, self.may_require_msg % 'date_interval_sql'):
self.ops.date_interval_sql(None)
def test_date_trunc_sql(self):
with self.assertRaisesMessage(NotImplementedError, self.may_require_msg % 'date_trunc_sql'):
self.ops.date_trunc_sql(None, None)
def test_time_trunc_sql(self):
with self.assertRaisesMessage(NotImplementedError, self.may_require_msg % 'time_trunc_sql'):
self.ops.time_trunc_sql(None, None)
def test_datetime_trunc_sql(self):
with self.assertRaisesMessage(NotImplementedError, self.may_require_msg % 'datetime_trunc_sql'):
self.ops.datetime_trunc_sql(None, None, None)
def test_datetime_cast_date_sql(self):
with self.assertRaisesMessage(NotImplementedError, self.may_require_msg % 'datetime_cast_date_sql'):
self.ops.datetime_cast_date_sql(None, None)
def test_datetime_cast_time_sql(self):
with self.assertRaisesMessage(NotImplementedError, self.may_require_msg % 'datetime_cast_time_sql'):
self.ops.datetime_cast_time_sql(None, None)
def test_datetime_extract_sql(self):
with self.assertRaisesMessage(NotImplementedError, self.may_require_msg % 'datetime_extract_sql'):
self.ops.datetime_extract_sql(None, None, None)
class DatabaseOperationTests(TestCase):
def setUp(self):
self.ops = BaseDatabaseOperations(connection=connection)
@skipIfDBFeature('supports_over_clause')
def test_window_frame_raise_not_supported_error(self):
msg = 'This backend does not support window expressions.'
with self.assertRaisesMessage(NotSupportedError, msg):
self.ops.window_frame_rows_start_end()
@skipIfDBFeature('can_distinct_on_fields')
def test_distinct_on_fields(self):
msg = 'DISTINCT ON fields is not supported by this database backend'
with self.assertRaisesMessage(NotSupportedError, msg):
self.ops.distinct_sql(['a', 'b'], None)
@skipIfDBFeature('supports_temporal_subtraction')
def test_subtract_temporals(self):
duration_field = DurationField()
duration_field_internal_type = duration_field.get_internal_type()
msg = (
'This backend does not support %s subtraction.' %
duration_field_internal_type
)
with self.assertRaisesMessage(NotSupportedError, msg):
self.ops.subtract_temporals(duration_field_internal_type, None, None)
|
1434501baa9e8809663e73c0a433fff50eaf7f32042d5ec90e5f18c283747f74 | """
The main QuerySet implementation. This provides the public API for the ORM.
"""
import copy
import operator
import warnings
from collections import namedtuple
from functools import lru_cache
from itertools import chain
from django.conf import settings
from django.core import exceptions
from django.db import (
DJANGO_VERSION_PICKLE_KEY, IntegrityError, NotSupportedError, connections,
router, transaction,
)
from django.db.models import AutoField, DateField, DateTimeField, sql
from django.db.models.constants import LOOKUP_SEP
from django.db.models.deletion import Collector
from django.db.models.expressions import Case, Expression, F, Value, When
from django.db.models.functions import Cast, Trunc
from django.db.models.query_utils import FilteredRelation, Q
from django.db.models.sql.constants import CURSOR, GET_ITERATOR_CHUNK_SIZE
from django.db.models.utils import resolve_callables
from django.utils import timezone
from django.utils.functional import cached_property, partition
from django.utils.version import get_version
# The maximum number of results to fetch in a get() query.
MAX_GET_RESULTS = 21
# The maximum number of items to display in a QuerySet.__repr__
REPR_OUTPUT_SIZE = 20
class BaseIterable:
def __init__(self, queryset, chunked_fetch=False, chunk_size=GET_ITERATOR_CHUNK_SIZE):
self.queryset = queryset
self.chunked_fetch = chunked_fetch
self.chunk_size = chunk_size
class ModelIterable(BaseIterable):
"""Iterable that yields a model instance for each row."""
def __iter__(self):
queryset = self.queryset
db = queryset.db
compiler = queryset.query.get_compiler(using=db)
# Execute the query. This will also fill compiler.select, klass_info,
# and annotations.
results = compiler.execute_sql(chunked_fetch=self.chunked_fetch, chunk_size=self.chunk_size)
select, klass_info, annotation_col_map = (compiler.select, compiler.klass_info,
compiler.annotation_col_map)
model_cls = klass_info['model']
select_fields = klass_info['select_fields']
model_fields_start, model_fields_end = select_fields[0], select_fields[-1] + 1
init_list = [f[0].target.attname
for f in select[model_fields_start:model_fields_end]]
related_populators = get_related_populators(klass_info, select, db)
known_related_objects = [
(field, related_objs, operator.attrgetter(*[
field.attname
if from_field == 'self' else
queryset.model._meta.get_field(from_field).attname
for from_field in field.from_fields
])) for field, related_objs in queryset._known_related_objects.items()
]
for row in compiler.results_iter(results):
obj = model_cls.from_db(db, init_list, row[model_fields_start:model_fields_end])
for rel_populator in related_populators:
rel_populator.populate(row, obj)
if annotation_col_map:
for attr_name, col_pos in annotation_col_map.items():
setattr(obj, attr_name, row[col_pos])
# Add the known related objects to the model.
for field, rel_objs, rel_getter in known_related_objects:
# Avoid overwriting objects loaded by, e.g., select_related().
if field.is_cached(obj):
continue
rel_obj_id = rel_getter(obj)
try:
rel_obj = rel_objs[rel_obj_id]
except KeyError:
pass # May happen in qs1 | qs2 scenarios.
else:
setattr(obj, field.name, rel_obj)
yield obj
class ValuesIterable(BaseIterable):
"""
Iterable returned by QuerySet.values() that yields a dict for each row.
"""
def __iter__(self):
queryset = self.queryset
query = queryset.query
compiler = query.get_compiler(queryset.db)
# extra(select=...) cols are always at the start of the row.
names = [
*query.extra_select,
*query.values_select,
*query.annotation_select,
]
indexes = range(len(names))
for row in compiler.results_iter(chunked_fetch=self.chunked_fetch, chunk_size=self.chunk_size):
yield {names[i]: row[i] for i in indexes}
class ValuesListIterable(BaseIterable):
"""
Iterable returned by QuerySet.values_list(flat=False) that yields a tuple
for each row.
"""
def __iter__(self):
queryset = self.queryset
query = queryset.query
compiler = query.get_compiler(queryset.db)
if queryset._fields:
# extra(select=...) cols are always at the start of the row.
names = [
*query.extra_select,
*query.values_select,
*query.annotation_select,
]
fields = [*queryset._fields, *(f for f in query.annotation_select if f not in queryset._fields)]
if fields != names:
# Reorder according to fields.
index_map = {name: idx for idx, name in enumerate(names)}
rowfactory = operator.itemgetter(*[index_map[f] for f in fields])
return map(
rowfactory,
compiler.results_iter(chunked_fetch=self.chunked_fetch, chunk_size=self.chunk_size)
)
return compiler.results_iter(tuple_expected=True, chunked_fetch=self.chunked_fetch, chunk_size=self.chunk_size)
class NamedValuesListIterable(ValuesListIterable):
"""
Iterable returned by QuerySet.values_list(named=True) that yields a
namedtuple for each row.
"""
@staticmethod
@lru_cache()
def create_namedtuple_class(*names):
# Cache namedtuple() with @lru_cache() since it's too slow to be
# called for every QuerySet evaluation.
return namedtuple('Row', names)
def __iter__(self):
queryset = self.queryset
if queryset._fields:
names = queryset._fields
else:
query = queryset.query
names = [*query.extra_select, *query.values_select, *query.annotation_select]
tuple_class = self.create_namedtuple_class(*names)
new = tuple.__new__
for row in super().__iter__():
yield new(tuple_class, row)
class FlatValuesListIterable(BaseIterable):
"""
Iterable returned by QuerySet.values_list(flat=True) that yields single
values.
"""
def __iter__(self):
queryset = self.queryset
compiler = queryset.query.get_compiler(queryset.db)
for row in compiler.results_iter(chunked_fetch=self.chunked_fetch, chunk_size=self.chunk_size):
yield row[0]
class QuerySet:
"""Represent a lazy database lookup for a set of objects."""
def __init__(self, model=None, query=None, using=None, hints=None):
self.model = model
self._db = using
self._hints = hints or {}
self._query = query or sql.Query(self.model)
self._result_cache = None
self._sticky_filter = False
self._for_write = False
self._prefetch_related_lookups = ()
self._prefetch_done = False
self._known_related_objects = {} # {rel_field: {pk: rel_obj}}
self._iterable_class = ModelIterable
self._fields = None
self._defer_next_filter = False
self._deferred_filter = None
@property
def query(self):
if self._deferred_filter:
negate, args, kwargs = self._deferred_filter
self._filter_or_exclude_inplace(negate, *args, **kwargs)
self._deferred_filter = None
return self._query
@query.setter
def query(self, value):
self._query = value
def as_manager(cls):
# Address the circular dependency between `Queryset` and `Manager`.
from django.db.models.manager import Manager
manager = Manager.from_queryset(cls)()
manager._built_with_as_manager = True
return manager
as_manager.queryset_only = True
as_manager = classmethod(as_manager)
########################
# PYTHON MAGIC METHODS #
########################
def __deepcopy__(self, memo):
"""Don't populate the QuerySet's cache."""
obj = self.__class__()
for k, v in self.__dict__.items():
if k == '_result_cache':
obj.__dict__[k] = None
else:
obj.__dict__[k] = copy.deepcopy(v, memo)
return obj
def __getstate__(self):
# Force the cache to be fully populated.
self._fetch_all()
return {**self.__dict__, DJANGO_VERSION_PICKLE_KEY: get_version()}
def __setstate__(self, state):
msg = None
pickled_version = state.get(DJANGO_VERSION_PICKLE_KEY)
if pickled_version:
current_version = get_version()
if current_version != pickled_version:
msg = (
"Pickled queryset instance's Django version %s does not "
"match the current version %s." % (pickled_version, current_version)
)
else:
msg = "Pickled queryset instance's Django version is not specified."
if msg:
warnings.warn(msg, RuntimeWarning, stacklevel=2)
self.__dict__.update(state)
def __repr__(self):
data = list(self[:REPR_OUTPUT_SIZE + 1])
if len(data) > REPR_OUTPUT_SIZE:
data[-1] = "...(remaining elements truncated)..."
return '<%s %r>' % (self.__class__.__name__, data)
def __len__(self):
self._fetch_all()
return len(self._result_cache)
def __iter__(self):
"""
The queryset iterator protocol uses three nested iterators in the
default case:
1. sql.compiler.execute_sql()
- Returns 100 rows at time (constants.GET_ITERATOR_CHUNK_SIZE)
using cursor.fetchmany(). This part is responsible for
doing some column masking, and returning the rows in chunks.
2. sql.compiler.results_iter()
- Returns one row at time. At this point the rows are still just
tuples. In some cases the return values are converted to
Python values at this location.
3. self.iterator()
- Responsible for turning the rows into model objects.
"""
self._fetch_all()
return iter(self._result_cache)
def __bool__(self):
self._fetch_all()
return bool(self._result_cache)
def __getitem__(self, k):
"""Retrieve an item or slice from the set of results."""
if not isinstance(k, (int, slice)):
raise TypeError(
'QuerySet indices must be integers or slices, not %s.'
% type(k).__name__
)
assert ((not isinstance(k, slice) and (k >= 0)) or
(isinstance(k, slice) and (k.start is None or k.start >= 0) and
(k.stop is None or k.stop >= 0))), \
"Negative indexing is not supported."
if self._result_cache is not None:
return self._result_cache[k]
if isinstance(k, slice):
qs = self._chain()
if k.start is not None:
start = int(k.start)
else:
start = None
if k.stop is not None:
stop = int(k.stop)
else:
stop = None
qs.query.set_limits(start, stop)
return list(qs)[::k.step] if k.step else qs
qs = self._chain()
qs.query.set_limits(k, k + 1)
qs._fetch_all()
return qs._result_cache[0]
def __class_getitem__(cls, *args, **kwargs):
return cls
def __and__(self, other):
self._merge_sanity_check(other)
if isinstance(other, EmptyQuerySet):
return other
if isinstance(self, EmptyQuerySet):
return self
combined = self._chain()
combined._merge_known_related_objects(other)
combined.query.combine(other.query, sql.AND)
return combined
def __or__(self, other):
self._merge_sanity_check(other)
if isinstance(self, EmptyQuerySet):
return other
if isinstance(other, EmptyQuerySet):
return self
query = self if self.query.can_filter() else self.model._base_manager.filter(pk__in=self.values('pk'))
combined = query._chain()
combined._merge_known_related_objects(other)
if not other.query.can_filter():
other = other.model._base_manager.filter(pk__in=other.values('pk'))
combined.query.combine(other.query, sql.OR)
return combined
####################################
# METHODS THAT DO DATABASE QUERIES #
####################################
def _iterator(self, use_chunked_fetch, chunk_size):
yield from self._iterable_class(self, chunked_fetch=use_chunked_fetch, chunk_size=chunk_size)
def iterator(self, chunk_size=2000):
"""
An iterator over the results from applying this QuerySet to the
database.
"""
if chunk_size <= 0:
raise ValueError('Chunk size must be strictly positive.')
use_chunked_fetch = not connections[self.db].settings_dict.get('DISABLE_SERVER_SIDE_CURSORS')
return self._iterator(use_chunked_fetch, chunk_size)
def aggregate(self, *args, **kwargs):
"""
Return a dictionary containing the calculations (aggregation)
over the current queryset.
If args is present the expression is passed as a kwarg using
the Aggregate object's default alias.
"""
if self.query.distinct_fields:
raise NotImplementedError("aggregate() + distinct(fields) not implemented.")
self._validate_values_are_expressions((*args, *kwargs.values()), method_name='aggregate')
for arg in args:
# The default_alias property raises TypeError if default_alias
# can't be set automatically or AttributeError if it isn't an
# attribute.
try:
arg.default_alias
except (AttributeError, TypeError):
raise TypeError("Complex aggregates require an alias")
kwargs[arg.default_alias] = arg
query = self.query.chain()
for (alias, aggregate_expr) in kwargs.items():
query.add_annotation(aggregate_expr, alias, is_summary=True)
if not query.annotations[alias].contains_aggregate:
raise TypeError("%s is not an aggregate expression" % alias)
return query.get_aggregation(self.db, kwargs)
def count(self):
"""
Perform a SELECT COUNT() and return the number of records as an
integer.
If the QuerySet is already fully cached, return the length of the
cached results set to avoid multiple SELECT COUNT(*) calls.
"""
if self._result_cache is not None:
return len(self._result_cache)
return self.query.get_count(using=self.db)
def get(self, *args, **kwargs):
"""
Perform the query and return a single object matching the given
keyword arguments.
"""
clone = self._chain() if self.query.combinator else self.filter(*args, **kwargs)
if self.query.can_filter() and not self.query.distinct_fields:
clone = clone.order_by()
limit = None
if not clone.query.select_for_update or connections[clone.db].features.supports_select_for_update_with_limit:
limit = MAX_GET_RESULTS
clone.query.set_limits(high=limit)
num = len(clone)
if num == 1:
return clone._result_cache[0]
if not num:
raise self.model.DoesNotExist(
"%s matching query does not exist." %
self.model._meta.object_name
)
raise self.model.MultipleObjectsReturned(
'get() returned more than one %s -- it returned %s!' % (
self.model._meta.object_name,
num if not limit or num < limit else 'more than %s' % (limit - 1),
)
)
def create(self, **kwargs):
"""
Create a new object with the given kwargs, saving it to the database
and returning the created object.
"""
obj = self.model(**kwargs)
self._for_write = True
obj.save(force_insert=True, using=self.db)
return obj
def _populate_pk_values(self, objs):
for obj in objs:
if obj.pk is None:
obj.pk = obj._meta.pk.get_pk_value_on_save(obj)
def bulk_create(self, objs, batch_size=None, ignore_conflicts=False):
"""
Insert each of the instances into the database. Do *not* call
save() on each of the instances, do not send any pre/post_save
signals, and do not set the primary key attribute if it is an
autoincrement field (except if features.can_return_rows_from_bulk_insert=True).
Multi-table models are not supported.
"""
# When you bulk insert you don't get the primary keys back (if it's an
# autoincrement, except if can_return_rows_from_bulk_insert=True), so
# you can't insert into the child tables which references this. There
# are two workarounds:
# 1) This could be implemented if you didn't have an autoincrement pk
# 2) You could do it by doing O(n) normal inserts into the parent
# tables to get the primary keys back and then doing a single bulk
# insert into the childmost table.
# We currently set the primary keys on the objects when using
# PostgreSQL via the RETURNING ID clause. It should be possible for
# Oracle as well, but the semantics for extracting the primary keys is
# trickier so it's not done yet.
assert batch_size is None or batch_size > 0
# Check that the parents share the same concrete model with the our
# model to detect the inheritance pattern ConcreteGrandParent ->
# MultiTableParent -> ProxyChild. Simply checking self.model._meta.proxy
# would not identify that case as involving multiple tables.
for parent in self.model._meta.get_parent_list():
if parent._meta.concrete_model is not self.model._meta.concrete_model:
raise ValueError("Can't bulk create a multi-table inherited model")
if not objs:
return objs
self._for_write = True
connection = connections[self.db]
opts = self.model._meta
fields = opts.concrete_fields
objs = list(objs)
self._populate_pk_values(objs)
with transaction.atomic(using=self.db, savepoint=False):
objs_with_pk, objs_without_pk = partition(lambda o: o.pk is None, objs)
if objs_with_pk:
returned_columns = self._batched_insert(
objs_with_pk, fields, batch_size, ignore_conflicts=ignore_conflicts,
)
for obj_with_pk, results in zip(objs_with_pk, returned_columns):
for result, field in zip(results, opts.db_returning_fields):
if field != opts.pk:
setattr(obj_with_pk, field.attname, result)
for obj_with_pk in objs_with_pk:
obj_with_pk._state.adding = False
obj_with_pk._state.db = self.db
if objs_without_pk:
fields = [f for f in fields if not isinstance(f, AutoField)]
returned_columns = self._batched_insert(
objs_without_pk, fields, batch_size, ignore_conflicts=ignore_conflicts,
)
if connection.features.can_return_rows_from_bulk_insert and not ignore_conflicts:
assert len(returned_columns) == len(objs_without_pk)
for obj_without_pk, results in zip(objs_without_pk, returned_columns):
for result, field in zip(results, opts.db_returning_fields):
setattr(obj_without_pk, field.attname, result)
obj_without_pk._state.adding = False
obj_without_pk._state.db = self.db
return objs
def bulk_update(self, objs, fields, batch_size=None):
"""
Update the given fields in each of the given objects in the database.
"""
if batch_size is not None and batch_size < 0:
raise ValueError('Batch size must be a positive integer.')
if not fields:
raise ValueError('Field names must be given to bulk_update().')
objs = tuple(objs)
if any(obj.pk is None for obj in objs):
raise ValueError('All bulk_update() objects must have a primary key set.')
fields = [self.model._meta.get_field(name) for name in fields]
if any(not f.concrete or f.many_to_many for f in fields):
raise ValueError('bulk_update() can only be used with concrete fields.')
if any(f.primary_key for f in fields):
raise ValueError('bulk_update() cannot be used with primary key fields.')
if not objs:
return
# PK is used twice in the resulting update query, once in the filter
# and once in the WHEN. Each field will also have one CAST.
max_batch_size = connections[self.db].ops.bulk_batch_size(['pk', 'pk'] + fields, objs)
batch_size = min(batch_size, max_batch_size) if batch_size else max_batch_size
requires_casting = connections[self.db].features.requires_casted_case_in_updates
batches = (objs[i:i + batch_size] for i in range(0, len(objs), batch_size))
updates = []
for batch_objs in batches:
update_kwargs = {}
for field in fields:
when_statements = []
for obj in batch_objs:
attr = getattr(obj, field.attname)
if not isinstance(attr, Expression):
attr = Value(attr, output_field=field)
when_statements.append(When(pk=obj.pk, then=attr))
case_statement = Case(*when_statements, output_field=field)
if requires_casting:
case_statement = Cast(case_statement, output_field=field)
update_kwargs[field.attname] = case_statement
updates.append(([obj.pk for obj in batch_objs], update_kwargs))
with transaction.atomic(using=self.db, savepoint=False):
for pks, update_kwargs in updates:
self.filter(pk__in=pks).update(**update_kwargs)
bulk_update.alters_data = True
def get_or_create(self, defaults=None, **kwargs):
"""
Look up an object with the given kwargs, creating one if necessary.
Return a tuple of (object, created), where created is a boolean
specifying whether an object was created.
"""
# The get() needs to be targeted at the write database in order
# to avoid potential transaction consistency problems.
self._for_write = True
try:
return self.get(**kwargs), False
except self.model.DoesNotExist:
params = self._extract_model_params(defaults, **kwargs)
return self._create_object_from_params(kwargs, params)
def update_or_create(self, defaults=None, **kwargs):
"""
Look up an object with the given kwargs, updating one with defaults
if it exists, otherwise create a new one.
Return a tuple (object, created), where created is a boolean
specifying whether an object was created.
"""
defaults = defaults or {}
self._for_write = True
with transaction.atomic(using=self.db):
try:
obj = self.select_for_update().get(**kwargs)
except self.model.DoesNotExist:
params = self._extract_model_params(defaults, **kwargs)
# Lock the row so that a concurrent update is blocked until
# after update_or_create() has performed its save.
obj, created = self._create_object_from_params(kwargs, params, lock=True)
if created:
return obj, created
for k, v in resolve_callables(defaults):
setattr(obj, k, v)
obj.save(using=self.db)
return obj, False
def _create_object_from_params(self, lookup, params, lock=False):
"""
Try to create an object using passed params. Used by get_or_create()
and update_or_create().
"""
try:
with transaction.atomic(using=self.db):
params = dict(resolve_callables(params))
obj = self.create(**params)
return obj, True
except IntegrityError:
try:
qs = self.select_for_update() if lock else self
return qs.get(**lookup), False
except self.model.DoesNotExist:
pass
raise
def _extract_model_params(self, defaults, **kwargs):
"""
Prepare `params` for creating a model instance based on the given
kwargs; for use by get_or_create() and update_or_create().
"""
defaults = defaults or {}
params = {k: v for k, v in kwargs.items() if LOOKUP_SEP not in k}
params.update(defaults)
property_names = self.model._meta._property_names
invalid_params = []
for param in params:
try:
self.model._meta.get_field(param)
except exceptions.FieldDoesNotExist:
# It's okay to use a model's property if it has a setter.
if not (param in property_names and getattr(self.model, param).fset):
invalid_params.append(param)
if invalid_params:
raise exceptions.FieldError(
"Invalid field name(s) for model %s: '%s'." % (
self.model._meta.object_name,
"', '".join(sorted(invalid_params)),
))
return params
def _earliest(self, *fields):
"""
Return the earliest object according to fields (if given) or by the
model's Meta.get_latest_by.
"""
if fields:
order_by = fields
else:
order_by = getattr(self.model._meta, 'get_latest_by')
if order_by and not isinstance(order_by, (tuple, list)):
order_by = (order_by,)
if order_by is None:
raise ValueError(
"earliest() and latest() require either fields as positional "
"arguments or 'get_latest_by' in the model's Meta."
)
assert not self.query.is_sliced, \
"Cannot change a query once a slice has been taken."
obj = self._chain()
obj.query.set_limits(high=1)
obj.query.clear_ordering(force_empty=True)
obj.query.add_ordering(*order_by)
return obj.get()
def earliest(self, *fields):
return self._earliest(*fields)
def latest(self, *fields):
return self.reverse()._earliest(*fields)
def first(self):
"""Return the first object of a query or None if no match is found."""
for obj in (self if self.ordered else self.order_by('pk'))[:1]:
return obj
def last(self):
"""Return the last object of a query or None if no match is found."""
for obj in (self.reverse() if self.ordered else self.order_by('-pk'))[:1]:
return obj
def in_bulk(self, id_list=None, *, field_name='pk'):
"""
Return a dictionary mapping each of the given IDs to the object with
that ID. If `id_list` isn't provided, evaluate the entire QuerySet.
"""
assert not self.query.is_sliced, \
"Cannot use 'limit' or 'offset' with in_bulk"
if field_name != 'pk' and not self.model._meta.get_field(field_name).unique:
raise ValueError("in_bulk()'s field_name must be a unique field but %r isn't." % field_name)
if id_list is not None:
if not id_list:
return {}
filter_key = '{}__in'.format(field_name)
batch_size = connections[self.db].features.max_query_params
id_list = tuple(id_list)
# If the database has a limit on the number of query parameters
# (e.g. SQLite), retrieve objects in batches if necessary.
if batch_size and batch_size < len(id_list):
qs = ()
for offset in range(0, len(id_list), batch_size):
batch = id_list[offset:offset + batch_size]
qs += tuple(self.filter(**{filter_key: batch}).order_by())
else:
qs = self.filter(**{filter_key: id_list}).order_by()
else:
qs = self._chain()
return {getattr(obj, field_name): obj for obj in qs}
def delete(self):
"""Delete the records in the current QuerySet."""
self._not_support_combined_queries('delete')
assert not self.query.is_sliced, \
"Cannot use 'limit' or 'offset' with delete."
if self._fields is not None:
raise TypeError("Cannot call delete() after .values() or .values_list()")
del_query = self._chain()
# The delete is actually 2 queries - one to find related objects,
# and one to delete. Make sure that the discovery of related
# objects is performed on the same database as the deletion.
del_query._for_write = True
# Disable non-supported fields.
del_query.query.select_for_update = False
del_query.query.select_related = False
del_query.query.clear_ordering(force_empty=True)
collector = Collector(using=del_query.db)
collector.collect(del_query)
deleted, _rows_count = collector.delete()
# Clear the result cache, in case this QuerySet gets reused.
self._result_cache = None
return deleted, _rows_count
delete.alters_data = True
delete.queryset_only = True
def _raw_delete(self, using):
"""
Delete objects found from the given queryset in single direct SQL
query. No signals are sent and there is no protection for cascades.
"""
query = self.query.clone()
query.__class__ = sql.DeleteQuery
cursor = query.get_compiler(using).execute_sql(CURSOR)
if cursor:
with cursor:
return cursor.rowcount
return 0
_raw_delete.alters_data = True
def update(self, **kwargs):
"""
Update all elements in the current QuerySet, setting all the given
fields to the appropriate values.
"""
self._not_support_combined_queries('update')
assert not self.query.is_sliced, \
"Cannot update a query once a slice has been taken."
self._for_write = True
query = self.query.chain(sql.UpdateQuery)
query.add_update_values(kwargs)
# Clear any annotations so that they won't be present in subqueries.
query.annotations = {}
with transaction.mark_for_rollback_on_error(using=self.db):
rows = query.get_compiler(self.db).execute_sql(CURSOR)
self._result_cache = None
return rows
update.alters_data = True
def _update(self, values):
"""
A version of update() that accepts field objects instead of field names.
Used primarily for model saving and not intended for use by general
code (it requires too much poking around at model internals to be
useful at that level).
"""
assert not self.query.is_sliced, \
"Cannot update a query once a slice has been taken."
query = self.query.chain(sql.UpdateQuery)
query.add_update_fields(values)
# Clear any annotations so that they won't be present in subqueries.
query.annotations = {}
self._result_cache = None
return query.get_compiler(self.db).execute_sql(CURSOR)
_update.alters_data = True
_update.queryset_only = False
def exists(self):
if self._result_cache is None:
return self.query.has_results(using=self.db)
return bool(self._result_cache)
def _prefetch_related_objects(self):
# This method can only be called once the result cache has been filled.
prefetch_related_objects(self._result_cache, *self._prefetch_related_lookups)
self._prefetch_done = True
def explain(self, *, format=None, **options):
return self.query.explain(using=self.db, format=format, **options)
##################################################
# PUBLIC METHODS THAT RETURN A QUERYSET SUBCLASS #
##################################################
def raw(self, raw_query, params=None, translations=None, using=None):
if using is None:
using = self.db
qs = RawQuerySet(raw_query, model=self.model, params=params, translations=translations, using=using)
qs._prefetch_related_lookups = self._prefetch_related_lookups[:]
return qs
def _values(self, *fields, **expressions):
clone = self._chain()
if expressions:
clone = clone.annotate(**expressions)
clone._fields = fields
clone.query.set_values(fields)
return clone
def values(self, *fields, **expressions):
fields += tuple(expressions)
clone = self._values(*fields, **expressions)
clone._iterable_class = ValuesIterable
return clone
def values_list(self, *fields, flat=False, named=False):
if flat and named:
raise TypeError("'flat' and 'named' can't be used together.")
if flat and len(fields) > 1:
raise TypeError("'flat' is not valid when values_list is called with more than one field.")
field_names = {f for f in fields if not hasattr(f, 'resolve_expression')}
_fields = []
expressions = {}
counter = 1
for field in fields:
if hasattr(field, 'resolve_expression'):
field_id_prefix = getattr(field, 'default_alias', field.__class__.__name__.lower())
while True:
field_id = field_id_prefix + str(counter)
counter += 1
if field_id not in field_names:
break
expressions[field_id] = field
_fields.append(field_id)
else:
_fields.append(field)
clone = self._values(*_fields, **expressions)
clone._iterable_class = (
NamedValuesListIterable if named
else FlatValuesListIterable if flat
else ValuesListIterable
)
return clone
def dates(self, field_name, kind, order='ASC'):
"""
Return a list of date objects representing all available dates for
the given field_name, scoped to 'kind'.
"""
assert kind in ('year', 'month', 'week', 'day'), \
"'kind' must be one of 'year', 'month', 'week', or 'day'."
assert order in ('ASC', 'DESC'), \
"'order' must be either 'ASC' or 'DESC'."
return self.annotate(
datefield=Trunc(field_name, kind, output_field=DateField()),
plain_field=F(field_name)
).values_list(
'datefield', flat=True
).distinct().filter(plain_field__isnull=False).order_by(('-' if order == 'DESC' else '') + 'datefield')
def datetimes(self, field_name, kind, order='ASC', tzinfo=None, is_dst=None):
"""
Return a list of datetime objects representing all available
datetimes for the given field_name, scoped to 'kind'.
"""
assert kind in ('year', 'month', 'week', 'day', 'hour', 'minute', 'second'), \
"'kind' must be one of 'year', 'month', 'week', 'day', 'hour', 'minute', or 'second'."
assert order in ('ASC', 'DESC'), \
"'order' must be either 'ASC' or 'DESC'."
if settings.USE_TZ:
if tzinfo is None:
tzinfo = timezone.get_current_timezone()
else:
tzinfo = None
return self.annotate(
datetimefield=Trunc(
field_name,
kind,
output_field=DateTimeField(),
tzinfo=tzinfo,
is_dst=is_dst,
),
plain_field=F(field_name)
).values_list(
'datetimefield', flat=True
).distinct().filter(plain_field__isnull=False).order_by(('-' if order == 'DESC' else '') + 'datetimefield')
def none(self):
"""Return an empty QuerySet."""
clone = self._chain()
clone.query.set_empty()
return clone
##################################################################
# PUBLIC METHODS THAT ALTER ATTRIBUTES AND RETURN A NEW QUERYSET #
##################################################################
def all(self):
"""
Return a new QuerySet that is a copy of the current one. This allows a
QuerySet to proxy for a model manager in some cases.
"""
return self._chain()
def filter(self, *args, **kwargs):
"""
Return a new QuerySet instance with the args ANDed to the existing
set.
"""
self._not_support_combined_queries('filter')
return self._filter_or_exclude(False, *args, **kwargs)
def exclude(self, *args, **kwargs):
"""
Return a new QuerySet instance with NOT (args) ANDed to the existing
set.
"""
self._not_support_combined_queries('exclude')
return self._filter_or_exclude(True, *args, **kwargs)
def _filter_or_exclude(self, negate, *args, **kwargs):
if args or kwargs:
assert not self.query.is_sliced, \
"Cannot filter a query once a slice has been taken."
clone = self._chain()
if self._defer_next_filter:
self._defer_next_filter = False
clone._deferred_filter = negate, args, kwargs
else:
clone._filter_or_exclude_inplace(negate, *args, **kwargs)
return clone
def _filter_or_exclude_inplace(self, negate, *args, **kwargs):
if negate:
self._query.add_q(~Q(*args, **kwargs))
else:
self._query.add_q(Q(*args, **kwargs))
def complex_filter(self, filter_obj):
"""
Return a new QuerySet instance with filter_obj added to the filters.
filter_obj can be a Q object or a dictionary of keyword lookup
arguments.
This exists to support framework features such as 'limit_choices_to',
and usually it will be more natural to use other methods.
"""
if isinstance(filter_obj, Q):
clone = self._chain()
clone.query.add_q(filter_obj)
return clone
else:
return self._filter_or_exclude(False, **filter_obj)
def _combinator_query(self, combinator, *other_qs, all=False):
# Clone the query to inherit the select list and everything
clone = self._chain()
# Clear limits and ordering so they can be reapplied
clone.query.clear_ordering(True)
clone.query.clear_limits()
clone.query.combined_queries = (self.query,) + tuple(qs.query for qs in other_qs)
clone.query.combinator = combinator
clone.query.combinator_all = all
return clone
def union(self, *other_qs, all=False):
# If the query is an EmptyQuerySet, combine all nonempty querysets.
if isinstance(self, EmptyQuerySet):
qs = [q for q in other_qs if not isinstance(q, EmptyQuerySet)]
return qs[0]._combinator_query('union', *qs[1:], all=all) if qs else self
return self._combinator_query('union', *other_qs, all=all)
def intersection(self, *other_qs):
# If any query is an EmptyQuerySet, return it.
if isinstance(self, EmptyQuerySet):
return self
for other in other_qs:
if isinstance(other, EmptyQuerySet):
return other
return self._combinator_query('intersection', *other_qs)
def difference(self, *other_qs):
# If the query is an EmptyQuerySet, return it.
if isinstance(self, EmptyQuerySet):
return self
return self._combinator_query('difference', *other_qs)
def select_for_update(self, nowait=False, skip_locked=False, of=()):
"""
Return a new QuerySet instance that will select objects with a
FOR UPDATE lock.
"""
if nowait and skip_locked:
raise ValueError('The nowait option cannot be used with skip_locked.')
obj = self._chain()
obj._for_write = True
obj.query.select_for_update = True
obj.query.select_for_update_nowait = nowait
obj.query.select_for_update_skip_locked = skip_locked
obj.query.select_for_update_of = of
return obj
def select_related(self, *fields):
"""
Return a new QuerySet instance that will select related objects.
If fields are specified, they must be ForeignKey fields and only those
related objects are included in the selection.
If select_related(None) is called, clear the list.
"""
self._not_support_combined_queries('select_related')
if self._fields is not None:
raise TypeError("Cannot call select_related() after .values() or .values_list()")
obj = self._chain()
if fields == (None,):
obj.query.select_related = False
elif fields:
obj.query.add_select_related(fields)
else:
obj.query.select_related = True
return obj
def prefetch_related(self, *lookups):
"""
Return a new QuerySet instance that will prefetch the specified
Many-To-One and Many-To-Many related objects when the QuerySet is
evaluated.
When prefetch_related() is called more than once, append to the list of
prefetch lookups. If prefetch_related(None) is called, clear the list.
"""
self._not_support_combined_queries('prefetch_related')
clone = self._chain()
if lookups == (None,):
clone._prefetch_related_lookups = ()
else:
for lookup in lookups:
if isinstance(lookup, Prefetch):
lookup = lookup.prefetch_to
lookup = lookup.split(LOOKUP_SEP, 1)[0]
if lookup in self.query._filtered_relations:
raise ValueError('prefetch_related() is not supported with FilteredRelation.')
clone._prefetch_related_lookups = clone._prefetch_related_lookups + lookups
return clone
def annotate(self, *args, **kwargs):
"""
Return a query set in which the returned objects have been annotated
with extra data or aggregations.
"""
self._not_support_combined_queries('annotate')
self._validate_values_are_expressions(args + tuple(kwargs.values()), method_name='annotate')
annotations = {}
for arg in args:
# The default_alias property may raise a TypeError.
try:
if arg.default_alias in kwargs:
raise ValueError("The named annotation '%s' conflicts with the "
"default name for another annotation."
% arg.default_alias)
except TypeError:
raise TypeError("Complex annotations require an alias")
annotations[arg.default_alias] = arg
annotations.update(kwargs)
clone = self._chain()
names = self._fields
if names is None:
names = set(chain.from_iterable(
(field.name, field.attname) if hasattr(field, 'attname') else (field.name,)
for field in self.model._meta.get_fields()
))
for alias, annotation in annotations.items():
if alias in names:
raise ValueError("The annotation '%s' conflicts with a field on "
"the model." % alias)
if isinstance(annotation, FilteredRelation):
clone.query.add_filtered_relation(annotation, alias)
else:
clone.query.add_annotation(annotation, alias, is_summary=False)
for alias, annotation in clone.query.annotations.items():
if alias in annotations and annotation.contains_aggregate:
if clone._fields is None:
clone.query.group_by = True
else:
clone.query.set_group_by()
break
return clone
def order_by(self, *field_names):
"""Return a new QuerySet instance with the ordering changed."""
assert not self.query.is_sliced, \
"Cannot reorder a query once a slice has been taken."
obj = self._chain()
obj.query.clear_ordering(force_empty=False)
obj.query.add_ordering(*field_names)
return obj
def distinct(self, *field_names):
"""
Return a new QuerySet instance that will select only distinct results.
"""
assert not self.query.is_sliced, \
"Cannot create distinct fields once a slice has been taken."
obj = self._chain()
obj.query.add_distinct_fields(*field_names)
return obj
def extra(self, select=None, where=None, params=None, tables=None,
order_by=None, select_params=None):
"""Add extra SQL fragments to the query."""
self._not_support_combined_queries('extra')
assert not self.query.is_sliced, \
"Cannot change a query once a slice has been taken"
clone = self._chain()
clone.query.add_extra(select, select_params, where, params, tables, order_by)
return clone
def reverse(self):
"""Reverse the ordering of the QuerySet."""
if self.query.is_sliced:
raise TypeError('Cannot reverse a query once a slice has been taken.')
clone = self._chain()
clone.query.standard_ordering = not clone.query.standard_ordering
return clone
def defer(self, *fields):
"""
Defer the loading of data for certain fields until they are accessed.
Add the set of deferred fields to any existing set of deferred fields.
The only exception to this is if None is passed in as the only
parameter, in which case removal all deferrals.
"""
self._not_support_combined_queries('defer')
if self._fields is not None:
raise TypeError("Cannot call defer() after .values() or .values_list()")
clone = self._chain()
if fields == (None,):
clone.query.clear_deferred_loading()
else:
clone.query.add_deferred_loading(fields)
return clone
def only(self, *fields):
"""
Essentially, the opposite of defer(). Only the fields passed into this
method and that are not already specified as deferred are loaded
immediately when the queryset is evaluated.
"""
self._not_support_combined_queries('only')
if self._fields is not None:
raise TypeError("Cannot call only() after .values() or .values_list()")
if fields == (None,):
# Can only pass None to defer(), not only(), as the rest option.
# That won't stop people trying to do this, so let's be explicit.
raise TypeError("Cannot pass None as an argument to only().")
for field in fields:
field = field.split(LOOKUP_SEP, 1)[0]
if field in self.query._filtered_relations:
raise ValueError('only() is not supported with FilteredRelation.')
clone = self._chain()
clone.query.add_immediate_loading(fields)
return clone
def using(self, alias):
"""Select which database this QuerySet should execute against."""
clone = self._chain()
clone._db = alias
return clone
###################################
# PUBLIC INTROSPECTION ATTRIBUTES #
###################################
@property
def ordered(self):
"""
Return True if the QuerySet is ordered -- i.e. has an order_by()
clause or a default ordering on the model (or is empty).
"""
if isinstance(self, EmptyQuerySet):
return True
if self.query.extra_order_by or self.query.order_by:
return True
elif self.query.default_ordering and self.query.get_meta().ordering:
return True
else:
return False
@property
def db(self):
"""Return the database used if this query is executed now."""
if self._for_write:
return self._db or router.db_for_write(self.model, **self._hints)
return self._db or router.db_for_read(self.model, **self._hints)
###################
# PRIVATE METHODS #
###################
def _insert(self, objs, fields, returning_fields=None, raw=False, using=None, ignore_conflicts=False):
"""
Insert a new record for the given model. This provides an interface to
the InsertQuery class and is how Model.save() is implemented.
"""
self._for_write = True
if using is None:
using = self.db
query = sql.InsertQuery(self.model, ignore_conflicts=ignore_conflicts)
query.insert_values(fields, objs, raw=raw)
return query.get_compiler(using=using).execute_sql(returning_fields)
_insert.alters_data = True
_insert.queryset_only = False
def _batched_insert(self, objs, fields, batch_size, ignore_conflicts=False):
"""
Helper method for bulk_create() to insert objs one batch at a time.
"""
if ignore_conflicts and not connections[self.db].features.supports_ignore_conflicts:
raise NotSupportedError('This database backend does not support ignoring conflicts.')
ops = connections[self.db].ops
max_batch_size = max(ops.bulk_batch_size(fields, objs), 1)
batch_size = min(batch_size, max_batch_size) if batch_size else max_batch_size
inserted_rows = []
bulk_return = connections[self.db].features.can_return_rows_from_bulk_insert
for item in [objs[i:i + batch_size] for i in range(0, len(objs), batch_size)]:
if bulk_return and not ignore_conflicts:
inserted_rows.extend(self._insert(
item, fields=fields, using=self.db,
returning_fields=self.model._meta.db_returning_fields,
ignore_conflicts=ignore_conflicts,
))
else:
self._insert(item, fields=fields, using=self.db, ignore_conflicts=ignore_conflicts)
return inserted_rows
def _chain(self, **kwargs):
"""
Return a copy of the current QuerySet that's ready for another
operation.
"""
obj = self._clone()
if obj._sticky_filter:
obj.query.filter_is_sticky = True
obj._sticky_filter = False
obj.__dict__.update(kwargs)
return obj
def _clone(self):
"""
Return a copy of the current QuerySet. A lightweight alternative
to deepcopy().
"""
c = self.__class__(model=self.model, query=self.query.chain(), using=self._db, hints=self._hints)
c._sticky_filter = self._sticky_filter
c._for_write = self._for_write
c._prefetch_related_lookups = self._prefetch_related_lookups[:]
c._known_related_objects = self._known_related_objects
c._iterable_class = self._iterable_class
c._fields = self._fields
return c
def _fetch_all(self):
if self._result_cache is None:
self._result_cache = list(self._iterable_class(self))
if self._prefetch_related_lookups and not self._prefetch_done:
self._prefetch_related_objects()
def _next_is_sticky(self):
"""
Indicate that the next filter call and the one following that should
be treated as a single filter. This is only important when it comes to
determining when to reuse tables for many-to-many filters. Required so
that we can filter naturally on the results of related managers.
This doesn't return a clone of the current QuerySet (it returns
"self"). The method is only used internally and should be immediately
followed by a filter() that does create a clone.
"""
self._sticky_filter = True
return self
def _merge_sanity_check(self, other):
"""Check that two QuerySet classes may be merged."""
if self._fields is not None and (
set(self.query.values_select) != set(other.query.values_select) or
set(self.query.extra_select) != set(other.query.extra_select) or
set(self.query.annotation_select) != set(other.query.annotation_select)):
raise TypeError(
"Merging '%s' classes must involve the same values in each case."
% self.__class__.__name__
)
def _merge_known_related_objects(self, other):
"""
Keep track of all known related objects from either QuerySet instance.
"""
for field, objects in other._known_related_objects.items():
self._known_related_objects.setdefault(field, {}).update(objects)
def resolve_expression(self, *args, **kwargs):
if self._fields and len(self._fields) > 1:
# values() queryset can only be used as nested queries
# if they are set up to select only a single field.
raise TypeError('Cannot use multi-field values as a filter value.')
query = self.query.resolve_expression(*args, **kwargs)
query._db = self._db
return query
resolve_expression.queryset_only = True
def _add_hints(self, **hints):
"""
Update hinting information for use by routers. Add new key/values or
overwrite existing key/values.
"""
self._hints.update(hints)
def _has_filters(self):
"""
Check if this QuerySet has any filtering going on. This isn't
equivalent with checking if all objects are present in results, for
example, qs[1:]._has_filters() -> False.
"""
return self.query.has_filters()
@staticmethod
def _validate_values_are_expressions(values, method_name):
invalid_args = sorted(str(arg) for arg in values if not hasattr(arg, 'resolve_expression'))
if invalid_args:
raise TypeError(
'QuerySet.%s() received non-expression(s): %s.' % (
method_name,
', '.join(invalid_args),
)
)
def _not_support_combined_queries(self, operation_name):
if self.query.combinator:
raise NotSupportedError(
'Calling QuerySet.%s() after %s() is not supported.'
% (operation_name, self.query.combinator)
)
class InstanceCheckMeta(type):
def __instancecheck__(self, instance):
return isinstance(instance, QuerySet) and instance.query.is_empty()
class EmptyQuerySet(metaclass=InstanceCheckMeta):
"""
Marker class to checking if a queryset is empty by .none():
isinstance(qs.none(), EmptyQuerySet) -> True
"""
def __init__(self, *args, **kwargs):
raise TypeError("EmptyQuerySet can't be instantiated")
class RawQuerySet:
"""
Provide an iterator which converts the results of raw SQL queries into
annotated model instances.
"""
def __init__(self, raw_query, model=None, query=None, params=None,
translations=None, using=None, hints=None):
self.raw_query = raw_query
self.model = model
self._db = using
self._hints = hints or {}
self.query = query or sql.RawQuery(sql=raw_query, using=self.db, params=params)
self.params = params or ()
self.translations = translations or {}
self._result_cache = None
self._prefetch_related_lookups = ()
self._prefetch_done = False
def resolve_model_init_order(self):
"""Resolve the init field names and value positions."""
converter = connections[self.db].introspection.identifier_converter
model_init_fields = [f for f in self.model._meta.fields if converter(f.column) in self.columns]
annotation_fields = [(column, pos) for pos, column in enumerate(self.columns)
if column not in self.model_fields]
model_init_order = [self.columns.index(converter(f.column)) for f in model_init_fields]
model_init_names = [f.attname for f in model_init_fields]
return model_init_names, model_init_order, annotation_fields
def prefetch_related(self, *lookups):
"""Same as QuerySet.prefetch_related()"""
clone = self._clone()
if lookups == (None,):
clone._prefetch_related_lookups = ()
else:
clone._prefetch_related_lookups = clone._prefetch_related_lookups + lookups
return clone
def _prefetch_related_objects(self):
prefetch_related_objects(self._result_cache, *self._prefetch_related_lookups)
self._prefetch_done = True
def _clone(self):
"""Same as QuerySet._clone()"""
c = self.__class__(
self.raw_query, model=self.model, query=self.query, params=self.params,
translations=self.translations, using=self._db, hints=self._hints
)
c._prefetch_related_lookups = self._prefetch_related_lookups[:]
return c
def _fetch_all(self):
if self._result_cache is None:
self._result_cache = list(self.iterator())
if self._prefetch_related_lookups and not self._prefetch_done:
self._prefetch_related_objects()
def __len__(self):
self._fetch_all()
return len(self._result_cache)
def __bool__(self):
self._fetch_all()
return bool(self._result_cache)
def __iter__(self):
self._fetch_all()
return iter(self._result_cache)
def iterator(self):
# Cache some things for performance reasons outside the loop.
db = self.db
compiler = connections[db].ops.compiler('SQLCompiler')(
self.query, connections[db], db
)
query = iter(self.query)
try:
model_init_names, model_init_pos, annotation_fields = self.resolve_model_init_order()
if self.model._meta.pk.attname not in model_init_names:
raise exceptions.FieldDoesNotExist(
'Raw query must include the primary key'
)
model_cls = self.model
fields = [self.model_fields.get(c) for c in self.columns]
converters = compiler.get_converters([
f.get_col(f.model._meta.db_table) if f else None for f in fields
])
if converters:
query = compiler.apply_converters(query, converters)
for values in query:
# Associate fields to values
model_init_values = [values[pos] for pos in model_init_pos]
instance = model_cls.from_db(db, model_init_names, model_init_values)
if annotation_fields:
for column, pos in annotation_fields:
setattr(instance, column, values[pos])
yield instance
finally:
# Done iterating the Query. If it has its own cursor, close it.
if hasattr(self.query, 'cursor') and self.query.cursor:
self.query.cursor.close()
def __repr__(self):
return "<%s: %s>" % (self.__class__.__name__, self.query)
def __getitem__(self, k):
return list(self)[k]
@property
def db(self):
"""Return the database used if this query is executed now."""
return self._db or router.db_for_read(self.model, **self._hints)
def using(self, alias):
"""Select the database this RawQuerySet should execute against."""
return RawQuerySet(
self.raw_query, model=self.model,
query=self.query.chain(using=alias),
params=self.params, translations=self.translations,
using=alias,
)
@cached_property
def columns(self):
"""
A list of model field names in the order they'll appear in the
query results.
"""
columns = self.query.get_columns()
# Adjust any column names which don't match field names
for (query_name, model_name) in self.translations.items():
# Ignore translations for nonexistent column names
try:
index = columns.index(query_name)
except ValueError:
pass
else:
columns[index] = model_name
return columns
@cached_property
def model_fields(self):
"""A dict mapping column names to model field names."""
converter = connections[self.db].introspection.identifier_converter
model_fields = {}
for field in self.model._meta.fields:
name, column = field.get_attname_column()
model_fields[converter(column)] = field
return model_fields
class Prefetch:
def __init__(self, lookup, queryset=None, to_attr=None):
# `prefetch_through` is the path we traverse to perform the prefetch.
self.prefetch_through = lookup
# `prefetch_to` is the path to the attribute that stores the result.
self.prefetch_to = lookup
if queryset is not None and (
isinstance(queryset, RawQuerySet) or (
hasattr(queryset, '_iterable_class') and
not issubclass(queryset._iterable_class, ModelIterable)
)
):
raise ValueError(
'Prefetch querysets cannot use raw(), values(), and '
'values_list().'
)
if to_attr:
self.prefetch_to = LOOKUP_SEP.join(lookup.split(LOOKUP_SEP)[:-1] + [to_attr])
self.queryset = queryset
self.to_attr = to_attr
def __getstate__(self):
obj_dict = self.__dict__.copy()
if self.queryset is not None:
# Prevent the QuerySet from being evaluated
obj_dict['queryset'] = self.queryset._chain(
_result_cache=[],
_prefetch_done=True,
)
return obj_dict
def add_prefix(self, prefix):
self.prefetch_through = prefix + LOOKUP_SEP + self.prefetch_through
self.prefetch_to = prefix + LOOKUP_SEP + self.prefetch_to
def get_current_prefetch_to(self, level):
return LOOKUP_SEP.join(self.prefetch_to.split(LOOKUP_SEP)[:level + 1])
def get_current_to_attr(self, level):
parts = self.prefetch_to.split(LOOKUP_SEP)
to_attr = parts[level]
as_attr = self.to_attr and level == len(parts) - 1
return to_attr, as_attr
def get_current_queryset(self, level):
if self.get_current_prefetch_to(level) == self.prefetch_to:
return self.queryset
return None
def __eq__(self, other):
if not isinstance(other, Prefetch):
return NotImplemented
return self.prefetch_to == other.prefetch_to
def __hash__(self):
return hash((self.__class__, self.prefetch_to))
def normalize_prefetch_lookups(lookups, prefix=None):
"""Normalize lookups into Prefetch objects."""
ret = []
for lookup in lookups:
if not isinstance(lookup, Prefetch):
lookup = Prefetch(lookup)
if prefix:
lookup.add_prefix(prefix)
ret.append(lookup)
return ret
def prefetch_related_objects(model_instances, *related_lookups):
"""
Populate prefetched object caches for a list of model instances based on
the lookups/Prefetch instances given.
"""
if not model_instances:
return # nothing to do
# We need to be able to dynamically add to the list of prefetch_related
# lookups that we look up (see below). So we need some book keeping to
# ensure we don't do duplicate work.
done_queries = {} # dictionary of things like 'foo__bar': [results]
auto_lookups = set() # we add to this as we go through.
followed_descriptors = set() # recursion protection
all_lookups = normalize_prefetch_lookups(reversed(related_lookups))
while all_lookups:
lookup = all_lookups.pop()
if lookup.prefetch_to in done_queries:
if lookup.queryset is not None:
raise ValueError("'%s' lookup was already seen with a different queryset. "
"You may need to adjust the ordering of your lookups." % lookup.prefetch_to)
continue
# Top level, the list of objects to decorate is the result cache
# from the primary QuerySet. It won't be for deeper levels.
obj_list = model_instances
through_attrs = lookup.prefetch_through.split(LOOKUP_SEP)
for level, through_attr in enumerate(through_attrs):
# Prepare main instances
if not obj_list:
break
prefetch_to = lookup.get_current_prefetch_to(level)
if prefetch_to in done_queries:
# Skip any prefetching, and any object preparation
obj_list = done_queries[prefetch_to]
continue
# Prepare objects:
good_objects = True
for obj in obj_list:
# Since prefetching can re-use instances, it is possible to have
# the same instance multiple times in obj_list, so obj might
# already be prepared.
if not hasattr(obj, '_prefetched_objects_cache'):
try:
obj._prefetched_objects_cache = {}
except (AttributeError, TypeError):
# Must be an immutable object from
# values_list(flat=True), for example (TypeError) or
# a QuerySet subclass that isn't returning Model
# instances (AttributeError), either in Django or a 3rd
# party. prefetch_related() doesn't make sense, so quit.
good_objects = False
break
if not good_objects:
break
# Descend down tree
# We assume that objects retrieved are homogeneous (which is the premise
# of prefetch_related), so what applies to first object applies to all.
first_obj = obj_list[0]
to_attr = lookup.get_current_to_attr(level)[0]
prefetcher, descriptor, attr_found, is_fetched = get_prefetcher(first_obj, through_attr, to_attr)
if not attr_found:
raise AttributeError("Cannot find '%s' on %s object, '%s' is an invalid "
"parameter to prefetch_related()" %
(through_attr, first_obj.__class__.__name__, lookup.prefetch_through))
if level == len(through_attrs) - 1 and prefetcher is None:
# Last one, this *must* resolve to something that supports
# prefetching, otherwise there is no point adding it and the
# developer asking for it has made a mistake.
raise ValueError("'%s' does not resolve to an item that supports "
"prefetching - this is an invalid parameter to "
"prefetch_related()." % lookup.prefetch_through)
if prefetcher is not None and not is_fetched:
obj_list, additional_lookups = prefetch_one_level(obj_list, prefetcher, lookup, level)
# We need to ensure we don't keep adding lookups from the
# same relationships to stop infinite recursion. So, if we
# are already on an automatically added lookup, don't add
# the new lookups from relationships we've seen already.
if not (prefetch_to in done_queries and lookup in auto_lookups and descriptor in followed_descriptors):
done_queries[prefetch_to] = obj_list
new_lookups = normalize_prefetch_lookups(reversed(additional_lookups), prefetch_to)
auto_lookups.update(new_lookups)
all_lookups.extend(new_lookups)
followed_descriptors.add(descriptor)
else:
# Either a singly related object that has already been fetched
# (e.g. via select_related), or hopefully some other property
# that doesn't support prefetching but needs to be traversed.
# We replace the current list of parent objects with the list
# of related objects, filtering out empty or missing values so
# that we can continue with nullable or reverse relations.
new_obj_list = []
for obj in obj_list:
if through_attr in getattr(obj, '_prefetched_objects_cache', ()):
# If related objects have been prefetched, use the
# cache rather than the object's through_attr.
new_obj = list(obj._prefetched_objects_cache.get(through_attr))
else:
try:
new_obj = getattr(obj, through_attr)
except exceptions.ObjectDoesNotExist:
continue
if new_obj is None:
continue
# We special-case `list` rather than something more generic
# like `Iterable` because we don't want to accidentally match
# user models that define __iter__.
if isinstance(new_obj, list):
new_obj_list.extend(new_obj)
else:
new_obj_list.append(new_obj)
obj_list = new_obj_list
def get_prefetcher(instance, through_attr, to_attr):
"""
For the attribute 'through_attr' on the given instance, find
an object that has a get_prefetch_queryset().
Return a 4 tuple containing:
(the object with get_prefetch_queryset (or None),
the descriptor object representing this relationship (or None),
a boolean that is False if the attribute was not found at all,
a boolean that is True if the attribute has already been fetched)
"""
prefetcher = None
is_fetched = False
# For singly related objects, we have to avoid getting the attribute
# from the object, as this will trigger the query. So we first try
# on the class, in order to get the descriptor object.
rel_obj_descriptor = getattr(instance.__class__, through_attr, None)
if rel_obj_descriptor is None:
attr_found = hasattr(instance, through_attr)
else:
attr_found = True
if rel_obj_descriptor:
# singly related object, descriptor object has the
# get_prefetch_queryset() method.
if hasattr(rel_obj_descriptor, 'get_prefetch_queryset'):
prefetcher = rel_obj_descriptor
if rel_obj_descriptor.is_cached(instance):
is_fetched = True
else:
# descriptor doesn't support prefetching, so we go ahead and get
# the attribute on the instance rather than the class to
# support many related managers
rel_obj = getattr(instance, through_attr)
if hasattr(rel_obj, 'get_prefetch_queryset'):
prefetcher = rel_obj
if through_attr != to_attr:
# Special case cached_property instances because hasattr
# triggers attribute computation and assignment.
if isinstance(getattr(instance.__class__, to_attr, None), cached_property):
is_fetched = to_attr in instance.__dict__
else:
is_fetched = hasattr(instance, to_attr)
else:
is_fetched = through_attr in instance._prefetched_objects_cache
return prefetcher, rel_obj_descriptor, attr_found, is_fetched
def prefetch_one_level(instances, prefetcher, lookup, level):
"""
Helper function for prefetch_related_objects().
Run prefetches on all instances using the prefetcher object,
assigning results to relevant caches in instance.
Return the prefetched objects along with any additional prefetches that
must be done due to prefetch_related lookups found from default managers.
"""
# prefetcher must have a method get_prefetch_queryset() which takes a list
# of instances, and returns a tuple:
# (queryset of instances of self.model that are related to passed in instances,
# callable that gets value to be matched for returned instances,
# callable that gets value to be matched for passed in instances,
# boolean that is True for singly related objects,
# cache or field name to assign to,
# boolean that is True when the previous argument is a cache name vs a field name).
# The 'values to be matched' must be hashable as they will be used
# in a dictionary.
rel_qs, rel_obj_attr, instance_attr, single, cache_name, is_descriptor = (
prefetcher.get_prefetch_queryset(instances, lookup.get_current_queryset(level)))
# We have to handle the possibility that the QuerySet we just got back
# contains some prefetch_related lookups. We don't want to trigger the
# prefetch_related functionality by evaluating the query. Rather, we need
# to merge in the prefetch_related lookups.
# Copy the lookups in case it is a Prefetch object which could be reused
# later (happens in nested prefetch_related).
additional_lookups = [
copy.copy(additional_lookup) for additional_lookup
in getattr(rel_qs, '_prefetch_related_lookups', ())
]
if additional_lookups:
# Don't need to clone because the manager should have given us a fresh
# instance, so we access an internal instead of using public interface
# for performance reasons.
rel_qs._prefetch_related_lookups = ()
all_related_objects = list(rel_qs)
rel_obj_cache = {}
for rel_obj in all_related_objects:
rel_attr_val = rel_obj_attr(rel_obj)
rel_obj_cache.setdefault(rel_attr_val, []).append(rel_obj)
to_attr, as_attr = lookup.get_current_to_attr(level)
# Make sure `to_attr` does not conflict with a field.
if as_attr and instances:
# We assume that objects retrieved are homogeneous (which is the premise
# of prefetch_related), so what applies to first object applies to all.
model = instances[0].__class__
try:
model._meta.get_field(to_attr)
except exceptions.FieldDoesNotExist:
pass
else:
msg = 'to_attr={} conflicts with a field on the {} model.'
raise ValueError(msg.format(to_attr, model.__name__))
# Whether or not we're prefetching the last part of the lookup.
leaf = len(lookup.prefetch_through.split(LOOKUP_SEP)) - 1 == level
for obj in instances:
instance_attr_val = instance_attr(obj)
vals = rel_obj_cache.get(instance_attr_val, [])
if single:
val = vals[0] if vals else None
if as_attr:
# A to_attr has been given for the prefetch.
setattr(obj, to_attr, val)
elif is_descriptor:
# cache_name points to a field name in obj.
# This field is a descriptor for a related object.
setattr(obj, cache_name, val)
else:
# No to_attr has been given for this prefetch operation and the
# cache_name does not point to a descriptor. Store the value of
# the field in the object's field cache.
obj._state.fields_cache[cache_name] = val
else:
if as_attr:
setattr(obj, to_attr, vals)
else:
manager = getattr(obj, to_attr)
if leaf and lookup.queryset is not None:
qs = manager._apply_rel_filters(lookup.queryset)
else:
qs = manager.get_queryset()
qs._result_cache = vals
# We don't want the individual qs doing prefetch_related now,
# since we have merged this into the current work.
qs._prefetch_done = True
obj._prefetched_objects_cache[cache_name] = qs
return all_related_objects, additional_lookups
class RelatedPopulator:
"""
RelatedPopulator is used for select_related() object instantiation.
The idea is that each select_related() model will be populated by a
different RelatedPopulator instance. The RelatedPopulator instances get
klass_info and select (computed in SQLCompiler) plus the used db as
input for initialization. That data is used to compute which columns
to use, how to instantiate the model, and how to populate the links
between the objects.
The actual creation of the objects is done in populate() method. This
method gets row and from_obj as input and populates the select_related()
model instance.
"""
def __init__(self, klass_info, select, db):
self.db = db
# Pre-compute needed attributes. The attributes are:
# - model_cls: the possibly deferred model class to instantiate
# - either:
# - cols_start, cols_end: usually the columns in the row are
# in the same order model_cls.__init__ expects them, so we
# can instantiate by model_cls(*row[cols_start:cols_end])
# - reorder_for_init: When select_related descends to a child
# class, then we want to reuse the already selected parent
# data. However, in this case the parent data isn't necessarily
# in the same order that Model.__init__ expects it to be, so
# we have to reorder the parent data. The reorder_for_init
# attribute contains a function used to reorder the field data
# in the order __init__ expects it.
# - pk_idx: the index of the primary key field in the reordered
# model data. Used to check if a related object exists at all.
# - init_list: the field attnames fetched from the database. For
# deferred models this isn't the same as all attnames of the
# model's fields.
# - related_populators: a list of RelatedPopulator instances if
# select_related() descends to related models from this model.
# - local_setter, remote_setter: Methods to set cached values on
# the object being populated and on the remote object. Usually
# these are Field.set_cached_value() methods.
select_fields = klass_info['select_fields']
from_parent = klass_info['from_parent']
if not from_parent:
self.cols_start = select_fields[0]
self.cols_end = select_fields[-1] + 1
self.init_list = [
f[0].target.attname for f in select[self.cols_start:self.cols_end]
]
self.reorder_for_init = None
else:
attname_indexes = {select[idx][0].target.attname: idx for idx in select_fields}
model_init_attnames = (f.attname for f in klass_info['model']._meta.concrete_fields)
self.init_list = [attname for attname in model_init_attnames if attname in attname_indexes]
self.reorder_for_init = operator.itemgetter(*[attname_indexes[attname] for attname in self.init_list])
self.model_cls = klass_info['model']
self.pk_idx = self.init_list.index(self.model_cls._meta.pk.attname)
self.related_populators = get_related_populators(klass_info, select, self.db)
self.local_setter = klass_info['local_setter']
self.remote_setter = klass_info['remote_setter']
def populate(self, row, from_obj):
if self.reorder_for_init:
obj_data = self.reorder_for_init(row)
else:
obj_data = row[self.cols_start:self.cols_end]
if obj_data[self.pk_idx] is None:
obj = None
else:
obj = self.model_cls.from_db(self.db, self.init_list, obj_data)
for rel_iter in self.related_populators:
rel_iter.populate(row, obj)
self.local_setter(from_obj, obj)
if obj is not None:
self.remote_setter(obj, from_obj)
def get_related_populators(klass_info, select, db):
iterators = []
related_klass_infos = klass_info.get('related_klass_infos', [])
for rel_klass_info in related_klass_infos:
rel_cls = RelatedPopulator(rel_klass_info, select, db)
iterators.append(rel_cls)
return iterators
|
2deaa8e695b267f18343271e9ed7f9f344c5e7705fc71acc3622d91524efd6e8 | import datetime
import decimal
from importlib import import_module
import sqlparse
from django.conf import settings
from django.db import NotSupportedError, transaction
from django.db.backends import utils
from django.utils import timezone
from django.utils.encoding import force_str
class BaseDatabaseOperations:
"""
Encapsulate backend-specific differences, such as the way a backend
performs ordering or calculates the ID of a recently-inserted row.
"""
compiler_module = "django.db.models.sql.compiler"
# Integer field safe ranges by `internal_type` as documented
# in docs/ref/models/fields.txt.
integer_field_ranges = {
'SmallIntegerField': (-32768, 32767),
'IntegerField': (-2147483648, 2147483647),
'BigIntegerField': (-9223372036854775808, 9223372036854775807),
'PositiveBigIntegerField': (0, 9223372036854775807),
'PositiveSmallIntegerField': (0, 32767),
'PositiveIntegerField': (0, 2147483647),
'SmallAutoField': (-32768, 32767),
'AutoField': (-2147483648, 2147483647),
'BigAutoField': (-9223372036854775808, 9223372036854775807),
}
set_operators = {
'union': 'UNION',
'intersection': 'INTERSECT',
'difference': 'EXCEPT',
}
# Mapping of Field.get_internal_type() (typically the model field's class
# name) to the data type to use for the Cast() function, if different from
# DatabaseWrapper.data_types.
cast_data_types = {}
# CharField data type if the max_length argument isn't provided.
cast_char_field_without_max_length = None
# Start and end points for window expressions.
PRECEDING = 'PRECEDING'
FOLLOWING = 'FOLLOWING'
UNBOUNDED_PRECEDING = 'UNBOUNDED ' + PRECEDING
UNBOUNDED_FOLLOWING = 'UNBOUNDED ' + FOLLOWING
CURRENT_ROW = 'CURRENT ROW'
# Prefix for EXPLAIN queries, or None EXPLAIN isn't supported.
explain_prefix = None
def __init__(self, connection):
self.connection = connection
self._cache = None
def autoinc_sql(self, table, column):
"""
Return any SQL needed to support auto-incrementing primary keys, or
None if no SQL is necessary.
This SQL is executed when a table is created.
"""
return None
def bulk_batch_size(self, fields, objs):
"""
Return the maximum allowed batch size for the backend. The fields
are the fields going to be inserted in the batch, the objs contains
all the objects to be inserted.
"""
return len(objs)
def cache_key_culling_sql(self):
"""
Return an SQL query that retrieves the first cache key greater than the
n smallest.
This is used by the 'db' cache backend to determine where to start
culling.
"""
return "SELECT cache_key FROM %s ORDER BY cache_key LIMIT 1 OFFSET %%s"
def unification_cast_sql(self, output_field):
"""
Given a field instance, return the SQL that casts the result of a union
to that type. The resulting string should contain a '%s' placeholder
for the expression being cast.
"""
return '%s'
def date_extract_sql(self, lookup_type, field_name):
"""
Given a lookup_type of 'year', 'month', or 'day', return the SQL that
extracts a value from the given date field field_name.
"""
raise NotImplementedError('subclasses of BaseDatabaseOperations may require a date_extract_sql() method')
def date_interval_sql(self, timedelta):
"""
Implement the date interval functionality for expressions.
"""
raise NotImplementedError('subclasses of BaseDatabaseOperations may require a date_interval_sql() method')
def date_trunc_sql(self, lookup_type, field_name):
"""
Given a lookup_type of 'year', 'month', or 'day', return the SQL that
truncates the given date field field_name to a date object with only
the given specificity.
"""
raise NotImplementedError('subclasses of BaseDatabaseOperations may require a date_trunc_sql() method.')
def datetime_cast_date_sql(self, field_name, tzname):
"""
Return the SQL to cast a datetime value to date value.
"""
raise NotImplementedError(
'subclasses of BaseDatabaseOperations may require a '
'datetime_cast_date_sql() method.'
)
def datetime_cast_time_sql(self, field_name, tzname):
"""
Return the SQL to cast a datetime value to time value.
"""
raise NotImplementedError('subclasses of BaseDatabaseOperations may require a datetime_cast_time_sql() method')
def datetime_extract_sql(self, lookup_type, field_name, tzname):
"""
Given a lookup_type of 'year', 'month', 'day', 'hour', 'minute', or
'second', return the SQL that extracts a value from the given
datetime field field_name.
"""
raise NotImplementedError('subclasses of BaseDatabaseOperations may require a datetime_extract_sql() method')
def datetime_trunc_sql(self, lookup_type, field_name, tzname):
"""
Given a lookup_type of 'year', 'month', 'day', 'hour', 'minute', or
'second', return the SQL that truncates the given datetime field
field_name to a datetime object with only the given specificity.
"""
raise NotImplementedError('subclasses of BaseDatabaseOperations may require a datetime_trunc_sql() method')
def time_trunc_sql(self, lookup_type, field_name):
"""
Given a lookup_type of 'hour', 'minute' or 'second', return the SQL
that truncates the given time field field_name to a time object with
only the given specificity.
"""
raise NotImplementedError('subclasses of BaseDatabaseOperations may require a time_trunc_sql() method')
def time_extract_sql(self, lookup_type, field_name):
"""
Given a lookup_type of 'hour', 'minute', or 'second', return the SQL
that extracts a value from the given time field field_name.
"""
return self.date_extract_sql(lookup_type, field_name)
def deferrable_sql(self):
"""
Return the SQL to make a constraint "initially deferred" during a
CREATE TABLE statement.
"""
return ''
def distinct_sql(self, fields, params):
"""
Return an SQL DISTINCT clause which removes duplicate rows from the
result set. If any fields are given, only check the given fields for
duplicates.
"""
if fields:
raise NotSupportedError('DISTINCT ON fields is not supported by this database backend')
else:
return ['DISTINCT'], []
def fetch_returned_insert_columns(self, cursor, returning_params):
"""
Given a cursor object that has just performed an INSERT...RETURNING
statement into a table, return the newly created data.
"""
return cursor.fetchone()
def field_cast_sql(self, db_type, internal_type):
"""
Given a column type (e.g. 'BLOB', 'VARCHAR') and an internal type
(e.g. 'GenericIPAddressField'), return the SQL to cast it before using
it in a WHERE statement. The resulting string should contain a '%s'
placeholder for the column being searched against.
"""
return '%s'
def force_no_ordering(self):
"""
Return a list used in the "ORDER BY" clause to force no ordering at
all. Return an empty list to include nothing in the ordering.
"""
return []
def for_update_sql(self, nowait=False, skip_locked=False, of=()):
"""
Return the FOR UPDATE SQL clause to lock rows for an update operation.
"""
return 'FOR UPDATE%s%s%s' % (
' OF %s' % ', '.join(of) if of else '',
' NOWAIT' if nowait else '',
' SKIP LOCKED' if skip_locked else '',
)
def _get_limit_offset_params(self, low_mark, high_mark):
offset = low_mark or 0
if high_mark is not None:
return (high_mark - offset), offset
elif offset:
return self.connection.ops.no_limit_value(), offset
return None, offset
def limit_offset_sql(self, low_mark, high_mark):
"""Return LIMIT/OFFSET SQL clause."""
limit, offset = self._get_limit_offset_params(low_mark, high_mark)
return ' '.join(sql for sql in (
('LIMIT %d' % limit) if limit else None,
('OFFSET %d' % offset) if offset else None,
) if sql)
def last_executed_query(self, cursor, sql, params):
"""
Return a string of the query last executed by the given cursor, with
placeholders replaced with actual values.
`sql` is the raw query containing placeholders and `params` is the
sequence of parameters. These are used by default, but this method
exists for database backends to provide a better implementation
according to their own quoting schemes.
"""
# Convert params to contain string values.
def to_string(s):
return force_str(s, strings_only=True, errors='replace')
if isinstance(params, (list, tuple)):
u_params = tuple(to_string(val) for val in params)
elif params is None:
u_params = ()
else:
u_params = {to_string(k): to_string(v) for k, v in params.items()}
return "QUERY = %r - PARAMS = %r" % (sql, u_params)
def last_insert_id(self, cursor, table_name, pk_name):
"""
Given a cursor object that has just performed an INSERT statement into
a table that has an auto-incrementing ID, return the newly created ID.
`pk_name` is the name of the primary-key column.
"""
return cursor.lastrowid
def lookup_cast(self, lookup_type, internal_type=None):
"""
Return the string to use in a query when performing lookups
("contains", "like", etc.). It should contain a '%s' placeholder for
the column being searched against.
"""
return "%s"
def max_in_list_size(self):
"""
Return the maximum number of items that can be passed in a single 'IN'
list condition, or None if the backend does not impose a limit.
"""
return None
def max_name_length(self):
"""
Return the maximum length of table and column names, or None if there
is no limit.
"""
return None
def no_limit_value(self):
"""
Return the value to use for the LIMIT when we are wanting "LIMIT
infinity". Return None if the limit clause can be omitted in this case.
"""
raise NotImplementedError('subclasses of BaseDatabaseOperations may require a no_limit_value() method')
def pk_default_value(self):
"""
Return the value to use during an INSERT statement to specify that
the field should use its default value.
"""
return 'DEFAULT'
def prepare_sql_script(self, sql):
"""
Take an SQL script that may contain multiple lines and return a list
of statements to feed to successive cursor.execute() calls.
Since few databases are able to process raw SQL scripts in a single
cursor.execute() call and PEP 249 doesn't talk about this use case,
the default implementation is conservative.
"""
return [
sqlparse.format(statement, strip_comments=True)
for statement in sqlparse.split(sql) if statement
]
def process_clob(self, value):
"""
Return the value of a CLOB column, for backends that return a locator
object that requires additional processing.
"""
return value
def return_insert_columns(self, fields):
"""
For backends that support returning columns as part of an insert query,
return the SQL and params to append to the INSERT query. The returned
fragment should contain a format string to hold the appropriate column.
"""
pass
def compiler(self, compiler_name):
"""
Return the SQLCompiler class corresponding to the given name,
in the namespace corresponding to the `compiler_module` attribute
on this backend.
"""
if self._cache is None:
self._cache = import_module(self.compiler_module)
return getattr(self._cache, compiler_name)
def quote_name(self, name):
"""
Return a quoted version of the given table, index, or column name. Do
not quote the given name if it's already been quoted.
"""
raise NotImplementedError('subclasses of BaseDatabaseOperations may require a quote_name() method')
def random_function_sql(self):
"""Return an SQL expression that returns a random value."""
return 'RANDOM()'
def regex_lookup(self, lookup_type):
"""
Return the string to use in a query when performing regular expression
lookups (using "regex" or "iregex"). It should contain a '%s'
placeholder for the column being searched against.
If the feature is not supported (or part of it is not supported), raise
NotImplementedError.
"""
raise NotImplementedError('subclasses of BaseDatabaseOperations may require a regex_lookup() method')
def savepoint_create_sql(self, sid):
"""
Return the SQL for starting a new savepoint. Only required if the
"uses_savepoints" feature is True. The "sid" parameter is a string
for the savepoint id.
"""
return "SAVEPOINT %s" % self.quote_name(sid)
def savepoint_commit_sql(self, sid):
"""
Return the SQL for committing the given savepoint.
"""
return "RELEASE SAVEPOINT %s" % self.quote_name(sid)
def savepoint_rollback_sql(self, sid):
"""
Return the SQL for rolling back the given savepoint.
"""
return "ROLLBACK TO SAVEPOINT %s" % self.quote_name(sid)
def set_time_zone_sql(self):
"""
Return the SQL that will set the connection's time zone.
Return '' if the backend doesn't support time zones.
"""
return ''
def sql_flush(self, style, tables, *, reset_sequences=False, allow_cascade=False):
"""
Return a list of SQL statements required to remove all data from
the given database tables (without actually removing the tables
themselves).
The `style` argument is a Style object as returned by either
color_style() or no_style() in django.core.management.color.
If `reset_sequences` is True, the list includes SQL statements required
to reset the sequences.
The `allow_cascade` argument determines whether truncation may cascade
to tables with foreign keys pointing the tables being truncated.
PostgreSQL requires a cascade even if these tables are empty.
"""
raise NotImplementedError('subclasses of BaseDatabaseOperations must provide a sql_flush() method')
def execute_sql_flush(self, using, sql_list):
"""Execute a list of SQL statements to flush the database."""
with transaction.atomic(using=using, savepoint=self.connection.features.can_rollback_ddl):
with self.connection.cursor() as cursor:
for sql in sql_list:
cursor.execute(sql)
def sequence_reset_by_name_sql(self, style, sequences):
"""
Return a list of the SQL statements required to reset sequences
passed in `sequences`.
The `style` argument is a Style object as returned by either
color_style() or no_style() in django.core.management.color.
"""
return []
def sequence_reset_sql(self, style, model_list):
"""
Return a list of the SQL statements required to reset sequences for
the given models.
The `style` argument is a Style object as returned by either
color_style() or no_style() in django.core.management.color.
"""
return [] # No sequence reset required by default.
def start_transaction_sql(self):
"""Return the SQL statement required to start a transaction."""
return "BEGIN;"
def end_transaction_sql(self, success=True):
"""Return the SQL statement required to end a transaction."""
if not success:
return "ROLLBACK;"
return "COMMIT;"
def tablespace_sql(self, tablespace, inline=False):
"""
Return the SQL that will be used in a query to define the tablespace.
Return '' if the backend doesn't support tablespaces.
If `inline` is True, append the SQL to a row; otherwise append it to
the entire CREATE TABLE or CREATE INDEX statement.
"""
return ''
def prep_for_like_query(self, x):
"""Prepare a value for use in a LIKE query."""
return str(x).replace("\\", "\\\\").replace("%", r"\%").replace("_", r"\_")
# Same as prep_for_like_query(), but called for "iexact" matches, which
# need not necessarily be implemented using "LIKE" in the backend.
prep_for_iexact_query = prep_for_like_query
def validate_autopk_value(self, value):
"""
Certain backends do not accept some values for "serial" fields
(for example zero in MySQL). Raise a ValueError if the value is
invalid, otherwise return the validated value.
"""
return value
def adapt_unknown_value(self, value):
"""
Transform a value to something compatible with the backend driver.
This method only depends on the type of the value. It's designed for
cases where the target type isn't known, such as .raw() SQL queries.
As a consequence it may not work perfectly in all circumstances.
"""
if isinstance(value, datetime.datetime): # must be before date
return self.adapt_datetimefield_value(value)
elif isinstance(value, datetime.date):
return self.adapt_datefield_value(value)
elif isinstance(value, datetime.time):
return self.adapt_timefield_value(value)
elif isinstance(value, decimal.Decimal):
return self.adapt_decimalfield_value(value)
else:
return value
def adapt_datefield_value(self, value):
"""
Transform a date value to an object compatible with what is expected
by the backend driver for date columns.
"""
if value is None:
return None
return str(value)
def adapt_datetimefield_value(self, value):
"""
Transform a datetime value to an object compatible with what is expected
by the backend driver for datetime columns.
"""
if value is None:
return None
return str(value)
def adapt_timefield_value(self, value):
"""
Transform a time value to an object compatible with what is expected
by the backend driver for time columns.
"""
if value is None:
return None
if timezone.is_aware(value):
raise ValueError("Django does not support timezone-aware times.")
return str(value)
def adapt_decimalfield_value(self, value, max_digits=None, decimal_places=None):
"""
Transform a decimal.Decimal value to an object compatible with what is
expected by the backend driver for decimal (numeric) columns.
"""
return utils.format_number(value, max_digits, decimal_places)
def adapt_ipaddressfield_value(self, value):
"""
Transform a string representation of an IP address into the expected
type for the backend driver.
"""
return value or None
def year_lookup_bounds_for_date_field(self, value):
"""
Return a two-elements list with the lower and upper bound to be used
with a BETWEEN operator to query a DateField value using a year
lookup.
`value` is an int, containing the looked-up year.
"""
first = datetime.date(value, 1, 1)
second = datetime.date(value, 12, 31)
first = self.adapt_datefield_value(first)
second = self.adapt_datefield_value(second)
return [first, second]
def year_lookup_bounds_for_datetime_field(self, value):
"""
Return a two-elements list with the lower and upper bound to be used
with a BETWEEN operator to query a DateTimeField value using a year
lookup.
`value` is an int, containing the looked-up year.
"""
first = datetime.datetime(value, 1, 1)
second = datetime.datetime(value, 12, 31, 23, 59, 59, 999999)
if settings.USE_TZ:
tz = timezone.get_current_timezone()
first = timezone.make_aware(first, tz)
second = timezone.make_aware(second, tz)
first = self.adapt_datetimefield_value(first)
second = self.adapt_datetimefield_value(second)
return [first, second]
def get_db_converters(self, expression):
"""
Return a list of functions needed to convert field data.
Some field types on some backends do not provide data in the correct
format, this is the hook for converter functions.
"""
return []
def convert_durationfield_value(self, value, expression, connection):
if value is not None:
return datetime.timedelta(0, 0, value)
def check_expression_support(self, expression):
"""
Check that the backend supports the provided expression.
This is used on specific backends to rule out known expressions
that have problematic or nonexistent implementations. If the
expression has a known problem, the backend should raise
NotSupportedError.
"""
pass
def conditional_expression_supported_in_where_clause(self, expression):
"""
Return True, if the conditional expression is supported in the WHERE
clause.
"""
return True
def combine_expression(self, connector, sub_expressions):
"""
Combine a list of subexpressions into a single expression, using
the provided connecting operator. This is required because operators
can vary between backends (e.g., Oracle with %% and &) and between
subexpression types (e.g., date expressions).
"""
conn = ' %s ' % connector
return conn.join(sub_expressions)
def combine_duration_expression(self, connector, sub_expressions):
return self.combine_expression(connector, sub_expressions)
def binary_placeholder_sql(self, value):
"""
Some backends require special syntax to insert binary content (MySQL
for example uses '_binary %s').
"""
return '%s'
def modify_insert_params(self, placeholder, params):
"""
Allow modification of insert parameters. Needed for Oracle Spatial
backend due to #10888.
"""
return params
def integer_field_range(self, internal_type):
"""
Given an integer field internal type (e.g. 'PositiveIntegerField'),
return a tuple of the (min_value, max_value) form representing the
range of the column type bound to the field.
"""
return self.integer_field_ranges[internal_type]
def subtract_temporals(self, internal_type, lhs, rhs):
if self.connection.features.supports_temporal_subtraction:
lhs_sql, lhs_params = lhs
rhs_sql, rhs_params = rhs
return '(%s - %s)' % (lhs_sql, rhs_sql), (*lhs_params, *rhs_params)
raise NotSupportedError("This backend does not support %s subtraction." % internal_type)
def window_frame_start(self, start):
if isinstance(start, int):
if start < 0:
return '%d %s' % (abs(start), self.PRECEDING)
elif start == 0:
return self.CURRENT_ROW
elif start is None:
return self.UNBOUNDED_PRECEDING
raise ValueError("start argument must be a negative integer, zero, or None, but got '%s'." % start)
def window_frame_end(self, end):
if isinstance(end, int):
if end == 0:
return self.CURRENT_ROW
elif end > 0:
return '%d %s' % (end, self.FOLLOWING)
elif end is None:
return self.UNBOUNDED_FOLLOWING
raise ValueError("end argument must be a positive integer, zero, or None, but got '%s'." % end)
def window_frame_rows_start_end(self, start=None, end=None):
"""
Return SQL for start and end points in an OVER clause window frame.
"""
if not self.connection.features.supports_over_clause:
raise NotSupportedError('This backend does not support window expressions.')
return self.window_frame_start(start), self.window_frame_end(end)
def window_frame_range_start_end(self, start=None, end=None):
start_, end_ = self.window_frame_rows_start_end(start, end)
if (
self.connection.features.only_supports_unbounded_with_preceding_and_following and
((start and start < 0) or (end and end > 0))
):
raise NotSupportedError(
'%s only supports UNBOUNDED together with PRECEDING and '
'FOLLOWING.' % self.connection.display_name
)
return start_, end_
def explain_query_prefix(self, format=None, **options):
if not self.connection.features.supports_explaining_query_execution:
raise NotSupportedError('This backend does not support explaining query execution.')
if format:
supported_formats = self.connection.features.supported_explain_formats
normalized_format = format.upper()
if normalized_format not in supported_formats:
msg = '%s is not a recognized format.' % normalized_format
if supported_formats:
msg += ' Allowed formats: %s' % ', '.join(sorted(supported_formats))
raise ValueError(msg)
if options:
raise ValueError('Unknown options: %s' % ', '.join(sorted(options.keys())))
return self.explain_prefix
def insert_statement(self, ignore_conflicts=False):
return 'INSERT INTO'
def ignore_conflicts_suffix_sql(self, ignore_conflicts=None):
return ''
|
1e8f2f83791522e88741028cde1fe574d16976cb7057aea1cad262e3a0fd65b6 | import datetime
import decimal
import uuid
from functools import lru_cache
from itertools import chain
from django.conf import settings
from django.core.exceptions import FieldError
from django.db import DatabaseError, NotSupportedError, models
from django.db.backends.base.operations import BaseDatabaseOperations
from django.db.models.expressions import Col
from django.utils import timezone
from django.utils.dateparse import parse_date, parse_datetime, parse_time
from django.utils.duration import duration_microseconds
from django.utils.functional import cached_property
class DatabaseOperations(BaseDatabaseOperations):
cast_char_field_without_max_length = 'text'
cast_data_types = {
'DateField': 'TEXT',
'DateTimeField': 'TEXT',
}
explain_prefix = 'EXPLAIN QUERY PLAN'
def bulk_batch_size(self, fields, objs):
"""
SQLite has a compile-time default (SQLITE_LIMIT_VARIABLE_NUMBER) of
999 variables per query.
If there's only a single field to insert, the limit is 500
(SQLITE_MAX_COMPOUND_SELECT).
"""
if len(fields) == 1:
return 500
elif len(fields) > 1:
return self.connection.features.max_query_params // len(fields)
else:
return len(objs)
def check_expression_support(self, expression):
bad_fields = (models.DateField, models.DateTimeField, models.TimeField)
bad_aggregates = (models.Sum, models.Avg, models.Variance, models.StdDev)
if isinstance(expression, bad_aggregates):
for expr in expression.get_source_expressions():
try:
output_field = expr.output_field
except (AttributeError, FieldError):
# Not every subexpression has an output_field which is fine
# to ignore.
pass
else:
if isinstance(output_field, bad_fields):
raise NotSupportedError(
'You cannot use Sum, Avg, StdDev, and Variance '
'aggregations on date/time fields in sqlite3 '
'since date/time is saved as text.'
)
if (
isinstance(expression, models.Aggregate) and
expression.distinct and
len(expression.source_expressions) > 1
):
raise NotSupportedError(
"SQLite doesn't support DISTINCT on aggregate functions "
"accepting multiple arguments."
)
def date_extract_sql(self, lookup_type, field_name):
"""
Support EXTRACT with a user-defined function django_date_extract()
that's registered in connect(). Use single quotes because this is a
string and could otherwise cause a collision with a field name.
"""
return "django_date_extract('%s', %s)" % (lookup_type.lower(), field_name)
def date_interval_sql(self, timedelta):
return str(duration_microseconds(timedelta))
def format_for_duration_arithmetic(self, sql):
"""Do nothing since formatting is handled in the custom function."""
return sql
def date_trunc_sql(self, lookup_type, field_name):
return "django_date_trunc('%s', %s)" % (lookup_type.lower(), field_name)
def time_trunc_sql(self, lookup_type, field_name):
return "django_time_trunc('%s', %s)" % (lookup_type.lower(), field_name)
def _convert_tznames_to_sql(self, tzname):
if settings.USE_TZ:
return "'%s'" % tzname, "'%s'" % self.connection.timezone_name
return 'NULL', 'NULL'
def datetime_cast_date_sql(self, field_name, tzname):
return 'django_datetime_cast_date(%s, %s, %s)' % (
field_name, *self._convert_tznames_to_sql(tzname),
)
def datetime_cast_time_sql(self, field_name, tzname):
return 'django_datetime_cast_time(%s, %s, %s)' % (
field_name, *self._convert_tznames_to_sql(tzname),
)
def datetime_extract_sql(self, lookup_type, field_name, tzname):
return "django_datetime_extract('%s', %s, %s, %s)" % (
lookup_type.lower(), field_name, *self._convert_tznames_to_sql(tzname),
)
def datetime_trunc_sql(self, lookup_type, field_name, tzname):
return "django_datetime_trunc('%s', %s, %s, %s)" % (
lookup_type.lower(), field_name, *self._convert_tznames_to_sql(tzname),
)
def time_extract_sql(self, lookup_type, field_name):
return "django_time_extract('%s', %s)" % (lookup_type.lower(), field_name)
def pk_default_value(self):
return "NULL"
def _quote_params_for_last_executed_query(self, params):
"""
Only for last_executed_query! Don't use this to execute SQL queries!
"""
# This function is limited both by SQLITE_LIMIT_VARIABLE_NUMBER (the
# number of parameters, default = 999) and SQLITE_MAX_COLUMN (the
# number of return values, default = 2000). Since Python's sqlite3
# module doesn't expose the get_limit() C API, assume the default
# limits are in effect and split the work in batches if needed.
BATCH_SIZE = 999
if len(params) > BATCH_SIZE:
results = ()
for index in range(0, len(params), BATCH_SIZE):
chunk = params[index:index + BATCH_SIZE]
results += self._quote_params_for_last_executed_query(chunk)
return results
sql = 'SELECT ' + ', '.join(['QUOTE(?)'] * len(params))
# Bypass Django's wrappers and use the underlying sqlite3 connection
# to avoid logging this query - it would trigger infinite recursion.
cursor = self.connection.connection.cursor()
# Native sqlite3 cursors cannot be used as context managers.
try:
return cursor.execute(sql, params).fetchone()
finally:
cursor.close()
def last_executed_query(self, cursor, sql, params):
# Python substitutes parameters in Modules/_sqlite/cursor.c with:
# pysqlite_statement_bind_parameters(self->statement, parameters, allow_8bit_chars);
# Unfortunately there is no way to reach self->statement from Python,
# so we quote and substitute parameters manually.
if params:
if isinstance(params, (list, tuple)):
params = self._quote_params_for_last_executed_query(params)
else:
values = tuple(params.values())
values = self._quote_params_for_last_executed_query(values)
params = dict(zip(params, values))
return sql % params
# For consistency with SQLiteCursorWrapper.execute(), just return sql
# when there are no parameters. See #13648 and #17158.
else:
return sql
def quote_name(self, name):
if name.startswith('"') and name.endswith('"'):
return name # Quoting once is enough.
return '"%s"' % name
def no_limit_value(self):
return -1
def __references_graph(self, table_name):
query = """
WITH tables AS (
SELECT %s name
UNION
SELECT sqlite_master.name
FROM sqlite_master
JOIN tables ON (sql REGEXP %s || tables.name || %s)
) SELECT name FROM tables;
"""
params = (
table_name,
r'(?i)\s+references\s+("|\')?',
r'("|\')?\s*\(',
)
with self.connection.cursor() as cursor:
results = cursor.execute(query, params)
return [row[0] for row in results.fetchall()]
@cached_property
def _references_graph(self):
# 512 is large enough to fit the ~330 tables (as of this writing) in
# Django's test suite.
return lru_cache(maxsize=512)(self.__references_graph)
def sql_flush(self, style, tables, *, reset_sequences=False, allow_cascade=False):
if tables and allow_cascade:
# Simulate TRUNCATE CASCADE by recursively collecting the tables
# referencing the tables to be flushed.
tables = set(chain.from_iterable(self._references_graph(table) for table in tables))
# Note: No requirement for reset of auto-incremented indices (cf. other
# sql_flush() implementations). Just return SQL at this point
return ['%s %s %s;' % (
style.SQL_KEYWORD('DELETE'),
style.SQL_KEYWORD('FROM'),
style.SQL_FIELD(self.quote_name(table))
) for table in tables]
def adapt_datetimefield_value(self, value):
if value is None:
return None
# Expression values are adapted by the database.
if hasattr(value, 'resolve_expression'):
return value
# SQLite doesn't support tz-aware datetimes
if timezone.is_aware(value):
if settings.USE_TZ:
value = timezone.make_naive(value, self.connection.timezone)
else:
raise ValueError("SQLite backend does not support timezone-aware datetimes when USE_TZ is False.")
return str(value)
def adapt_timefield_value(self, value):
if value is None:
return None
# Expression values are adapted by the database.
if hasattr(value, 'resolve_expression'):
return value
# SQLite doesn't support tz-aware datetimes
if timezone.is_aware(value):
raise ValueError("SQLite backend does not support timezone-aware times.")
return str(value)
def get_db_converters(self, expression):
converters = super().get_db_converters(expression)
internal_type = expression.output_field.get_internal_type()
if internal_type == 'DateTimeField':
converters.append(self.convert_datetimefield_value)
elif internal_type == 'DateField':
converters.append(self.convert_datefield_value)
elif internal_type == 'TimeField':
converters.append(self.convert_timefield_value)
elif internal_type == 'DecimalField':
converters.append(self.get_decimalfield_converter(expression))
elif internal_type == 'UUIDField':
converters.append(self.convert_uuidfield_value)
elif internal_type in ('NullBooleanField', 'BooleanField'):
converters.append(self.convert_booleanfield_value)
return converters
def convert_datetimefield_value(self, value, expression, connection):
if value is not None:
if not isinstance(value, datetime.datetime):
value = parse_datetime(value)
if settings.USE_TZ and not timezone.is_aware(value):
value = timezone.make_aware(value, self.connection.timezone)
return value
def convert_datefield_value(self, value, expression, connection):
if value is not None:
if not isinstance(value, datetime.date):
value = parse_date(value)
return value
def convert_timefield_value(self, value, expression, connection):
if value is not None:
if not isinstance(value, datetime.time):
value = parse_time(value)
return value
def get_decimalfield_converter(self, expression):
# SQLite stores only 15 significant digits. Digits coming from
# float inaccuracy must be removed.
create_decimal = decimal.Context(prec=15).create_decimal_from_float
if isinstance(expression, Col):
quantize_value = decimal.Decimal(1).scaleb(-expression.output_field.decimal_places)
def converter(value, expression, connection):
if value is not None:
return create_decimal(value).quantize(quantize_value, context=expression.output_field.context)
else:
def converter(value, expression, connection):
if value is not None:
return create_decimal(value)
return converter
def convert_uuidfield_value(self, value, expression, connection):
if value is not None:
value = uuid.UUID(value)
return value
def convert_booleanfield_value(self, value, expression, connection):
return bool(value) if value in (1, 0) else value
def bulk_insert_sql(self, fields, placeholder_rows):
return " UNION ALL ".join(
"SELECT %s" % ", ".join(row)
for row in placeholder_rows
)
def combine_expression(self, connector, sub_expressions):
# SQLite doesn't have a ^ operator, so use the user-defined POWER
# function that's registered in connect().
if connector == '^':
return 'POWER(%s)' % ','.join(sub_expressions)
elif connector == '#':
return 'BITXOR(%s)' % ','.join(sub_expressions)
return super().combine_expression(connector, sub_expressions)
def combine_duration_expression(self, connector, sub_expressions):
if connector not in ['+', '-']:
raise DatabaseError('Invalid connector for timedelta: %s.' % connector)
fn_params = ["'%s'" % connector] + sub_expressions
if len(fn_params) > 3:
raise ValueError('Too many params for timedelta operations.')
return "django_format_dtdelta(%s)" % ', '.join(fn_params)
def integer_field_range(self, internal_type):
# SQLite doesn't enforce any integer constraints
return (None, None)
def subtract_temporals(self, internal_type, lhs, rhs):
lhs_sql, lhs_params = lhs
rhs_sql, rhs_params = rhs
params = (*lhs_params, *rhs_params)
if internal_type == 'TimeField':
return 'django_time_diff(%s, %s)' % (lhs_sql, rhs_sql), params
return 'django_timestamp_diff(%s, %s)' % (lhs_sql, rhs_sql), params
def insert_statement(self, ignore_conflicts=False):
return 'INSERT OR IGNORE INTO' if ignore_conflicts else super().insert_statement(ignore_conflicts)
|
5033672ce511f4b6d94560effb269a8fc298be36425857d5c640db93184dcdad | """Tests related to django.db.backends that haven't been organized."""
import datetime
import threading
import unittest
import warnings
from django.core.management.color import no_style
from django.db import (
DEFAULT_DB_ALIAS, DatabaseError, IntegrityError, connection, connections,
reset_queries, transaction,
)
from django.db.backends.base.base import BaseDatabaseWrapper
from django.db.backends.signals import connection_created
from django.db.backends.utils import CursorWrapper
from django.db.models.sql.constants import CURSOR
from django.test import (
TestCase, TransactionTestCase, override_settings, skipIfDBFeature,
skipUnlessDBFeature,
)
from .models import (
Article, Object, ObjectReference, Person, Post, RawData, Reporter,
ReporterProxy, SchoolClass, Square,
VeryLongModelNameZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ,
)
class DateQuotingTest(TestCase):
def test_django_date_trunc(self):
"""
Test the custom ``django_date_trunc method``, in particular against
fields which clash with strings passed to it (e.g. 'year') (#12818).
"""
updated = datetime.datetime(2010, 2, 20)
SchoolClass.objects.create(year=2009, last_updated=updated)
years = SchoolClass.objects.dates('last_updated', 'year')
self.assertEqual(list(years), [datetime.date(2010, 1, 1)])
def test_django_date_extract(self):
"""
Test the custom ``django_date_extract method``, in particular against fields
which clash with strings passed to it (e.g. 'day') (#12818).
"""
updated = datetime.datetime(2010, 2, 20)
SchoolClass.objects.create(year=2009, last_updated=updated)
classes = SchoolClass.objects.filter(last_updated__day=20)
self.assertEqual(len(classes), 1)
@override_settings(DEBUG=True)
class LastExecutedQueryTest(TestCase):
def test_last_executed_query_without_previous_query(self):
"""
last_executed_query should not raise an exception even if no previous
query has been run.
"""
with connection.cursor() as cursor:
connection.ops.last_executed_query(cursor, '', ())
def test_debug_sql(self):
list(Reporter.objects.filter(first_name="test"))
sql = connection.queries[-1]['sql'].lower()
self.assertIn("select", sql)
self.assertIn(Reporter._meta.db_table, sql)
def test_query_encoding(self):
"""last_executed_query() returns a string."""
data = RawData.objects.filter(raw_data=b'\x00\x46 \xFE').extra(select={'föö': 1})
sql, params = data.query.sql_with_params()
with data.query.get_compiler('default').execute_sql(CURSOR) as cursor:
last_sql = cursor.db.ops.last_executed_query(cursor, sql, params)
self.assertIsInstance(last_sql, str)
def test_last_executed_query(self):
# last_executed_query() interpolate all parameters, in most cases it is
# not equal to QuerySet.query.
for qs in (
Article.objects.filter(pk=1),
Article.objects.filter(pk__in=(1, 2), reporter__pk=3),
Article.objects.filter(
pk=1,
reporter__pk=9,
).exclude(reporter__pk__in=[2, 1]),
):
sql, params = qs.query.sql_with_params()
with qs.query.get_compiler(DEFAULT_DB_ALIAS).execute_sql(CURSOR) as cursor:
self.assertEqual(
cursor.db.ops.last_executed_query(cursor, sql, params),
str(qs.query),
)
@skipUnlessDBFeature('supports_paramstyle_pyformat')
def test_last_executed_query_dict(self):
square_opts = Square._meta
sql = 'INSERT INTO %s (%s, %s) VALUES (%%(root)s, %%(square)s)' % (
connection.introspection.identifier_converter(square_opts.db_table),
connection.ops.quote_name(square_opts.get_field('root').column),
connection.ops.quote_name(square_opts.get_field('square').column),
)
with connection.cursor() as cursor:
params = {'root': 2, 'square': 4}
cursor.execute(sql, params)
self.assertEqual(
cursor.db.ops.last_executed_query(cursor, sql, params),
sql % params,
)
class ParameterHandlingTest(TestCase):
def test_bad_parameter_count(self):
"An executemany call with too many/not enough parameters will raise an exception (Refs #12612)"
with connection.cursor() as cursor:
query = ('INSERT INTO %s (%s, %s) VALUES (%%s, %%s)' % (
connection.introspection.identifier_converter('backends_square'),
connection.ops.quote_name('root'),
connection.ops.quote_name('square')
))
with self.assertRaises(Exception):
cursor.executemany(query, [(1, 2, 3)])
with self.assertRaises(Exception):
cursor.executemany(query, [(1,)])
class LongNameTest(TransactionTestCase):
"""Long primary keys and model names can result in a sequence name
that exceeds the database limits, which will result in truncation
on certain databases (e.g., Postgres). The backend needs to use
the correct sequence name in last_insert_id and other places, so
check it is. Refs #8901.
"""
available_apps = ['backends']
def test_sequence_name_length_limits_create(self):
"""Test creation of model with long name and long pk name doesn't error. Ref #8901"""
VeryLongModelNameZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ.objects.create()
def test_sequence_name_length_limits_m2m(self):
"""
An m2m save of a model with a long name and a long m2m field name
doesn't error (#8901).
"""
obj = VeryLongModelNameZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ.objects.create()
rel_obj = Person.objects.create(first_name='Django', last_name='Reinhardt')
obj.m2m_also_quite_long_zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz.add(rel_obj)
def test_sequence_name_length_limits_flush(self):
"""
Sequence resetting as part of a flush with model with long name and
long pk name doesn't error (#8901).
"""
# A full flush is expensive to the full test, so we dig into the
# internals to generate the likely offending SQL and run it manually
# Some convenience aliases
VLM = VeryLongModelNameZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ
VLM_m2m = VLM.m2m_also_quite_long_zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz.through
tables = [
VLM._meta.db_table,
VLM_m2m._meta.db_table,
]
sql_list = connection.ops.sql_flush(no_style(), tables, reset_sequences=True)
connection.ops.execute_sql_flush(connection.alias, sql_list)
class SequenceResetTest(TestCase):
def test_generic_relation(self):
"Sequence names are correct when resetting generic relations (Ref #13941)"
# Create an object with a manually specified PK
Post.objects.create(id=10, name='1st post', text='hello world')
# Reset the sequences for the database
commands = connections[DEFAULT_DB_ALIAS].ops.sequence_reset_sql(no_style(), [Post])
with connection.cursor() as cursor:
for sql in commands:
cursor.execute(sql)
# If we create a new object now, it should have a PK greater
# than the PK we specified manually.
obj = Post.objects.create(name='New post', text='goodbye world')
self.assertGreater(obj.pk, 10)
# This test needs to run outside of a transaction, otherwise closing the
# connection would implicitly rollback and cause problems during teardown.
class ConnectionCreatedSignalTest(TransactionTestCase):
available_apps = []
# Unfortunately with sqlite3 the in-memory test database cannot be closed,
# and so it cannot be re-opened during testing.
@skipUnlessDBFeature('test_db_allows_multiple_connections')
def test_signal(self):
data = {}
def receiver(sender, connection, **kwargs):
data["connection"] = connection
connection_created.connect(receiver)
connection.close()
with connection.cursor():
pass
self.assertIs(data["connection"].connection, connection.connection)
connection_created.disconnect(receiver)
data.clear()
with connection.cursor():
pass
self.assertEqual(data, {})
class EscapingChecks(TestCase):
"""
All tests in this test case are also run with settings.DEBUG=True in
EscapingChecksDebug test case, to also test CursorDebugWrapper.
"""
bare_select_suffix = connection.features.bare_select_suffix
def test_paramless_no_escaping(self):
with connection.cursor() as cursor:
cursor.execute("SELECT '%s'" + self.bare_select_suffix)
self.assertEqual(cursor.fetchall()[0][0], '%s')
def test_parameter_escaping(self):
with connection.cursor() as cursor:
cursor.execute("SELECT '%%', %s" + self.bare_select_suffix, ('%d',))
self.assertEqual(cursor.fetchall()[0], ('%', '%d'))
@override_settings(DEBUG=True)
class EscapingChecksDebug(EscapingChecks):
pass
class BackendTestCase(TransactionTestCase):
available_apps = ['backends']
def create_squares_with_executemany(self, args):
self.create_squares(args, 'format', True)
def create_squares(self, args, paramstyle, multiple):
opts = Square._meta
tbl = connection.introspection.identifier_converter(opts.db_table)
f1 = connection.ops.quote_name(opts.get_field('root').column)
f2 = connection.ops.quote_name(opts.get_field('square').column)
if paramstyle == 'format':
query = 'INSERT INTO %s (%s, %s) VALUES (%%s, %%s)' % (tbl, f1, f2)
elif paramstyle == 'pyformat':
query = 'INSERT INTO %s (%s, %s) VALUES (%%(root)s, %%(square)s)' % (tbl, f1, f2)
else:
raise ValueError("unsupported paramstyle in test")
with connection.cursor() as cursor:
if multiple:
cursor.executemany(query, args)
else:
cursor.execute(query, args)
def test_cursor_executemany(self):
# Test cursor.executemany #4896
args = [(i, i ** 2) for i in range(-5, 6)]
self.create_squares_with_executemany(args)
self.assertEqual(Square.objects.count(), 11)
for i in range(-5, 6):
square = Square.objects.get(root=i)
self.assertEqual(square.square, i ** 2)
def test_cursor_executemany_with_empty_params_list(self):
# Test executemany with params=[] does nothing #4765
args = []
self.create_squares_with_executemany(args)
self.assertEqual(Square.objects.count(), 0)
def test_cursor_executemany_with_iterator(self):
# Test executemany accepts iterators #10320
args = ((i, i ** 2) for i in range(-3, 2))
self.create_squares_with_executemany(args)
self.assertEqual(Square.objects.count(), 5)
args = ((i, i ** 2) for i in range(3, 7))
with override_settings(DEBUG=True):
# same test for DebugCursorWrapper
self.create_squares_with_executemany(args)
self.assertEqual(Square.objects.count(), 9)
@skipUnlessDBFeature('supports_paramstyle_pyformat')
def test_cursor_execute_with_pyformat(self):
# Support pyformat style passing of parameters #10070
args = {'root': 3, 'square': 9}
self.create_squares(args, 'pyformat', multiple=False)
self.assertEqual(Square.objects.count(), 1)
@skipUnlessDBFeature('supports_paramstyle_pyformat')
def test_cursor_executemany_with_pyformat(self):
# Support pyformat style passing of parameters #10070
args = [{'root': i, 'square': i ** 2} for i in range(-5, 6)]
self.create_squares(args, 'pyformat', multiple=True)
self.assertEqual(Square.objects.count(), 11)
for i in range(-5, 6):
square = Square.objects.get(root=i)
self.assertEqual(square.square, i ** 2)
@skipUnlessDBFeature('supports_paramstyle_pyformat')
def test_cursor_executemany_with_pyformat_iterator(self):
args = ({'root': i, 'square': i ** 2} for i in range(-3, 2))
self.create_squares(args, 'pyformat', multiple=True)
self.assertEqual(Square.objects.count(), 5)
args = ({'root': i, 'square': i ** 2} for i in range(3, 7))
with override_settings(DEBUG=True):
# same test for DebugCursorWrapper
self.create_squares(args, 'pyformat', multiple=True)
self.assertEqual(Square.objects.count(), 9)
def test_unicode_fetches(self):
# fetchone, fetchmany, fetchall return strings as unicode objects #6254
qn = connection.ops.quote_name
Person(first_name="John", last_name="Doe").save()
Person(first_name="Jane", last_name="Doe").save()
Person(first_name="Mary", last_name="Agnelline").save()
Person(first_name="Peter", last_name="Parker").save()
Person(first_name="Clark", last_name="Kent").save()
opts2 = Person._meta
f3, f4 = opts2.get_field('first_name'), opts2.get_field('last_name')
with connection.cursor() as cursor:
cursor.execute(
'SELECT %s, %s FROM %s ORDER BY %s' % (
qn(f3.column),
qn(f4.column),
connection.introspection.identifier_converter(opts2.db_table),
qn(f3.column),
)
)
self.assertEqual(cursor.fetchone(), ('Clark', 'Kent'))
self.assertEqual(list(cursor.fetchmany(2)), [('Jane', 'Doe'), ('John', 'Doe')])
self.assertEqual(list(cursor.fetchall()), [('Mary', 'Agnelline'), ('Peter', 'Parker')])
def test_unicode_password(self):
old_password = connection.settings_dict['PASSWORD']
connection.settings_dict['PASSWORD'] = "françois"
try:
with connection.cursor():
pass
except DatabaseError:
# As password is probably wrong, a database exception is expected
pass
except Exception as e:
self.fail("Unexpected error raised with unicode password: %s" % e)
finally:
connection.settings_dict['PASSWORD'] = old_password
def test_database_operations_helper_class(self):
# Ticket #13630
self.assertTrue(hasattr(connection, 'ops'))
self.assertTrue(hasattr(connection.ops, 'connection'))
self.assertEqual(connection, connection.ops.connection)
def test_database_operations_init(self):
"""
DatabaseOperations initialization doesn't query the database.
See #17656.
"""
with self.assertNumQueries(0):
connection.ops.__class__(connection)
def test_cached_db_features(self):
self.assertIn(connection.features.supports_transactions, (True, False))
self.assertIn(connection.features.can_introspect_foreign_keys, (True, False))
def test_duplicate_table_error(self):
""" Creating an existing table returns a DatabaseError """
query = 'CREATE TABLE %s (id INTEGER);' % Article._meta.db_table
with connection.cursor() as cursor:
with self.assertRaises(DatabaseError):
cursor.execute(query)
def test_cursor_contextmanager(self):
"""
Cursors can be used as a context manager
"""
with connection.cursor() as cursor:
self.assertIsInstance(cursor, CursorWrapper)
# Both InterfaceError and ProgrammingError seem to be used when
# accessing closed cursor (psycopg2 has InterfaceError, rest seem
# to use ProgrammingError).
with self.assertRaises(connection.features.closed_cursor_error_class):
# cursor should be closed, so no queries should be possible.
cursor.execute("SELECT 1" + connection.features.bare_select_suffix)
@unittest.skipUnless(connection.vendor == 'postgresql',
"Psycopg2 specific cursor.closed attribute needed")
def test_cursor_contextmanager_closing(self):
# There isn't a generic way to test that cursors are closed, but
# psycopg2 offers us a way to check that by closed attribute.
# So, run only on psycopg2 for that reason.
with connection.cursor() as cursor:
self.assertIsInstance(cursor, CursorWrapper)
self.assertTrue(cursor.closed)
# Unfortunately with sqlite3 the in-memory test database cannot be closed.
@skipUnlessDBFeature('test_db_allows_multiple_connections')
def test_is_usable_after_database_disconnects(self):
"""
is_usable() doesn't crash when the database disconnects (#21553).
"""
# Open a connection to the database.
with connection.cursor():
pass
# Emulate a connection close by the database.
connection._close()
# Even then is_usable() should not raise an exception.
try:
self.assertFalse(connection.is_usable())
finally:
# Clean up the mess created by connection._close(). Since the
# connection is already closed, this crashes on some backends.
try:
connection.close()
except Exception:
pass
@override_settings(DEBUG=True)
def test_queries(self):
"""
Test the documented API of connection.queries.
"""
sql = 'SELECT 1' + connection.features.bare_select_suffix
with connection.cursor() as cursor:
reset_queries()
cursor.execute(sql)
self.assertEqual(1, len(connection.queries))
self.assertIsInstance(connection.queries, list)
self.assertIsInstance(connection.queries[0], dict)
self.assertEqual(list(connection.queries[0]), ['sql', 'time'])
self.assertEqual(connection.queries[0]['sql'], sql)
reset_queries()
self.assertEqual(0, len(connection.queries))
sql = ('INSERT INTO %s (%s, %s) VALUES (%%s, %%s)' % (
connection.introspection.identifier_converter('backends_square'),
connection.ops.quote_name('root'),
connection.ops.quote_name('square'),
))
with connection.cursor() as cursor:
cursor.executemany(sql, [(1, 1), (2, 4)])
self.assertEqual(1, len(connection.queries))
self.assertIsInstance(connection.queries, list)
self.assertIsInstance(connection.queries[0], dict)
self.assertEqual(list(connection.queries[0]), ['sql', 'time'])
self.assertEqual(connection.queries[0]['sql'], '2 times: %s' % sql)
# Unfortunately with sqlite3 the in-memory test database cannot be closed.
@skipUnlessDBFeature('test_db_allows_multiple_connections')
@override_settings(DEBUG=True)
def test_queries_limit(self):
"""
The backend doesn't store an unlimited number of queries (#12581).
"""
old_queries_limit = BaseDatabaseWrapper.queries_limit
BaseDatabaseWrapper.queries_limit = 3
new_connection = connection.copy()
# Initialize the connection and clear initialization statements.
with new_connection.cursor():
pass
new_connection.queries_log.clear()
try:
with new_connection.cursor() as cursor:
cursor.execute("SELECT 1" + new_connection.features.bare_select_suffix)
cursor.execute("SELECT 2" + new_connection.features.bare_select_suffix)
with warnings.catch_warnings(record=True) as w:
self.assertEqual(2, len(new_connection.queries))
self.assertEqual(0, len(w))
with new_connection.cursor() as cursor:
cursor.execute("SELECT 3" + new_connection.features.bare_select_suffix)
cursor.execute("SELECT 4" + new_connection.features.bare_select_suffix)
msg = "Limit for query logging exceeded, only the last 3 queries will be returned."
with self.assertWarnsMessage(UserWarning, msg):
self.assertEqual(3, len(new_connection.queries))
finally:
BaseDatabaseWrapper.queries_limit = old_queries_limit
new_connection.close()
def test_timezone_none_use_tz_false(self):
connection.ensure_connection()
with self.settings(TIME_ZONE=None, USE_TZ=False):
connection.init_connection_state()
# These tests aren't conditional because it would require differentiating
# between MySQL+InnoDB and MySQL+MYISAM (something we currently can't do).
class FkConstraintsTests(TransactionTestCase):
available_apps = ['backends']
def setUp(self):
# Create a Reporter.
self.r = Reporter.objects.create(first_name='John', last_name='Smith')
def test_integrity_checks_on_creation(self):
"""
Try to create a model instance that violates a FK constraint. If it
fails it should fail with IntegrityError.
"""
a1 = Article(headline="This is a test", pub_date=datetime.datetime(2005, 7, 27), reporter_id=30)
try:
a1.save()
except IntegrityError:
pass
else:
self.skipTest("This backend does not support integrity checks.")
# Now that we know this backend supports integrity checks we make sure
# constraints are also enforced for proxy Refs #17519
a2 = Article(
headline='This is another test', reporter=self.r,
pub_date=datetime.datetime(2012, 8, 3),
reporter_proxy_id=30,
)
with self.assertRaises(IntegrityError):
a2.save()
def test_integrity_checks_on_update(self):
"""
Try to update a model instance introducing a FK constraint violation.
If it fails it should fail with IntegrityError.
"""
# Create an Article.
Article.objects.create(headline="Test article", pub_date=datetime.datetime(2010, 9, 4), reporter=self.r)
# Retrieve it from the DB
a1 = Article.objects.get(headline="Test article")
a1.reporter_id = 30
try:
a1.save()
except IntegrityError:
pass
else:
self.skipTest("This backend does not support integrity checks.")
# Now that we know this backend supports integrity checks we make sure
# constraints are also enforced for proxy Refs #17519
# Create another article
r_proxy = ReporterProxy.objects.get(pk=self.r.pk)
Article.objects.create(
headline='Another article',
pub_date=datetime.datetime(1988, 5, 15),
reporter=self.r, reporter_proxy=r_proxy,
)
# Retrieve the second article from the DB
a2 = Article.objects.get(headline='Another article')
a2.reporter_proxy_id = 30
with self.assertRaises(IntegrityError):
a2.save()
def test_disable_constraint_checks_manually(self):
"""
When constraint checks are disabled, should be able to write bad data
without IntegrityErrors.
"""
with transaction.atomic():
# Create an Article.
Article.objects.create(
headline="Test article",
pub_date=datetime.datetime(2010, 9, 4),
reporter=self.r,
)
# Retrieve it from the DB
a = Article.objects.get(headline="Test article")
a.reporter_id = 30
try:
connection.disable_constraint_checking()
a.save()
connection.enable_constraint_checking()
except IntegrityError:
self.fail("IntegrityError should not have occurred.")
transaction.set_rollback(True)
def test_disable_constraint_checks_context_manager(self):
"""
When constraint checks are disabled (using context manager), should be
able to write bad data without IntegrityErrors.
"""
with transaction.atomic():
# Create an Article.
Article.objects.create(
headline="Test article",
pub_date=datetime.datetime(2010, 9, 4),
reporter=self.r,
)
# Retrieve it from the DB
a = Article.objects.get(headline="Test article")
a.reporter_id = 30
try:
with connection.constraint_checks_disabled():
a.save()
except IntegrityError:
self.fail("IntegrityError should not have occurred.")
transaction.set_rollback(True)
def test_check_constraints(self):
"""
Constraint checks should raise an IntegrityError when bad data is in the DB.
"""
with transaction.atomic():
# Create an Article.
Article.objects.create(
headline="Test article",
pub_date=datetime.datetime(2010, 9, 4),
reporter=self.r,
)
# Retrieve it from the DB
a = Article.objects.get(headline="Test article")
a.reporter_id = 30
with connection.constraint_checks_disabled():
a.save()
with self.assertRaises(IntegrityError):
connection.check_constraints()
transaction.set_rollback(True)
class ThreadTests(TransactionTestCase):
available_apps = ['backends']
def test_default_connection_thread_local(self):
"""
The default connection (i.e. django.db.connection) is different for
each thread (#17258).
"""
# Map connections by id because connections with identical aliases
# have the same hash.
connections_dict = {}
with connection.cursor():
pass
connections_dict[id(connection)] = connection
def runner():
# Passing django.db.connection between threads doesn't work while
# connections[DEFAULT_DB_ALIAS] does.
from django.db import connections
connection = connections[DEFAULT_DB_ALIAS]
# Allow thread sharing so the connection can be closed by the
# main thread.
connection.inc_thread_sharing()
with connection.cursor():
pass
connections_dict[id(connection)] = connection
try:
for x in range(2):
t = threading.Thread(target=runner)
t.start()
t.join()
# Each created connection got different inner connection.
self.assertEqual(len({conn.connection for conn in connections_dict.values()}), 3)
finally:
# Finish by closing the connections opened by the other threads
# (the connection opened in the main thread will automatically be
# closed on teardown).
for conn in connections_dict.values():
if conn is not connection:
if conn.allow_thread_sharing:
conn.close()
conn.dec_thread_sharing()
def test_connections_thread_local(self):
"""
The connections are different for each thread (#17258).
"""
# Map connections by id because connections with identical aliases
# have the same hash.
connections_dict = {}
for conn in connections.all():
connections_dict[id(conn)] = conn
def runner():
from django.db import connections
for conn in connections.all():
# Allow thread sharing so the connection can be closed by the
# main thread.
conn.inc_thread_sharing()
connections_dict[id(conn)] = conn
try:
num_new_threads = 2
for x in range(num_new_threads):
t = threading.Thread(target=runner)
t.start()
t.join()
self.assertEqual(
len(connections_dict),
len(connections.all()) * (num_new_threads + 1),
)
finally:
# Finish by closing the connections opened by the other threads
# (the connection opened in the main thread will automatically be
# closed on teardown).
for conn in connections_dict.values():
if conn is not connection:
if conn.allow_thread_sharing:
conn.close()
conn.dec_thread_sharing()
def test_pass_connection_between_threads(self):
"""
A connection can be passed from one thread to the other (#17258).
"""
Person.objects.create(first_name="John", last_name="Doe")
def do_thread():
def runner(main_thread_connection):
from django.db import connections
connections['default'] = main_thread_connection
try:
Person.objects.get(first_name="John", last_name="Doe")
except Exception as e:
exceptions.append(e)
t = threading.Thread(target=runner, args=[connections['default']])
t.start()
t.join()
# Without touching thread sharing, which should be False by default.
exceptions = []
do_thread()
# Forbidden!
self.assertIsInstance(exceptions[0], DatabaseError)
connections['default'].close()
# After calling inc_thread_sharing() on the connection.
connections['default'].inc_thread_sharing()
try:
exceptions = []
do_thread()
# All good
self.assertEqual(exceptions, [])
finally:
connections['default'].dec_thread_sharing()
def test_closing_non_shared_connections(self):
"""
A connection that is not explicitly shareable cannot be closed by
another thread (#17258).
"""
# First, without explicitly enabling the connection for sharing.
exceptions = set()
def runner1():
def runner2(other_thread_connection):
try:
other_thread_connection.close()
except DatabaseError as e:
exceptions.add(e)
t2 = threading.Thread(target=runner2, args=[connections['default']])
t2.start()
t2.join()
t1 = threading.Thread(target=runner1)
t1.start()
t1.join()
# The exception was raised
self.assertEqual(len(exceptions), 1)
# Then, with explicitly enabling the connection for sharing.
exceptions = set()
def runner1():
def runner2(other_thread_connection):
try:
other_thread_connection.close()
except DatabaseError as e:
exceptions.add(e)
# Enable thread sharing
connections['default'].inc_thread_sharing()
try:
t2 = threading.Thread(target=runner2, args=[connections['default']])
t2.start()
t2.join()
finally:
connections['default'].dec_thread_sharing()
t1 = threading.Thread(target=runner1)
t1.start()
t1.join()
# No exception was raised
self.assertEqual(len(exceptions), 0)
def test_thread_sharing_count(self):
self.assertIs(connection.allow_thread_sharing, False)
connection.inc_thread_sharing()
self.assertIs(connection.allow_thread_sharing, True)
connection.inc_thread_sharing()
self.assertIs(connection.allow_thread_sharing, True)
connection.dec_thread_sharing()
self.assertIs(connection.allow_thread_sharing, True)
connection.dec_thread_sharing()
self.assertIs(connection.allow_thread_sharing, False)
msg = 'Cannot decrement the thread sharing count below zero.'
with self.assertRaisesMessage(RuntimeError, msg):
connection.dec_thread_sharing()
class MySQLPKZeroTests(TestCase):
"""
Zero as id for AutoField should raise exception in MySQL, because MySQL
does not allow zero for autoincrement primary key.
"""
@skipIfDBFeature('allows_auto_pk_0')
def test_zero_as_autoval(self):
with self.assertRaises(ValueError):
Square.objects.create(id=0, root=0, square=1)
class DBConstraintTestCase(TestCase):
def test_can_reference_existent(self):
obj = Object.objects.create()
ref = ObjectReference.objects.create(obj=obj)
self.assertEqual(ref.obj, obj)
ref = ObjectReference.objects.get(obj=obj)
self.assertEqual(ref.obj, obj)
def test_can_reference_non_existent(self):
self.assertFalse(Object.objects.filter(id=12345).exists())
ref = ObjectReference.objects.create(obj_id=12345)
ref_new = ObjectReference.objects.get(obj_id=12345)
self.assertEqual(ref, ref_new)
with self.assertRaises(Object.DoesNotExist):
ref.obj
def test_many_to_many(self):
obj = Object.objects.create()
obj.related_objects.create()
self.assertEqual(Object.objects.count(), 2)
self.assertEqual(obj.related_objects.count(), 1)
intermediary_model = Object._meta.get_field("related_objects").remote_field.through
intermediary_model.objects.create(from_object_id=obj.id, to_object_id=12345)
self.assertEqual(obj.related_objects.count(), 1)
self.assertEqual(intermediary_model.objects.count(), 2)
|
5c1e14898d4a88373ab57d1253197ba917c10da0dddc7757061d1696602af5c7 | from django.contrib.admin import ModelAdmin, TabularInline
from django.contrib.admin.helpers import InlineAdminForm
from django.contrib.admin.tests import AdminSeleniumTestCase
from django.contrib.auth.models import Permission, User
from django.contrib.contenttypes.models import ContentType
from django.test import RequestFactory, TestCase, override_settings
from django.urls import reverse
from .admin import InnerInline, site as admin_site
from .models import (
Author, BinaryTree, Book, Chapter, Child, ChildModel1, ChildModel2,
Fashionista, FootNote, Holder, Holder2, Holder3, Holder4, Inner, Inner2,
Inner3, Inner4Stacked, Inner4Tabular, Novel, OutfitItem, Parent,
ParentModelWithCustomPk, Person, Poll, Profile, ProfileCollection,
Question, Sighting, SomeChildModel, SomeParentModel, Teacher,
)
INLINE_CHANGELINK_HTML = 'class="inlinechangelink">Change</a>'
class TestDataMixin:
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', email='[email protected]', password='secret')
@override_settings(ROOT_URLCONF='admin_inlines.urls')
class TestInline(TestDataMixin, TestCase):
factory = RequestFactory()
@classmethod
def setUpTestData(cls):
super().setUpTestData()
cls.holder = Holder.objects.create(dummy=13)
Inner.objects.create(dummy=42, holder=cls.holder)
def setUp(self):
self.client.force_login(self.superuser)
def test_can_delete(self):
"""
can_delete should be passed to inlineformset factory.
"""
response = self.client.get(
reverse('admin:admin_inlines_holder_change', args=(self.holder.id,))
)
inner_formset = response.context['inline_admin_formsets'][0].formset
expected = InnerInline.can_delete
actual = inner_formset.can_delete
self.assertEqual(expected, actual, 'can_delete must be equal')
def test_readonly_stacked_inline_label(self):
"""Bug #13174."""
holder = Holder.objects.create(dummy=42)
Inner.objects.create(holder=holder, dummy=42, readonly='')
response = self.client.get(
reverse('admin:admin_inlines_holder_change', args=(holder.id,))
)
self.assertContains(response, '<label>Inner readonly label:</label>')
def test_many_to_many_inlines(self):
"Autogenerated many-to-many inlines are displayed correctly (#13407)"
response = self.client.get(reverse('admin:admin_inlines_author_add'))
# The heading for the m2m inline block uses the right text
self.assertContains(response, '<h2>Author-book relationships</h2>')
# The "add another" label is correct
self.assertContains(response, 'Add another Author-book relationship')
# The '+' is dropped from the autogenerated form prefix (Author_books+)
self.assertContains(response, 'id="id_Author_books-TOTAL_FORMS"')
def test_inline_primary(self):
person = Person.objects.create(firstname='Imelda')
item = OutfitItem.objects.create(name='Shoes')
# Imelda likes shoes, but can't carry her own bags.
data = {
'shoppingweakness_set-TOTAL_FORMS': 1,
'shoppingweakness_set-INITIAL_FORMS': 0,
'shoppingweakness_set-MAX_NUM_FORMS': 0,
'_save': 'Save',
'person': person.id,
'max_weight': 0,
'shoppingweakness_set-0-item': item.id,
}
response = self.client.post(reverse('admin:admin_inlines_fashionista_add'), data)
self.assertEqual(response.status_code, 302)
self.assertEqual(len(Fashionista.objects.filter(person__firstname='Imelda')), 1)
def test_tabular_inline_column_css_class(self):
"""
Field names are included in the context to output a field-specific
CSS class name in the column headers.
"""
response = self.client.get(reverse('admin:admin_inlines_poll_add'))
text_field, call_me_field = list(response.context['inline_admin_formset'].fields())
# Editable field.
self.assertEqual(text_field['name'], 'text')
self.assertContains(response, '<th class="column-text required">')
# Read-only field.
self.assertEqual(call_me_field['name'], 'call_me')
self.assertContains(response, '<th class="column-call_me">')
def test_custom_form_tabular_inline_label(self):
"""
A model form with a form field specified (TitleForm.title1) should have
its label rendered in the tabular inline.
"""
response = self.client.get(reverse('admin:admin_inlines_titlecollection_add'))
self.assertContains(response, '<th class="column-title1 required">Title1</th>', html=True)
def test_custom_form_tabular_inline_extra_field_label(self):
response = self.client.get(reverse('admin:admin_inlines_outfititem_add'))
_, extra_field = list(response.context['inline_admin_formset'].fields())
self.assertEqual(extra_field['label'], 'Extra field')
def test_non_editable_custom_form_tabular_inline_extra_field_label(self):
response = self.client.get(reverse('admin:admin_inlines_chapter_add'))
_, extra_field = list(response.context['inline_admin_formset'].fields())
self.assertEqual(extra_field['label'], 'Extra field')
def test_custom_form_tabular_inline_overridden_label(self):
"""
SomeChildModelForm.__init__() overrides the label of a form field.
That label is displayed in the TabularInline.
"""
response = self.client.get(reverse('admin:admin_inlines_someparentmodel_add'))
field = list(response.context['inline_admin_formset'].fields())[0]
self.assertEqual(field['label'], 'new label')
self.assertContains(response, '<th class="column-name required">New label</th>', html=True)
def test_tabular_non_field_errors(self):
"""
non_field_errors are displayed correctly, including the correct value
for colspan.
"""
data = {
'title_set-TOTAL_FORMS': 1,
'title_set-INITIAL_FORMS': 0,
'title_set-MAX_NUM_FORMS': 0,
'_save': 'Save',
'title_set-0-title1': 'a title',
'title_set-0-title2': 'a different title',
}
response = self.client.post(reverse('admin:admin_inlines_titlecollection_add'), data)
# Here colspan is "4": two fields (title1 and title2), one hidden field and the delete checkbox.
self.assertContains(
response,
'<tr class="row-form-errors"><td colspan="4"><ul class="errorlist nonfield">'
'<li>The two titles must be the same</li></ul></td></tr>'
)
def test_no_parent_callable_lookup(self):
"""Admin inline `readonly_field` shouldn't invoke parent ModelAdmin callable"""
# Identically named callable isn't present in the parent ModelAdmin,
# rendering of the add view shouldn't explode
response = self.client.get(reverse('admin:admin_inlines_novel_add'))
self.assertEqual(response.status_code, 200)
# View should have the child inlines section
self.assertContains(
response,
'<div class="js-inline-admin-formset inline-group" id="chapter_set-group"'
)
def test_callable_lookup(self):
"""Admin inline should invoke local callable when its name is listed in readonly_fields"""
response = self.client.get(reverse('admin:admin_inlines_poll_add'))
self.assertEqual(response.status_code, 200)
# Add parent object view should have the child inlines section
self.assertContains(
response,
'<div class="js-inline-admin-formset inline-group" id="question_set-group"'
)
# The right callable should be used for the inline readonly_fields
# column cells
self.assertContains(response, '<p>Callable in QuestionInline</p>')
def test_help_text(self):
"""
The inlines' model field help texts are displayed when using both the
stacked and tabular layouts.
"""
response = self.client.get(reverse('admin:admin_inlines_holder4_add'))
self.assertContains(response, '<div class="help">Awesome stacked help text is awesome.</div>', 4)
self.assertContains(
response,
'<img src="/static/admin/img/icon-unknown.svg" '
'class="help help-tooltip" width="10" height="10" '
'alt="(Awesome tabular help text is awesome.)" '
'title="Awesome tabular help text is awesome.">',
1
)
# ReadOnly fields
response = self.client.get(reverse('admin:admin_inlines_capofamiglia_add'))
self.assertContains(
response,
'<img src="/static/admin/img/icon-unknown.svg" '
'class="help help-tooltip" width="10" height="10" '
'alt="(Help text for ReadOnlyInline)" '
'title="Help text for ReadOnlyInline">',
1
)
def test_tabular_model_form_meta_readonly_field(self):
"""
Tabular inlines use ModelForm.Meta.help_texts and labels for read-only
fields.
"""
response = self.client.get(reverse('admin:admin_inlines_someparentmodel_add'))
self.assertContains(
response,
'<img src="/static/admin/img/icon-unknown.svg" '
'class="help help-tooltip" width="10" height="10" '
'alt="(Help text from ModelForm.Meta)" '
'title="Help text from ModelForm.Meta">'
)
self.assertContains(response, 'Label from ModelForm.Meta')
def test_inline_hidden_field_no_column(self):
"""#18263 -- Make sure hidden fields don't get a column in tabular inlines"""
parent = SomeParentModel.objects.create(name='a')
SomeChildModel.objects.create(name='b', position='0', parent=parent)
SomeChildModel.objects.create(name='c', position='1', parent=parent)
response = self.client.get(reverse('admin:admin_inlines_someparentmodel_change', args=(parent.pk,)))
self.assertNotContains(response, '<td class="field-position">')
self.assertInHTML(
'<input id="id_somechildmodel_set-1-position" '
'name="somechildmodel_set-1-position" type="hidden" value="1">',
response.rendered_content,
)
def test_non_related_name_inline(self):
"""
Multiple inlines with related_name='+' have correct form prefixes.
"""
response = self.client.get(reverse('admin:admin_inlines_capofamiglia_add'))
self.assertContains(response, '<input type="hidden" name="-1-0-id" id="id_-1-0-id">', html=True)
self.assertContains(
response,
'<input type="hidden" name="-1-0-capo_famiglia" id="id_-1-0-capo_famiglia">',
html=True
)
self.assertContains(
response,
'<input id="id_-1-0-name" type="text" class="vTextField" name="-1-0-name" maxlength="100">',
html=True
)
self.assertContains(response, '<input type="hidden" name="-2-0-id" id="id_-2-0-id">', html=True)
self.assertContains(
response,
'<input type="hidden" name="-2-0-capo_famiglia" id="id_-2-0-capo_famiglia">',
html=True
)
self.assertContains(
response,
'<input id="id_-2-0-name" type="text" class="vTextField" name="-2-0-name" maxlength="100">',
html=True
)
@override_settings(USE_L10N=True, USE_THOUSAND_SEPARATOR=True)
def test_localize_pk_shortcut(self):
"""
The "View on Site" link is correct for locales that use thousand
separators.
"""
holder = Holder.objects.create(pk=123456789, dummy=42)
inner = Inner.objects.create(pk=987654321, holder=holder, dummy=42, readonly='')
response = self.client.get(reverse('admin:admin_inlines_holder_change', args=(holder.id,)))
inner_shortcut = 'r/%s/%s/' % (ContentType.objects.get_for_model(inner).pk, inner.pk)
self.assertContains(response, inner_shortcut)
def test_custom_pk_shortcut(self):
"""
The "View on Site" link is correct for models with a custom primary key
field.
"""
parent = ParentModelWithCustomPk.objects.create(my_own_pk="foo", name="Foo")
child1 = ChildModel1.objects.create(my_own_pk="bar", name="Bar", parent=parent)
child2 = ChildModel2.objects.create(my_own_pk="baz", name="Baz", parent=parent)
response = self.client.get(reverse('admin:admin_inlines_parentmodelwithcustompk_change', args=('foo',)))
child1_shortcut = 'r/%s/%s/' % (ContentType.objects.get_for_model(child1).pk, child1.pk)
child2_shortcut = 'r/%s/%s/' % (ContentType.objects.get_for_model(child2).pk, child2.pk)
self.assertContains(response, child1_shortcut)
self.assertContains(response, child2_shortcut)
def test_create_inlines_on_inherited_model(self):
"""
An object can be created with inlines when it inherits another class.
"""
data = {
'name': 'Martian',
'sighting_set-TOTAL_FORMS': 1,
'sighting_set-INITIAL_FORMS': 0,
'sighting_set-MAX_NUM_FORMS': 0,
'sighting_set-0-place': 'Zone 51',
'_save': 'Save',
}
response = self.client.post(reverse('admin:admin_inlines_extraterrestrial_add'), data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Sighting.objects.filter(et__name='Martian').count(), 1)
def test_custom_get_extra_form(self):
bt_head = BinaryTree.objects.create(name="Tree Head")
BinaryTree.objects.create(name="First Child", parent=bt_head)
# The maximum number of forms should respect 'get_max_num' on the
# ModelAdmin
max_forms_input = (
'<input id="id_binarytree_set-MAX_NUM_FORMS" '
'name="binarytree_set-MAX_NUM_FORMS" type="hidden" value="%d">'
)
# The total number of forms will remain the same in either case
total_forms_hidden = (
'<input id="id_binarytree_set-TOTAL_FORMS" '
'name="binarytree_set-TOTAL_FORMS" type="hidden" value="2">'
)
response = self.client.get(reverse('admin:admin_inlines_binarytree_add'))
self.assertInHTML(max_forms_input % 3, response.rendered_content)
self.assertInHTML(total_forms_hidden, response.rendered_content)
response = self.client.get(reverse('admin:admin_inlines_binarytree_change', args=(bt_head.id,)))
self.assertInHTML(max_forms_input % 2, response.rendered_content)
self.assertInHTML(total_forms_hidden, response.rendered_content)
def test_min_num(self):
"""
min_num and extra determine number of forms.
"""
class MinNumInline(TabularInline):
model = BinaryTree
min_num = 2
extra = 3
modeladmin = ModelAdmin(BinaryTree, admin_site)
modeladmin.inlines = [MinNumInline]
min_forms = (
'<input id="id_binarytree_set-MIN_NUM_FORMS" '
'name="binarytree_set-MIN_NUM_FORMS" type="hidden" value="2">'
)
total_forms = (
'<input id="id_binarytree_set-TOTAL_FORMS" '
'name="binarytree_set-TOTAL_FORMS" type="hidden" value="5">'
)
request = self.factory.get(reverse('admin:admin_inlines_binarytree_add'))
request.user = User(username='super', is_superuser=True)
response = modeladmin.changeform_view(request)
self.assertInHTML(min_forms, response.rendered_content)
self.assertInHTML(total_forms, response.rendered_content)
def test_custom_min_num(self):
bt_head = BinaryTree.objects.create(name="Tree Head")
BinaryTree.objects.create(name="First Child", parent=bt_head)
class MinNumInline(TabularInline):
model = BinaryTree
extra = 3
def get_min_num(self, request, obj=None, **kwargs):
if obj:
return 5
return 2
modeladmin = ModelAdmin(BinaryTree, admin_site)
modeladmin.inlines = [MinNumInline]
min_forms = (
'<input id="id_binarytree_set-MIN_NUM_FORMS" '
'name="binarytree_set-MIN_NUM_FORMS" type="hidden" value="%d">'
)
total_forms = (
'<input id="id_binarytree_set-TOTAL_FORMS" '
'name="binarytree_set-TOTAL_FORMS" type="hidden" value="%d">'
)
request = self.factory.get(reverse('admin:admin_inlines_binarytree_add'))
request.user = User(username='super', is_superuser=True)
response = modeladmin.changeform_view(request)
self.assertInHTML(min_forms % 2, response.rendered_content)
self.assertInHTML(total_forms % 5, response.rendered_content)
request = self.factory.get(reverse('admin:admin_inlines_binarytree_change', args=(bt_head.id,)))
request.user = User(username='super', is_superuser=True)
response = modeladmin.changeform_view(request, object_id=str(bt_head.id))
self.assertInHTML(min_forms % 5, response.rendered_content)
self.assertInHTML(total_forms % 8, response.rendered_content)
def test_inline_nonauto_noneditable_pk(self):
response = self.client.get(reverse('admin:admin_inlines_author_add'))
self.assertContains(
response,
'<input id="id_nonautopkbook_set-0-rand_pk" '
'name="nonautopkbook_set-0-rand_pk" type="hidden">',
html=True
)
self.assertContains(
response,
'<input id="id_nonautopkbook_set-2-0-rand_pk" '
'name="nonautopkbook_set-2-0-rand_pk" type="hidden">',
html=True
)
def test_inline_nonauto_noneditable_inherited_pk(self):
response = self.client.get(reverse('admin:admin_inlines_author_add'))
self.assertContains(
response,
'<input id="id_nonautopkbookchild_set-0-nonautopkbook_ptr" '
'name="nonautopkbookchild_set-0-nonautopkbook_ptr" type="hidden">',
html=True
)
self.assertContains(
response,
'<input id="id_nonautopkbookchild_set-2-nonautopkbook_ptr" '
'name="nonautopkbookchild_set-2-nonautopkbook_ptr" type="hidden">',
html=True
)
def test_inline_editable_pk(self):
response = self.client.get(reverse('admin:admin_inlines_author_add'))
self.assertContains(
response,
'<input class="vIntegerField" id="id_editablepkbook_set-0-manual_pk" '
'name="editablepkbook_set-0-manual_pk" type="number">',
html=True, count=1
)
self.assertContains(
response,
'<input class="vIntegerField" id="id_editablepkbook_set-2-0-manual_pk" '
'name="editablepkbook_set-2-0-manual_pk" type="number">',
html=True, count=1
)
def test_stacked_inline_edit_form_contains_has_original_class(self):
holder = Holder.objects.create(dummy=1)
holder.inner_set.create(dummy=1)
response = self.client.get(reverse('admin:admin_inlines_holder_change', args=(holder.pk,)))
self.assertContains(
response,
'<div class="inline-related has_original" id="inner_set-0">',
count=1
)
self.assertContains(
response,
'<div class="inline-related" id="inner_set-1">',
count=1
)
def test_inlines_show_change_link_registered(self):
"Inlines `show_change_link` for registered models when enabled."
holder = Holder4.objects.create(dummy=1)
item1 = Inner4Stacked.objects.create(dummy=1, holder=holder)
item2 = Inner4Tabular.objects.create(dummy=1, holder=holder)
items = (
('inner4stacked', item1.pk),
('inner4tabular', item2.pk),
)
response = self.client.get(reverse('admin:admin_inlines_holder4_change', args=(holder.pk,)))
self.assertTrue(response.context['inline_admin_formset'].opts.has_registered_model)
for model, pk in items:
url = reverse('admin:admin_inlines_%s_change' % model, args=(pk,))
self.assertContains(response, '<a href="%s" %s' % (url, INLINE_CHANGELINK_HTML))
def test_inlines_show_change_link_unregistered(self):
"Inlines `show_change_link` disabled for unregistered models."
parent = ParentModelWithCustomPk.objects.create(my_own_pk="foo", name="Foo")
ChildModel1.objects.create(my_own_pk="bar", name="Bar", parent=parent)
ChildModel2.objects.create(my_own_pk="baz", name="Baz", parent=parent)
response = self.client.get(reverse('admin:admin_inlines_parentmodelwithcustompk_change', args=('foo',)))
self.assertFalse(response.context['inline_admin_formset'].opts.has_registered_model)
self.assertNotContains(response, INLINE_CHANGELINK_HTML)
def test_tabular_inline_show_change_link_false_registered(self):
"Inlines `show_change_link` disabled by default."
poll = Poll.objects.create(name="New poll")
Question.objects.create(poll=poll)
response = self.client.get(reverse('admin:admin_inlines_poll_change', args=(poll.pk,)))
self.assertTrue(response.context['inline_admin_formset'].opts.has_registered_model)
self.assertNotContains(response, INLINE_CHANGELINK_HTML)
def test_noneditable_inline_has_field_inputs(self):
"""Inlines without change permission shows field inputs on add form."""
response = self.client.get(reverse('admin:admin_inlines_novelreadonlychapter_add'))
self.assertContains(
response,
'<input type="text" name="chapter_set-0-name" '
'class="vTextField" maxlength="40" id="id_chapter_set-0-name">',
html=True
)
@override_settings(ROOT_URLCONF='admin_inlines.urls')
class TestInlineMedia(TestDataMixin, TestCase):
def setUp(self):
self.client.force_login(self.superuser)
def test_inline_media_only_base(self):
holder = Holder(dummy=13)
holder.save()
Inner(dummy=42, holder=holder).save()
change_url = reverse('admin:admin_inlines_holder_change', args=(holder.id,))
response = self.client.get(change_url)
self.assertContains(response, 'my_awesome_admin_scripts.js')
def test_inline_media_only_inline(self):
holder = Holder3(dummy=13)
holder.save()
Inner3(dummy=42, holder=holder).save()
change_url = reverse('admin:admin_inlines_holder3_change', args=(holder.id,))
response = self.client.get(change_url)
self.assertEqual(
response.context['inline_admin_formsets'][0].media._js,
[
'admin/js/vendor/jquery/jquery.min.js',
'my_awesome_inline_scripts.js',
'custom_number.js',
'admin/js/jquery.init.js',
'admin/js/inlines.min.js',
]
)
self.assertContains(response, 'my_awesome_inline_scripts.js')
def test_all_inline_media(self):
holder = Holder2(dummy=13)
holder.save()
Inner2(dummy=42, holder=holder).save()
change_url = reverse('admin:admin_inlines_holder2_change', args=(holder.id,))
response = self.client.get(change_url)
self.assertContains(response, 'my_awesome_admin_scripts.js')
self.assertContains(response, 'my_awesome_inline_scripts.js')
@override_settings(ROOT_URLCONF='admin_inlines.urls')
class TestInlineAdminForm(TestCase):
def test_immutable_content_type(self):
"""Regression for #9362
The problem depends only on InlineAdminForm and its "original"
argument, so we can safely set the other arguments to None/{}. We just
need to check that the content_type argument of Child isn't altered by
the internals of the inline form."""
sally = Teacher.objects.create(name='Sally')
john = Parent.objects.create(name='John')
joe = Child.objects.create(name='Joe', teacher=sally, parent=john)
iaf = InlineAdminForm(None, None, {}, {}, joe)
parent_ct = ContentType.objects.get_for_model(Parent)
self.assertEqual(iaf.original.content_type, parent_ct)
@override_settings(ROOT_URLCONF='admin_inlines.urls')
class TestInlineProtectedOnDelete(TestDataMixin, TestCase):
def setUp(self):
self.client.force_login(self.superuser)
def test_deleting_inline_with_protected_delete_does_not_validate(self):
lotr = Novel.objects.create(name='Lord of the rings')
chapter = Chapter.objects.create(novel=lotr, name='Many Meetings')
foot_note = FootNote.objects.create(chapter=chapter, note='yadda yadda')
change_url = reverse('admin:admin_inlines_novel_change', args=(lotr.id,))
response = self.client.get(change_url)
data = {
'name': lotr.name,
'chapter_set-TOTAL_FORMS': 1,
'chapter_set-INITIAL_FORMS': 1,
'chapter_set-MAX_NUM_FORMS': 1000,
'_save': 'Save',
'chapter_set-0-id': chapter.id,
'chapter_set-0-name': chapter.name,
'chapter_set-0-novel': lotr.id,
'chapter_set-0-DELETE': 'on'
}
response = self.client.post(change_url, data)
self.assertContains(response, "Deleting chapter %s would require deleting "
"the following protected related objects: foot note %s"
% (chapter, foot_note))
@override_settings(ROOT_URLCONF='admin_inlines.urls')
class TestInlinePermissions(TestCase):
"""
Make sure the admin respects permissions for objects that are edited
inline. Refs #8060.
"""
@classmethod
def setUpTestData(cls):
cls.user = User(username='admin', is_staff=True, is_active=True)
cls.user.set_password('secret')
cls.user.save()
cls.author_ct = ContentType.objects.get_for_model(Author)
cls.holder_ct = ContentType.objects.get_for_model(Holder2)
cls.book_ct = ContentType.objects.get_for_model(Book)
cls.inner_ct = ContentType.objects.get_for_model(Inner2)
# User always has permissions to add and change Authors, and Holders,
# the main (parent) models of the inlines. Permissions on the inlines
# vary per test.
permission = Permission.objects.get(codename='add_author', content_type=cls.author_ct)
cls.user.user_permissions.add(permission)
permission = Permission.objects.get(codename='change_author', content_type=cls.author_ct)
cls.user.user_permissions.add(permission)
permission = Permission.objects.get(codename='add_holder2', content_type=cls.holder_ct)
cls.user.user_permissions.add(permission)
permission = Permission.objects.get(codename='change_holder2', content_type=cls.holder_ct)
cls.user.user_permissions.add(permission)
author = Author.objects.create(pk=1, name='The Author')
cls.book = author.books.create(name='The inline Book')
cls.author_change_url = reverse('admin:admin_inlines_author_change', args=(author.id,))
# Get the ID of the automatically created intermediate model for the Author-Book m2m
author_book_auto_m2m_intermediate = Author.books.through.objects.get(author=author, book=cls.book)
cls.author_book_auto_m2m_intermediate_id = author_book_auto_m2m_intermediate.pk
cls.holder = Holder2.objects.create(dummy=13)
cls.inner2 = Inner2.objects.create(dummy=42, holder=cls.holder)
def setUp(self):
self.holder_change_url = reverse('admin:admin_inlines_holder2_change', args=(self.holder.id,))
self.client.force_login(self.user)
def test_inline_add_m2m_noperm(self):
response = self.client.get(reverse('admin:admin_inlines_author_add'))
# No change permission on books, so no inline
self.assertNotContains(response, '<h2>Author-book relationships</h2>')
self.assertNotContains(response, 'Add another Author-Book Relationship')
self.assertNotContains(response, 'id="id_Author_books-TOTAL_FORMS"')
def test_inline_add_fk_noperm(self):
response = self.client.get(reverse('admin:admin_inlines_holder2_add'))
# No permissions on Inner2s, so no inline
self.assertNotContains(response, '<h2>Inner2s</h2>')
self.assertNotContains(response, 'Add another Inner2')
self.assertNotContains(response, 'id="id_inner2_set-TOTAL_FORMS"')
def test_inline_change_m2m_noperm(self):
response = self.client.get(self.author_change_url)
# No change permission on books, so no inline
self.assertNotContains(response, '<h2>Author-book relationships</h2>')
self.assertNotContains(response, 'Add another Author-Book Relationship')
self.assertNotContains(response, 'id="id_Author_books-TOTAL_FORMS"')
def test_inline_change_fk_noperm(self):
response = self.client.get(self.holder_change_url)
# No permissions on Inner2s, so no inline
self.assertNotContains(response, '<h2>Inner2s</h2>')
self.assertNotContains(response, 'Add another Inner2')
self.assertNotContains(response, 'id="id_inner2_set-TOTAL_FORMS"')
def test_inline_add_m2m_view_only_perm(self):
permission = Permission.objects.get(codename='view_book', content_type=self.book_ct)
self.user.user_permissions.add(permission)
response = self.client.get(reverse('admin:admin_inlines_author_add'))
# View-only inlines. (It could be nicer to hide the empty, non-editable
# inlines on the add page.)
self.assertIs(response.context['inline_admin_formset'].has_view_permission, True)
self.assertIs(response.context['inline_admin_formset'].has_add_permission, False)
self.assertIs(response.context['inline_admin_formset'].has_change_permission, False)
self.assertIs(response.context['inline_admin_formset'].has_delete_permission, False)
self.assertContains(response, '<h2>Author-book relationships</h2>')
self.assertContains(
response,
'<input type="hidden" name="Author_books-TOTAL_FORMS" value="0" '
'id="id_Author_books-TOTAL_FORMS">',
html=True,
)
self.assertNotContains(response, 'Add another Author-Book Relationship')
def test_inline_add_m2m_add_perm(self):
permission = Permission.objects.get(codename='add_book', content_type=self.book_ct)
self.user.user_permissions.add(permission)
response = self.client.get(reverse('admin:admin_inlines_author_add'))
# No change permission on Books, so no inline
self.assertNotContains(response, '<h2>Author-book relationships</h2>')
self.assertNotContains(response, 'Add another Author-Book Relationship')
self.assertNotContains(response, 'id="id_Author_books-TOTAL_FORMS"')
def test_inline_add_fk_add_perm(self):
permission = Permission.objects.get(codename='add_inner2', content_type=self.inner_ct)
self.user.user_permissions.add(permission)
response = self.client.get(reverse('admin:admin_inlines_holder2_add'))
# Add permission on inner2s, so we get the inline
self.assertContains(response, '<h2>Inner2s</h2>')
self.assertContains(response, 'Add another Inner2')
self.assertContains(response, '<input type="hidden" id="id_inner2_set-TOTAL_FORMS" '
'value="3" name="inner2_set-TOTAL_FORMS">', html=True)
def test_inline_change_m2m_add_perm(self):
permission = Permission.objects.get(codename='add_book', content_type=self.book_ct)
self.user.user_permissions.add(permission)
response = self.client.get(self.author_change_url)
# No change permission on books, so no inline
self.assertNotContains(response, '<h2>Author-book relationships</h2>')
self.assertNotContains(response, 'Add another Author-Book Relationship')
self.assertNotContains(response, 'id="id_Author_books-TOTAL_FORMS"')
self.assertNotContains(response, 'id="id_Author_books-0-DELETE"')
def test_inline_change_m2m_view_only_perm(self):
permission = Permission.objects.get(codename='view_book', content_type=self.book_ct)
self.user.user_permissions.add(permission)
response = self.client.get(self.author_change_url)
# View-only inlines.
self.assertIs(response.context['inline_admin_formset'].has_view_permission, True)
self.assertIs(response.context['inline_admin_formset'].has_add_permission, False)
self.assertIs(response.context['inline_admin_formset'].has_change_permission, False)
self.assertIs(response.context['inline_admin_formset'].has_delete_permission, False)
self.assertContains(response, '<h2>Author-book relationships</h2>')
self.assertContains(
response,
'<input type="hidden" name="Author_books-TOTAL_FORMS" value="1" '
'id="id_Author_books-TOTAL_FORMS">',
html=True,
)
# The field in the inline is read-only.
self.assertContains(response, '<p>%s</p>' % self.book)
self.assertNotContains(
response,
'<input type="checkbox" name="Author_books-0-DELETE" id="id_Author_books-0-DELETE">',
html=True,
)
def test_inline_change_m2m_change_perm(self):
permission = Permission.objects.get(codename='change_book', content_type=self.book_ct)
self.user.user_permissions.add(permission)
response = self.client.get(self.author_change_url)
# We have change perm on books, so we can add/change/delete inlines
self.assertIs(response.context['inline_admin_formset'].has_view_permission, True)
self.assertIs(response.context['inline_admin_formset'].has_add_permission, True)
self.assertIs(response.context['inline_admin_formset'].has_change_permission, True)
self.assertIs(response.context['inline_admin_formset'].has_delete_permission, True)
self.assertContains(response, '<h2>Author-book relationships</h2>')
self.assertContains(response, 'Add another Author-book relationship')
self.assertContains(response, '<input type="hidden" id="id_Author_books-TOTAL_FORMS" '
'value="4" name="Author_books-TOTAL_FORMS">', html=True)
self.assertContains(
response,
'<input type="hidden" id="id_Author_books-0-id" value="%i" '
'name="Author_books-0-id">' % self.author_book_auto_m2m_intermediate_id,
html=True
)
self.assertContains(response, 'id="id_Author_books-0-DELETE"')
def test_inline_change_fk_add_perm(self):
permission = Permission.objects.get(codename='add_inner2', content_type=self.inner_ct)
self.user.user_permissions.add(permission)
response = self.client.get(self.holder_change_url)
# Add permission on inner2s, so we can add but not modify existing
self.assertContains(response, '<h2>Inner2s</h2>')
self.assertContains(response, 'Add another Inner2')
# 3 extra forms only, not the existing instance form
self.assertContains(
response,
'<input type="hidden" id="id_inner2_set-TOTAL_FORMS" value="3" '
'name="inner2_set-TOTAL_FORMS">',
html=True
)
self.assertNotContains(
response,
'<input type="hidden" id="id_inner2_set-0-id" value="%i" name="inner2_set-0-id">' % self.inner2.id,
html=True
)
def test_inline_change_fk_change_perm(self):
permission = Permission.objects.get(codename='change_inner2', content_type=self.inner_ct)
self.user.user_permissions.add(permission)
response = self.client.get(self.holder_change_url)
# Change permission on inner2s, so we can change existing but not add new
self.assertContains(response, '<h2>Inner2s</h2>', count=2)
# Just the one form for existing instances
self.assertContains(
response, '<input type="hidden" id="id_inner2_set-TOTAL_FORMS" value="1" name="inner2_set-TOTAL_FORMS">',
html=True
)
self.assertContains(
response,
'<input type="hidden" id="id_inner2_set-0-id" value="%i" name="inner2_set-0-id">' % self.inner2.id,
html=True
)
# max-num 0 means we can't add new ones
self.assertContains(
response,
'<input type="hidden" id="id_inner2_set-MAX_NUM_FORMS" value="0" name="inner2_set-MAX_NUM_FORMS">',
html=True
)
# TabularInline
self.assertContains(response, '<th class="column-dummy required">Dummy</th>', html=True)
self.assertContains(
response,
'<input type="number" name="inner2_set-2-0-dummy" value="%s" '
'class="vIntegerField" id="id_inner2_set-2-0-dummy">' % self.inner2.dummy,
html=True,
)
def test_inline_change_fk_add_change_perm(self):
permission = Permission.objects.get(codename='add_inner2', content_type=self.inner_ct)
self.user.user_permissions.add(permission)
permission = Permission.objects.get(codename='change_inner2', content_type=self.inner_ct)
self.user.user_permissions.add(permission)
response = self.client.get(self.holder_change_url)
# Add/change perm, so we can add new and change existing
self.assertContains(response, '<h2>Inner2s</h2>')
# One form for existing instance and three extra for new
self.assertContains(
response, '<input type="hidden" id="id_inner2_set-TOTAL_FORMS" value="4" name="inner2_set-TOTAL_FORMS">',
html=True
)
self.assertContains(
response,
'<input type="hidden" id="id_inner2_set-0-id" value="%i" name="inner2_set-0-id">' % self.inner2.id,
html=True
)
def test_inline_change_fk_change_del_perm(self):
permission = Permission.objects.get(codename='change_inner2', content_type=self.inner_ct)
self.user.user_permissions.add(permission)
permission = Permission.objects.get(codename='delete_inner2', content_type=self.inner_ct)
self.user.user_permissions.add(permission)
response = self.client.get(self.holder_change_url)
# Change/delete perm on inner2s, so we can change/delete existing
self.assertContains(response, '<h2>Inner2s</h2>')
# One form for existing instance only, no new
self.assertContains(
response,
'<input type="hidden" id="id_inner2_set-TOTAL_FORMS" value="1" name="inner2_set-TOTAL_FORMS">',
html=True
)
self.assertContains(
response,
'<input type="hidden" id="id_inner2_set-0-id" value="%i" name="inner2_set-0-id">' % self.inner2.id,
html=True
)
self.assertContains(response, 'id="id_inner2_set-0-DELETE"')
def test_inline_change_fk_all_perms(self):
permission = Permission.objects.get(codename='add_inner2', content_type=self.inner_ct)
self.user.user_permissions.add(permission)
permission = Permission.objects.get(codename='change_inner2', content_type=self.inner_ct)
self.user.user_permissions.add(permission)
permission = Permission.objects.get(codename='delete_inner2', content_type=self.inner_ct)
self.user.user_permissions.add(permission)
response = self.client.get(self.holder_change_url)
# All perms on inner2s, so we can add/change/delete
self.assertContains(response, '<h2>Inner2s</h2>', count=2)
# One form for existing instance only, three for new
self.assertContains(
response,
'<input type="hidden" id="id_inner2_set-TOTAL_FORMS" value="4" name="inner2_set-TOTAL_FORMS">',
html=True
)
self.assertContains(
response,
'<input type="hidden" id="id_inner2_set-0-id" value="%i" name="inner2_set-0-id">' % self.inner2.id,
html=True
)
self.assertContains(response, 'id="id_inner2_set-0-DELETE"')
# TabularInline
self.assertContains(response, '<th class="column-dummy required">Dummy</th>', html=True)
self.assertContains(
response,
'<input type="number" name="inner2_set-2-0-dummy" value="%s" '
'class="vIntegerField" id="id_inner2_set-2-0-dummy">' % self.inner2.dummy,
html=True,
)
@override_settings(ROOT_URLCONF='admin_inlines.urls')
class TestReadOnlyChangeViewInlinePermissions(TestCase):
@classmethod
def setUpTestData(cls):
cls.user = User.objects.create_user('testing', password='password', is_staff=True)
cls.user.user_permissions.add(
Permission.objects.get(codename='view_poll', content_type=ContentType.objects.get_for_model(Poll))
)
cls.user.user_permissions.add(
*Permission.objects.filter(
codename__endswith="question", content_type=ContentType.objects.get_for_model(Question)
).values_list('pk', flat=True)
)
cls.poll = Poll.objects.create(name="Survey")
cls.add_url = reverse('admin:admin_inlines_poll_add')
cls.change_url = reverse('admin:admin_inlines_poll_change', args=(cls.poll.id,))
def setUp(self):
self.client.force_login(self.user)
def test_add_url_not_allowed(self):
response = self.client.get(self.add_url)
self.assertEqual(response.status_code, 403)
response = self.client.post(self.add_url, {})
self.assertEqual(response.status_code, 403)
def test_post_to_change_url_not_allowed(self):
response = self.client.post(self.change_url, {})
self.assertEqual(response.status_code, 403)
def test_get_to_change_url_is_allowed(self):
response = self.client.get(self.change_url)
self.assertEqual(response.status_code, 200)
def test_main_model_is_rendered_as_read_only(self):
response = self.client.get(self.change_url)
self.assertContains(
response,
'<div class="readonly">%s</div>' % self.poll.name,
html=True
)
input = '<input type="text" name="name" value="%s" class="vTextField" maxlength="40" required id="id_name">'
self.assertNotContains(
response,
input % self.poll.name,
html=True
)
def test_inlines_are_rendered_as_read_only(self):
question = Question.objects.create(text="How will this be rendered?", poll=self.poll)
response = self.client.get(self.change_url)
self.assertContains(
response,
'<td class="field-text"><p>%s</p></td>' % question.text,
html=True
)
self.assertNotContains(response, 'id="id_question_set-0-text"')
self.assertNotContains(response, 'id="id_related_objs-0-DELETE"')
def test_submit_line_shows_only_close_button(self):
response = self.client.get(self.change_url)
self.assertContains(
response,
'<a href="/admin/admin_inlines/poll/" class="closelink">Close</a>',
html=True
)
delete_link = '<p class="deletelink-box"><a href="/admin/admin_inlines/poll/%s/delete/" class="deletelink">Delete</a></p>' # noqa
self.assertNotContains(
response,
delete_link % self.poll.id,
html=True
)
self.assertNotContains(response, '<input type="submit" value="Save and add another" name="_addanother">')
self.assertNotContains(response, '<input type="submit" value="Save and continue editing" name="_continue">')
def test_inline_delete_buttons_are_not_shown(self):
Question.objects.create(text="How will this be rendered?", poll=self.poll)
response = self.client.get(self.change_url)
self.assertNotContains(
response,
'<input type="checkbox" name="question_set-0-DELETE" id="id_question_set-0-DELETE">',
html=True
)
def test_extra_inlines_are_not_shown(self):
response = self.client.get(self.change_url)
self.assertNotContains(response, 'id="id_question_set-0-text"')
@override_settings(ROOT_URLCONF='admin_inlines.urls')
class SeleniumTests(AdminSeleniumTestCase):
available_apps = ['admin_inlines'] + AdminSeleniumTestCase.available_apps
def setUp(self):
User.objects.create_superuser(username='super', password='secret', email='[email protected]')
def test_add_stackeds(self):
"""
The "Add another XXX" link correctly adds items to the stacked formset.
"""
self.admin_login(username='super', password='secret')
self.selenium.get(self.live_server_url + reverse('admin:admin_inlines_holder4_add'))
inline_id = '#inner4stacked_set-group'
def rows_length():
return len(self.selenium.find_elements_by_css_selector('%s .dynamic-inner4stacked_set' % inline_id))
self.assertEqual(rows_length(), 3)
add_button = self.selenium.find_element_by_link_text(
'Add another Inner4 stacked')
add_button.click()
self.assertEqual(rows_length(), 4)
def test_delete_stackeds(self):
self.admin_login(username='super', password='secret')
self.selenium.get(self.live_server_url + reverse('admin:admin_inlines_holder4_add'))
inline_id = '#inner4stacked_set-group'
def rows_length():
return len(self.selenium.find_elements_by_css_selector('%s .dynamic-inner4stacked_set' % inline_id))
self.assertEqual(rows_length(), 3)
add_button = self.selenium.find_element_by_link_text(
'Add another Inner4 stacked')
add_button.click()
add_button.click()
self.assertEqual(rows_length(), 5, msg="sanity check")
for delete_link in self.selenium.find_elements_by_css_selector('%s .inline-deletelink' % inline_id):
delete_link.click()
with self.disable_implicit_wait():
self.assertEqual(rows_length(), 0)
def test_delete_invalid_stacked_inlines(self):
from selenium.common.exceptions import NoSuchElementException
self.admin_login(username='super', password='secret')
self.selenium.get(self.live_server_url + reverse('admin:admin_inlines_holder4_add'))
inline_id = '#inner4stacked_set-group'
def rows_length():
return len(self.selenium.find_elements_by_css_selector('%s .dynamic-inner4stacked_set' % inline_id))
self.assertEqual(rows_length(), 3)
add_button = self.selenium.find_element_by_link_text(
'Add another Inner4 stacked')
add_button.click()
add_button.click()
self.assertEqual(len(self.selenium.find_elements_by_css_selector('#id_inner4stacked_set-4-dummy')), 1)
# Enter some data and click 'Save'.
self.selenium.find_element_by_name('dummy').send_keys('1')
self.selenium.find_element_by_name('inner4stacked_set-0-dummy').send_keys('100')
self.selenium.find_element_by_name('inner4stacked_set-1-dummy').send_keys('101')
self.selenium.find_element_by_name('inner4stacked_set-2-dummy').send_keys('222')
self.selenium.find_element_by_name('inner4stacked_set-3-dummy').send_keys('103')
self.selenium.find_element_by_name('inner4stacked_set-4-dummy').send_keys('222')
with self.wait_page_loaded():
self.selenium.find_element_by_xpath('//input[@value="Save"]').click()
self.assertEqual(rows_length(), 5, msg="sanity check")
errorlist = self.selenium.find_element_by_css_selector(
'%s .dynamic-inner4stacked_set .errorlist li' % inline_id
)
self.assertEqual('Please correct the duplicate values below.', errorlist.text)
delete_link = self.selenium.find_element_by_css_selector('#inner4stacked_set-4 .inline-deletelink')
delete_link.click()
self.assertEqual(rows_length(), 4)
with self.disable_implicit_wait(), self.assertRaises(NoSuchElementException):
self.selenium.find_element_by_css_selector('%s .dynamic-inner4stacked_set .errorlist li' % inline_id)
with self.wait_page_loaded():
self.selenium.find_element_by_xpath('//input[@value="Save"]').click()
# The objects have been created in the database.
self.assertEqual(Inner4Stacked.objects.all().count(), 4)
def test_delete_invalid_tabular_inlines(self):
from selenium.common.exceptions import NoSuchElementException
self.admin_login(username='super', password='secret')
self.selenium.get(self.live_server_url + reverse('admin:admin_inlines_holder4_add'))
inline_id = '#inner4tabular_set-group'
def rows_length():
return len(self.selenium.find_elements_by_css_selector('%s .dynamic-inner4tabular_set' % inline_id))
self.assertEqual(rows_length(), 3)
add_button = self.selenium.find_element_by_link_text(
'Add another Inner4 tabular')
add_button.click()
add_button.click()
self.assertEqual(len(self.selenium.find_elements_by_css_selector('#id_inner4tabular_set-4-dummy')), 1)
# Enter some data and click 'Save'.
self.selenium.find_element_by_name('dummy').send_keys('1')
self.selenium.find_element_by_name('inner4tabular_set-0-dummy').send_keys('100')
self.selenium.find_element_by_name('inner4tabular_set-1-dummy').send_keys('101')
self.selenium.find_element_by_name('inner4tabular_set-2-dummy').send_keys('222')
self.selenium.find_element_by_name('inner4tabular_set-3-dummy').send_keys('103')
self.selenium.find_element_by_name('inner4tabular_set-4-dummy').send_keys('222')
with self.wait_page_loaded():
self.selenium.find_element_by_xpath('//input[@value="Save"]').click()
self.assertEqual(rows_length(), 5, msg="sanity check")
# Non-field errorlist is in its own <tr> just before
# tr#inner4tabular_set-3:
errorlist = self.selenium.find_element_by_css_selector(
'%s #inner4tabular_set-3 + .row-form-errors .errorlist li' % inline_id
)
self.assertEqual('Please correct the duplicate values below.', errorlist.text)
delete_link = self.selenium.find_element_by_css_selector('#inner4tabular_set-4 .inline-deletelink')
delete_link.click()
self.assertEqual(rows_length(), 4)
with self.disable_implicit_wait(), self.assertRaises(NoSuchElementException):
self.selenium.find_element_by_css_selector('%s .dynamic-inner4tabular_set .errorlist li' % inline_id)
with self.wait_page_loaded():
self.selenium.find_element_by_xpath('//input[@value="Save"]').click()
# The objects have been created in the database.
self.assertEqual(Inner4Tabular.objects.all().count(), 4)
def test_add_inlines(self):
"""
The "Add another XXX" link correctly adds items to the inline form.
"""
self.admin_login(username='super', password='secret')
self.selenium.get(self.live_server_url + reverse('admin:admin_inlines_profilecollection_add'))
# There's only one inline to start with and it has the correct ID.
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'.dynamic-profile_set')), 1)
self.assertEqual(self.selenium.find_elements_by_css_selector(
'.dynamic-profile_set')[0].get_attribute('id'),
'profile_set-0')
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'.dynamic-profile_set#profile_set-0 input[name=profile_set-0-first_name]')), 1)
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'.dynamic-profile_set#profile_set-0 input[name=profile_set-0-last_name]')), 1)
# Add an inline
self.selenium.find_element_by_link_text('Add another Profile').click()
# The inline has been added, it has the right id, and it contains the
# correct fields.
self.assertEqual(len(self.selenium.find_elements_by_css_selector('.dynamic-profile_set')), 2)
self.assertEqual(self.selenium.find_elements_by_css_selector(
'.dynamic-profile_set')[1].get_attribute('id'), 'profile_set-1')
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'.dynamic-profile_set#profile_set-1 input[name=profile_set-1-first_name]')), 1)
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'.dynamic-profile_set#profile_set-1 input[name=profile_set-1-last_name]')), 1)
# Let's add another one to be sure
self.selenium.find_element_by_link_text('Add another Profile').click()
self.assertEqual(len(self.selenium.find_elements_by_css_selector('.dynamic-profile_set')), 3)
self.assertEqual(self.selenium.find_elements_by_css_selector(
'.dynamic-profile_set')[2].get_attribute('id'), 'profile_set-2')
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'.dynamic-profile_set#profile_set-2 input[name=profile_set-2-first_name]')), 1)
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'.dynamic-profile_set#profile_set-2 input[name=profile_set-2-last_name]')), 1)
# Enter some data and click 'Save'
self.selenium.find_element_by_name('profile_set-0-first_name').send_keys('0 first name 1')
self.selenium.find_element_by_name('profile_set-0-last_name').send_keys('0 last name 2')
self.selenium.find_element_by_name('profile_set-1-first_name').send_keys('1 first name 1')
self.selenium.find_element_by_name('profile_set-1-last_name').send_keys('1 last name 2')
self.selenium.find_element_by_name('profile_set-2-first_name').send_keys('2 first name 1')
self.selenium.find_element_by_name('profile_set-2-last_name').send_keys('2 last name 2')
with self.wait_page_loaded():
self.selenium.find_element_by_xpath('//input[@value="Save"]').click()
# The objects have been created in the database
self.assertEqual(ProfileCollection.objects.all().count(), 1)
self.assertEqual(Profile.objects.all().count(), 3)
def test_add_inline_link_absent_for_view_only_parent_model(self):
from selenium.common.exceptions import NoSuchElementException
user = User.objects.create_user('testing', password='password', is_staff=True)
user.user_permissions.add(
Permission.objects.get(codename='view_poll', content_type=ContentType.objects.get_for_model(Poll))
)
user.user_permissions.add(
*Permission.objects.filter(
codename__endswith="question", content_type=ContentType.objects.get_for_model(Question)
).values_list('pk', flat=True)
)
self.admin_login(username='testing', password='password')
poll = Poll.objects.create(name="Survey")
change_url = reverse('admin:admin_inlines_poll_change', args=(poll.id,))
self.selenium.get(self.live_server_url + change_url)
with self.disable_implicit_wait():
with self.assertRaises(NoSuchElementException):
self.selenium.find_element_by_link_text('Add another Question')
def test_delete_inlines(self):
self.admin_login(username='super', password='secret')
self.selenium.get(self.live_server_url + reverse('admin:admin_inlines_profilecollection_add'))
# Add a few inlines
self.selenium.find_element_by_link_text('Add another Profile').click()
self.selenium.find_element_by_link_text('Add another Profile').click()
self.selenium.find_element_by_link_text('Add another Profile').click()
self.selenium.find_element_by_link_text('Add another Profile').click()
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'#profile_set-group table tr.dynamic-profile_set')), 5)
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'form#profilecollection_form tr.dynamic-profile_set#profile_set-0')), 1)
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'form#profilecollection_form tr.dynamic-profile_set#profile_set-1')), 1)
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'form#profilecollection_form tr.dynamic-profile_set#profile_set-2')), 1)
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'form#profilecollection_form tr.dynamic-profile_set#profile_set-3')), 1)
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'form#profilecollection_form tr.dynamic-profile_set#profile_set-4')), 1)
# Click on a few delete buttons
self.selenium.find_element_by_css_selector(
'form#profilecollection_form tr.dynamic-profile_set#profile_set-1 td.delete a').click()
self.selenium.find_element_by_css_selector(
'form#profilecollection_form tr.dynamic-profile_set#profile_set-2 td.delete a').click()
# The rows are gone and the IDs have been re-sequenced
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'#profile_set-group table tr.dynamic-profile_set')), 3)
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'form#profilecollection_form tr.dynamic-profile_set#profile_set-0')), 1)
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'form#profilecollection_form tr.dynamic-profile_set#profile_set-1')), 1)
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'form#profilecollection_form tr.dynamic-profile_set#profile_set-2')), 1)
def test_collapsed_inlines(self):
# Collapsed inlines have SHOW/HIDE links.
self.admin_login(username='super', password='secret')
self.selenium.get(self.live_server_url + reverse('admin:admin_inlines_author_add'))
# One field is in a stacked inline, other in a tabular one.
test_fields = ['#id_nonautopkbook_set-0-title', '#id_nonautopkbook_set-2-0-title']
show_links = self.selenium.find_elements_by_link_text('SHOW')
self.assertEqual(len(show_links), 3)
for show_index, field_name in enumerate(test_fields, 0):
self.wait_until_invisible(field_name)
show_links[show_index].click()
self.wait_until_visible(field_name)
hide_links = self.selenium.find_elements_by_link_text('HIDE')
self.assertEqual(len(hide_links), 2)
for hide_index, field_name in enumerate(test_fields, 0):
self.wait_until_visible(field_name)
hide_links[hide_index].click()
self.wait_until_invisible(field_name)
def test_added_stacked_inline_with_collapsed_fields(self):
self.admin_login(username='super', password='secret')
self.selenium.get(self.live_server_url + reverse('admin:admin_inlines_teacher_add'))
self.selenium.find_element_by_link_text('Add another Child').click()
test_fields = ['#id_child_set-0-name', '#id_child_set-1-name']
show_links = self.selenium.find_elements_by_link_text('SHOW')
self.assertEqual(len(show_links), 2)
for show_index, field_name in enumerate(test_fields, 0):
self.wait_until_invisible(field_name)
show_links[show_index].click()
self.wait_until_visible(field_name)
hide_links = self.selenium.find_elements_by_link_text('HIDE')
self.assertEqual(len(hide_links), 2)
for hide_index, field_name in enumerate(test_fields, 0):
self.wait_until_visible(field_name)
hide_links[hide_index].click()
self.wait_until_invisible(field_name)
def test_inline_formset_error_input_border(self):
self.admin_login(username='super', password='secret')
self.selenium.get(self.live_server_url + reverse('admin:admin_inlines_holder5_add'))
self.wait_until_visible('#id_dummy')
self.selenium.find_element_by_id('id_dummy').send_keys(1)
fields = ['id_inner5stacked_set-0-dummy', 'id_inner5tabular_set-0-dummy']
show_links = self.selenium.find_elements_by_link_text('SHOW')
for show_index, field_name in enumerate(fields):
show_links[show_index].click()
self.wait_until_visible('#' + field_name)
self.selenium.find_element_by_id(field_name).send_keys(1)
# Before save all inputs have default border
for inline in ('stacked', 'tabular'):
for field_name in ('name', 'select', 'text'):
element_id = 'id_inner5%s_set-0-%s' % (inline, field_name)
self.assertEqual(
self.selenium.find_element_by_id(element_id).value_of_css_property('border'),
'1px solid rgb(204, 204, 204)', # 1px solid #cccccc
)
self.selenium.find_element_by_xpath('//input[@value="Save"]').click()
# Test the red border around inputs by css selectors
stacked_selectors = ['.errors input', '.errors select', '.errors textarea']
for selector in stacked_selectors:
self.assertEqual(
self.selenium.find_element_by_css_selector(selector).value_of_css_property('border'),
'1px solid rgb(186, 33, 33)', # 1px solid #ba2121
)
tabular_selectors = [
'td ul.errorlist + input', 'td ul.errorlist + select', 'td ul.errorlist + textarea'
]
for selector in tabular_selectors:
self.assertEqual(
self.selenium.find_element_by_css_selector(selector).value_of_css_property('border'),
'1px solid rgb(186, 33, 33)', # 1px solid #ba2121
)
def test_inline_formset_error(self):
self.admin_login(username='super', password='secret')
self.selenium.get(self.live_server_url + reverse('admin:admin_inlines_holder5_add'))
stacked_inline_formset_selector = 'div#inner5stacked_set-group fieldset.module.collapse'
tabular_inline_formset_selector = 'div#inner5tabular_set-group fieldset.module.collapse'
# Inlines without errors, both inlines collapsed
self.selenium.find_element_by_xpath('//input[@value="Save"]').click()
self.assertEqual(
len(self.selenium.find_elements_by_css_selector(stacked_inline_formset_selector + '.collapsed')), 1
)
self.assertEqual(
len(self.selenium.find_elements_by_css_selector(tabular_inline_formset_selector + '.collapsed')), 1
)
show_links = self.selenium.find_elements_by_link_text('SHOW')
self.assertEqual(len(show_links), 2)
# Inlines with errors, both inlines expanded
test_fields = ['#id_inner5stacked_set-0-dummy', '#id_inner5tabular_set-0-dummy']
for show_index, field_name in enumerate(test_fields):
show_links[show_index].click()
self.wait_until_visible(field_name)
self.selenium.find_element_by_id(field_name[1:]).send_keys(1)
hide_links = self.selenium.find_elements_by_link_text('HIDE')
self.assertEqual(len(hide_links), 2)
for hide_index, field_name in enumerate(test_fields):
hide_links[hide_index].click()
self.wait_until_invisible(field_name)
self.selenium.find_element_by_xpath('//input[@value="Save"]').click()
self.assertEqual(
len(self.selenium.find_elements_by_css_selector(stacked_inline_formset_selector + '.collapsed')), 0
)
self.assertEqual(
len(self.selenium.find_elements_by_css_selector(tabular_inline_formset_selector + '.collapsed')), 0
)
self.assertEqual(
len(self.selenium.find_elements_by_css_selector(stacked_inline_formset_selector)), 1
)
self.assertEqual(
len(self.selenium.find_elements_by_css_selector(tabular_inline_formset_selector)), 1
)
|
f9718ed518a1d48cce6d3c65fd304a8312f8233e67199ae0fbb4d4c120d3ecc9 | import unittest
from django.core.management.color import no_style
from django.db import connection
from ..models import Person, Tag
@unittest.skipUnless(connection.vendor == 'oracle', 'Oracle tests')
class OperationsTests(unittest.TestCase):
def test_sequence_name_truncation(self):
seq_name = connection.ops._get_no_autofield_sequence_name('schema_authorwithevenlongee869')
self.assertEqual(seq_name, 'SCHEMA_AUTHORWITHEVENLOB0B8_SQ')
def test_bulk_batch_size(self):
# Oracle restricts the number of parameters in a query.
objects = range(2**16)
self.assertEqual(connection.ops.bulk_batch_size([], objects), len(objects))
# Each field is a parameter for each object.
self.assertEqual(
connection.ops.bulk_batch_size(['id'], objects),
connection.features.max_query_params,
)
self.assertEqual(
connection.ops.bulk_batch_size(['id', 'other'], objects),
connection.features.max_query_params // 2,
)
def test_sql_flush(self):
statements = connection.ops.sql_flush(
no_style(),
[Person._meta.db_table, Tag._meta.db_table],
)
# The tables and constraints are processed in an unordered set.
self.assertEqual(
statements[0],
'ALTER TABLE "BACKENDS_TAG" DISABLE CONSTRAINT '
'"BACKENDS__CONTENT_T_FD9D7A85_F" KEEP INDEX;',
)
self.assertEqual(
sorted(statements[1:-1]),
[
'TRUNCATE TABLE "BACKENDS_PERSON";',
'TRUNCATE TABLE "BACKENDS_TAG";',
],
)
self.assertEqual(
statements[-1],
'ALTER TABLE "BACKENDS_TAG" ENABLE CONSTRAINT '
'"BACKENDS__CONTENT_T_FD9D7A85_F";',
)
def test_sql_flush_allow_cascade(self):
statements = connection.ops.sql_flush(
no_style(),
[Person._meta.db_table, Tag._meta.db_table],
allow_cascade=True,
)
# The tables and constraints are processed in an unordered set.
self.assertEqual(
statements[0],
'ALTER TABLE "BACKENDS_VERYLONGMODELNAME540F" DISABLE CONSTRAINT '
'"BACKENDS__PERSON_ID_1DD5E829_F" KEEP INDEX;',
)
self.assertEqual(
sorted(statements[1:-1]),
[
'TRUNCATE TABLE "BACKENDS_PERSON";',
'TRUNCATE TABLE "BACKENDS_TAG";',
'TRUNCATE TABLE "BACKENDS_VERYLONGMODELNAME540F";',
],
)
self.assertEqual(
statements[-1],
'ALTER TABLE "BACKENDS_VERYLONGMODELNAME540F" ENABLE CONSTRAINT '
'"BACKENDS__PERSON_ID_1DD5E829_F";',
)
def test_sql_flush_sequences(self):
statements = connection.ops.sql_flush(
no_style(),
[Person._meta.db_table, Tag._meta.db_table],
reset_sequences=True,
)
# The tables and constraints are processed in an unordered set.
self.assertEqual(
statements[0],
'ALTER TABLE "BACKENDS_TAG" DISABLE CONSTRAINT '
'"BACKENDS__CONTENT_T_FD9D7A85_F" KEEP INDEX;',
)
self.assertEqual(
sorted(statements[1:3]),
[
'TRUNCATE TABLE "BACKENDS_PERSON";',
'TRUNCATE TABLE "BACKENDS_TAG";',
],
)
self.assertEqual(
statements[3],
'ALTER TABLE "BACKENDS_TAG" ENABLE CONSTRAINT '
'"BACKENDS__CONTENT_T_FD9D7A85_F";',
)
# Sequences.
self.assertEqual(len(statements[4:]), 2)
self.assertIn('BACKENDS_PERSON_SQ', statements[4])
self.assertIn('BACKENDS_TAG_SQ', statements[5])
def test_sql_flush_sequences_allow_cascade(self):
statements = connection.ops.sql_flush(
no_style(),
[Person._meta.db_table, Tag._meta.db_table],
reset_sequences=True,
allow_cascade=True,
)
# The tables and constraints are processed in an unordered set.
self.assertEqual(
statements[0],
'ALTER TABLE "BACKENDS_VERYLONGMODELNAME540F" DISABLE CONSTRAINT '
'"BACKENDS__PERSON_ID_1DD5E829_F" KEEP INDEX;',
)
self.assertEqual(
sorted(statements[1:4]),
[
'TRUNCATE TABLE "BACKENDS_PERSON";',
'TRUNCATE TABLE "BACKENDS_TAG";',
'TRUNCATE TABLE "BACKENDS_VERYLONGMODELNAME540F";',
],
)
self.assertEqual(
statements[4],
'ALTER TABLE "BACKENDS_VERYLONGMODELNAME540F" ENABLE CONSTRAINT '
'"BACKENDS__PERSON_ID_1DD5E829_F";',
)
# Sequences.
self.assertEqual(len(statements[5:]), 3)
self.assertIn('BACKENDS_PERSON_SQ', statements[5])
self.assertIn('BACKENDS_VERYLONGMODELN7BE2_SQ', statements[6])
self.assertIn('BACKENDS_TAG_SQ', statements[7])
|
240a15a3cfab5fe65743e37e238bb557da9bb469b13d0358828a39b160afb4e4 | import unittest
from django.core.management.color import no_style
from django.db import connection
from django.test import TestCase
from ..models import Person, Tag
@unittest.skipUnless(connection.vendor == 'sqlite', 'SQLite tests.')
class SQLiteOperationsTests(TestCase):
def test_sql_flush(self):
self.assertEqual(
connection.ops.sql_flush(
no_style(),
[Person._meta.db_table, Tag._meta.db_table],
),
[
'DELETE FROM "backends_person";',
'DELETE FROM "backends_tag";',
],
)
def test_sql_flush_allow_cascade(self):
statements = connection.ops.sql_flush(
no_style(),
[Person._meta.db_table, Tag._meta.db_table],
allow_cascade=True,
)
self.assertEqual(
# The tables are processed in an unordered set.
sorted(statements),
[
'DELETE FROM "backends_person";',
'DELETE FROM "backends_tag";',
'DELETE FROM "backends_verylongmodelnamezzzzzzzzzzzzzzzzzzzzzz'
'zzzzzzzzzzzzzzzzzzzz_m2m_also_quite_long_zzzzzzzzzzzzzzzzzzzz'
'zzzzzzzzzzzzzzzzzzzzzzz";',
],
)
def test_sql_flush_sequences(self):
# sequences doesn't change statements on SQLite.
self.assertEqual(
connection.ops.sql_flush(
no_style(),
[Person._meta.db_table, Tag._meta.db_table],
reset_sequences=True,
),
[
'DELETE FROM "backends_person";',
'DELETE FROM "backends_tag";',
],
)
def test_sql_flush_sequences_allow_cascade(self):
# sequences doesn't change statements on SQLite.
statements = connection.ops.sql_flush(
no_style(),
[Person._meta.db_table, Tag._meta.db_table],
reset_sequences=True,
allow_cascade=True,
)
self.assertEqual(
# The tables are processed in an unordered set.
sorted(statements),
[
'DELETE FROM "backends_person";',
'DELETE FROM "backends_tag";',
'DELETE FROM "backends_verylongmodelnamezzzzzzzzzzzzzzzzzzzzzz'
'zzzzzzzzzzzzzzzzzzzz_m2m_also_quite_long_zzzzzzzzzzzzzzzzzzzz'
'zzzzzzzzzzzzzzzzzzzzzzz";',
],
)
|
a15d9880839055893ea15d271b36456d4d5c456d332e1b9497e1cdcf197c56c9 | import decimal
from django.core.management.color import no_style
from django.db import NotSupportedError, connection, transaction
from django.db.backends.base.operations import BaseDatabaseOperations
from django.db.models import DurationField
from django.test import (
SimpleTestCase, TestCase, TransactionTestCase, override_settings,
skipIfDBFeature,
)
from django.utils import timezone
from ..models import Author, Book
class SimpleDatabaseOperationTests(SimpleTestCase):
may_require_msg = 'subclasses of BaseDatabaseOperations may require a %s() method'
def setUp(self):
self.ops = BaseDatabaseOperations(connection=connection)
def test_deferrable_sql(self):
self.assertEqual(self.ops.deferrable_sql(), '')
def test_end_transaction_rollback(self):
self.assertEqual(self.ops.end_transaction_sql(success=False), 'ROLLBACK;')
def test_no_limit_value(self):
with self.assertRaisesMessage(NotImplementedError, self.may_require_msg % 'no_limit_value'):
self.ops.no_limit_value()
def test_quote_name(self):
with self.assertRaisesMessage(NotImplementedError, self.may_require_msg % 'quote_name'):
self.ops.quote_name('a')
def test_regex_lookup(self):
with self.assertRaisesMessage(NotImplementedError, self.may_require_msg % 'regex_lookup'):
self.ops.regex_lookup(lookup_type='regex')
def test_set_time_zone_sql(self):
self.assertEqual(self.ops.set_time_zone_sql(), '')
def test_sql_flush(self):
msg = 'subclasses of BaseDatabaseOperations must provide a sql_flush() method'
with self.assertRaisesMessage(NotImplementedError, msg):
self.ops.sql_flush(None, None)
def test_pk_default_value(self):
self.assertEqual(self.ops.pk_default_value(), 'DEFAULT')
def test_tablespace_sql(self):
self.assertEqual(self.ops.tablespace_sql(None), '')
def test_sequence_reset_by_name_sql(self):
self.assertEqual(self.ops.sequence_reset_by_name_sql(None, []), [])
def test_adapt_unknown_value_decimal(self):
value = decimal.Decimal('3.14')
self.assertEqual(
self.ops.adapt_unknown_value(value),
self.ops.adapt_decimalfield_value(value)
)
def test_adapt_unknown_value_date(self):
value = timezone.now().date()
self.assertEqual(self.ops.adapt_unknown_value(value), self.ops.adapt_datefield_value(value))
def test_adapt_unknown_value_time(self):
value = timezone.now().time()
self.assertEqual(self.ops.adapt_unknown_value(value), self.ops.adapt_timefield_value(value))
def test_adapt_timefield_value_none(self):
self.assertIsNone(self.ops.adapt_timefield_value(None))
def test_adapt_datetimefield_value(self):
self.assertIsNone(self.ops.adapt_datetimefield_value(None))
def test_adapt_timefield_value(self):
msg = 'Django does not support timezone-aware times.'
with self.assertRaisesMessage(ValueError, msg):
self.ops.adapt_timefield_value(timezone.make_aware(timezone.now()))
@override_settings(USE_TZ=False)
def test_adapt_timefield_value_unaware(self):
now = timezone.now()
self.assertEqual(self.ops.adapt_timefield_value(now), str(now))
def test_date_extract_sql(self):
with self.assertRaisesMessage(NotImplementedError, self.may_require_msg % 'date_extract_sql'):
self.ops.date_extract_sql(None, None)
def test_time_extract_sql(self):
with self.assertRaisesMessage(NotImplementedError, self.may_require_msg % 'date_extract_sql'):
self.ops.time_extract_sql(None, None)
def test_date_interval_sql(self):
with self.assertRaisesMessage(NotImplementedError, self.may_require_msg % 'date_interval_sql'):
self.ops.date_interval_sql(None)
def test_date_trunc_sql(self):
with self.assertRaisesMessage(NotImplementedError, self.may_require_msg % 'date_trunc_sql'):
self.ops.date_trunc_sql(None, None)
def test_time_trunc_sql(self):
with self.assertRaisesMessage(NotImplementedError, self.may_require_msg % 'time_trunc_sql'):
self.ops.time_trunc_sql(None, None)
def test_datetime_trunc_sql(self):
with self.assertRaisesMessage(NotImplementedError, self.may_require_msg % 'datetime_trunc_sql'):
self.ops.datetime_trunc_sql(None, None, None)
def test_datetime_cast_date_sql(self):
with self.assertRaisesMessage(NotImplementedError, self.may_require_msg % 'datetime_cast_date_sql'):
self.ops.datetime_cast_date_sql(None, None)
def test_datetime_cast_time_sql(self):
with self.assertRaisesMessage(NotImplementedError, self.may_require_msg % 'datetime_cast_time_sql'):
self.ops.datetime_cast_time_sql(None, None)
def test_datetime_extract_sql(self):
with self.assertRaisesMessage(NotImplementedError, self.may_require_msg % 'datetime_extract_sql'):
self.ops.datetime_extract_sql(None, None, None)
class DatabaseOperationTests(TestCase):
def setUp(self):
self.ops = BaseDatabaseOperations(connection=connection)
@skipIfDBFeature('supports_over_clause')
def test_window_frame_raise_not_supported_error(self):
msg = 'This backend does not support window expressions.'
with self.assertRaisesMessage(NotSupportedError, msg):
self.ops.window_frame_rows_start_end()
@skipIfDBFeature('can_distinct_on_fields')
def test_distinct_on_fields(self):
msg = 'DISTINCT ON fields is not supported by this database backend'
with self.assertRaisesMessage(NotSupportedError, msg):
self.ops.distinct_sql(['a', 'b'], None)
@skipIfDBFeature('supports_temporal_subtraction')
def test_subtract_temporals(self):
duration_field = DurationField()
duration_field_internal_type = duration_field.get_internal_type()
msg = (
'This backend does not support %s subtraction.' %
duration_field_internal_type
)
with self.assertRaisesMessage(NotSupportedError, msg):
self.ops.subtract_temporals(duration_field_internal_type, None, None)
class SqlFlushTests(TransactionTestCase):
available_apps = ['backends']
def test_sql_flush_no_tables(self):
self.assertEqual(connection.ops.sql_flush(no_style(), []), [])
def test_execute_sql_flush_statements(self):
with transaction.atomic():
author = Author.objects.create(name='George Orwell')
Book.objects.create(author=author)
author = Author.objects.create(name='Harper Lee')
Book.objects.create(author=author)
Book.objects.create(author=author)
self.assertIs(Author.objects.exists(), True)
self.assertIs(Book.objects.exists(), True)
sql_list = connection.ops.sql_flush(
no_style(),
[Author._meta.db_table, Book._meta.db_table],
reset_sequences=True,
allow_cascade=True,
)
connection.ops.execute_sql_flush(connection.alias, sql_list)
with transaction.atomic():
self.assertIs(Author.objects.exists(), False)
self.assertIs(Book.objects.exists(), False)
if connection.features.supports_sequence_reset:
author = Author.objects.create(name='F. Scott Fitzgerald')
self.assertEqual(author.pk, 1)
book = Book.objects.create(author=author)
self.assertEqual(book.pk, 1)
|
624dc27e68862086fa6f985dc8c999f0e34bea9dc0f1b5040064c9a388a327be | from django.utils.version import get_version
VERSION = (3, 2, 0, 'alpha', 0)
__version__ = get_version(VERSION)
def setup(set_prefix=True):
"""
Configure the settings (this happens as a side effect of accessing the
first setting), configure logging and populate the app registry.
Set the thread-local urlresolvers script prefix if `set_prefix` is True.
"""
from django.apps import apps
from django.conf import settings
from django.urls import set_script_prefix
from django.utils.log import configure_logging
configure_logging(settings.LOGGING_CONFIG, settings.LOGGING)
if set_prefix:
set_script_prefix(
'/' if settings.FORCE_SCRIPT_NAME is None else settings.FORCE_SCRIPT_NAME
)
apps.populate(settings.INSTALLED_APPS)
|
2d24f459a92536df3edbaa2df90b5c7e910a86ec2aba6944f11c07c66e6683e3 | #!/usr/bin/env python
import argparse
import atexit
import copy
import os
import shutil
import socket
import subprocess
import sys
import tempfile
import warnings
try:
import django
except ImportError as e:
raise RuntimeError(
'Django module not found, reference tests/README.rst for instructions.'
) from e
else:
from django.apps import apps
from django.conf import settings
from django.db import connection, connections
from django.test import TestCase, TransactionTestCase
from django.test.runner import default_test_processes
from django.test.selenium import SeleniumTestCaseBase
from django.test.utils import get_runner
from django.utils.deprecation import (
RemovedInDjango40Warning, RemovedInDjango41Warning,
)
from django.utils.log import DEFAULT_LOGGING
from django.utils.version import PY37
try:
import MySQLdb
except ImportError:
pass
else:
# Ignore informational warnings from QuerySet.explain().
warnings.filterwarnings('ignore', r'\(1003, *', category=MySQLdb.Warning)
# Make deprecation warnings errors to ensure no usage of deprecated features.
warnings.simplefilter("error", RemovedInDjango40Warning)
warnings.simplefilter('error', RemovedInDjango41Warning)
# Make resource and runtime warning errors to ensure no usage of error prone
# patterns.
warnings.simplefilter("error", ResourceWarning)
warnings.simplefilter("error", RuntimeWarning)
# Ignore known warnings in test dependencies.
warnings.filterwarnings("ignore", "'U' mode is deprecated", DeprecationWarning, module='docutils.io')
RUNTESTS_DIR = os.path.abspath(os.path.dirname(__file__))
TEMPLATE_DIR = os.path.join(RUNTESTS_DIR, 'templates')
# Create a specific subdirectory for the duration of the test suite.
TMPDIR = tempfile.mkdtemp(prefix='django_')
# Set the TMPDIR environment variable in addition to tempfile.tempdir
# so that children processes inherit it.
tempfile.tempdir = os.environ['TMPDIR'] = TMPDIR
# Removing the temporary TMPDIR.
atexit.register(shutil.rmtree, TMPDIR)
SUBDIRS_TO_SKIP = [
'data',
'import_error_package',
'test_runner_apps',
]
ALWAYS_INSTALLED_APPS = [
'django.contrib.contenttypes',
'django.contrib.auth',
'django.contrib.sites',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.admin.apps.SimpleAdminConfig',
'django.contrib.staticfiles',
]
ALWAYS_MIDDLEWARE = [
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
]
# Need to add the associated contrib app to INSTALLED_APPS in some cases to
# avoid "RuntimeError: Model class X doesn't declare an explicit app_label
# and isn't in an application in INSTALLED_APPS."
CONTRIB_TESTS_TO_APPS = {
'flatpages_tests': 'django.contrib.flatpages',
'redirects_tests': 'django.contrib.redirects',
}
def get_test_modules():
modules = []
discovery_paths = [(None, RUNTESTS_DIR)]
if connection.features.gis_enabled:
# GIS tests are in nested apps
discovery_paths.append(('gis_tests', os.path.join(RUNTESTS_DIR, 'gis_tests')))
else:
SUBDIRS_TO_SKIP.append('gis_tests')
for modpath, dirpath in discovery_paths:
for f in os.scandir(dirpath):
if ('.' not in f.name and
os.path.basename(f.name) not in SUBDIRS_TO_SKIP and
not f.is_file() and
os.path.exists(os.path.join(f.path, '__init__.py'))):
modules.append((modpath, f.name))
return modules
def get_installed():
return [app_config.name for app_config in apps.get_app_configs()]
def setup(verbosity, test_labels, parallel, start_at, start_after):
# Reduce the given test labels to just the app module path.
test_labels_set = set()
for label in test_labels:
bits = label.split('.')[:1]
test_labels_set.add('.'.join(bits))
if verbosity >= 1:
msg = "Testing against Django installed in '%s'" % os.path.dirname(django.__file__)
max_parallel = default_test_processes() if parallel == 0 else parallel
if max_parallel > 1:
msg += " with up to %d processes" % max_parallel
print(msg)
# Force declaring available_apps in TransactionTestCase for faster tests.
def no_available_apps(self):
raise Exception("Please define available_apps in TransactionTestCase "
"and its subclasses.")
TransactionTestCase.available_apps = property(no_available_apps)
TestCase.available_apps = None
state = {
'INSTALLED_APPS': settings.INSTALLED_APPS,
'ROOT_URLCONF': getattr(settings, "ROOT_URLCONF", ""),
'TEMPLATES': settings.TEMPLATES,
'LANGUAGE_CODE': settings.LANGUAGE_CODE,
'STATIC_URL': settings.STATIC_URL,
'STATIC_ROOT': settings.STATIC_ROOT,
'MIDDLEWARE': settings.MIDDLEWARE,
}
# Redirect some settings for the duration of these tests.
settings.INSTALLED_APPS = ALWAYS_INSTALLED_APPS
settings.ROOT_URLCONF = 'urls'
settings.STATIC_URL = '/static/'
settings.STATIC_ROOT = os.path.join(TMPDIR, 'static')
settings.TEMPLATES = [{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [TEMPLATE_DIR],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
}]
settings.LANGUAGE_CODE = 'en'
settings.SITE_ID = 1
settings.MIDDLEWARE = ALWAYS_MIDDLEWARE
settings.MIGRATION_MODULES = {
# This lets us skip creating migrations for the test models as many of
# them depend on one of the following contrib applications.
'auth': None,
'contenttypes': None,
'sessions': None,
}
log_config = copy.deepcopy(DEFAULT_LOGGING)
# Filter out non-error logging so we don't have to capture it in lots of
# tests.
log_config['loggers']['django']['level'] = 'ERROR'
settings.LOGGING = log_config
settings.SILENCED_SYSTEM_CHECKS = [
'fields.W342', # ForeignKey(unique=True) -> OneToOneField
'fields.W903', # NullBooleanField deprecated.
]
# Load all the ALWAYS_INSTALLED_APPS.
django.setup()
# It would be nice to put this validation earlier but it must come after
# django.setup() so that connection.features.gis_enabled can be accessed
# without raising AppRegistryNotReady when running gis_tests in isolation
# on some backends (e.g. PostGIS).
if 'gis_tests' in test_labels_set and not connection.features.gis_enabled:
print('Aborting: A GIS database backend is required to run gis_tests.')
sys.exit(1)
def _module_match_label(module_label, label):
# Exact or ancestor match.
return module_label == label or module_label.startswith(label + '.')
# Load all the test model apps.
test_modules = get_test_modules()
found_start = not (start_at or start_after)
installed_app_names = set(get_installed())
for modpath, module_name in test_modules:
if modpath:
module_label = modpath + '.' + module_name
else:
module_label = module_name
if not found_start:
if start_at and _module_match_label(module_label, start_at):
found_start = True
elif start_after and _module_match_label(module_label, start_after):
found_start = True
continue
else:
continue
# if the module (or an ancestor) was named on the command line, or
# no modules were named (i.e., run all), import
# this module and add it to INSTALLED_APPS.
module_found_in_labels = not test_labels or any(
_module_match_label(module_label, label) for label in test_labels_set
)
if module_name in CONTRIB_TESTS_TO_APPS and module_found_in_labels:
settings.INSTALLED_APPS.append(CONTRIB_TESTS_TO_APPS[module_name])
if module_found_in_labels and module_label not in installed_app_names:
if verbosity >= 2:
print("Importing application %s" % module_name)
settings.INSTALLED_APPS.append(module_label)
# Add contrib.gis to INSTALLED_APPS if needed (rather than requiring
# @override_settings(INSTALLED_APPS=...) on all test cases.
gis = 'django.contrib.gis'
if connection.features.gis_enabled and gis not in settings.INSTALLED_APPS:
if verbosity >= 2:
print("Importing application %s" % gis)
settings.INSTALLED_APPS.append(gis)
apps.set_installed_apps(settings.INSTALLED_APPS)
return state
def teardown(state):
# Restore the old settings.
for key, value in state.items():
setattr(settings, key, value)
# Discard the multiprocessing.util finalizer that tries to remove a
# temporary directory that's already removed by this script's
# atexit.register(shutil.rmtree, TMPDIR) handler. Prevents
# FileNotFoundError at the end of a test run (#27890).
from multiprocessing.util import _finalizer_registry
_finalizer_registry.pop((-100, 0), None)
def actual_test_processes(parallel):
if parallel == 0:
# This doesn't work before django.setup() on some databases.
if all(conn.features.can_clone_databases for conn in connections.all()):
return default_test_processes()
else:
return 1
else:
return parallel
class ActionSelenium(argparse.Action):
"""
Validate the comma-separated list of requested browsers.
"""
def __call__(self, parser, namespace, values, option_string=None):
browsers = values.split(',')
for browser in browsers:
try:
SeleniumTestCaseBase.import_webdriver(browser)
except ImportError:
raise argparse.ArgumentError(self, "Selenium browser specification '%s' is not valid." % browser)
setattr(namespace, self.dest, browsers)
def django_tests(verbosity, interactive, failfast, keepdb, reverse,
test_labels, debug_sql, parallel, tags, exclude_tags,
test_name_patterns, start_at, start_after, pdb, buffer):
state = setup(verbosity, test_labels, parallel, start_at, start_after)
extra_tests = []
# Run the test suite, including the extra validation tests.
if not hasattr(settings, 'TEST_RUNNER'):
settings.TEST_RUNNER = 'django.test.runner.DiscoverRunner'
TestRunner = get_runner(settings)
test_runner = TestRunner(
verbosity=verbosity,
interactive=interactive,
failfast=failfast,
keepdb=keepdb,
reverse=reverse,
debug_sql=debug_sql,
parallel=actual_test_processes(parallel),
tags=tags,
exclude_tags=exclude_tags,
test_name_patterns=test_name_patterns,
pdb=pdb,
buffer=buffer,
)
failures = test_runner.run_tests(
test_labels or get_installed(),
extra_tests=extra_tests,
)
teardown(state)
return failures
def get_subprocess_args(options):
subprocess_args = [
sys.executable, __file__, '--settings=%s' % options.settings
]
if options.failfast:
subprocess_args.append('--failfast')
if options.verbosity:
subprocess_args.append('--verbosity=%s' % options.verbosity)
if not options.interactive:
subprocess_args.append('--noinput')
if options.tags:
subprocess_args.append('--tag=%s' % options.tags)
if options.exclude_tags:
subprocess_args.append('--exclude_tag=%s' % options.exclude_tags)
return subprocess_args
def bisect_tests(bisection_label, options, test_labels, parallel, start_at, start_after):
state = setup(options.verbosity, test_labels, parallel, start_at, start_after)
test_labels = test_labels or get_installed()
print('***** Bisecting test suite: %s' % ' '.join(test_labels))
# Make sure the bisection point isn't in the test list
# Also remove tests that need to be run in specific combinations
for label in [bisection_label, 'model_inheritance_same_model_name']:
try:
test_labels.remove(label)
except ValueError:
pass
subprocess_args = get_subprocess_args(options)
iteration = 1
while len(test_labels) > 1:
midpoint = len(test_labels) // 2
test_labels_a = test_labels[:midpoint] + [bisection_label]
test_labels_b = test_labels[midpoint:] + [bisection_label]
print('***** Pass %da: Running the first half of the test suite' % iteration)
print('***** Test labels: %s' % ' '.join(test_labels_a))
failures_a = subprocess.run(subprocess_args + test_labels_a)
print('***** Pass %db: Running the second half of the test suite' % iteration)
print('***** Test labels: %s' % ' '.join(test_labels_b))
print('')
failures_b = subprocess.run(subprocess_args + test_labels_b)
if failures_a.returncode and not failures_b.returncode:
print("***** Problem found in first half. Bisecting again...")
iteration += 1
test_labels = test_labels_a[:-1]
elif failures_b.returncode and not failures_a.returncode:
print("***** Problem found in second half. Bisecting again...")
iteration += 1
test_labels = test_labels_b[:-1]
elif failures_a.returncode and failures_b.returncode:
print("***** Multiple sources of failure found")
break
else:
print("***** No source of failure found... try pair execution (--pair)")
break
if len(test_labels) == 1:
print("***** Source of error: %s" % test_labels[0])
teardown(state)
def paired_tests(paired_test, options, test_labels, parallel, start_at, start_after):
state = setup(options.verbosity, test_labels, parallel, start_at, start_after)
test_labels = test_labels or get_installed()
print('***** Trying paired execution')
# Make sure the constant member of the pair isn't in the test list
# Also remove tests that need to be run in specific combinations
for label in [paired_test, 'model_inheritance_same_model_name']:
try:
test_labels.remove(label)
except ValueError:
pass
subprocess_args = get_subprocess_args(options)
for i, label in enumerate(test_labels):
print('***** %d of %d: Check test pairing with %s' % (
i + 1, len(test_labels), label))
failures = subprocess.call(subprocess_args + [label, paired_test])
if failures:
print('***** Found problem pair with %s' % label)
return
print('***** No problem pair found')
teardown(state)
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Run the Django test suite.")
parser.add_argument(
'modules', nargs='*', metavar='module',
help='Optional path(s) to test modules; e.g. "i18n" or '
'"i18n.tests.TranslationTests.test_lazy_objects".',
)
parser.add_argument(
'-v', '--verbosity', default=1, type=int, choices=[0, 1, 2, 3],
help='Verbosity level; 0=minimal output, 1=normal output, 2=all output',
)
parser.add_argument(
'--noinput', action='store_false', dest='interactive',
help='Tells Django to NOT prompt the user for input of any kind.',
)
parser.add_argument(
'--failfast', action='store_true',
help='Tells Django to stop running the test suite after first failed test.',
)
parser.add_argument(
'--keepdb', action='store_true',
help='Tells Django to preserve the test database between runs.',
)
parser.add_argument(
'--settings',
help='Python path to settings module, e.g. "myproject.settings". If '
'this isn\'t provided, either the DJANGO_SETTINGS_MODULE '
'environment variable or "test_sqlite" will be used.',
)
parser.add_argument(
'--bisect',
help='Bisect the test suite to discover a test that causes a test '
'failure when combined with the named test.',
)
parser.add_argument(
'--pair',
help='Run the test suite in pairs with the named test to find problem pairs.',
)
parser.add_argument(
'--reverse', action='store_true',
help='Sort test suites and test cases in opposite order to debug '
'test side effects not apparent with normal execution lineup.',
)
parser.add_argument(
'--selenium', action=ActionSelenium, metavar='BROWSERS',
help='A comma-separated list of browsers to run the Selenium tests against.',
)
parser.add_argument(
'--headless', action='store_true',
help='Run selenium tests in headless mode, if the browser supports the option.',
)
parser.add_argument(
'--selenium-hub',
help='A URL for a selenium hub instance to use in combination with --selenium.',
)
parser.add_argument(
'--external-host', default=socket.gethostname(),
help='The external host that can be reached by the selenium hub instance when running Selenium '
'tests via Selenium Hub.',
)
parser.add_argument(
'--debug-sql', action='store_true',
help='Turn on the SQL query logger within tests.',
)
parser.add_argument(
'--parallel', nargs='?', default=0, type=int,
const=default_test_processes(), metavar='N',
help='Run tests using up to N parallel processes.',
)
parser.add_argument(
'--tag', dest='tags', action='append',
help='Run only tests with the specified tags. Can be used multiple times.',
)
parser.add_argument(
'--exclude-tag', dest='exclude_tags', action='append',
help='Do not run tests with the specified tag. Can be used multiple times.',
)
parser.add_argument(
'--start-after', dest='start_after',
help='Run tests starting after the specified top-level module.',
)
parser.add_argument(
'--start-at', dest='start_at',
help='Run tests starting at the specified top-level module.',
)
parser.add_argument(
'--pdb', action='store_true',
help='Runs the PDB debugger on error or failure.'
)
parser.add_argument(
'-b', '--buffer', action='store_true',
help='Discard output of passing tests.',
)
if PY37:
parser.add_argument(
'-k', dest='test_name_patterns', action='append',
help=(
'Only run test methods and classes matching test name pattern. '
'Same as unittest -k option. Can be used multiple times.'
),
)
options = parser.parse_args()
using_selenium_hub = options.selenium and options.selenium_hub
if options.selenium_hub and not options.selenium:
parser.error('--selenium-hub and --external-host require --selenium to be used.')
if using_selenium_hub and not options.external_host:
parser.error('--selenium-hub and --external-host must be used together.')
# Allow including a trailing slash on app_labels for tab completion convenience
options.modules = [os.path.normpath(labels) for labels in options.modules]
mutually_exclusive_options = [options.start_at, options.start_after, options.modules]
enabled_module_options = [bool(option) for option in mutually_exclusive_options].count(True)
if enabled_module_options > 1:
print('Aborting: --start-at, --start-after, and test labels are mutually exclusive.')
sys.exit(1)
for opt_name in ['start_at', 'start_after']:
opt_val = getattr(options, opt_name)
if opt_val:
if '.' in opt_val:
print('Aborting: --%s must be a top-level module.' % opt_name.replace('_', '-'))
sys.exit(1)
setattr(options, opt_name, os.path.normpath(opt_val))
if options.settings:
os.environ['DJANGO_SETTINGS_MODULE'] = options.settings
else:
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'test_sqlite')
options.settings = os.environ['DJANGO_SETTINGS_MODULE']
if options.selenium:
if not options.tags:
options.tags = ['selenium']
elif 'selenium' not in options.tags:
options.tags.append('selenium')
if options.selenium_hub:
SeleniumTestCaseBase.selenium_hub = options.selenium_hub
SeleniumTestCaseBase.external_host = options.external_host
SeleniumTestCaseBase.headless = options.headless
SeleniumTestCaseBase.browsers = options.selenium
if options.bisect:
bisect_tests(
options.bisect, options, options.modules, options.parallel,
options.start_at, options.start_after,
)
elif options.pair:
paired_tests(
options.pair, options, options.modules, options.parallel,
options.start_at, options.start_after,
)
else:
failures = django_tests(
options.verbosity, options.interactive, options.failfast,
options.keepdb, options.reverse, options.modules,
options.debug_sql, options.parallel, options.tags,
options.exclude_tags,
getattr(options, 'test_name_patterns', None),
options.start_at, options.start_after, options.pdb, options.buffer,
)
if failures:
sys.exit(1)
|
8016cecb6700ac2889fa491b690d7e78988ad541dc7f06a683bd7fb3ebd04a3f | # Django documentation build configuration file, created by
# sphinx-quickstart on Thu Mar 27 09:06:53 2008.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# The contents of this file are pickled, so don't put values in the namespace
# that aren't picklable (module imports are okay, they're removed automatically).
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
from os.path import abspath, dirname, join
# Workaround for sphinx-build recursion limit overflow:
# pickle.dump(doctree, f, pickle.HIGHEST_PROTOCOL)
# RuntimeError: maximum recursion depth exceeded while pickling an object
#
# Python's default allowed recursion depth is 1000 but this isn't enough for
# building docs/ref/settings.txt sometimes.
# https://groups.google.com/d/topic/sphinx-dev/MtRf64eGtv4/discussion
sys.setrecursionlimit(2000)
# Make sure we get the version of this copy of Django
sys.path.insert(1, dirname(dirname(abspath(__file__))))
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.append(abspath(join(dirname(__file__), "_ext")))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
needs_sphinx = '1.6.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = [
"djangodocs",
'sphinx.ext.extlinks',
"sphinx.ext.intersphinx",
"sphinx.ext.viewcode",
"sphinx.ext.autosectionlabel",
]
# AutosectionLabel settings.
# Uses a <page>:<label> schema which doesn't work for duplicate sub-section
# labels, so set max depth.
autosectionlabel_prefix_document = True
autosectionlabel_maxdepth = 2
# Spelling check needs an additional module that is not installed by default.
# Add it only if spelling check is requested so docs can be generated without it.
if 'spelling' in sys.argv:
extensions.append("sphinxcontrib.spelling")
# Spelling language.
spelling_lang = 'en_US'
# Location of word list.
spelling_word_list_filename = 'spelling_wordlist'
# Add any paths that contain templates here, relative to this directory.
# templates_path = []
# The suffix of source filenames.
source_suffix = '.txt'
# The encoding of source files.
# source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'contents'
# General substitutions.
project = 'Django'
copyright = 'Django Software Foundation and contributors'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '3.2'
# The full version, including alpha/beta/rc tags.
try:
from django import VERSION, get_version
except ImportError:
release = version
else:
def django_release():
pep440ver = get_version()
if VERSION[3:5] == ('alpha', 0) and 'dev' not in pep440ver:
return pep440ver + '.dev'
return pep440ver
release = django_release()
# The "development version" of Django
django_next_version = '3.2'
extlinks = {
'commit': ('https://github.com/django/django/commit/%s', ''),
'cve': ('https://nvd.nist.gov/view/vuln/detail?vulnId=%s', 'CVE-'),
# A file or directory. GitHub redirects from blob to tree if needed.
'source': ('https://github.com/django/django/blob/master/%s', ''),
'ticket': ('https://code.djangoproject.com/ticket/%s', '#'),
}
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
# language = None
# Location for .po/.mo translation files used when language is set
locale_dirs = ['locale/']
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
# today = ''
# Else, today_fmt is used as the format for a strftime call.
today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build', '_theme']
# The reST default role (used for this markup: `text`) to use for all documents.
default_role = "default-role-error"
# If true, '()' will be appended to :func: etc. cross-reference text.
add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
add_module_names = False
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'trac'
# Links to Python's docs should reference the most recent version of the 3.x
# branch, which is located at this URL.
intersphinx_mapping = {
'python': ('https://docs.python.org/3/', None),
'sphinx': ('https://www.sphinx-doc.org/en/master/', None),
'psycopg2': ('https://www.psycopg.org/docs/', None),
}
# Python's docs don't change every week.
intersphinx_cache_limit = 90 # days
# The 'versionadded' and 'versionchanged' directives are overridden.
suppress_warnings = ['app.add_directive']
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = "djangodocs"
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
# html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
html_theme_path = ["_theme"]
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
# html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
# html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
# html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
# html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
# html_static_path = ["_static"]
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
html_last_updated_fmt = '%b %d, %Y'
# Content template for the index page.
# html_index = ''
# Custom sidebar templates, maps document names to template names.
# html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
html_additional_pages = {}
# If false, no module index is generated.
# html_domain_indices = True
# If false, no index is generated.
# html_use_index = True
# If true, the index is split into individual pages for each letter.
# html_split_index = False
# If true, links to the reST sources are added to the pages.
# html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
# html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
# html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
# html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
# html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'Djangodoc'
modindex_common_prefix = ["django."]
# Appended to every page
rst_epilog = """
.. |django-users| replace:: :ref:`django-users <django-users-mailing-list>`
.. |django-core-mentorship| replace:: :ref:`django-core-mentorship <django-core-mentorship-mailing-list>`
.. |django-developers| replace:: :ref:`django-developers <django-developers-mailing-list>`
.. |django-announce| replace:: :ref:`django-announce <django-announce-mailing-list>`
.. |django-updates| replace:: :ref:`django-updates <django-updates-mailing-list>`
"""
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
'preamble': (
'\\DeclareUnicodeCharacter{2264}{\\ensuremath{\\le}}'
'\\DeclareUnicodeCharacter{2265}{\\ensuremath{\\ge}}'
'\\DeclareUnicodeCharacter{2665}{[unicode-heart]}'
'\\DeclareUnicodeCharacter{2713}{[unicode-checkmark]}'
),
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, document class [howto/manual]).
# latex_documents = []
latex_documents = [
('contents', 'django.tex', 'Django Documentation',
'Django Software Foundation', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
# latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
# latex_use_parts = False
# If true, show page references after internal links.
# latex_show_pagerefs = False
# If true, show URL addresses after external links.
# latex_show_urls = False
# Documents to append as an appendix to all manuals.
# latex_appendices = []
# If false, no module index is generated.
# latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [(
'ref/django-admin',
'django-admin',
'Utility script for the Django Web framework',
['Django Software Foundation'],
1
)]
# -- Options for Texinfo output ------------------------------------------------
# List of tuples (startdocname, targetname, title, author, dir_entry,
# description, category, toctree_only)
texinfo_documents = [(
master_doc, "django", "", "", "Django",
"Documentation of the Django framework", "Web development", False
)]
# -- Options for Epub output ---------------------------------------------------
# Bibliographic Dublin Core info.
epub_title = project
epub_author = 'Django Software Foundation'
epub_publisher = 'Django Software Foundation'
epub_copyright = copyright
# The basename for the epub file. It defaults to the project name.
# epub_basename = 'Django'
# The HTML theme for the epub output. Since the default themes are not optimized
# for small screen space, using the same theme for HTML and epub output is
# usually not wise. This defaults to 'epub', a theme designed to save visual
# space.
epub_theme = 'djangodocs-epub'
# The language of the text. It defaults to the language option
# or en if the language is not set.
# epub_language = ''
# The scheme of the identifier. Typical schemes are ISBN or URL.
# epub_scheme = ''
# The unique identifier of the text. This can be an ISBN number
# or the project homepage.
# epub_identifier = ''
# A unique identification for the text.
# epub_uid = ''
# A tuple containing the cover image and cover page html template filenames.
epub_cover = ('', 'epub-cover.html')
# A sequence of (type, uri, title) tuples for the guide element of content.opf.
# epub_guide = ()
# HTML files that should be inserted before the pages created by sphinx.
# The format is a list of tuples containing the path and title.
# epub_pre_files = []
# HTML files shat should be inserted after the pages created by sphinx.
# The format is a list of tuples containing the path and title.
# epub_post_files = []
# A list of files that should not be packed into the epub file.
# epub_exclude_files = []
# The depth of the table of contents in toc.ncx.
# epub_tocdepth = 3
# Allow duplicate toc entries.
# epub_tocdup = True
# Choose between 'default' and 'includehidden'.
# epub_tocscope = 'default'
# Fix unsupported image types using the PIL.
# epub_fix_images = False
# Scale large images.
# epub_max_image_width = 0
# How to display URL addresses: 'footnote', 'no', or 'inline'.
# epub_show_urls = 'inline'
# If false, no index is generated.
# epub_use_index = True
|
e1c2a9ea8a3162febff72da272993c3f6c0f9913440a67ea6793f0003077e8f9 | import json
import mimetypes
import os
import sys
from copy import copy
from functools import partial
from http import HTTPStatus
from importlib import import_module
from io import BytesIO
from urllib.parse import unquote_to_bytes, urljoin, urlparse, urlsplit
from asgiref.sync import sync_to_async
from django.conf import settings
from django.core.handlers.asgi import ASGIRequest
from django.core.handlers.base import BaseHandler
from django.core.handlers.wsgi import WSGIRequest
from django.core.serializers.json import DjangoJSONEncoder
from django.core.signals import (
got_request_exception, request_finished, request_started,
)
from django.db import close_old_connections
from django.http import HttpRequest, QueryDict, SimpleCookie
from django.test import signals
from django.test.utils import ContextList
from django.urls import resolve
from django.utils.encoding import force_bytes
from django.utils.functional import SimpleLazyObject
from django.utils.http import urlencode
from django.utils.itercompat import is_iterable
from django.utils.regex_helper import _lazy_re_compile
__all__ = ('Client', 'RedirectCycleError', 'RequestFactory', 'encode_file', 'encode_multipart')
BOUNDARY = 'BoUnDaRyStRiNg'
MULTIPART_CONTENT = 'multipart/form-data; boundary=%s' % BOUNDARY
CONTENT_TYPE_RE = _lazy_re_compile(r'.*; charset=([\w\d-]+);?')
# Structured suffix spec: https://tools.ietf.org/html/rfc6838#section-4.2.8
JSON_CONTENT_TYPE_RE = _lazy_re_compile(r'^application\/(.+\+)?json')
class RedirectCycleError(Exception):
"""The test client has been asked to follow a redirect loop."""
def __init__(self, message, last_response):
super().__init__(message)
self.last_response = last_response
self.redirect_chain = last_response.redirect_chain
class FakePayload:
"""
A wrapper around BytesIO that restricts what can be read since data from
the network can't be sought and cannot be read outside of its content
length. This makes sure that views can't do anything under the test client
that wouldn't work in real life.
"""
def __init__(self, content=None):
self.__content = BytesIO()
self.__len = 0
self.read_started = False
if content is not None:
self.write(content)
def __len__(self):
return self.__len
def read(self, num_bytes=None):
if not self.read_started:
self.__content.seek(0)
self.read_started = True
if num_bytes is None:
num_bytes = self.__len or 0
assert self.__len >= num_bytes, "Cannot read more than the available bytes from the HTTP incoming data."
content = self.__content.read(num_bytes)
self.__len -= num_bytes
return content
def write(self, content):
if self.read_started:
raise ValueError("Unable to write a payload after it's been read")
content = force_bytes(content)
self.__content.write(content)
self.__len += len(content)
def closing_iterator_wrapper(iterable, close):
try:
yield from iterable
finally:
request_finished.disconnect(close_old_connections)
close() # will fire request_finished
request_finished.connect(close_old_connections)
def conditional_content_removal(request, response):
"""
Simulate the behavior of most Web servers by removing the content of
responses for HEAD requests, 1xx, 204, and 304 responses. Ensure
compliance with RFC 7230, section 3.3.3.
"""
if 100 <= response.status_code < 200 or response.status_code in (204, 304):
if response.streaming:
response.streaming_content = []
else:
response.content = b''
if request.method == 'HEAD':
if response.streaming:
response.streaming_content = []
else:
response.content = b''
return response
class ClientHandler(BaseHandler):
"""
A HTTP Handler that can be used for testing purposes. Use the WSGI
interface to compose requests, but return the raw HttpResponse object with
the originating WSGIRequest attached to its ``wsgi_request`` attribute.
"""
def __init__(self, enforce_csrf_checks=True, *args, **kwargs):
self.enforce_csrf_checks = enforce_csrf_checks
super().__init__(*args, **kwargs)
def __call__(self, environ):
# Set up middleware if needed. We couldn't do this earlier, because
# settings weren't available.
if self._middleware_chain is None:
self.load_middleware()
request_started.disconnect(close_old_connections)
request_started.send(sender=self.__class__, environ=environ)
request_started.connect(close_old_connections)
request = WSGIRequest(environ)
# sneaky little hack so that we can easily get round
# CsrfViewMiddleware. This makes life easier, and is probably
# required for backwards compatibility with external tests against
# admin views.
request._dont_enforce_csrf_checks = not self.enforce_csrf_checks
# Request goes through middleware.
response = self.get_response(request)
# Simulate behaviors of most Web servers.
conditional_content_removal(request, response)
# Attach the originating request to the response so that it could be
# later retrieved.
response.wsgi_request = request
# Emulate a WSGI server by calling the close method on completion.
if response.streaming:
response.streaming_content = closing_iterator_wrapper(
response.streaming_content, response.close)
else:
request_finished.disconnect(close_old_connections)
response.close() # will fire request_finished
request_finished.connect(close_old_connections)
return response
class AsyncClientHandler(BaseHandler):
"""An async version of ClientHandler."""
def __init__(self, enforce_csrf_checks=True, *args, **kwargs):
self.enforce_csrf_checks = enforce_csrf_checks
super().__init__(*args, **kwargs)
async def __call__(self, scope):
# Set up middleware if needed. We couldn't do this earlier, because
# settings weren't available.
if self._middleware_chain is None:
self.load_middleware(is_async=True)
# Extract body file from the scope, if provided.
if '_body_file' in scope:
body_file = scope.pop('_body_file')
else:
body_file = FakePayload('')
request_started.disconnect(close_old_connections)
await sync_to_async(request_started.send)(sender=self.__class__, scope=scope)
request_started.connect(close_old_connections)
request = ASGIRequest(scope, body_file)
# Sneaky little hack so that we can easily get round
# CsrfViewMiddleware. This makes life easier, and is probably required
# for backwards compatibility with external tests against admin views.
request._dont_enforce_csrf_checks = not self.enforce_csrf_checks
# Request goes through middleware.
response = await self.get_response_async(request)
# Simulate behaviors of most Web servers.
conditional_content_removal(request, response)
# Attach the originating ASGI request to the response so that it could
# be later retrieved.
response.asgi_request = request
# Emulate a server by calling the close method on completion.
if response.streaming:
response.streaming_content = await sync_to_async(closing_iterator_wrapper)(
response.streaming_content,
response.close,
)
else:
request_finished.disconnect(close_old_connections)
# Will fire request_finished.
await sync_to_async(response.close)()
request_finished.connect(close_old_connections)
return response
def store_rendered_templates(store, signal, sender, template, context, **kwargs):
"""
Store templates and contexts that are rendered.
The context is copied so that it is an accurate representation at the time
of rendering.
"""
store.setdefault('templates', []).append(template)
if 'context' not in store:
store['context'] = ContextList()
store['context'].append(copy(context))
def encode_multipart(boundary, data):
"""
Encode multipart POST data from a dictionary of form values.
The key will be used as the form data name; the value will be transmitted
as content. If the value is a file, the contents of the file will be sent
as an application/octet-stream; otherwise, str(value) will be sent.
"""
lines = []
def to_bytes(s):
return force_bytes(s, settings.DEFAULT_CHARSET)
# Not by any means perfect, but good enough for our purposes.
def is_file(thing):
return hasattr(thing, "read") and callable(thing.read)
# Each bit of the multipart form data could be either a form value or a
# file, or a *list* of form values and/or files. Remember that HTTP field
# names can be duplicated!
for (key, value) in data.items():
if value is None:
raise TypeError(
"Cannot encode None for key '%s' as POST data. Did you mean "
"to pass an empty string or omit the value?" % key
)
elif is_file(value):
lines.extend(encode_file(boundary, key, value))
elif not isinstance(value, str) and is_iterable(value):
for item in value:
if is_file(item):
lines.extend(encode_file(boundary, key, item))
else:
lines.extend(to_bytes(val) for val in [
'--%s' % boundary,
'Content-Disposition: form-data; name="%s"' % key,
'',
item
])
else:
lines.extend(to_bytes(val) for val in [
'--%s' % boundary,
'Content-Disposition: form-data; name="%s"' % key,
'',
value
])
lines.extend([
to_bytes('--%s--' % boundary),
b'',
])
return b'\r\n'.join(lines)
def encode_file(boundary, key, file):
def to_bytes(s):
return force_bytes(s, settings.DEFAULT_CHARSET)
# file.name might not be a string. For example, it's an int for
# tempfile.TemporaryFile().
file_has_string_name = hasattr(file, 'name') and isinstance(file.name, str)
filename = os.path.basename(file.name) if file_has_string_name else ''
if hasattr(file, 'content_type'):
content_type = file.content_type
elif filename:
content_type = mimetypes.guess_type(filename)[0]
else:
content_type = None
if content_type is None:
content_type = 'application/octet-stream'
filename = filename or key
return [
to_bytes('--%s' % boundary),
to_bytes('Content-Disposition: form-data; name="%s"; filename="%s"'
% (key, filename)),
to_bytes('Content-Type: %s' % content_type),
b'',
to_bytes(file.read())
]
class RequestFactory:
"""
Class that lets you create mock Request objects for use in testing.
Usage:
rf = RequestFactory()
get_request = rf.get('/hello/')
post_request = rf.post('/submit/', {'foo': 'bar'})
Once you have a request object you can pass it to any view function,
just as if that view had been hooked up using a URLconf.
"""
def __init__(self, *, json_encoder=DjangoJSONEncoder, **defaults):
self.json_encoder = json_encoder
self.defaults = defaults
self.cookies = SimpleCookie()
self.errors = BytesIO()
def _base_environ(self, **request):
"""
The base environment for a request.
"""
# This is a minimal valid WSGI environ dictionary, plus:
# - HTTP_COOKIE: for cookie support,
# - REMOTE_ADDR: often useful, see #8551.
# See https://www.python.org/dev/peps/pep-3333/#environ-variables
return {
'HTTP_COOKIE': '; '.join(sorted(
'%s=%s' % (morsel.key, morsel.coded_value)
for morsel in self.cookies.values()
)),
'PATH_INFO': '/',
'REMOTE_ADDR': '127.0.0.1',
'REQUEST_METHOD': 'GET',
'SCRIPT_NAME': '',
'SERVER_NAME': 'testserver',
'SERVER_PORT': '80',
'SERVER_PROTOCOL': 'HTTP/1.1',
'wsgi.version': (1, 0),
'wsgi.url_scheme': 'http',
'wsgi.input': FakePayload(b''),
'wsgi.errors': self.errors,
'wsgi.multiprocess': True,
'wsgi.multithread': False,
'wsgi.run_once': False,
**self.defaults,
**request,
}
def request(self, **request):
"Construct a generic request object."
return WSGIRequest(self._base_environ(**request))
def _encode_data(self, data, content_type):
if content_type is MULTIPART_CONTENT:
return encode_multipart(BOUNDARY, data)
else:
# Encode the content so that the byte representation is correct.
match = CONTENT_TYPE_RE.match(content_type)
if match:
charset = match[1]
else:
charset = settings.DEFAULT_CHARSET
return force_bytes(data, encoding=charset)
def _encode_json(self, data, content_type):
"""
Return encoded JSON if data is a dict, list, or tuple and content_type
is application/json.
"""
should_encode = JSON_CONTENT_TYPE_RE.match(content_type) and isinstance(data, (dict, list, tuple))
return json.dumps(data, cls=self.json_encoder) if should_encode else data
def _get_path(self, parsed):
path = parsed.path
# If there are parameters, add them
if parsed.params:
path += ";" + parsed.params
path = unquote_to_bytes(path)
# Replace the behavior where non-ASCII values in the WSGI environ are
# arbitrarily decoded with ISO-8859-1.
# Refs comment in `get_bytes_from_wsgi()`.
return path.decode('iso-8859-1')
def get(self, path, data=None, secure=False, **extra):
"""Construct a GET request."""
data = {} if data is None else data
return self.generic('GET', path, secure=secure, **{
'QUERY_STRING': urlencode(data, doseq=True),
**extra,
})
def post(self, path, data=None, content_type=MULTIPART_CONTENT,
secure=False, **extra):
"""Construct a POST request."""
data = self._encode_json({} if data is None else data, content_type)
post_data = self._encode_data(data, content_type)
return self.generic('POST', path, post_data, content_type,
secure=secure, **extra)
def head(self, path, data=None, secure=False, **extra):
"""Construct a HEAD request."""
data = {} if data is None else data
return self.generic('HEAD', path, secure=secure, **{
'QUERY_STRING': urlencode(data, doseq=True),
**extra,
})
def trace(self, path, secure=False, **extra):
"""Construct a TRACE request."""
return self.generic('TRACE', path, secure=secure, **extra)
def options(self, path, data='', content_type='application/octet-stream',
secure=False, **extra):
"Construct an OPTIONS request."
return self.generic('OPTIONS', path, data, content_type,
secure=secure, **extra)
def put(self, path, data='', content_type='application/octet-stream',
secure=False, **extra):
"""Construct a PUT request."""
data = self._encode_json(data, content_type)
return self.generic('PUT', path, data, content_type,
secure=secure, **extra)
def patch(self, path, data='', content_type='application/octet-stream',
secure=False, **extra):
"""Construct a PATCH request."""
data = self._encode_json(data, content_type)
return self.generic('PATCH', path, data, content_type,
secure=secure, **extra)
def delete(self, path, data='', content_type='application/octet-stream',
secure=False, **extra):
"""Construct a DELETE request."""
data = self._encode_json(data, content_type)
return self.generic('DELETE', path, data, content_type,
secure=secure, **extra)
def generic(self, method, path, data='',
content_type='application/octet-stream', secure=False,
**extra):
"""Construct an arbitrary HTTP request."""
parsed = urlparse(str(path)) # path can be lazy
data = force_bytes(data, settings.DEFAULT_CHARSET)
r = {
'PATH_INFO': self._get_path(parsed),
'REQUEST_METHOD': method,
'SERVER_PORT': '443' if secure else '80',
'wsgi.url_scheme': 'https' if secure else 'http',
}
if data:
r.update({
'CONTENT_LENGTH': str(len(data)),
'CONTENT_TYPE': content_type,
'wsgi.input': FakePayload(data),
})
r.update(extra)
# If QUERY_STRING is absent or empty, we want to extract it from the URL.
if not r.get('QUERY_STRING'):
# WSGI requires latin-1 encoded strings. See get_path_info().
query_string = parsed[4].encode().decode('iso-8859-1')
r['QUERY_STRING'] = query_string
return self.request(**r)
class AsyncRequestFactory(RequestFactory):
"""
Class that lets you create mock ASGI-like Request objects for use in
testing. Usage:
rf = AsyncRequestFactory()
get_request = await rf.get('/hello/')
post_request = await rf.post('/submit/', {'foo': 'bar'})
Once you have a request object you can pass it to any view function,
including synchronous ones. The reason we have a separate class here is:
a) this makes ASGIRequest subclasses, and
b) AsyncTestClient can subclass it.
"""
def _base_scope(self, **request):
"""The base scope for a request."""
# This is a minimal valid ASGI scope, plus:
# - headers['cookie'] for cookie support,
# - 'client' often useful, see #8551.
scope = {
'asgi': {'version': '3.0'},
'type': 'http',
'http_version': '1.1',
'client': ['127.0.0.1', 0],
'server': ('testserver', '80'),
'scheme': 'http',
'method': 'GET',
'headers': [],
**self.defaults,
**request,
}
scope['headers'].append((
b'cookie',
b'; '.join(sorted(
('%s=%s' % (morsel.key, morsel.coded_value)).encode('ascii')
for morsel in self.cookies.values()
)),
))
return scope
def request(self, **request):
"""Construct a generic request object."""
# This is synchronous, which means all methods on this class are.
# AsyncClient, however, has an async request function, which makes all
# its methods async.
if '_body_file' in request:
body_file = request.pop('_body_file')
else:
body_file = FakePayload('')
return ASGIRequest(self._base_scope(**request), body_file)
def generic(
self, method, path, data='', content_type='application/octet-stream',
secure=False, **extra,
):
"""Construct an arbitrary HTTP request."""
parsed = urlparse(str(path)) # path can be lazy.
data = force_bytes(data, settings.DEFAULT_CHARSET)
s = {
'method': method,
'path': self._get_path(parsed),
'server': ('127.0.0.1', '443' if secure else '80'),
'scheme': 'https' if secure else 'http',
'headers': [(b'host', b'testserver')],
}
if data:
s['headers'].extend([
(b'content-length', bytes(len(data))),
(b'content-type', content_type.encode('ascii')),
])
s['_body_file'] = FakePayload(data)
s.update(extra)
# If QUERY_STRING is absent or empty, we want to extract it from the
# URL.
if not s.get('query_string'):
s['query_string'] = parsed[4]
return self.request(**s)
class ClientMixin:
"""
Mixin with common methods between Client and AsyncClient.
"""
def store_exc_info(self, **kwargs):
"""Store exceptions when they are generated by a view."""
self.exc_info = sys.exc_info()
def check_exception(self, response):
"""
Look for a signaled exception, clear the current context exception
data, re-raise the signaled exception, and clear the signaled exception
from the local cache.
"""
response.exc_info = self.exc_info
if self.exc_info:
_, exc_value, _ = self.exc_info
self.exc_info = None
if self.raise_request_exception:
raise exc_value
@property
def session(self):
"""Return the current session variables."""
engine = import_module(settings.SESSION_ENGINE)
cookie = self.cookies.get(settings.SESSION_COOKIE_NAME)
if cookie:
return engine.SessionStore(cookie.value)
session = engine.SessionStore()
session.save()
self.cookies[settings.SESSION_COOKIE_NAME] = session.session_key
return session
def login(self, **credentials):
"""
Set the Factory to appear as if it has successfully logged into a site.
Return True if login is possible or False if the provided credentials
are incorrect.
"""
from django.contrib.auth import authenticate
user = authenticate(**credentials)
if user:
self._login(user)
return True
return False
def force_login(self, user, backend=None):
def get_backend():
from django.contrib.auth import load_backend
for backend_path in settings.AUTHENTICATION_BACKENDS:
backend = load_backend(backend_path)
if hasattr(backend, 'get_user'):
return backend_path
if backend is None:
backend = get_backend()
user.backend = backend
self._login(user, backend)
def _login(self, user, backend=None):
from django.contrib.auth import login
# Create a fake request to store login details.
request = HttpRequest()
if self.session:
request.session = self.session
else:
engine = import_module(settings.SESSION_ENGINE)
request.session = engine.SessionStore()
login(request, user, backend)
# Save the session values.
request.session.save()
# Set the cookie to represent the session.
session_cookie = settings.SESSION_COOKIE_NAME
self.cookies[session_cookie] = request.session.session_key
cookie_data = {
'max-age': None,
'path': '/',
'domain': settings.SESSION_COOKIE_DOMAIN,
'secure': settings.SESSION_COOKIE_SECURE or None,
'expires': None,
}
self.cookies[session_cookie].update(cookie_data)
def logout(self):
"""Log out the user by removing the cookies and session object."""
from django.contrib.auth import get_user, logout
request = HttpRequest()
if self.session:
request.session = self.session
request.user = get_user(request)
else:
engine = import_module(settings.SESSION_ENGINE)
request.session = engine.SessionStore()
logout(request)
self.cookies = SimpleCookie()
def _parse_json(self, response, **extra):
if not hasattr(response, '_json'):
if not JSON_CONTENT_TYPE_RE.match(response.get('Content-Type')):
raise ValueError(
'Content-Type header is "%s", not "application/json"'
% response.get('Content-Type')
)
response._json = json.loads(response.content.decode(response.charset), **extra)
return response._json
class Client(ClientMixin, RequestFactory):
"""
A class that can act as a client for testing purposes.
It allows the user to compose GET and POST requests, and
obtain the response that the server gave to those requests.
The server Response objects are annotated with the details
of the contexts and templates that were rendered during the
process of serving the request.
Client objects are stateful - they will retain cookie (and
thus session) details for the lifetime of the Client instance.
This is not intended as a replacement for Twill/Selenium or
the like - it is here to allow testing against the
contexts and templates produced by a view, rather than the
HTML rendered to the end-user.
"""
def __init__(self, enforce_csrf_checks=False, raise_request_exception=True, **defaults):
super().__init__(**defaults)
self.handler = ClientHandler(enforce_csrf_checks)
self.raise_request_exception = raise_request_exception
self.exc_info = None
self.extra = None
def request(self, **request):
"""
The master request method. Compose the environment dictionary and pass
to the handler, return the result of the handler. Assume defaults for
the query environment, which can be overridden using the arguments to
the request.
"""
environ = self._base_environ(**request)
# Curry a data dictionary into an instance of the template renderer
# callback function.
data = {}
on_template_render = partial(store_rendered_templates, data)
signal_uid = "template-render-%s" % id(request)
signals.template_rendered.connect(on_template_render, dispatch_uid=signal_uid)
# Capture exceptions created by the handler.
exception_uid = "request-exception-%s" % id(request)
got_request_exception.connect(self.store_exc_info, dispatch_uid=exception_uid)
try:
response = self.handler(environ)
finally:
signals.template_rendered.disconnect(dispatch_uid=signal_uid)
got_request_exception.disconnect(dispatch_uid=exception_uid)
# Check for signaled exceptions.
self.check_exception(response)
# Save the client and request that stimulated the response.
response.client = self
response.request = request
# Add any rendered template detail to the response.
response.templates = data.get('templates', [])
response.context = data.get('context')
response.json = partial(self._parse_json, response)
# Attach the ResolverMatch instance to the response.
response.resolver_match = SimpleLazyObject(lambda: resolve(request['PATH_INFO']))
# Flatten a single context. Not really necessary anymore thanks to the
# __getattr__ flattening in ContextList, but has some edge case
# backwards compatibility implications.
if response.context and len(response.context) == 1:
response.context = response.context[0]
# Update persistent cookie data.
if response.cookies:
self.cookies.update(response.cookies)
return response
def get(self, path, data=None, follow=False, secure=False, **extra):
"""Request a response from the server using GET."""
self.extra = extra
response = super().get(path, data=data, secure=secure, **extra)
if follow:
response = self._handle_redirects(response, data=data, **extra)
return response
def post(self, path, data=None, content_type=MULTIPART_CONTENT,
follow=False, secure=False, **extra):
"""Request a response from the server using POST."""
self.extra = extra
response = super().post(path, data=data, content_type=content_type, secure=secure, **extra)
if follow:
response = self._handle_redirects(response, data=data, content_type=content_type, **extra)
return response
def head(self, path, data=None, follow=False, secure=False, **extra):
"""Request a response from the server using HEAD."""
self.extra = extra
response = super().head(path, data=data, secure=secure, **extra)
if follow:
response = self._handle_redirects(response, data=data, **extra)
return response
def options(self, path, data='', content_type='application/octet-stream',
follow=False, secure=False, **extra):
"""Request a response from the server using OPTIONS."""
self.extra = extra
response = super().options(path, data=data, content_type=content_type, secure=secure, **extra)
if follow:
response = self._handle_redirects(response, data=data, content_type=content_type, **extra)
return response
def put(self, path, data='', content_type='application/octet-stream',
follow=False, secure=False, **extra):
"""Send a resource to the server using PUT."""
self.extra = extra
response = super().put(path, data=data, content_type=content_type, secure=secure, **extra)
if follow:
response = self._handle_redirects(response, data=data, content_type=content_type, **extra)
return response
def patch(self, path, data='', content_type='application/octet-stream',
follow=False, secure=False, **extra):
"""Send a resource to the server using PATCH."""
self.extra = extra
response = super().patch(path, data=data, content_type=content_type, secure=secure, **extra)
if follow:
response = self._handle_redirects(response, data=data, content_type=content_type, **extra)
return response
def delete(self, path, data='', content_type='application/octet-stream',
follow=False, secure=False, **extra):
"""Send a DELETE request to the server."""
self.extra = extra
response = super().delete(path, data=data, content_type=content_type, secure=secure, **extra)
if follow:
response = self._handle_redirects(response, data=data, content_type=content_type, **extra)
return response
def trace(self, path, data='', follow=False, secure=False, **extra):
"""Send a TRACE request to the server."""
self.extra = extra
response = super().trace(path, data=data, secure=secure, **extra)
if follow:
response = self._handle_redirects(response, data=data, **extra)
return response
def _handle_redirects(self, response, data='', content_type='', **extra):
"""
Follow any redirects by requesting responses from the server using GET.
"""
response.redirect_chain = []
redirect_status_codes = (
HTTPStatus.MOVED_PERMANENTLY,
HTTPStatus.FOUND,
HTTPStatus.SEE_OTHER,
HTTPStatus.TEMPORARY_REDIRECT,
HTTPStatus.PERMANENT_REDIRECT,
)
while response.status_code in redirect_status_codes:
response_url = response.url
redirect_chain = response.redirect_chain
redirect_chain.append((response_url, response.status_code))
url = urlsplit(response_url)
if url.scheme:
extra['wsgi.url_scheme'] = url.scheme
if url.hostname:
extra['SERVER_NAME'] = url.hostname
if url.port:
extra['SERVER_PORT'] = str(url.port)
# Prepend the request path to handle relative path redirects
path = url.path
if not path.startswith('/'):
path = urljoin(response.request['PATH_INFO'], path)
if response.status_code in (HTTPStatus.TEMPORARY_REDIRECT, HTTPStatus.PERMANENT_REDIRECT):
# Preserve request method post-redirect for 307/308 responses.
request_method = getattr(self, response.request['REQUEST_METHOD'].lower())
else:
request_method = self.get
data = QueryDict(url.query)
content_type = None
response = request_method(path, data=data, content_type=content_type, follow=False, **extra)
response.redirect_chain = redirect_chain
if redirect_chain[-1] in redirect_chain[:-1]:
# Check that we're not redirecting to somewhere we've already
# been to, to prevent loops.
raise RedirectCycleError("Redirect loop detected.", last_response=response)
if len(redirect_chain) > 20:
# Such a lengthy chain likely also means a loop, but one with
# a growing path, changing view, or changing query argument;
# 20 is the value of "network.http.redirection-limit" from Firefox.
raise RedirectCycleError("Too many redirects.", last_response=response)
return response
class AsyncClient(ClientMixin, AsyncRequestFactory):
"""
An async version of Client that creates ASGIRequests and calls through an
async request path.
Does not currently support "follow" on its methods.
"""
def __init__(self, enforce_csrf_checks=False, raise_request_exception=True, **defaults):
super().__init__(**defaults)
self.handler = AsyncClientHandler(enforce_csrf_checks)
self.raise_request_exception = raise_request_exception
self.exc_info = None
self.extra = None
async def request(self, **request):
"""
The master request method. Compose the scope dictionary and pass to the
handler, return the result of the handler. Assume defaults for the
query environment, which can be overridden using the arguments to the
request.
"""
if 'follow' in request:
raise NotImplementedError(
'AsyncClient request methods do not accept the follow '
'parameter.'
)
scope = self._base_scope(**request)
# Curry a data dictionary into an instance of the template renderer
# callback function.
data = {}
on_template_render = partial(store_rendered_templates, data)
signal_uid = 'template-render-%s' % id(request)
signals.template_rendered.connect(on_template_render, dispatch_uid=signal_uid)
# Capture exceptions created by the handler.
exception_uid = 'request-exception-%s' % id(request)
got_request_exception.connect(self.store_exc_info, dispatch_uid=exception_uid)
try:
response = await self.handler(scope)
finally:
signals.template_rendered.disconnect(dispatch_uid=signal_uid)
got_request_exception.disconnect(dispatch_uid=exception_uid)
# Check for signaled exceptions.
self.check_exception(response)
# Save the client and request that stimulated the response.
response.client = self
response.request = request
# Add any rendered template detail to the response.
response.templates = data.get('templates', [])
response.context = data.get('context')
response.json = partial(self._parse_json, response)
# Attach the ResolverMatch instance to the response.
response.resolver_match = SimpleLazyObject(lambda: resolve(request['path']))
# Flatten a single context. Not really necessary anymore thanks to the
# __getattr__ flattening in ContextList, but has some edge case
# backwards compatibility implications.
if response.context and len(response.context) == 1:
response.context = response.context[0]
# Update persistent cookie data.
if response.cookies:
self.cookies.update(response.cookies)
return response
|
e730da1525beca74e84e63120120758ec7c43b2b7d3ce1043ab9cf7a8c749d78 | import asyncio
import difflib
import json
import posixpath
import sys
import threading
import unittest
import warnings
from collections import Counter
from contextlib import contextmanager
from copy import copy, deepcopy
from difflib import get_close_matches
from functools import wraps
from unittest.suite import _DebugResult
from unittest.util import safe_repr
from urllib.parse import (
parse_qsl, unquote, urlencode, urljoin, urlparse, urlsplit, urlunparse,
)
from urllib.request import url2pathname
from asgiref.sync import async_to_sync
from django.apps import apps
from django.conf import settings
from django.core import mail
from django.core.exceptions import ImproperlyConfigured, ValidationError
from django.core.files import locks
from django.core.handlers.wsgi import WSGIHandler, get_path_info
from django.core.management import call_command
from django.core.management.color import no_style
from django.core.management.sql import emit_post_migrate_signal
from django.core.servers.basehttp import ThreadedWSGIServer, WSGIRequestHandler
from django.db import DEFAULT_DB_ALIAS, connection, connections, transaction
from django.forms.fields import CharField
from django.http import QueryDict
from django.http.request import split_domain_port, validate_host
from django.test.client import AsyncClient, Client
from django.test.html import HTMLParseError, parse_html
from django.test.signals import setting_changed, template_rendered
from django.test.utils import (
CaptureQueriesContext, ContextList, compare_xml, modify_settings,
override_settings,
)
from django.utils.deprecation import RemovedInDjango41Warning
from django.utils.functional import classproperty
from django.views.static import serve
__all__ = ('TestCase', 'TransactionTestCase',
'SimpleTestCase', 'skipIfDBFeature', 'skipUnlessDBFeature')
def to_list(value):
"""
Put value into a list if it's not already one. Return an empty list if
value is None.
"""
if value is None:
value = []
elif not isinstance(value, list):
value = [value]
return value
def assert_and_parse_html(self, html, user_msg, msg):
try:
dom = parse_html(html)
except HTMLParseError as e:
standardMsg = '%s\n%s' % (msg, e)
self.fail(self._formatMessage(user_msg, standardMsg))
return dom
class _AssertNumQueriesContext(CaptureQueriesContext):
def __init__(self, test_case, num, connection):
self.test_case = test_case
self.num = num
super().__init__(connection)
def __exit__(self, exc_type, exc_value, traceback):
super().__exit__(exc_type, exc_value, traceback)
if exc_type is not None:
return
executed = len(self)
self.test_case.assertEqual(
executed, self.num,
"%d queries executed, %d expected\nCaptured queries were:\n%s" % (
executed, self.num,
'\n'.join(
'%d. %s' % (i, query['sql']) for i, query in enumerate(self.captured_queries, start=1)
)
)
)
class _AssertTemplateUsedContext:
def __init__(self, test_case, template_name):
self.test_case = test_case
self.template_name = template_name
self.rendered_templates = []
self.rendered_template_names = []
self.context = ContextList()
def on_template_render(self, sender, signal, template, context, **kwargs):
self.rendered_templates.append(template)
self.rendered_template_names.append(template.name)
self.context.append(copy(context))
def test(self):
return self.template_name in self.rendered_template_names
def message(self):
return '%s was not rendered.' % self.template_name
def __enter__(self):
template_rendered.connect(self.on_template_render)
return self
def __exit__(self, exc_type, exc_value, traceback):
template_rendered.disconnect(self.on_template_render)
if exc_type is not None:
return
if not self.test():
message = self.message()
if self.rendered_templates:
message += ' Following templates were rendered: %s' % (
', '.join(self.rendered_template_names)
)
else:
message += ' No template was rendered.'
self.test_case.fail(message)
class _AssertTemplateNotUsedContext(_AssertTemplateUsedContext):
def test(self):
return self.template_name not in self.rendered_template_names
def message(self):
return '%s was rendered.' % self.template_name
class _DatabaseFailure:
def __init__(self, wrapped, message):
self.wrapped = wrapped
self.message = message
def __call__(self):
raise AssertionError(self.message)
class SimpleTestCase(unittest.TestCase):
# The class we'll use for the test client self.client.
# Can be overridden in derived classes.
client_class = Client
async_client_class = AsyncClient
_overridden_settings = None
_modified_settings = None
databases = set()
_disallowed_database_msg = (
'Database %(operation)s to %(alias)r are not allowed in SimpleTestCase '
'subclasses. Either subclass TestCase or TransactionTestCase to ensure '
'proper test isolation or add %(alias)r to %(test)s.databases to silence '
'this failure.'
)
_disallowed_connection_methods = [
('connect', 'connections'),
('temporary_connection', 'connections'),
('cursor', 'queries'),
('chunked_cursor', 'queries'),
]
@classmethod
def setUpClass(cls):
super().setUpClass()
if cls._overridden_settings:
cls._cls_overridden_context = override_settings(**cls._overridden_settings)
cls._cls_overridden_context.enable()
if cls._modified_settings:
cls._cls_modified_context = modify_settings(cls._modified_settings)
cls._cls_modified_context.enable()
cls._add_databases_failures()
@classmethod
def _validate_databases(cls):
if cls.databases == '__all__':
return frozenset(connections)
for alias in cls.databases:
if alias not in connections:
message = '%s.%s.databases refers to %r which is not defined in settings.DATABASES.' % (
cls.__module__,
cls.__qualname__,
alias,
)
close_matches = get_close_matches(alias, list(connections))
if close_matches:
message += ' Did you mean %r?' % close_matches[0]
raise ImproperlyConfigured(message)
return frozenset(cls.databases)
@classmethod
def _add_databases_failures(cls):
cls.databases = cls._validate_databases()
for alias in connections:
if alias in cls.databases:
continue
connection = connections[alias]
for name, operation in cls._disallowed_connection_methods:
message = cls._disallowed_database_msg % {
'test': '%s.%s' % (cls.__module__, cls.__qualname__),
'alias': alias,
'operation': operation,
}
method = getattr(connection, name)
setattr(connection, name, _DatabaseFailure(method, message))
@classmethod
def _remove_databases_failures(cls):
for alias in connections:
if alias in cls.databases:
continue
connection = connections[alias]
for name, _ in cls._disallowed_connection_methods:
method = getattr(connection, name)
setattr(connection, name, method.wrapped)
@classmethod
def tearDownClass(cls):
cls._remove_databases_failures()
if hasattr(cls, '_cls_modified_context'):
cls._cls_modified_context.disable()
delattr(cls, '_cls_modified_context')
if hasattr(cls, '_cls_overridden_context'):
cls._cls_overridden_context.disable()
delattr(cls, '_cls_overridden_context')
super().tearDownClass()
def __call__(self, result=None):
"""
Wrapper around default __call__ method to perform common Django test
set up. This means that user-defined Test Cases aren't required to
include a call to super().setUp().
"""
self._setup_and_call(result)
def debug(self):
"""Perform the same as __call__(), without catching the exception."""
debug_result = _DebugResult()
self._setup_and_call(debug_result, debug=True)
def _setup_and_call(self, result, debug=False):
"""
Perform the following in order: pre-setup, run test, post-teardown,
skipping pre/post hooks if test is set to be skipped.
If debug=True, reraise any errors in setup and use super().debug()
instead of __call__() to run the test.
"""
testMethod = getattr(self, self._testMethodName)
skipped = (
getattr(self.__class__, "__unittest_skip__", False) or
getattr(testMethod, "__unittest_skip__", False)
)
# Convert async test methods.
if asyncio.iscoroutinefunction(testMethod):
setattr(self, self._testMethodName, async_to_sync(testMethod))
if not skipped:
try:
self._pre_setup()
except Exception:
if debug:
raise
result.addError(self, sys.exc_info())
return
if debug:
super().debug()
else:
super().__call__(result)
if not skipped:
try:
self._post_teardown()
except Exception:
if debug:
raise
result.addError(self, sys.exc_info())
return
def _pre_setup(self):
"""
Perform pre-test setup:
* Create a test client.
* Clear the mail test outbox.
"""
self.client = self.client_class()
self.async_client = self.async_client_class()
mail.outbox = []
def _post_teardown(self):
"""Perform post-test things."""
pass
def settings(self, **kwargs):
"""
A context manager that temporarily sets a setting and reverts to the
original value when exiting the context.
"""
return override_settings(**kwargs)
def modify_settings(self, **kwargs):
"""
A context manager that temporarily applies changes a list setting and
reverts back to the original value when exiting the context.
"""
return modify_settings(**kwargs)
def assertRedirects(self, response, expected_url, status_code=302,
target_status_code=200, msg_prefix='',
fetch_redirect_response=True):
"""
Assert that a response redirected to a specific URL and that the
redirect URL can be loaded.
Won't work for external links since it uses the test client to do a
request (use fetch_redirect_response=False to check such links without
fetching them).
"""
if msg_prefix:
msg_prefix += ": "
if hasattr(response, 'redirect_chain'):
# The request was a followed redirect
self.assertTrue(
response.redirect_chain,
msg_prefix + "Response didn't redirect as expected: Response code was %d (expected %d)"
% (response.status_code, status_code)
)
self.assertEqual(
response.redirect_chain[0][1], status_code,
msg_prefix + "Initial response didn't redirect as expected: Response code was %d (expected %d)"
% (response.redirect_chain[0][1], status_code)
)
url, status_code = response.redirect_chain[-1]
scheme, netloc, path, query, fragment = urlsplit(url)
self.assertEqual(
response.status_code, target_status_code,
msg_prefix + "Response didn't redirect as expected: Final Response code was %d (expected %d)"
% (response.status_code, target_status_code)
)
else:
# Not a followed redirect
self.assertEqual(
response.status_code, status_code,
msg_prefix + "Response didn't redirect as expected: Response code was %d (expected %d)"
% (response.status_code, status_code)
)
url = response.url
scheme, netloc, path, query, fragment = urlsplit(url)
# Prepend the request path to handle relative path redirects.
if not path.startswith('/'):
url = urljoin(response.request['PATH_INFO'], url)
path = urljoin(response.request['PATH_INFO'], path)
if fetch_redirect_response:
# netloc might be empty, or in cases where Django tests the
# HTTP scheme, the convention is for netloc to be 'testserver'.
# Trust both as "internal" URLs here.
domain, port = split_domain_port(netloc)
if domain and not validate_host(domain, settings.ALLOWED_HOSTS):
raise ValueError(
"The test client is unable to fetch remote URLs (got %s). "
"If the host is served by Django, add '%s' to ALLOWED_HOSTS. "
"Otherwise, use assertRedirects(..., fetch_redirect_response=False)."
% (url, domain)
)
# Get the redirection page, using the same client that was used
# to obtain the original response.
extra = response.client.extra or {}
redirect_response = response.client.get(
path,
QueryDict(query),
secure=(scheme == 'https'),
**extra,
)
self.assertEqual(
redirect_response.status_code, target_status_code,
msg_prefix + "Couldn't retrieve redirection page '%s': response code was %d (expected %d)"
% (path, redirect_response.status_code, target_status_code)
)
self.assertURLEqual(
url, expected_url,
msg_prefix + "Response redirected to '%s', expected '%s'" % (url, expected_url)
)
def assertURLEqual(self, url1, url2, msg_prefix=''):
"""
Assert that two URLs are the same, ignoring the order of query string
parameters except for parameters with the same name.
For example, /path/?x=1&y=2 is equal to /path/?y=2&x=1, but
/path/?a=1&a=2 isn't equal to /path/?a=2&a=1.
"""
def normalize(url):
"""Sort the URL's query string parameters."""
url = str(url) # Coerce reverse_lazy() URLs.
scheme, netloc, path, params, query, fragment = urlparse(url)
query_parts = sorted(parse_qsl(query))
return urlunparse((scheme, netloc, path, params, urlencode(query_parts), fragment))
self.assertEqual(
normalize(url1), normalize(url2),
msg_prefix + "Expected '%s' to equal '%s'." % (url1, url2)
)
def _assert_contains(self, response, text, status_code, msg_prefix, html):
# If the response supports deferred rendering and hasn't been rendered
# yet, then ensure that it does get rendered before proceeding further.
if hasattr(response, 'render') and callable(response.render) and not response.is_rendered:
response.render()
if msg_prefix:
msg_prefix += ": "
self.assertEqual(
response.status_code, status_code,
msg_prefix + "Couldn't retrieve content: Response code was %d"
" (expected %d)" % (response.status_code, status_code)
)
if response.streaming:
content = b''.join(response.streaming_content)
else:
content = response.content
if not isinstance(text, bytes) or html:
text = str(text)
content = content.decode(response.charset)
text_repr = "'%s'" % text
else:
text_repr = repr(text)
if html:
content = assert_and_parse_html(self, content, None, "Response's content is not valid HTML:")
text = assert_and_parse_html(self, text, None, "Second argument is not valid HTML:")
real_count = content.count(text)
return (text_repr, real_count, msg_prefix)
def assertContains(self, response, text, count=None, status_code=200, msg_prefix='', html=False):
"""
Assert that a response indicates that some content was retrieved
successfully, (i.e., the HTTP status code was as expected) and that
``text`` occurs ``count`` times in the content of the response.
If ``count`` is None, the count doesn't matter - the assertion is true
if the text occurs at least once in the response.
"""
text_repr, real_count, msg_prefix = self._assert_contains(
response, text, status_code, msg_prefix, html)
if count is not None:
self.assertEqual(
real_count, count,
msg_prefix + "Found %d instances of %s in response (expected %d)" % (real_count, text_repr, count)
)
else:
self.assertTrue(real_count != 0, msg_prefix + "Couldn't find %s in response" % text_repr)
def assertNotContains(self, response, text, status_code=200, msg_prefix='', html=False):
"""
Assert that a response indicates that some content was retrieved
successfully, (i.e., the HTTP status code was as expected) and that
``text`` doesn't occurs in the content of the response.
"""
text_repr, real_count, msg_prefix = self._assert_contains(
response, text, status_code, msg_prefix, html)
self.assertEqual(real_count, 0, msg_prefix + "Response should not contain %s" % text_repr)
def assertFormError(self, response, form, field, errors, msg_prefix=''):
"""
Assert that a form used to render the response has a specific field
error.
"""
if msg_prefix:
msg_prefix += ": "
# Put context(s) into a list to simplify processing.
contexts = to_list(response.context)
if not contexts:
self.fail(msg_prefix + "Response did not use any contexts to render the response")
# Put error(s) into a list to simplify processing.
errors = to_list(errors)
# Search all contexts for the error.
found_form = False
for i, context in enumerate(contexts):
if form not in context:
continue
found_form = True
for err in errors:
if field:
if field in context[form].errors:
field_errors = context[form].errors[field]
self.assertTrue(
err in field_errors,
msg_prefix + "The field '%s' on form '%s' in"
" context %d does not contain the error '%s'"
" (actual errors: %s)" %
(field, form, i, err, repr(field_errors))
)
elif field in context[form].fields:
self.fail(
msg_prefix + "The field '%s' on form '%s' in context %d contains no errors" %
(field, form, i)
)
else:
self.fail(
msg_prefix + "The form '%s' in context %d does not contain the field '%s'" %
(form, i, field)
)
else:
non_field_errors = context[form].non_field_errors()
self.assertTrue(
err in non_field_errors,
msg_prefix + "The form '%s' in context %d does not"
" contain the non-field error '%s'"
" (actual errors: %s)" %
(form, i, err, non_field_errors or 'none')
)
if not found_form:
self.fail(msg_prefix + "The form '%s' was not used to render the response" % form)
def assertFormsetError(self, response, formset, form_index, field, errors,
msg_prefix=''):
"""
Assert that a formset used to render the response has a specific error.
For field errors, specify the ``form_index`` and the ``field``.
For non-field errors, specify the ``form_index`` and the ``field`` as
None.
For non-form errors, specify ``form_index`` as None and the ``field``
as None.
"""
# Add punctuation to msg_prefix
if msg_prefix:
msg_prefix += ": "
# Put context(s) into a list to simplify processing.
contexts = to_list(response.context)
if not contexts:
self.fail(msg_prefix + 'Response did not use any contexts to '
'render the response')
# Put error(s) into a list to simplify processing.
errors = to_list(errors)
# Search all contexts for the error.
found_formset = False
for i, context in enumerate(contexts):
if formset not in context:
continue
found_formset = True
for err in errors:
if field is not None:
if field in context[formset].forms[form_index].errors:
field_errors = context[formset].forms[form_index].errors[field]
self.assertTrue(
err in field_errors,
msg_prefix + "The field '%s' on formset '%s', "
"form %d in context %d does not contain the "
"error '%s' (actual errors: %s)" %
(field, formset, form_index, i, err, repr(field_errors))
)
elif field in context[formset].forms[form_index].fields:
self.fail(
msg_prefix + "The field '%s' on formset '%s', form %d in context %d contains no errors"
% (field, formset, form_index, i)
)
else:
self.fail(
msg_prefix + "The formset '%s', form %d in context %d does not contain the field '%s'"
% (formset, form_index, i, field)
)
elif form_index is not None:
non_field_errors = context[formset].forms[form_index].non_field_errors()
self.assertFalse(
not non_field_errors,
msg_prefix + "The formset '%s', form %d in context %d "
"does not contain any non-field errors." % (formset, form_index, i)
)
self.assertTrue(
err in non_field_errors,
msg_prefix + "The formset '%s', form %d in context %d "
"does not contain the non-field error '%s' (actual errors: %s)"
% (formset, form_index, i, err, repr(non_field_errors))
)
else:
non_form_errors = context[formset].non_form_errors()
self.assertFalse(
not non_form_errors,
msg_prefix + "The formset '%s' in context %d does not "
"contain any non-form errors." % (formset, i)
)
self.assertTrue(
err in non_form_errors,
msg_prefix + "The formset '%s' in context %d does not "
"contain the non-form error '%s' (actual errors: %s)"
% (formset, i, err, repr(non_form_errors))
)
if not found_formset:
self.fail(msg_prefix + "The formset '%s' was not used to render the response" % formset)
def _assert_template_used(self, response, template_name, msg_prefix):
if response is None and template_name is None:
raise TypeError('response and/or template_name argument must be provided')
if msg_prefix:
msg_prefix += ": "
if template_name is not None and response is not None and not hasattr(response, 'templates'):
raise ValueError(
"assertTemplateUsed() and assertTemplateNotUsed() are only "
"usable on responses fetched using the Django test Client."
)
if not hasattr(response, 'templates') or (response is None and template_name):
if response:
template_name = response
response = None
# use this template with context manager
return template_name, None, msg_prefix
template_names = [t.name for t in response.templates if t.name is not None]
return None, template_names, msg_prefix
def assertTemplateUsed(self, response=None, template_name=None, msg_prefix='', count=None):
"""
Assert that the template with the provided name was used in rendering
the response. Also usable as context manager.
"""
context_mgr_template, template_names, msg_prefix = self._assert_template_used(
response, template_name, msg_prefix)
if context_mgr_template:
# Use assertTemplateUsed as context manager.
return _AssertTemplateUsedContext(self, context_mgr_template)
if not template_names:
self.fail(msg_prefix + "No templates used to render the response")
self.assertTrue(
template_name in template_names,
msg_prefix + "Template '%s' was not a template used to render"
" the response. Actual template(s) used: %s"
% (template_name, ', '.join(template_names))
)
if count is not None:
self.assertEqual(
template_names.count(template_name), count,
msg_prefix + "Template '%s' was expected to be rendered %d "
"time(s) but was actually rendered %d time(s)."
% (template_name, count, template_names.count(template_name))
)
def assertTemplateNotUsed(self, response=None, template_name=None, msg_prefix=''):
"""
Assert that the template with the provided name was NOT used in
rendering the response. Also usable as context manager.
"""
context_mgr_template, template_names, msg_prefix = self._assert_template_used(
response, template_name, msg_prefix
)
if context_mgr_template:
# Use assertTemplateNotUsed as context manager.
return _AssertTemplateNotUsedContext(self, context_mgr_template)
self.assertFalse(
template_name in template_names,
msg_prefix + "Template '%s' was used unexpectedly in rendering the response" % template_name
)
@contextmanager
def _assert_raises_or_warns_cm(self, func, cm_attr, expected_exception, expected_message):
with func(expected_exception) as cm:
yield cm
self.assertIn(expected_message, str(getattr(cm, cm_attr)))
def _assertFooMessage(self, func, cm_attr, expected_exception, expected_message, *args, **kwargs):
callable_obj = None
if args:
callable_obj, *args = args
cm = self._assert_raises_or_warns_cm(func, cm_attr, expected_exception, expected_message)
# Assertion used in context manager fashion.
if callable_obj is None:
return cm
# Assertion was passed a callable.
with cm:
callable_obj(*args, **kwargs)
def assertRaisesMessage(self, expected_exception, expected_message, *args, **kwargs):
"""
Assert that expected_message is found in the message of a raised
exception.
Args:
expected_exception: Exception class expected to be raised.
expected_message: expected error message string value.
args: Function to be called and extra positional args.
kwargs: Extra kwargs.
"""
return self._assertFooMessage(
self.assertRaises, 'exception', expected_exception, expected_message,
*args, **kwargs
)
def assertWarnsMessage(self, expected_warning, expected_message, *args, **kwargs):
"""
Same as assertRaisesMessage but for assertWarns() instead of
assertRaises().
"""
return self._assertFooMessage(
self.assertWarns, 'warning', expected_warning, expected_message,
*args, **kwargs
)
def assertFieldOutput(self, fieldclass, valid, invalid, field_args=None,
field_kwargs=None, empty_value=''):
"""
Assert that a form field behaves correctly with various inputs.
Args:
fieldclass: the class of the field to be tested.
valid: a dictionary mapping valid inputs to their expected
cleaned values.
invalid: a dictionary mapping invalid inputs to one or more
raised error messages.
field_args: the args passed to instantiate the field
field_kwargs: the kwargs passed to instantiate the field
empty_value: the expected clean output for inputs in empty_values
"""
if field_args is None:
field_args = []
if field_kwargs is None:
field_kwargs = {}
required = fieldclass(*field_args, **field_kwargs)
optional = fieldclass(*field_args, **{**field_kwargs, 'required': False})
# test valid inputs
for input, output in valid.items():
self.assertEqual(required.clean(input), output)
self.assertEqual(optional.clean(input), output)
# test invalid inputs
for input, errors in invalid.items():
with self.assertRaises(ValidationError) as context_manager:
required.clean(input)
self.assertEqual(context_manager.exception.messages, errors)
with self.assertRaises(ValidationError) as context_manager:
optional.clean(input)
self.assertEqual(context_manager.exception.messages, errors)
# test required inputs
error_required = [required.error_messages['required']]
for e in required.empty_values:
with self.assertRaises(ValidationError) as context_manager:
required.clean(e)
self.assertEqual(context_manager.exception.messages, error_required)
self.assertEqual(optional.clean(e), empty_value)
# test that max_length and min_length are always accepted
if issubclass(fieldclass, CharField):
field_kwargs.update({'min_length': 2, 'max_length': 20})
self.assertIsInstance(fieldclass(*field_args, **field_kwargs), fieldclass)
def assertHTMLEqual(self, html1, html2, msg=None):
"""
Assert that two HTML snippets are semantically the same.
Whitespace in most cases is ignored, and attribute ordering is not
significant. The arguments must be valid HTML.
"""
dom1 = assert_and_parse_html(self, html1, msg, 'First argument is not valid HTML:')
dom2 = assert_and_parse_html(self, html2, msg, 'Second argument is not valid HTML:')
if dom1 != dom2:
standardMsg = '%s != %s' % (
safe_repr(dom1, True), safe_repr(dom2, True))
diff = ('\n' + '\n'.join(difflib.ndiff(
str(dom1).splitlines(), str(dom2).splitlines(),
)))
standardMsg = self._truncateMessage(standardMsg, diff)
self.fail(self._formatMessage(msg, standardMsg))
def assertHTMLNotEqual(self, html1, html2, msg=None):
"""Assert that two HTML snippets are not semantically equivalent."""
dom1 = assert_and_parse_html(self, html1, msg, 'First argument is not valid HTML:')
dom2 = assert_and_parse_html(self, html2, msg, 'Second argument is not valid HTML:')
if dom1 == dom2:
standardMsg = '%s == %s' % (
safe_repr(dom1, True), safe_repr(dom2, True))
self.fail(self._formatMessage(msg, standardMsg))
def assertInHTML(self, needle, haystack, count=None, msg_prefix=''):
needle = assert_and_parse_html(self, needle, None, 'First argument is not valid HTML:')
haystack = assert_and_parse_html(self, haystack, None, 'Second argument is not valid HTML:')
real_count = haystack.count(needle)
if count is not None:
self.assertEqual(
real_count, count,
msg_prefix + "Found %d instances of '%s' in response (expected %d)" % (real_count, needle, count)
)
else:
self.assertTrue(real_count != 0, msg_prefix + "Couldn't find '%s' in response" % needle)
def assertJSONEqual(self, raw, expected_data, msg=None):
"""
Assert that the JSON fragments raw and expected_data are equal.
Usual JSON non-significant whitespace rules apply as the heavyweight
is delegated to the json library.
"""
try:
data = json.loads(raw)
except json.JSONDecodeError:
self.fail("First argument is not valid JSON: %r" % raw)
if isinstance(expected_data, str):
try:
expected_data = json.loads(expected_data)
except ValueError:
self.fail("Second argument is not valid JSON: %r" % expected_data)
self.assertEqual(data, expected_data, msg=msg)
def assertJSONNotEqual(self, raw, expected_data, msg=None):
"""
Assert that the JSON fragments raw and expected_data are not equal.
Usual JSON non-significant whitespace rules apply as the heavyweight
is delegated to the json library.
"""
try:
data = json.loads(raw)
except json.JSONDecodeError:
self.fail("First argument is not valid JSON: %r" % raw)
if isinstance(expected_data, str):
try:
expected_data = json.loads(expected_data)
except json.JSONDecodeError:
self.fail("Second argument is not valid JSON: %r" % expected_data)
self.assertNotEqual(data, expected_data, msg=msg)
def assertXMLEqual(self, xml1, xml2, msg=None):
"""
Assert that two XML snippets are semantically the same.
Whitespace in most cases is ignored and attribute ordering is not
significant. The arguments must be valid XML.
"""
try:
result = compare_xml(xml1, xml2)
except Exception as e:
standardMsg = 'First or second argument is not valid XML\n%s' % e
self.fail(self._formatMessage(msg, standardMsg))
else:
if not result:
standardMsg = '%s != %s' % (safe_repr(xml1, True), safe_repr(xml2, True))
diff = ('\n' + '\n'.join(
difflib.ndiff(xml1.splitlines(), xml2.splitlines())
))
standardMsg = self._truncateMessage(standardMsg, diff)
self.fail(self._formatMessage(msg, standardMsg))
def assertXMLNotEqual(self, xml1, xml2, msg=None):
"""
Assert that two XML snippets are not semantically equivalent.
Whitespace in most cases is ignored and attribute ordering is not
significant. The arguments must be valid XML.
"""
try:
result = compare_xml(xml1, xml2)
except Exception as e:
standardMsg = 'First or second argument is not valid XML\n%s' % e
self.fail(self._formatMessage(msg, standardMsg))
else:
if result:
standardMsg = '%s == %s' % (safe_repr(xml1, True), safe_repr(xml2, True))
self.fail(self._formatMessage(msg, standardMsg))
class TransactionTestCase(SimpleTestCase):
# Subclasses can ask for resetting of auto increment sequence before each
# test case
reset_sequences = False
# Subclasses can enable only a subset of apps for faster tests
available_apps = None
# Subclasses can define fixtures which will be automatically installed.
fixtures = None
databases = {DEFAULT_DB_ALIAS}
_disallowed_database_msg = (
'Database %(operation)s to %(alias)r are not allowed in this test. '
'Add %(alias)r to %(test)s.databases to ensure proper test isolation '
'and silence this failure.'
)
# If transactions aren't available, Django will serialize the database
# contents into a fixture during setup and flush and reload them
# during teardown (as flush does not restore data from migrations).
# This can be slow; this flag allows enabling on a per-case basis.
serialized_rollback = False
def _pre_setup(self):
"""
Perform pre-test setup:
* If the class has an 'available_apps' attribute, restrict the app
registry to these applications, then fire the post_migrate signal --
it must run with the correct set of applications for the test case.
* If the class has a 'fixtures' attribute, install those fixtures.
"""
super()._pre_setup()
if self.available_apps is not None:
apps.set_available_apps(self.available_apps)
setting_changed.send(
sender=settings._wrapped.__class__,
setting='INSTALLED_APPS',
value=self.available_apps,
enter=True,
)
for db_name in self._databases_names(include_mirrors=False):
emit_post_migrate_signal(verbosity=0, interactive=False, db=db_name)
try:
self._fixture_setup()
except Exception:
if self.available_apps is not None:
apps.unset_available_apps()
setting_changed.send(
sender=settings._wrapped.__class__,
setting='INSTALLED_APPS',
value=settings.INSTALLED_APPS,
enter=False,
)
raise
# Clear the queries_log so that it's less likely to overflow (a single
# test probably won't execute 9K queries). If queries_log overflows,
# then assertNumQueries() doesn't work.
for db_name in self._databases_names(include_mirrors=False):
connections[db_name].queries_log.clear()
@classmethod
def _databases_names(cls, include_mirrors=True):
# Only consider allowed database aliases, including mirrors or not.
return [
alias for alias in connections
if alias in cls.databases and (
include_mirrors or not connections[alias].settings_dict['TEST']['MIRROR']
)
]
def _reset_sequences(self, db_name):
conn = connections[db_name]
if conn.features.supports_sequence_reset:
sql_list = conn.ops.sequence_reset_by_name_sql(
no_style(), conn.introspection.sequence_list())
if sql_list:
with transaction.atomic(using=db_name):
with conn.cursor() as cursor:
for sql in sql_list:
cursor.execute(sql)
def _fixture_setup(self):
for db_name in self._databases_names(include_mirrors=False):
# Reset sequences
if self.reset_sequences:
self._reset_sequences(db_name)
# Provide replica initial data from migrated apps, if needed.
if self.serialized_rollback and hasattr(connections[db_name], "_test_serialized_contents"):
if self.available_apps is not None:
apps.unset_available_apps()
connections[db_name].creation.deserialize_db_from_string(
connections[db_name]._test_serialized_contents
)
if self.available_apps is not None:
apps.set_available_apps(self.available_apps)
if self.fixtures:
# We have to use this slightly awkward syntax due to the fact
# that we're using *args and **kwargs together.
call_command('loaddata', *self.fixtures,
**{'verbosity': 0, 'database': db_name})
def _should_reload_connections(self):
return True
def _post_teardown(self):
"""
Perform post-test things:
* Flush the contents of the database to leave a clean slate. If the
class has an 'available_apps' attribute, don't fire post_migrate.
* Force-close the connection so the next test gets a clean cursor.
"""
try:
self._fixture_teardown()
super()._post_teardown()
if self._should_reload_connections():
# Some DB cursors include SQL statements as part of cursor
# creation. If you have a test that does a rollback, the effect
# of these statements is lost, which can affect the operation of
# tests (e.g., losing a timezone setting causing objects to be
# created with the wrong time). To make sure this doesn't
# happen, get a clean connection at the start of every test.
for conn in connections.all():
conn.close()
finally:
if self.available_apps is not None:
apps.unset_available_apps()
setting_changed.send(sender=settings._wrapped.__class__,
setting='INSTALLED_APPS',
value=settings.INSTALLED_APPS,
enter=False)
def _fixture_teardown(self):
# Allow TRUNCATE ... CASCADE and don't emit the post_migrate signal
# when flushing only a subset of the apps
for db_name in self._databases_names(include_mirrors=False):
# Flush the database
inhibit_post_migrate = (
self.available_apps is not None or
( # Inhibit the post_migrate signal when using serialized
# rollback to avoid trying to recreate the serialized data.
self.serialized_rollback and
hasattr(connections[db_name], '_test_serialized_contents')
)
)
call_command('flush', verbosity=0, interactive=False,
database=db_name, reset_sequences=False,
allow_cascade=self.available_apps is not None,
inhibit_post_migrate=inhibit_post_migrate)
def assertQuerysetEqual(self, qs, values, transform=repr, ordered=True, msg=None):
items = map(transform, qs)
if not ordered:
return self.assertEqual(Counter(items), Counter(values), msg=msg)
values = list(values)
# For example qs.iterator() could be passed as qs, but it does not
# have 'ordered' attribute.
if len(values) > 1 and hasattr(qs, 'ordered') and not qs.ordered:
raise ValueError("Trying to compare non-ordered queryset "
"against more than one ordered values")
return self.assertEqual(list(items), values, msg=msg)
def assertNumQueries(self, num, func=None, *args, using=DEFAULT_DB_ALIAS, **kwargs):
conn = connections[using]
context = _AssertNumQueriesContext(self, num, conn)
if func is None:
return context
with context:
func(*args, **kwargs)
def connections_support_transactions(aliases=None):
"""
Return whether or not all (or specified) connections support
transactions.
"""
conns = connections.all() if aliases is None else (connections[alias] for alias in aliases)
return all(conn.features.supports_transactions for conn in conns)
class TestData:
"""
Descriptor to provide TestCase instance isolation for attributes assigned
during the setUpTestData() phase.
Allow safe alteration of objects assigned in setUpTestData() by test
methods by exposing deep copies instead of the original objects.
Objects are deep copied using a memo kept on the test case instance in
order to maintain their original relationships.
"""
memo_attr = '_testdata_memo'
def __init__(self, name, data):
self.name = name
self.data = data
def get_memo(self, testcase):
try:
memo = getattr(testcase, self.memo_attr)
except AttributeError:
memo = {}
setattr(testcase, self.memo_attr, memo)
return memo
def __get__(self, instance, owner):
if instance is None:
return self.data
memo = self.get_memo(instance)
try:
data = deepcopy(self.data, memo)
except TypeError:
# RemovedInDjango41Warning.
msg = (
"Assigning objects which don't support copy.deepcopy() during "
"setUpTestData() is deprecated. Either assign the %s "
"attribute during setUpClass() or setUp(), or add support for "
"deepcopy() to %s.%s.%s."
) % (
self.name,
owner.__module__,
owner.__qualname__,
self.name,
)
warnings.warn(msg, category=RemovedInDjango41Warning, stacklevel=2)
data = self.data
setattr(instance, self.name, data)
return data
def __repr__(self):
return '<TestData: name=%r, data=%r>' % (self.name, self.data)
class TestCase(TransactionTestCase):
"""
Similar to TransactionTestCase, but use `transaction.atomic()` to achieve
test isolation.
In most situations, TestCase should be preferred to TransactionTestCase as
it allows faster execution. However, there are some situations where using
TransactionTestCase might be necessary (e.g. testing some transactional
behavior).
On database backends with no transaction support, TestCase behaves as
TransactionTestCase.
"""
@classmethod
def _enter_atomics(cls):
"""Open atomic blocks for multiple databases."""
atomics = {}
for db_name in cls._databases_names():
atomics[db_name] = transaction.atomic(using=db_name)
atomics[db_name].__enter__()
return atomics
@classmethod
def _rollback_atomics(cls, atomics):
"""Rollback atomic blocks opened by the previous method."""
for db_name in reversed(cls._databases_names()):
transaction.set_rollback(True, using=db_name)
atomics[db_name].__exit__(None, None, None)
@classmethod
def _databases_support_transactions(cls):
return connections_support_transactions(cls.databases)
@classmethod
def setUpClass(cls):
super().setUpClass()
if not cls._databases_support_transactions():
return
cls.cls_atomics = cls._enter_atomics()
if cls.fixtures:
for db_name in cls._databases_names(include_mirrors=False):
try:
call_command('loaddata', *cls.fixtures, **{'verbosity': 0, 'database': db_name})
except Exception:
cls._rollback_atomics(cls.cls_atomics)
cls._remove_databases_failures()
raise
pre_attrs = cls.__dict__.copy()
try:
cls.setUpTestData()
except Exception:
cls._rollback_atomics(cls.cls_atomics)
cls._remove_databases_failures()
raise
for name, value in cls.__dict__.items():
if value is not pre_attrs.get(name):
setattr(cls, name, TestData(name, value))
@classmethod
def tearDownClass(cls):
if cls._databases_support_transactions():
cls._rollback_atomics(cls.cls_atomics)
for conn in connections.all():
conn.close()
super().tearDownClass()
@classmethod
def setUpTestData(cls):
"""Load initial data for the TestCase."""
pass
def _should_reload_connections(self):
if self._databases_support_transactions():
return False
return super()._should_reload_connections()
def _fixture_setup(self):
if not self._databases_support_transactions():
# If the backend does not support transactions, we should reload
# class data before each test
self.setUpTestData()
return super()._fixture_setup()
assert not self.reset_sequences, 'reset_sequences cannot be used on TestCase instances'
self.atomics = self._enter_atomics()
def _fixture_teardown(self):
if not self._databases_support_transactions():
return super()._fixture_teardown()
try:
for db_name in reversed(self._databases_names()):
if self._should_check_constraints(connections[db_name]):
connections[db_name].check_constraints()
finally:
self._rollback_atomics(self.atomics)
def _should_check_constraints(self, connection):
return (
connection.features.can_defer_constraint_checks and
not connection.needs_rollback and connection.is_usable()
)
class CheckCondition:
"""Descriptor class for deferred condition checking."""
def __init__(self, *conditions):
self.conditions = conditions
def add_condition(self, condition, reason):
return self.__class__(*self.conditions, (condition, reason))
def __get__(self, instance, cls=None):
# Trigger access for all bases.
if any(getattr(base, '__unittest_skip__', False) for base in cls.__bases__):
return True
for condition, reason in self.conditions:
if condition():
# Override this descriptor's value and set the skip reason.
cls.__unittest_skip__ = True
cls.__unittest_skip_why__ = reason
return True
return False
def _deferredSkip(condition, reason, name):
def decorator(test_func):
nonlocal condition
if not (isinstance(test_func, type) and
issubclass(test_func, unittest.TestCase)):
@wraps(test_func)
def skip_wrapper(*args, **kwargs):
if (args and isinstance(args[0], unittest.TestCase) and
connection.alias not in getattr(args[0], 'databases', {})):
raise ValueError(
"%s cannot be used on %s as %s doesn't allow queries "
"against the %r database." % (
name,
args[0],
args[0].__class__.__qualname__,
connection.alias,
)
)
if condition():
raise unittest.SkipTest(reason)
return test_func(*args, **kwargs)
test_item = skip_wrapper
else:
# Assume a class is decorated
test_item = test_func
databases = getattr(test_item, 'databases', None)
if not databases or connection.alias not in databases:
# Defer raising to allow importing test class's module.
def condition():
raise ValueError(
"%s cannot be used on %s as it doesn't allow queries "
"against the '%s' database." % (
name, test_item, connection.alias,
)
)
# Retrieve the possibly existing value from the class's dict to
# avoid triggering the descriptor.
skip = test_func.__dict__.get('__unittest_skip__')
if isinstance(skip, CheckCondition):
test_item.__unittest_skip__ = skip.add_condition(condition, reason)
elif skip is not True:
test_item.__unittest_skip__ = CheckCondition((condition, reason))
return test_item
return decorator
def skipIfDBFeature(*features):
"""Skip a test if a database has at least one of the named features."""
return _deferredSkip(
lambda: any(getattr(connection.features, feature, False) for feature in features),
"Database has feature(s) %s" % ", ".join(features),
'skipIfDBFeature',
)
def skipUnlessDBFeature(*features):
"""Skip a test unless a database has all the named features."""
return _deferredSkip(
lambda: not all(getattr(connection.features, feature, False) for feature in features),
"Database doesn't support feature(s): %s" % ", ".join(features),
'skipUnlessDBFeature',
)
def skipUnlessAnyDBFeature(*features):
"""Skip a test unless a database has any of the named features."""
return _deferredSkip(
lambda: not any(getattr(connection.features, feature, False) for feature in features),
"Database doesn't support any of the feature(s): %s" % ", ".join(features),
'skipUnlessAnyDBFeature',
)
class QuietWSGIRequestHandler(WSGIRequestHandler):
"""
A WSGIRequestHandler that doesn't log to standard output any of the
requests received, so as to not clutter the test result output.
"""
def log_message(*args):
pass
class FSFilesHandler(WSGIHandler):
"""
WSGI middleware that intercepts calls to a directory, as defined by one of
the *_ROOT settings, and serves those files, publishing them under *_URL.
"""
def __init__(self, application):
self.application = application
self.base_url = urlparse(self.get_base_url())
super().__init__()
def _should_handle(self, path):
"""
Check if the path should be handled. Ignore the path if:
* the host is provided as part of the base_url
* the request's path isn't under the media path (or equal)
"""
return path.startswith(self.base_url[2]) and not self.base_url[1]
def file_path(self, url):
"""Return the relative path to the file on disk for the given URL."""
relative_url = url[len(self.base_url[2]):]
return url2pathname(relative_url)
def get_response(self, request):
from django.http import Http404
if self._should_handle(request.path):
try:
return self.serve(request)
except Http404:
pass
return super().get_response(request)
def serve(self, request):
os_rel_path = self.file_path(request.path)
os_rel_path = posixpath.normpath(unquote(os_rel_path))
# Emulate behavior of django.contrib.staticfiles.views.serve() when it
# invokes staticfiles' finders functionality.
# TODO: Modify if/when that internal API is refactored
final_rel_path = os_rel_path.replace('\\', '/').lstrip('/')
return serve(request, final_rel_path, document_root=self.get_base_dir())
def __call__(self, environ, start_response):
if not self._should_handle(get_path_info(environ)):
return self.application(environ, start_response)
return super().__call__(environ, start_response)
class _StaticFilesHandler(FSFilesHandler):
"""
Handler for serving static files. A private class that is meant to be used
solely as a convenience by LiveServerThread.
"""
def get_base_dir(self):
return settings.STATIC_ROOT
def get_base_url(self):
return settings.STATIC_URL
class _MediaFilesHandler(FSFilesHandler):
"""
Handler for serving the media files. A private class that is meant to be
used solely as a convenience by LiveServerThread.
"""
def get_base_dir(self):
return settings.MEDIA_ROOT
def get_base_url(self):
return settings.MEDIA_URL
class LiveServerThread(threading.Thread):
"""Thread for running a live http server while the tests are running."""
def __init__(self, host, static_handler, connections_override=None, port=0):
self.host = host
self.port = port
self.is_ready = threading.Event()
self.error = None
self.static_handler = static_handler
self.connections_override = connections_override
super().__init__()
def run(self):
"""
Set up the live server and databases, and then loop over handling
HTTP requests.
"""
if self.connections_override:
# Override this thread's database connections with the ones
# provided by the main thread.
for alias, conn in self.connections_override.items():
connections[alias] = conn
try:
# Create the handler for serving static and media files
handler = self.static_handler(_MediaFilesHandler(WSGIHandler()))
self.httpd = self._create_server()
# If binding to port zero, assign the port allocated by the OS.
if self.port == 0:
self.port = self.httpd.server_address[1]
self.httpd.set_app(handler)
self.is_ready.set()
self.httpd.serve_forever()
except Exception as e:
self.error = e
self.is_ready.set()
finally:
connections.close_all()
def _create_server(self):
return ThreadedWSGIServer((self.host, self.port), QuietWSGIRequestHandler, allow_reuse_address=False)
def terminate(self):
if hasattr(self, 'httpd'):
# Stop the WSGI server
self.httpd.shutdown()
self.httpd.server_close()
self.join()
class LiveServerTestCase(TransactionTestCase):
"""
Do basically the same as TransactionTestCase but also launch a live HTTP
server in a separate thread so that the tests may use another testing
framework, such as Selenium for example, instead of the built-in dummy
client.
It inherits from TransactionTestCase instead of TestCase because the
threads don't share the same transactions (unless if using in-memory sqlite)
and each thread needs to commit all their transactions so that the other
thread can see the changes.
"""
host = 'localhost'
port = 0
server_thread_class = LiveServerThread
static_handler = _StaticFilesHandler
@classproperty
def live_server_url(cls):
return 'http://%s:%s' % (cls.host, cls.server_thread.port)
@classproperty
def allowed_host(cls):
return cls.host
@classmethod
def setUpClass(cls):
super().setUpClass()
connections_override = {}
for conn in connections.all():
# If using in-memory sqlite databases, pass the connections to
# the server thread.
if conn.vendor == 'sqlite' and conn.is_in_memory_db():
# Explicitly enable thread-shareability for this connection
conn.inc_thread_sharing()
connections_override[conn.alias] = conn
cls._live_server_modified_settings = modify_settings(
ALLOWED_HOSTS={'append': cls.allowed_host},
)
cls._live_server_modified_settings.enable()
cls.server_thread = cls._create_server_thread(connections_override)
cls.server_thread.daemon = True
cls.server_thread.start()
# Wait for the live server to be ready
cls.server_thread.is_ready.wait()
if cls.server_thread.error:
# Clean up behind ourselves, since tearDownClass won't get called in
# case of errors.
cls._tearDownClassInternal()
raise cls.server_thread.error
@classmethod
def _create_server_thread(cls, connections_override):
return cls.server_thread_class(
cls.host,
cls.static_handler,
connections_override=connections_override,
port=cls.port,
)
@classmethod
def _tearDownClassInternal(cls):
# There may not be a 'server_thread' attribute if setUpClass() for some
# reasons has raised an exception.
if hasattr(cls, 'server_thread'):
# Terminate the live server's thread
cls.server_thread.terminate()
# Restore sqlite in-memory database connections' non-shareability.
for conn in cls.server_thread.connections_override.values():
conn.dec_thread_sharing()
@classmethod
def tearDownClass(cls):
cls._tearDownClassInternal()
cls._live_server_modified_settings.disable()
super().tearDownClass()
class SerializeMixin:
"""
Enforce serialization of TestCases that share a common resource.
Define a common 'lockfile' for each set of TestCases to serialize. This
file must exist on the filesystem.
Place it early in the MRO in order to isolate setUpClass()/tearDownClass().
"""
lockfile = None
@classmethod
def setUpClass(cls):
if cls.lockfile is None:
raise ValueError(
"{}.lockfile isn't set. Set it to a unique value "
"in the base class.".format(cls.__name__))
cls._lockfile = open(cls.lockfile)
locks.lock(cls._lockfile, locks.LOCK_EX)
super().setUpClass()
@classmethod
def tearDownClass(cls):
super().tearDownClass()
cls._lockfile.close()
|
744a95716a8d4381e5596dc41acd5c8d0e8865de4e5cc1cbbc109ef5b832d267 | """
Views and functions for serving static files. These are only to be used
during development, and SHOULD NOT be used in a production setting.
"""
import mimetypes
import posixpath
import re
from pathlib import Path
from django.http import (
FileResponse, Http404, HttpResponse, HttpResponseNotModified,
)
from django.template import Context, Engine, TemplateDoesNotExist, loader
from django.utils._os import safe_join
from django.utils.http import http_date, parse_http_date
from django.utils.translation import gettext as _, gettext_lazy
def serve(request, path, document_root=None, show_indexes=False):
"""
Serve static files below a given point in the directory structure.
To use, put a URL pattern such as::
from django.views.static import serve
path('<path:path>', serve, {'document_root': '/path/to/my/files/'})
in your URLconf. You must provide the ``document_root`` param. You may
also set ``show_indexes`` to ``True`` if you'd like to serve a basic index
of the directory. This index view will use the template hardcoded below,
but if you'd like to override it, you can create a template called
``static/directory_index.html``.
"""
path = posixpath.normpath(path).lstrip('/')
fullpath = Path(safe_join(document_root, path))
if fullpath.is_dir():
if show_indexes:
return directory_index(path, fullpath)
raise Http404(_("Directory indexes are not allowed here."))
if not fullpath.exists():
raise Http404(_('“%(path)s” does not exist') % {'path': fullpath})
# Respect the If-Modified-Since header.
statobj = fullpath.stat()
if not was_modified_since(request.META.get('HTTP_IF_MODIFIED_SINCE'),
statobj.st_mtime, statobj.st_size):
return HttpResponseNotModified()
content_type, encoding = mimetypes.guess_type(str(fullpath))
content_type = content_type or 'application/octet-stream'
response = FileResponse(fullpath.open('rb'), content_type=content_type)
response["Last-Modified"] = http_date(statobj.st_mtime)
if encoding:
response["Content-Encoding"] = encoding
return response
DEFAULT_DIRECTORY_INDEX_TEMPLATE = """
{% load i18n %}
<!DOCTYPE html>
<html lang="en">
<head>
<meta http-equiv="Content-type" content="text/html; charset=utf-8">
<meta http-equiv="Content-Language" content="en-us">
<meta name="robots" content="NONE,NOARCHIVE">
<title>{% blocktranslate %}Index of {{ directory }}{% endblocktranslate %}</title>
</head>
<body>
<h1>{% blocktranslate %}Index of {{ directory }}{% endblocktranslate %}</h1>
<ul>
{% if directory != "/" %}
<li><a href="../">../</a></li>
{% endif %}
{% for f in file_list %}
<li><a href="{{ f|urlencode }}">{{ f }}</a></li>
{% endfor %}
</ul>
</body>
</html>
"""
template_translatable = gettext_lazy("Index of %(directory)s")
def directory_index(path, fullpath):
try:
t = loader.select_template([
'static/directory_index.html',
'static/directory_index',
])
except TemplateDoesNotExist:
t = Engine(libraries={'i18n': 'django.templatetags.i18n'}).from_string(DEFAULT_DIRECTORY_INDEX_TEMPLATE)
c = Context()
else:
c = {}
files = []
for f in fullpath.iterdir():
if not f.name.startswith('.'):
url = str(f.relative_to(fullpath))
if f.is_dir():
url += '/'
files.append(url)
c.update({
'directory': path + '/',
'file_list': files,
})
return HttpResponse(t.render(c))
def was_modified_since(header=None, mtime=0, size=0):
"""
Was something modified since the user last downloaded it?
header
This is the value of the If-Modified-Since header. If this is None,
I'll just return True.
mtime
This is the modification time of the item we're talking about.
size
This is the size of the item we're talking about.
"""
try:
if header is None:
raise ValueError
matches = re.match(r"^([^;]+)(; length=([0-9]+))?$", header,
re.IGNORECASE)
header_mtime = parse_http_date(matches[1])
header_len = matches[3]
if header_len and int(header_len) != size:
raise ValueError
if int(mtime) > header_mtime:
raise ValueError
except (AttributeError, ValueError, OverflowError):
return True
return False
|
f82c2bd3a1b20245481d28f76a13f1df68a6e7e5648782e03ed1b692b20a92ca | import itertools
import json
import os
import re
from urllib.parse import unquote
from django.apps import apps
from django.conf import settings
from django.http import HttpResponse, HttpResponseRedirect, JsonResponse
from django.template import Context, Engine
from django.urls import translate_url
from django.utils.formats import get_format
from django.utils.http import url_has_allowed_host_and_scheme
from django.utils.translation import (
LANGUAGE_SESSION_KEY, check_for_language, get_language,
)
from django.utils.translation.trans_real import DjangoTranslation
from django.views.generic import View
LANGUAGE_QUERY_PARAMETER = 'language'
def set_language(request):
"""
Redirect to a given URL while setting the chosen language in the session
(if enabled) and in a cookie. The URL and the language code need to be
specified in the request parameters.
Since this view changes how the user will see the rest of the site, it must
only be accessed as a POST request. If called as a GET request, it will
redirect to the page in the request (the 'next' parameter) without changing
any state.
"""
next_url = request.POST.get('next', request.GET.get('next'))
if (
(next_url or request.accepts('text/html')) and
not url_has_allowed_host_and_scheme(
url=next_url,
allowed_hosts={request.get_host()},
require_https=request.is_secure(),
)
):
next_url = request.META.get('HTTP_REFERER')
# HTTP_REFERER may be encoded.
next_url = next_url and unquote(next_url)
if not url_has_allowed_host_and_scheme(
url=next_url,
allowed_hosts={request.get_host()},
require_https=request.is_secure(),
):
next_url = '/'
response = HttpResponseRedirect(next_url) if next_url else HttpResponse(status=204)
if request.method == 'POST':
lang_code = request.POST.get(LANGUAGE_QUERY_PARAMETER)
if lang_code and check_for_language(lang_code):
if next_url:
next_trans = translate_url(next_url, lang_code)
if next_trans != next_url:
response = HttpResponseRedirect(next_trans)
if hasattr(request, 'session'):
# Storing the language in the session is deprecated.
# (RemovedInDjango40Warning)
request.session[LANGUAGE_SESSION_KEY] = lang_code
response.set_cookie(
settings.LANGUAGE_COOKIE_NAME, lang_code,
max_age=settings.LANGUAGE_COOKIE_AGE,
path=settings.LANGUAGE_COOKIE_PATH,
domain=settings.LANGUAGE_COOKIE_DOMAIN,
secure=settings.LANGUAGE_COOKIE_SECURE,
httponly=settings.LANGUAGE_COOKIE_HTTPONLY,
samesite=settings.LANGUAGE_COOKIE_SAMESITE,
)
return response
def get_formats():
"""Return all formats strings required for i18n to work."""
FORMAT_SETTINGS = (
'DATE_FORMAT', 'DATETIME_FORMAT', 'TIME_FORMAT',
'YEAR_MONTH_FORMAT', 'MONTH_DAY_FORMAT', 'SHORT_DATE_FORMAT',
'SHORT_DATETIME_FORMAT', 'FIRST_DAY_OF_WEEK', 'DECIMAL_SEPARATOR',
'THOUSAND_SEPARATOR', 'NUMBER_GROUPING',
'DATE_INPUT_FORMATS', 'TIME_INPUT_FORMATS', 'DATETIME_INPUT_FORMATS'
)
return {attr: get_format(attr) for attr in FORMAT_SETTINGS}
js_catalog_template = r"""
{% autoescape off %}
(function(globals) {
var django = globals.django || (globals.django = {});
{% if plural %}
django.pluralidx = function(n) {
var v={{ plural }};
if (typeof(v) == 'boolean') {
return v ? 1 : 0;
} else {
return v;
}
};
{% else %}
django.pluralidx = function(count) { return (count == 1) ? 0 : 1; };
{% endif %}
/* gettext library */
django.catalog = django.catalog || {};
{% if catalog_str %}
var newcatalog = {{ catalog_str }};
for (var key in newcatalog) {
django.catalog[key] = newcatalog[key];
}
{% endif %}
if (!django.jsi18n_initialized) {
django.gettext = function(msgid) {
var value = django.catalog[msgid];
if (typeof(value) == 'undefined') {
return msgid;
} else {
return (typeof(value) == 'string') ? value : value[0];
}
};
django.ngettext = function(singular, plural, count) {
var value = django.catalog[singular];
if (typeof(value) == 'undefined') {
return (count == 1) ? singular : plural;
} else {
return value.constructor === Array ? value[django.pluralidx(count)] : value;
}
};
django.gettext_noop = function(msgid) { return msgid; };
django.pgettext = function(context, msgid) {
var value = django.gettext(context + '\x04' + msgid);
if (value.indexOf('\x04') != -1) {
value = msgid;
}
return value;
};
django.npgettext = function(context, singular, plural, count) {
var value = django.ngettext(context + '\x04' + singular, context + '\x04' + plural, count);
if (value.indexOf('\x04') != -1) {
value = django.ngettext(singular, plural, count);
}
return value;
};
django.interpolate = function(fmt, obj, named) {
if (named) {
return fmt.replace(/%\(\w+\)s/g, function(match){return String(obj[match.slice(2,-2)])});
} else {
return fmt.replace(/%s/g, function(match){return String(obj.shift())});
}
};
/* formatting library */
django.formats = {{ formats_str }};
django.get_format = function(format_type) {
var value = django.formats[format_type];
if (typeof(value) == 'undefined') {
return format_type;
} else {
return value;
}
};
/* add to global namespace */
globals.pluralidx = django.pluralidx;
globals.gettext = django.gettext;
globals.ngettext = django.ngettext;
globals.gettext_noop = django.gettext_noop;
globals.pgettext = django.pgettext;
globals.npgettext = django.npgettext;
globals.interpolate = django.interpolate;
globals.get_format = django.get_format;
django.jsi18n_initialized = true;
}
}(this));
{% endautoescape %}
"""
class JavaScriptCatalog(View):
"""
Return the selected language catalog as a JavaScript library.
Receive the list of packages to check for translations in the `packages`
kwarg either from the extra dictionary passed to the path() function or as
a plus-sign delimited string from the request. Default is 'django.conf'.
You can override the gettext domain for this view, but usually you don't
want to do that as JavaScript messages go to the djangojs domain. This
might be needed if you deliver your JavaScript source from Django templates.
"""
domain = 'djangojs'
packages = None
def get(self, request, *args, **kwargs):
locale = get_language()
domain = kwargs.get('domain', self.domain)
# If packages are not provided, default to all installed packages, as
# DjangoTranslation without localedirs harvests them all.
packages = kwargs.get('packages', '')
packages = packages.split('+') if packages else self.packages
paths = self.get_paths(packages) if packages else None
self.translation = DjangoTranslation(locale, domain=domain, localedirs=paths)
context = self.get_context_data(**kwargs)
return self.render_to_response(context)
def get_paths(self, packages):
allowable_packages = {app_config.name: app_config for app_config in apps.get_app_configs()}
app_configs = [allowable_packages[p] for p in packages if p in allowable_packages]
if len(app_configs) < len(packages):
excluded = [p for p in packages if p not in allowable_packages]
raise ValueError(
'Invalid package(s) provided to JavaScriptCatalog: %s' % ','.join(excluded)
)
# paths of requested packages
return [os.path.join(app.path, 'locale') for app in app_configs]
@property
def _num_plurals(self):
"""
Return the number of plurals for this catalog language, or 2 if no
plural string is available.
"""
match = re.search(r'nplurals=\s*(\d+)', self._plural_string or '')
if match:
return int(match[1])
return 2
@property
def _plural_string(self):
"""
Return the plural string (including nplurals) for this catalog language,
or None if no plural string is available.
"""
if '' in self.translation._catalog:
for line in self.translation._catalog[''].split('\n'):
if line.startswith('Plural-Forms:'):
return line.split(':', 1)[1].strip()
return None
def get_plural(self):
plural = self._plural_string
if plural is not None:
# This should be a compiled function of a typical plural-form:
# Plural-Forms: nplurals=3; plural=n%10==1 && n%100!=11 ? 0 :
# n%10>=2 && n%10<=4 && (n%100<10 || n%100>=20) ? 1 : 2;
plural = [el.strip() for el in plural.split(';') if el.strip().startswith('plural=')][0].split('=', 1)[1]
return plural
def get_catalog(self):
pdict = {}
num_plurals = self._num_plurals
catalog = {}
trans_cat = self.translation._catalog
trans_fallback_cat = self.translation._fallback._catalog if self.translation._fallback else {}
seen_keys = set()
for key, value in itertools.chain(trans_cat.items(), trans_fallback_cat.items()):
if key == '' or key in seen_keys:
continue
if isinstance(key, str):
catalog[key] = value
elif isinstance(key, tuple):
msgid, cnt = key
pdict.setdefault(msgid, {})[cnt] = value
else:
raise TypeError(key)
seen_keys.add(key)
for k, v in pdict.items():
catalog[k] = [v.get(i, '') for i in range(num_plurals)]
return catalog
def get_context_data(self, **kwargs):
return {
'catalog': self.get_catalog(),
'formats': get_formats(),
'plural': self.get_plural(),
}
def render_to_response(self, context, **response_kwargs):
def indent(s):
return s.replace('\n', '\n ')
template = Engine().from_string(js_catalog_template)
context['catalog_str'] = indent(
json.dumps(context['catalog'], sort_keys=True, indent=2)
) if context['catalog'] else None
context['formats_str'] = indent(json.dumps(context['formats'], sort_keys=True, indent=2))
return HttpResponse(template.render(Context(context)), 'text/javascript; charset="utf-8"')
class JSONCatalog(JavaScriptCatalog):
"""
Return the selected language catalog as a JSON object.
Receive the same parameters as JavaScriptCatalog and return a response
with a JSON object of the following format:
{
"catalog": {
# Translations catalog
},
"formats": {
# Language formats for date, time, etc.
},
"plural": '...' # Expression for plural forms, or null.
}
"""
def render_to_response(self, context, **response_kwargs):
return JsonResponse(context)
|
7eecf87e01f3f35e894505239f06e2a765a07c3b9f5fbd09e9e3e69fbdd4c814 | import functools
import re
import sys
import types
from pathlib import Path
from django.conf import settings
from django.http import Http404, HttpResponse, HttpResponseNotFound
from django.template import Context, Engine, TemplateDoesNotExist
from django.template.defaultfilters import pprint
from django.urls import resolve
from django.utils import timezone
from django.utils.datastructures import MultiValueDict
from django.utils.encoding import force_str
from django.utils.module_loading import import_string
from django.utils.regex_helper import _lazy_re_compile
from django.utils.version import get_docs_version
# Minimal Django templates engine to render the error templates
# regardless of the project's TEMPLATES setting. Templates are
# read directly from the filesystem so that the error handler
# works even if the template loader is broken.
DEBUG_ENGINE = Engine(
debug=True,
libraries={'i18n': 'django.templatetags.i18n'},
)
CURRENT_DIR = Path(__file__).parent
class CallableSettingWrapper:
"""
Object to wrap callable appearing in settings.
* Not to call in the debug page (#21345).
* Not to break the debug page if the callable forbidding to set attributes
(#23070).
"""
def __init__(self, callable_setting):
self._wrapped = callable_setting
def __repr__(self):
return repr(self._wrapped)
def technical_500_response(request, exc_type, exc_value, tb, status_code=500):
"""
Create a technical server error response. The last three arguments are
the values returned from sys.exc_info() and friends.
"""
reporter = get_exception_reporter_class(request)(request, exc_type, exc_value, tb)
if request.accepts('text/html'):
html = reporter.get_traceback_html()
return HttpResponse(html, status=status_code, content_type='text/html')
else:
text = reporter.get_traceback_text()
return HttpResponse(text, status=status_code, content_type='text/plain; charset=utf-8')
@functools.lru_cache()
def get_default_exception_reporter_filter():
# Instantiate the default filter for the first time and cache it.
return import_string(settings.DEFAULT_EXCEPTION_REPORTER_FILTER)()
def get_exception_reporter_filter(request):
default_filter = get_default_exception_reporter_filter()
return getattr(request, 'exception_reporter_filter', default_filter)
def get_exception_reporter_class(request):
default_exception_reporter_class = import_string(settings.DEFAULT_EXCEPTION_REPORTER)
return getattr(request, 'exception_reporter_class', default_exception_reporter_class)
class SafeExceptionReporterFilter:
"""
Use annotations made by the sensitive_post_parameters and
sensitive_variables decorators to filter out sensitive information.
"""
cleansed_substitute = '********************'
hidden_settings = _lazy_re_compile('API|TOKEN|KEY|SECRET|PASS|SIGNATURE', flags=re.I)
def cleanse_setting(self, key, value):
"""
Cleanse an individual setting key/value of sensitive content. If the
value is a dictionary, recursively cleanse the keys in that dictionary.
"""
try:
if self.hidden_settings.search(key):
cleansed = self.cleansed_substitute
elif isinstance(value, dict):
cleansed = {k: self.cleanse_setting(k, v) for k, v in value.items()}
elif isinstance(value, list):
cleansed = [self.cleanse_setting('', v) for v in value]
elif isinstance(value, tuple):
cleansed = tuple([self.cleanse_setting('', v) for v in value])
else:
cleansed = value
except TypeError:
# If the key isn't regex-able, just return as-is.
cleansed = value
if callable(cleansed):
cleansed = CallableSettingWrapper(cleansed)
return cleansed
def get_safe_settings(self):
"""
Return a dictionary of the settings module with values of sensitive
settings replaced with stars (*********).
"""
settings_dict = {}
for k in dir(settings):
if k.isupper():
settings_dict[k] = self.cleanse_setting(k, getattr(settings, k))
return settings_dict
def get_safe_request_meta(self, request):
"""
Return a dictionary of request.META with sensitive values redacted.
"""
if not hasattr(request, 'META'):
return {}
return {k: self.cleanse_setting(k, v) for k, v in request.META.items()}
def is_active(self, request):
"""
This filter is to add safety in production environments (i.e. DEBUG
is False). If DEBUG is True then your site is not safe anyway.
This hook is provided as a convenience to easily activate or
deactivate the filter on a per request basis.
"""
return settings.DEBUG is False
def get_cleansed_multivaluedict(self, request, multivaluedict):
"""
Replace the keys in a MultiValueDict marked as sensitive with stars.
This mitigates leaking sensitive POST parameters if something like
request.POST['nonexistent_key'] throws an exception (#21098).
"""
sensitive_post_parameters = getattr(request, 'sensitive_post_parameters', [])
if self.is_active(request) and sensitive_post_parameters:
multivaluedict = multivaluedict.copy()
for param in sensitive_post_parameters:
if param in multivaluedict:
multivaluedict[param] = self.cleansed_substitute
return multivaluedict
def get_post_parameters(self, request):
"""
Replace the values of POST parameters marked as sensitive with
stars (*********).
"""
if request is None:
return {}
else:
sensitive_post_parameters = getattr(request, 'sensitive_post_parameters', [])
if self.is_active(request) and sensitive_post_parameters:
cleansed = request.POST.copy()
if sensitive_post_parameters == '__ALL__':
# Cleanse all parameters.
for k in cleansed:
cleansed[k] = self.cleansed_substitute
return cleansed
else:
# Cleanse only the specified parameters.
for param in sensitive_post_parameters:
if param in cleansed:
cleansed[param] = self.cleansed_substitute
return cleansed
else:
return request.POST
def cleanse_special_types(self, request, value):
try:
# If value is lazy or a complex object of another kind, this check
# might raise an exception. isinstance checks that lazy
# MultiValueDicts will have a return value.
is_multivalue_dict = isinstance(value, MultiValueDict)
except Exception as e:
return '{!r} while evaluating {!r}'.format(e, value)
if is_multivalue_dict:
# Cleanse MultiValueDicts (request.POST is the one we usually care about)
value = self.get_cleansed_multivaluedict(request, value)
return value
def get_traceback_frame_variables(self, request, tb_frame):
"""
Replace the values of variables marked as sensitive with
stars (*********).
"""
# Loop through the frame's callers to see if the sensitive_variables
# decorator was used.
current_frame = tb_frame.f_back
sensitive_variables = None
while current_frame is not None:
if (current_frame.f_code.co_name == 'sensitive_variables_wrapper' and
'sensitive_variables_wrapper' in current_frame.f_locals):
# The sensitive_variables decorator was used, so we take note
# of the sensitive variables' names.
wrapper = current_frame.f_locals['sensitive_variables_wrapper']
sensitive_variables = getattr(wrapper, 'sensitive_variables', None)
break
current_frame = current_frame.f_back
cleansed = {}
if self.is_active(request) and sensitive_variables:
if sensitive_variables == '__ALL__':
# Cleanse all variables
for name in tb_frame.f_locals:
cleansed[name] = self.cleansed_substitute
else:
# Cleanse specified variables
for name, value in tb_frame.f_locals.items():
if name in sensitive_variables:
value = self.cleansed_substitute
else:
value = self.cleanse_special_types(request, value)
cleansed[name] = value
else:
# Potentially cleanse the request and any MultiValueDicts if they
# are one of the frame variables.
for name, value in tb_frame.f_locals.items():
cleansed[name] = self.cleanse_special_types(request, value)
if (tb_frame.f_code.co_name == 'sensitive_variables_wrapper' and
'sensitive_variables_wrapper' in tb_frame.f_locals):
# For good measure, obfuscate the decorated function's arguments in
# the sensitive_variables decorator's frame, in case the variables
# associated with those arguments were meant to be obfuscated from
# the decorated function's frame.
cleansed['func_args'] = self.cleansed_substitute
cleansed['func_kwargs'] = self.cleansed_substitute
return cleansed.items()
class ExceptionReporter:
"""Organize and coordinate reporting on exceptions."""
def __init__(self, request, exc_type, exc_value, tb, is_email=False):
self.request = request
self.filter = get_exception_reporter_filter(self.request)
self.exc_type = exc_type
self.exc_value = exc_value
self.tb = tb
self.is_email = is_email
self.template_info = getattr(self.exc_value, 'template_debug', None)
self.template_does_not_exist = False
self.postmortem = None
def get_traceback_data(self):
"""Return a dictionary containing traceback information."""
if self.exc_type and issubclass(self.exc_type, TemplateDoesNotExist):
self.template_does_not_exist = True
self.postmortem = self.exc_value.chain or [self.exc_value]
frames = self.get_traceback_frames()
for i, frame in enumerate(frames):
if 'vars' in frame:
frame_vars = []
for k, v in frame['vars']:
v = pprint(v)
# Trim large blobs of data
if len(v) > 4096:
v = '%s… <trimmed %d bytes string>' % (v[0:4096], len(v))
frame_vars.append((k, v))
frame['vars'] = frame_vars
frames[i] = frame
unicode_hint = ''
if self.exc_type and issubclass(self.exc_type, UnicodeError):
start = getattr(self.exc_value, 'start', None)
end = getattr(self.exc_value, 'end', None)
if start is not None and end is not None:
unicode_str = self.exc_value.args[1]
unicode_hint = force_str(
unicode_str[max(start - 5, 0):min(end + 5, len(unicode_str))],
'ascii', errors='replace'
)
from django import get_version
if self.request is None:
user_str = None
else:
try:
user_str = str(self.request.user)
except Exception:
# request.user may raise OperationalError if the database is
# unavailable, for example.
user_str = '[unable to retrieve the current user]'
c = {
'is_email': self.is_email,
'unicode_hint': unicode_hint,
'frames': frames,
'request': self.request,
'request_meta': self.filter.get_safe_request_meta(self.request),
'user_str': user_str,
'filtered_POST_items': list(self.filter.get_post_parameters(self.request).items()),
'settings': self.filter.get_safe_settings(),
'sys_executable': sys.executable,
'sys_version_info': '%d.%d.%d' % sys.version_info[0:3],
'server_time': timezone.now(),
'django_version_info': get_version(),
'sys_path': sys.path,
'template_info': self.template_info,
'template_does_not_exist': self.template_does_not_exist,
'postmortem': self.postmortem,
}
if self.request is not None:
c['request_GET_items'] = self.request.GET.items()
c['request_FILES_items'] = self.request.FILES.items()
c['request_COOKIES_items'] = self.request.COOKIES.items()
# Check whether exception info is available
if self.exc_type:
c['exception_type'] = self.exc_type.__name__
if self.exc_value:
c['exception_value'] = str(self.exc_value)
if frames:
c['lastframe'] = frames[-1]
return c
def get_traceback_html(self):
"""Return HTML version of debug 500 HTTP error page."""
with Path(CURRENT_DIR, 'templates', 'technical_500.html').open(encoding='utf-8') as fh:
t = DEBUG_ENGINE.from_string(fh.read())
c = Context(self.get_traceback_data(), use_l10n=False)
return t.render(c)
def get_traceback_text(self):
"""Return plain text version of debug 500 HTTP error page."""
with Path(CURRENT_DIR, 'templates', 'technical_500.txt').open(encoding='utf-8') as fh:
t = DEBUG_ENGINE.from_string(fh.read())
c = Context(self.get_traceback_data(), autoescape=False, use_l10n=False)
return t.render(c)
def _get_source(self, filename, loader, module_name):
source = None
if hasattr(loader, 'get_source'):
try:
source = loader.get_source(module_name)
except ImportError:
pass
if source is not None:
source = source.splitlines()
if source is None:
try:
with open(filename, 'rb') as fp:
source = fp.read().splitlines()
except OSError:
pass
return source
def _get_lines_from_file(self, filename, lineno, context_lines, loader=None, module_name=None):
"""
Return context_lines before and after lineno from file.
Return (pre_context_lineno, pre_context, context_line, post_context).
"""
source = self._get_source(filename, loader, module_name)
if source is None:
return None, [], None, []
# If we just read the source from a file, or if the loader did not
# apply tokenize.detect_encoding to decode the source into a
# string, then we should do that ourselves.
if isinstance(source[0], bytes):
encoding = 'ascii'
for line in source[:2]:
# File coding may be specified. Match pattern from PEP-263
# (https://www.python.org/dev/peps/pep-0263/)
match = re.search(br'coding[:=]\s*([-\w.]+)', line)
if match:
encoding = match[1].decode('ascii')
break
source = [str(sline, encoding, 'replace') for sline in source]
lower_bound = max(0, lineno - context_lines)
upper_bound = lineno + context_lines
try:
pre_context = source[lower_bound:lineno]
context_line = source[lineno]
post_context = source[lineno + 1:upper_bound]
except IndexError:
return None, [], None, []
return lower_bound, pre_context, context_line, post_context
def get_traceback_frames(self):
def explicit_or_implicit_cause(exc_value):
explicit = getattr(exc_value, '__cause__', None)
implicit = getattr(exc_value, '__context__', None)
return explicit or implicit
# Get the exception and all its causes
exceptions = []
exc_value = self.exc_value
while exc_value:
exceptions.append(exc_value)
exc_value = explicit_or_implicit_cause(exc_value)
if exc_value in exceptions:
# Avoid infinite loop if there's a cyclic reference (#29393).
break
frames = []
# No exceptions were supplied to ExceptionReporter
if not exceptions:
return frames
# In case there's just one exception, take the traceback from self.tb
exc_value = exceptions.pop()
tb = self.tb if not exceptions else exc_value.__traceback__
while tb is not None:
# Support for __traceback_hide__ which is used by a few libraries
# to hide internal frames.
if tb.tb_frame.f_locals.get('__traceback_hide__'):
tb = tb.tb_next
continue
filename = tb.tb_frame.f_code.co_filename
function = tb.tb_frame.f_code.co_name
lineno = tb.tb_lineno - 1
loader = tb.tb_frame.f_globals.get('__loader__')
module_name = tb.tb_frame.f_globals.get('__name__') or ''
pre_context_lineno, pre_context, context_line, post_context = self._get_lines_from_file(
filename, lineno, 7, loader, module_name,
)
if pre_context_lineno is None:
pre_context_lineno = lineno
pre_context = []
context_line = '<source code not available>'
post_context = []
frames.append({
'exc_cause': explicit_or_implicit_cause(exc_value),
'exc_cause_explicit': getattr(exc_value, '__cause__', True),
'tb': tb,
'type': 'django' if module_name.startswith('django.') else 'user',
'filename': filename,
'function': function,
'lineno': lineno + 1,
'vars': self.filter.get_traceback_frame_variables(self.request, tb.tb_frame),
'id': id(tb),
'pre_context': pre_context,
'context_line': context_line,
'post_context': post_context,
'pre_context_lineno': pre_context_lineno + 1,
})
# If the traceback for current exception is consumed, try the
# other exception.
if not tb.tb_next and exceptions:
exc_value = exceptions.pop()
tb = exc_value.__traceback__
else:
tb = tb.tb_next
return frames
def technical_404_response(request, exception):
"""Create a technical 404 error response. `exception` is the Http404."""
try:
error_url = exception.args[0]['path']
except (IndexError, TypeError, KeyError):
error_url = request.path_info[1:] # Trim leading slash
try:
tried = exception.args[0]['tried']
except (IndexError, TypeError, KeyError):
tried = []
else:
if (not tried or ( # empty URLconf
request.path == '/' and
len(tried) == 1 and # default URLconf
len(tried[0]) == 1 and
getattr(tried[0][0], 'app_name', '') == getattr(tried[0][0], 'namespace', '') == 'admin'
)):
return default_urlconf(request)
urlconf = getattr(request, 'urlconf', settings.ROOT_URLCONF)
if isinstance(urlconf, types.ModuleType):
urlconf = urlconf.__name__
caller = ''
try:
resolver_match = resolve(request.path)
except Http404:
pass
else:
obj = resolver_match.func
if hasattr(obj, '__name__'):
caller = obj.__name__
elif hasattr(obj, '__class__') and hasattr(obj.__class__, '__name__'):
caller = obj.__class__.__name__
if hasattr(obj, '__module__'):
module = obj.__module__
caller = '%s.%s' % (module, caller)
with Path(CURRENT_DIR, 'templates', 'technical_404.html').open(encoding='utf-8') as fh:
t = DEBUG_ENGINE.from_string(fh.read())
reporter_filter = get_default_exception_reporter_filter()
c = Context({
'urlconf': urlconf,
'root_urlconf': settings.ROOT_URLCONF,
'request_path': error_url,
'urlpatterns': tried,
'reason': str(exception),
'request': request,
'settings': reporter_filter.get_safe_settings(),
'raising_view_name': caller,
})
return HttpResponseNotFound(t.render(c), content_type='text/html')
def default_urlconf(request):
"""Create an empty URLconf 404 error response."""
with Path(CURRENT_DIR, 'templates', 'default_urlconf.html').open(encoding='utf-8') as fh:
t = DEBUG_ENGINE.from_string(fh.read())
c = Context({
'version': get_docs_version(),
})
return HttpResponse(t.render(c), content_type='text/html')
|
f2e5ad93ee2fe435d8d4c479854f001701f5a46fa3fcefb5ca1f6b1e122c4af8 | """
Default Django settings. Override these with settings in the module pointed to
by the DJANGO_SETTINGS_MODULE environment variable.
"""
# This is defined here as a do-nothing function because we can't import
# django.utils.translation -- that module depends on the settings.
def gettext_noop(s):
return s
####################
# CORE #
####################
DEBUG = False
# Whether the framework should propagate raw exceptions rather than catching
# them. This is useful under some testing situations and should never be used
# on a live site.
DEBUG_PROPAGATE_EXCEPTIONS = False
# People who get code error notifications.
# In the format [('Full Name', '[email protected]'), ('Full Name', '[email protected]')]
ADMINS = []
# List of IP addresses, as strings, that:
# * See debug comments, when DEBUG is true
# * Receive x-headers
INTERNAL_IPS = []
# Hosts/domain names that are valid for this site.
# "*" matches anything, ".example.com" matches example.com and all subdomains
ALLOWED_HOSTS = []
# Local time zone for this installation. All choices can be found here:
# https://en.wikipedia.org/wiki/List_of_tz_zones_by_name (although not all
# systems may support all possibilities). When USE_TZ is True, this is
# interpreted as the default user time zone.
TIME_ZONE = 'America/Chicago'
# If you set this to True, Django will use timezone-aware datetimes.
USE_TZ = False
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-us'
# Languages we provide translations for, out of the box.
LANGUAGES = [
('af', gettext_noop('Afrikaans')),
('ar', gettext_noop('Arabic')),
('ar-dz', gettext_noop('Algerian Arabic')),
('ast', gettext_noop('Asturian')),
('az', gettext_noop('Azerbaijani')),
('bg', gettext_noop('Bulgarian')),
('be', gettext_noop('Belarusian')),
('bn', gettext_noop('Bengali')),
('br', gettext_noop('Breton')),
('bs', gettext_noop('Bosnian')),
('ca', gettext_noop('Catalan')),
('cs', gettext_noop('Czech')),
('cy', gettext_noop('Welsh')),
('da', gettext_noop('Danish')),
('de', gettext_noop('German')),
('dsb', gettext_noop('Lower Sorbian')),
('el', gettext_noop('Greek')),
('en', gettext_noop('English')),
('en-au', gettext_noop('Australian English')),
('en-gb', gettext_noop('British English')),
('eo', gettext_noop('Esperanto')),
('es', gettext_noop('Spanish')),
('es-ar', gettext_noop('Argentinian Spanish')),
('es-co', gettext_noop('Colombian Spanish')),
('es-mx', gettext_noop('Mexican Spanish')),
('es-ni', gettext_noop('Nicaraguan Spanish')),
('es-ve', gettext_noop('Venezuelan Spanish')),
('et', gettext_noop('Estonian')),
('eu', gettext_noop('Basque')),
('fa', gettext_noop('Persian')),
('fi', gettext_noop('Finnish')),
('fr', gettext_noop('French')),
('fy', gettext_noop('Frisian')),
('ga', gettext_noop('Irish')),
('gd', gettext_noop('Scottish Gaelic')),
('gl', gettext_noop('Galician')),
('he', gettext_noop('Hebrew')),
('hi', gettext_noop('Hindi')),
('hr', gettext_noop('Croatian')),
('hsb', gettext_noop('Upper Sorbian')),
('hu', gettext_noop('Hungarian')),
('hy', gettext_noop('Armenian')),
('ia', gettext_noop('Interlingua')),
('id', gettext_noop('Indonesian')),
('io', gettext_noop('Ido')),
('is', gettext_noop('Icelandic')),
('it', gettext_noop('Italian')),
('ja', gettext_noop('Japanese')),
('ka', gettext_noop('Georgian')),
('kab', gettext_noop('Kabyle')),
('kk', gettext_noop('Kazakh')),
('km', gettext_noop('Khmer')),
('kn', gettext_noop('Kannada')),
('ko', gettext_noop('Korean')),
('ky', gettext_noop('Kyrgyz')),
('lb', gettext_noop('Luxembourgish')),
('lt', gettext_noop('Lithuanian')),
('lv', gettext_noop('Latvian')),
('mk', gettext_noop('Macedonian')),
('ml', gettext_noop('Malayalam')),
('mn', gettext_noop('Mongolian')),
('mr', gettext_noop('Marathi')),
('my', gettext_noop('Burmese')),
('nb', gettext_noop('Norwegian Bokmål')),
('ne', gettext_noop('Nepali')),
('nl', gettext_noop('Dutch')),
('nn', gettext_noop('Norwegian Nynorsk')),
('os', gettext_noop('Ossetic')),
('pa', gettext_noop('Punjabi')),
('pl', gettext_noop('Polish')),
('pt', gettext_noop('Portuguese')),
('pt-br', gettext_noop('Brazilian Portuguese')),
('ro', gettext_noop('Romanian')),
('ru', gettext_noop('Russian')),
('sk', gettext_noop('Slovak')),
('sl', gettext_noop('Slovenian')),
('sq', gettext_noop('Albanian')),
('sr', gettext_noop('Serbian')),
('sr-latn', gettext_noop('Serbian Latin')),
('sv', gettext_noop('Swedish')),
('sw', gettext_noop('Swahili')),
('ta', gettext_noop('Tamil')),
('te', gettext_noop('Telugu')),
('th', gettext_noop('Thai')),
('tr', gettext_noop('Turkish')),
('tt', gettext_noop('Tatar')),
('udm', gettext_noop('Udmurt')),
('uk', gettext_noop('Ukrainian')),
('ur', gettext_noop('Urdu')),
('uz', gettext_noop('Uzbek')),
('vi', gettext_noop('Vietnamese')),
('zh-hans', gettext_noop('Simplified Chinese')),
('zh-hant', gettext_noop('Traditional Chinese')),
]
# Languages using BiDi (right-to-left) layout
LANGUAGES_BIDI = ["he", "ar", "ar-dz", "fa", "ur"]
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
LOCALE_PATHS = []
# Settings for language cookie
LANGUAGE_COOKIE_NAME = 'django_language'
LANGUAGE_COOKIE_AGE = None
LANGUAGE_COOKIE_DOMAIN = None
LANGUAGE_COOKIE_PATH = '/'
LANGUAGE_COOKIE_SECURE = False
LANGUAGE_COOKIE_HTTPONLY = False
LANGUAGE_COOKIE_SAMESITE = None
# If you set this to True, Django will format dates, numbers and calendars
# according to user current locale.
USE_L10N = False
# Not-necessarily-technical managers of the site. They get broken link
# notifications and other various emails.
MANAGERS = ADMINS
# Default charset to use for all HttpResponse objects, if a MIME type isn't
# manually specified. It's used to construct the Content-Type header.
DEFAULT_CHARSET = 'utf-8'
# Email address that error messages come from.
SERVER_EMAIL = 'root@localhost'
# Database connection info. If left empty, will default to the dummy backend.
DATABASES = {}
# Classes used to implement DB routing behavior.
DATABASE_ROUTERS = []
# The email backend to use. For possible shortcuts see django.core.mail.
# The default is to use the SMTP backend.
# Third-party backends can be specified by providing a Python path
# to a module that defines an EmailBackend class.
EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend'
# Host for sending email.
EMAIL_HOST = 'localhost'
# Port for sending email.
EMAIL_PORT = 25
# Whether to send SMTP 'Date' header in the local time zone or in UTC.
EMAIL_USE_LOCALTIME = False
# Optional SMTP authentication information for EMAIL_HOST.
EMAIL_HOST_USER = ''
EMAIL_HOST_PASSWORD = ''
EMAIL_USE_TLS = False
EMAIL_USE_SSL = False
EMAIL_SSL_CERTFILE = None
EMAIL_SSL_KEYFILE = None
EMAIL_TIMEOUT = None
# List of strings representing installed apps.
INSTALLED_APPS = []
TEMPLATES = []
# Default form rendering class.
FORM_RENDERER = 'django.forms.renderers.DjangoTemplates'
# Default email address to use for various automated correspondence from
# the site managers.
DEFAULT_FROM_EMAIL = 'webmaster@localhost'
# Subject-line prefix for email messages send with django.core.mail.mail_admins
# or ...mail_managers. Make sure to include the trailing space.
EMAIL_SUBJECT_PREFIX = '[Django] '
# Whether to append trailing slashes to URLs.
APPEND_SLASH = True
# Whether to prepend the "www." subdomain to URLs that don't have it.
PREPEND_WWW = False
# Override the server-derived value of SCRIPT_NAME
FORCE_SCRIPT_NAME = None
# List of compiled regular expression objects representing User-Agent strings
# that are not allowed to visit any page, systemwide. Use this for bad
# robots/crawlers. Here are a few examples:
# import re
# DISALLOWED_USER_AGENTS = [
# re.compile(r'^NaverBot.*'),
# re.compile(r'^EmailSiphon.*'),
# re.compile(r'^SiteSucker.*'),
# re.compile(r'^sohu-search'),
# ]
DISALLOWED_USER_AGENTS = []
ABSOLUTE_URL_OVERRIDES = {}
# List of compiled regular expression objects representing URLs that need not
# be reported by BrokenLinkEmailsMiddleware. Here are a few examples:
# import re
# IGNORABLE_404_URLS = [
# re.compile(r'^/apple-touch-icon.*\.png$'),
# re.compile(r'^/favicon.ico$'),
# re.compile(r'^/robots.txt$'),
# re.compile(r'^/phpmyadmin/'),
# re.compile(r'\.(cgi|php|pl)$'),
# ]
IGNORABLE_404_URLS = []
# A secret key for this particular Django installation. Used in secret-key
# hashing algorithms. Set this in your settings, or Django will complain
# loudly.
SECRET_KEY = ''
# Default file storage mechanism that holds media.
DEFAULT_FILE_STORAGE = 'django.core.files.storage.FileSystemStorage'
# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/var/www/example.com/media/"
MEDIA_ROOT = ''
# URL that handles the media served from MEDIA_ROOT.
# Examples: "http://example.com/media/", "http://media.example.com/"
MEDIA_URL = ''
# Absolute path to the directory static files should be collected to.
# Example: "/var/www/example.com/static/"
STATIC_ROOT = None
# URL that handles the static files served from STATIC_ROOT.
# Example: "http://example.com/static/", "http://static.example.com/"
STATIC_URL = None
# List of upload handler classes to be applied in order.
FILE_UPLOAD_HANDLERS = [
'django.core.files.uploadhandler.MemoryFileUploadHandler',
'django.core.files.uploadhandler.TemporaryFileUploadHandler',
]
# Maximum size, in bytes, of a request before it will be streamed to the
# file system instead of into memory.
FILE_UPLOAD_MAX_MEMORY_SIZE = 2621440 # i.e. 2.5 MB
# Maximum size in bytes of request data (excluding file uploads) that will be
# read before a SuspiciousOperation (RequestDataTooBig) is raised.
DATA_UPLOAD_MAX_MEMORY_SIZE = 2621440 # i.e. 2.5 MB
# Maximum number of GET/POST parameters that will be read before a
# SuspiciousOperation (TooManyFieldsSent) is raised.
DATA_UPLOAD_MAX_NUMBER_FIELDS = 1000
# Directory in which upload streamed files will be temporarily saved. A value of
# `None` will make Django use the operating system's default temporary directory
# (i.e. "/tmp" on *nix systems).
FILE_UPLOAD_TEMP_DIR = None
# The numeric mode to set newly-uploaded files to. The value should be a mode
# you'd pass directly to os.chmod; see https://docs.python.org/library/os.html#files-and-directories.
FILE_UPLOAD_PERMISSIONS = 0o644
# The numeric mode to assign to newly-created directories, when uploading files.
# The value should be a mode as you'd pass to os.chmod;
# see https://docs.python.org/library/os.html#files-and-directories.
FILE_UPLOAD_DIRECTORY_PERMISSIONS = None
# Python module path where user will place custom format definition.
# The directory where this setting is pointing should contain subdirectories
# named as the locales, containing a formats.py file
# (i.e. "myproject.locale" for myproject/locale/en/formats.py etc. use)
FORMAT_MODULE_PATH = None
# Default formatting for date objects. See all available format strings here:
# https://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
DATE_FORMAT = 'N j, Y'
# Default formatting for datetime objects. See all available format strings here:
# https://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
DATETIME_FORMAT = 'N j, Y, P'
# Default formatting for time objects. See all available format strings here:
# https://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
TIME_FORMAT = 'P'
# Default formatting for date objects when only the year and month are relevant.
# See all available format strings here:
# https://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
YEAR_MONTH_FORMAT = 'F Y'
# Default formatting for date objects when only the month and day are relevant.
# See all available format strings here:
# https://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
MONTH_DAY_FORMAT = 'F j'
# Default short formatting for date objects. See all available format strings here:
# https://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
SHORT_DATE_FORMAT = 'm/d/Y'
# Default short formatting for datetime objects.
# See all available format strings here:
# https://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
SHORT_DATETIME_FORMAT = 'm/d/Y P'
# Default formats to be used when parsing dates from input boxes, in order
# See all available format string here:
# https://docs.python.org/library/datetime.html#strftime-behavior
# * Note that these format strings are different from the ones to display dates
DATE_INPUT_FORMATS = [
'%Y-%m-%d', '%m/%d/%Y', '%m/%d/%y', # '2006-10-25', '10/25/2006', '10/25/06'
'%b %d %Y', '%b %d, %Y', # 'Oct 25 2006', 'Oct 25, 2006'
'%d %b %Y', '%d %b, %Y', # '25 Oct 2006', '25 Oct, 2006'
'%B %d %Y', '%B %d, %Y', # 'October 25 2006', 'October 25, 2006'
'%d %B %Y', '%d %B, %Y', # '25 October 2006', '25 October, 2006'
]
# Default formats to be used when parsing times from input boxes, in order
# See all available format string here:
# https://docs.python.org/library/datetime.html#strftime-behavior
# * Note that these format strings are different from the ones to display dates
TIME_INPUT_FORMATS = [
'%H:%M:%S', # '14:30:59'
'%H:%M:%S.%f', # '14:30:59.000200'
'%H:%M', # '14:30'
]
# Default formats to be used when parsing dates and times from input boxes,
# in order
# See all available format string here:
# https://docs.python.org/library/datetime.html#strftime-behavior
# * Note that these format strings are different from the ones to display dates
DATETIME_INPUT_FORMATS = [
'%Y-%m-%d %H:%M:%S', # '2006-10-25 14:30:59'
'%Y-%m-%d %H:%M:%S.%f', # '2006-10-25 14:30:59.000200'
'%Y-%m-%d %H:%M', # '2006-10-25 14:30'
'%m/%d/%Y %H:%M:%S', # '10/25/2006 14:30:59'
'%m/%d/%Y %H:%M:%S.%f', # '10/25/2006 14:30:59.000200'
'%m/%d/%Y %H:%M', # '10/25/2006 14:30'
'%m/%d/%y %H:%M:%S', # '10/25/06 14:30:59'
'%m/%d/%y %H:%M:%S.%f', # '10/25/06 14:30:59.000200'
'%m/%d/%y %H:%M', # '10/25/06 14:30'
]
# First day of week, to be used on calendars
# 0 means Sunday, 1 means Monday...
FIRST_DAY_OF_WEEK = 0
# Decimal separator symbol
DECIMAL_SEPARATOR = '.'
# Boolean that sets whether to add thousand separator when formatting numbers
USE_THOUSAND_SEPARATOR = False
# Number of digits that will be together, when splitting them by
# THOUSAND_SEPARATOR. 0 means no grouping, 3 means splitting by thousands...
NUMBER_GROUPING = 0
# Thousand separator symbol
THOUSAND_SEPARATOR = ','
# The tablespaces to use for each model when not specified otherwise.
DEFAULT_TABLESPACE = ''
DEFAULT_INDEX_TABLESPACE = ''
# Default X-Frame-Options header value
X_FRAME_OPTIONS = 'DENY'
USE_X_FORWARDED_HOST = False
USE_X_FORWARDED_PORT = False
# The Python dotted path to the WSGI application that Django's internal server
# (runserver) will use. If `None`, the return value of
# 'django.core.wsgi.get_wsgi_application' is used, thus preserving the same
# behavior as previous versions of Django. Otherwise this should point to an
# actual WSGI application object.
WSGI_APPLICATION = None
# If your Django app is behind a proxy that sets a header to specify secure
# connections, AND that proxy ensures that user-submitted headers with the
# same name are ignored (so that people can't spoof it), set this value to
# a tuple of (header_name, header_value). For any requests that come in with
# that header/value, request.is_secure() will return True.
# WARNING! Only set this if you fully understand what you're doing. Otherwise,
# you may be opening yourself up to a security risk.
SECURE_PROXY_SSL_HEADER = None
##############
# MIDDLEWARE #
##############
# List of middleware to use. Order is important; in the request phase, these
# middleware will be applied in the order given, and in the response
# phase the middleware will be applied in reverse order.
MIDDLEWARE = []
############
# SESSIONS #
############
# Cache to store session data if using the cache session backend.
SESSION_CACHE_ALIAS = 'default'
# Cookie name. This can be whatever you want.
SESSION_COOKIE_NAME = 'sessionid'
# Age of cookie, in seconds (default: 2 weeks).
SESSION_COOKIE_AGE = 60 * 60 * 24 * 7 * 2
# A string like "example.com", or None for standard domain cookie.
SESSION_COOKIE_DOMAIN = None
# Whether the session cookie should be secure (https:// only).
SESSION_COOKIE_SECURE = False
# The path of the session cookie.
SESSION_COOKIE_PATH = '/'
# Whether to use the HttpOnly flag.
SESSION_COOKIE_HTTPONLY = True
# Whether to set the flag restricting cookie leaks on cross-site requests.
# This can be 'Lax', 'Strict', or None to disable the flag.
SESSION_COOKIE_SAMESITE = 'Lax'
# Whether to save the session data on every request.
SESSION_SAVE_EVERY_REQUEST = False
# Whether a user's session cookie expires when the Web browser is closed.
SESSION_EXPIRE_AT_BROWSER_CLOSE = False
# The module to store session data
SESSION_ENGINE = 'django.contrib.sessions.backends.db'
# Directory to store session files if using the file session module. If None,
# the backend will use a sensible default.
SESSION_FILE_PATH = None
# class to serialize session data
SESSION_SERIALIZER = 'django.contrib.sessions.serializers.JSONSerializer'
#########
# CACHE #
#########
# The cache backends to use.
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
}
}
CACHE_MIDDLEWARE_KEY_PREFIX = ''
CACHE_MIDDLEWARE_SECONDS = 600
CACHE_MIDDLEWARE_ALIAS = 'default'
##################
# AUTHENTICATION #
##################
AUTH_USER_MODEL = 'auth.User'
AUTHENTICATION_BACKENDS = ['django.contrib.auth.backends.ModelBackend']
LOGIN_URL = '/accounts/login/'
LOGIN_REDIRECT_URL = '/accounts/profile/'
LOGOUT_REDIRECT_URL = None
# The number of days a password reset link is valid for
PASSWORD_RESET_TIMEOUT_DAYS = 3
# The number of seconds a password reset link is valid for (default: 3 days).
PASSWORD_RESET_TIMEOUT = 60 * 60 * 24 * 3
# the first hasher in this list is the preferred algorithm. any
# password using different algorithms will be converted automatically
# upon login
PASSWORD_HASHERS = [
'django.contrib.auth.hashers.PBKDF2PasswordHasher',
'django.contrib.auth.hashers.PBKDF2SHA1PasswordHasher',
'django.contrib.auth.hashers.Argon2PasswordHasher',
'django.contrib.auth.hashers.BCryptSHA256PasswordHasher',
]
AUTH_PASSWORD_VALIDATORS = []
###########
# SIGNING #
###########
SIGNING_BACKEND = 'django.core.signing.TimestampSigner'
########
# CSRF #
########
# Dotted path to callable to be used as view when a request is
# rejected by the CSRF middleware.
CSRF_FAILURE_VIEW = 'django.views.csrf.csrf_failure'
# Settings for CSRF cookie.
CSRF_COOKIE_NAME = 'csrftoken'
CSRF_COOKIE_AGE = 60 * 60 * 24 * 7 * 52
CSRF_COOKIE_DOMAIN = None
CSRF_COOKIE_PATH = '/'
CSRF_COOKIE_SECURE = False
CSRF_COOKIE_HTTPONLY = False
CSRF_COOKIE_SAMESITE = 'Lax'
CSRF_HEADER_NAME = 'HTTP_X_CSRFTOKEN'
CSRF_TRUSTED_ORIGINS = []
CSRF_USE_SESSIONS = False
############
# MESSAGES #
############
# Class to use as messages backend
MESSAGE_STORAGE = 'django.contrib.messages.storage.fallback.FallbackStorage'
# Default values of MESSAGE_LEVEL and MESSAGE_TAGS are defined within
# django.contrib.messages to avoid imports in this settings file.
###########
# LOGGING #
###########
# The callable to use to configure logging
LOGGING_CONFIG = 'logging.config.dictConfig'
# Custom logging configuration.
LOGGING = {}
# Default exception reporter class used in case none has been
# specifically assigned to the HttpRequest instance.
DEFAULT_EXCEPTION_REPORTER = 'django.views.debug.ExceptionReporter'
# Default exception reporter filter class used in case none has been
# specifically assigned to the HttpRequest instance.
DEFAULT_EXCEPTION_REPORTER_FILTER = 'django.views.debug.SafeExceptionReporterFilter'
###########
# TESTING #
###########
# The name of the class to use to run the test suite
TEST_RUNNER = 'django.test.runner.DiscoverRunner'
# Apps that don't need to be serialized at test database creation time
# (only apps with migrations are to start with)
TEST_NON_SERIALIZED_APPS = []
############
# FIXTURES #
############
# The list of directories to search for fixtures
FIXTURE_DIRS = []
###############
# STATICFILES #
###############
# A list of locations of additional static files
STATICFILES_DIRS = []
# The default file storage backend used during the build process
STATICFILES_STORAGE = 'django.contrib.staticfiles.storage.StaticFilesStorage'
# List of finder classes that know how to find static files in
# various locations.
STATICFILES_FINDERS = [
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
# 'django.contrib.staticfiles.finders.DefaultStorageFinder',
]
##############
# MIGRATIONS #
##############
# Migration module overrides for apps, by app label.
MIGRATION_MODULES = {}
#################
# SYSTEM CHECKS #
#################
# List of all issues generated by system checks that should be silenced. Light
# issues like warnings, infos or debugs will not generate a message. Silencing
# serious issues like errors and criticals does not result in hiding the
# message, but Django will not stop you from e.g. running server.
SILENCED_SYSTEM_CHECKS = []
#######################
# SECURITY MIDDLEWARE #
#######################
SECURE_BROWSER_XSS_FILTER = False
SECURE_CONTENT_TYPE_NOSNIFF = True
SECURE_HSTS_INCLUDE_SUBDOMAINS = False
SECURE_HSTS_PRELOAD = False
SECURE_HSTS_SECONDS = 0
SECURE_REDIRECT_EXEMPT = []
SECURE_REFERRER_POLICY = 'same-origin'
SECURE_SSL_HOST = None
SECURE_SSL_REDIRECT = False
|
26ca2c6185dc44daa71d2082d956a3716e6d0b82a793151be603989835114646 | # These classes override date and datetime to ensure that strftime('%Y')
# returns four digits (with leading zeros) on years < 1000.
# https://bugs.python.org/issue13305
#
# Based on code submitted to comp.lang.python by Andrew Dalke
#
# >>> datetime_safe.date(10, 8, 2).strftime("%Y/%m/%d was a %A")
# '0010/08/02 was a Monday'
import time as ttime
from datetime import (
date as real_date, datetime as real_datetime, time as real_time,
)
from django.utils.regex_helper import _lazy_re_compile
class date(real_date):
def strftime(self, fmt):
return strftime(self, fmt)
class datetime(real_datetime):
def strftime(self, fmt):
return strftime(self, fmt)
@classmethod
def combine(cls, date, time):
return cls(date.year, date.month, date.day,
time.hour, time.minute, time.second,
time.microsecond, time.tzinfo)
def date(self):
return date(self.year, self.month, self.day)
class time(real_time):
pass
def new_date(d):
"Generate a safe date from a datetime.date object."
return date(d.year, d.month, d.day)
def new_datetime(d):
"""
Generate a safe datetime from a datetime.date or datetime.datetime object.
"""
kw = [d.year, d.month, d.day]
if isinstance(d, real_datetime):
kw.extend([d.hour, d.minute, d.second, d.microsecond, d.tzinfo])
return datetime(*kw)
# This library does not support strftime's "%s" or "%y" format strings.
# Allowed if there's an even number of "%"s because they are escaped.
_illegal_formatting = _lazy_re_compile(r"((^|[^%])(%%)*%[sy])")
def _findall(text, substr):
# Also finds overlaps
sites = []
i = 0
while True:
i = text.find(substr, i)
if i == -1:
break
sites.append(i)
i += 1
return sites
def strftime(dt, fmt):
if dt.year >= 1000:
return super(type(dt), dt).strftime(fmt)
illegal_formatting = _illegal_formatting.search(fmt)
if illegal_formatting:
raise TypeError('strftime of dates before 1000 does not handle ' + illegal_formatting[0])
year = dt.year
# For every non-leap year century, advance by
# 6 years to get into the 28-year repeat cycle
delta = 2000 - year
off = 6 * (delta // 100 + delta // 400)
year = year + off
# Move to around the year 2000
year = year + ((2000 - year) // 28) * 28
timetuple = dt.timetuple()
s1 = ttime.strftime(fmt, (year,) + timetuple[1:])
sites1 = _findall(s1, str(year))
s2 = ttime.strftime(fmt, (year + 28,) + timetuple[1:])
sites2 = _findall(s2, str(year + 28))
sites = []
for site in sites1:
if site in sites2:
sites.append(site)
s = s1
syear = "%04d" % dt.year
for site in sites:
s = s[:site] + syear + s[site + 4:]
return s
|
b093ffba4204a5e370b3926fbca4f0827506a4e5500b9ddcfa29699a9eba9dcd | import functools
import itertools
import logging
import os
import signal
import subprocess
import sys
import threading
import time
import traceback
import weakref
from collections import defaultdict
from pathlib import Path
from types import ModuleType
from zipimport import zipimporter
from django.apps import apps
from django.core.signals import request_finished
from django.dispatch import Signal
from django.utils.functional import cached_property
from django.utils.version import get_version_tuple
autoreload_started = Signal()
file_changed = Signal()
DJANGO_AUTORELOAD_ENV = 'RUN_MAIN'
logger = logging.getLogger('django.utils.autoreload')
# If an error is raised while importing a file, it's not placed in sys.modules.
# This means that any future modifications aren't caught. Keep a list of these
# file paths to allow watching them in the future.
_error_files = []
_exception = None
try:
import termios
except ImportError:
termios = None
try:
import pywatchman
except ImportError:
pywatchman = None
def check_errors(fn):
@functools.wraps(fn)
def wrapper(*args, **kwargs):
global _exception
try:
fn(*args, **kwargs)
except Exception:
_exception = sys.exc_info()
et, ev, tb = _exception
if getattr(ev, 'filename', None) is None:
# get the filename from the last item in the stack
filename = traceback.extract_tb(tb)[-1][0]
else:
filename = ev.filename
if filename not in _error_files:
_error_files.append(filename)
raise
return wrapper
def raise_last_exception():
global _exception
if _exception is not None:
raise _exception[1]
def ensure_echo_on():
"""
Ensure that echo mode is enabled. Some tools such as PDB disable
it which causes usability issues after reload.
"""
if not termios or not sys.stdin.isatty():
return
attr_list = termios.tcgetattr(sys.stdin)
if not attr_list[3] & termios.ECHO:
attr_list[3] |= termios.ECHO
if hasattr(signal, 'SIGTTOU'):
old_handler = signal.signal(signal.SIGTTOU, signal.SIG_IGN)
else:
old_handler = None
termios.tcsetattr(sys.stdin, termios.TCSANOW, attr_list)
if old_handler is not None:
signal.signal(signal.SIGTTOU, old_handler)
def iter_all_python_module_files():
# This is a hot path during reloading. Create a stable sorted list of
# modules based on the module name and pass it to iter_modules_and_files().
# This ensures cached results are returned in the usual case that modules
# aren't loaded on the fly.
keys = sorted(sys.modules)
modules = tuple(m for m in map(sys.modules.__getitem__, keys) if not isinstance(m, weakref.ProxyTypes))
return iter_modules_and_files(modules, frozenset(_error_files))
@functools.lru_cache(maxsize=1)
def iter_modules_and_files(modules, extra_files):
"""Iterate through all modules needed to be watched."""
sys_file_paths = []
for module in modules:
# During debugging (with PyDev) the 'typing.io' and 'typing.re' objects
# are added to sys.modules, however they are types not modules and so
# cause issues here.
if not isinstance(module, ModuleType):
continue
if module.__name__ == '__main__':
# __main__ (usually manage.py) doesn't always have a __spec__ set.
# Handle this by falling back to using __file__, resolved below.
# See https://docs.python.org/reference/import.html#main-spec
# __file__ may not exists, e.g. when running ipdb debugger.
if hasattr(module, '__file__'):
sys_file_paths.append(module.__file__)
continue
if getattr(module, '__spec__', None) is None:
continue
spec = module.__spec__
# Modules could be loaded from places without a concrete location. If
# this is the case, skip them.
if spec.has_location:
origin = spec.loader.archive if isinstance(spec.loader, zipimporter) else spec.origin
sys_file_paths.append(origin)
results = set()
for filename in itertools.chain(sys_file_paths, extra_files):
if not filename:
continue
path = Path(filename)
try:
resolved_path = path.resolve(strict=True).absolute()
except FileNotFoundError:
# The module could have been removed, don't fail loudly if this
# is the case.
continue
except ValueError as e:
# Network filesystems may return null bytes in file paths.
logger.debug('"%s" raised when resolving path: "%s"', e, path)
continue
results.add(resolved_path)
return frozenset(results)
@functools.lru_cache(maxsize=1)
def common_roots(paths):
"""
Return a tuple of common roots that are shared between the given paths.
File system watchers operate on directories and aren't cheap to create.
Try to find the minimum set of directories to watch that encompass all of
the files that need to be watched.
"""
# Inspired from Werkzeug:
# https://github.com/pallets/werkzeug/blob/7477be2853df70a022d9613e765581b9411c3c39/werkzeug/_reloader.py
# Create a sorted list of the path components, longest first.
path_parts = sorted([x.parts for x in paths], key=len, reverse=True)
tree = {}
for chunks in path_parts:
node = tree
# Add each part of the path to the tree.
for chunk in chunks:
node = node.setdefault(chunk, {})
# Clear the last leaf in the tree.
node.clear()
# Turn the tree into a list of Path instances.
def _walk(node, path):
for prefix, child in node.items():
yield from _walk(child, path + (prefix,))
if not node:
yield Path(*path)
return tuple(_walk(tree, ()))
def sys_path_directories():
"""
Yield absolute directories from sys.path, ignoring entries that don't
exist.
"""
for path in sys.path:
path = Path(path)
try:
resolved_path = path.resolve(strict=True).absolute()
except FileNotFoundError:
continue
# If the path is a file (like a zip file), watch the parent directory.
if resolved_path.is_file():
yield resolved_path.parent
else:
yield resolved_path
def get_child_arguments():
"""
Return the executable. This contains a workaround for Windows if the
executable is reported to not have the .exe extension which can cause bugs
on reloading.
"""
import django.__main__
args = [sys.executable] + ['-W%s' % o for o in sys.warnoptions]
if sys.argv[0] == django.__main__.__file__:
# The server was started with `python -m django runserver`.
args += ['-m', 'django']
args += sys.argv[1:]
else:
args += sys.argv
return args
def trigger_reload(filename):
logger.info('%s changed, reloading.', filename)
sys.exit(3)
def restart_with_reloader():
new_environ = {**os.environ, DJANGO_AUTORELOAD_ENV: 'true'}
args = get_child_arguments()
while True:
p = subprocess.run(args, env=new_environ, close_fds=False)
if p.returncode != 3:
return p.returncode
class BaseReloader:
def __init__(self):
self.extra_files = set()
self.directory_globs = defaultdict(set)
self._stop_condition = threading.Event()
def watch_dir(self, path, glob):
path = Path(path)
try:
path = path.absolute()
except FileNotFoundError:
logger.debug(
'Unable to watch directory %s as it cannot be resolved.',
path,
exc_info=True,
)
return
logger.debug('Watching dir %s with glob %s.', path, glob)
self.directory_globs[path].add(glob)
def watched_files(self, include_globs=True):
"""
Yield all files that need to be watched, including module files and
files within globs.
"""
yield from iter_all_python_module_files()
yield from self.extra_files
if include_globs:
for directory, patterns in self.directory_globs.items():
for pattern in patterns:
yield from directory.glob(pattern)
def wait_for_apps_ready(self, app_reg, django_main_thread):
"""
Wait until Django reports that the apps have been loaded. If the given
thread has terminated before the apps are ready, then a SyntaxError or
other non-recoverable error has been raised. In that case, stop waiting
for the apps_ready event and continue processing.
Return True if the thread is alive and the ready event has been
triggered, or False if the thread is terminated while waiting for the
event.
"""
while django_main_thread.is_alive():
if app_reg.ready_event.wait(timeout=0.1):
return True
else:
logger.debug('Main Django thread has terminated before apps are ready.')
return False
def run(self, django_main_thread):
logger.debug('Waiting for apps ready_event.')
self.wait_for_apps_ready(apps, django_main_thread)
from django.urls import get_resolver
# Prevent a race condition where URL modules aren't loaded when the
# reloader starts by accessing the urlconf_module property.
try:
get_resolver().urlconf_module
except Exception:
# Loading the urlconf can result in errors during development.
# If this occurs then swallow the error and continue.
pass
logger.debug('Apps ready_event triggered. Sending autoreload_started signal.')
autoreload_started.send(sender=self)
self.run_loop()
def run_loop(self):
ticker = self.tick()
while not self.should_stop:
try:
next(ticker)
except StopIteration:
break
self.stop()
def tick(self):
"""
This generator is called in a loop from run_loop. It's important that
the method takes care of pausing or otherwise waiting for a period of
time. This split between run_loop() and tick() is to improve the
testability of the reloader implementations by decoupling the work they
do from the loop.
"""
raise NotImplementedError('subclasses must implement tick().')
@classmethod
def check_availability(cls):
raise NotImplementedError('subclasses must implement check_availability().')
def notify_file_changed(self, path):
results = file_changed.send(sender=self, file_path=path)
logger.debug('%s notified as changed. Signal results: %s.', path, results)
if not any(res[1] for res in results):
trigger_reload(path)
# These are primarily used for testing.
@property
def should_stop(self):
return self._stop_condition.is_set()
def stop(self):
self._stop_condition.set()
class StatReloader(BaseReloader):
SLEEP_TIME = 1 # Check for changes once per second.
def tick(self):
mtimes = {}
while True:
for filepath, mtime in self.snapshot_files():
old_time = mtimes.get(filepath)
mtimes[filepath] = mtime
if old_time is None:
logger.debug('File %s first seen with mtime %s', filepath, mtime)
continue
elif mtime > old_time:
logger.debug('File %s previous mtime: %s, current mtime: %s', filepath, old_time, mtime)
self.notify_file_changed(filepath)
time.sleep(self.SLEEP_TIME)
yield
def snapshot_files(self):
# watched_files may produce duplicate paths if globs overlap.
seen_files = set()
for file in self.watched_files():
if file in seen_files:
continue
try:
mtime = file.stat().st_mtime
except OSError:
# This is thrown when the file does not exist.
continue
seen_files.add(file)
yield file, mtime
@classmethod
def check_availability(cls):
return True
class WatchmanUnavailable(RuntimeError):
pass
class WatchmanReloader(BaseReloader):
def __init__(self):
self.roots = defaultdict(set)
self.processed_request = threading.Event()
self.client_timeout = int(os.environ.get('DJANGO_WATCHMAN_TIMEOUT', 5))
super().__init__()
@cached_property
def client(self):
return pywatchman.client(timeout=self.client_timeout)
def _watch_root(self, root):
# In practice this shouldn't occur, however, it's possible that a
# directory that doesn't exist yet is being watched. If it's outside of
# sys.path then this will end up a new root. How to handle this isn't
# clear: Not adding the root will likely break when subscribing to the
# changes, however, as this is currently an internal API, no files
# will be being watched outside of sys.path. Fixing this by checking
# inside watch_glob() and watch_dir() is expensive, instead this could
# could fall back to the StatReloader if this case is detected? For
# now, watching its parent, if possible, is sufficient.
if not root.exists():
if not root.parent.exists():
logger.warning('Unable to watch root dir %s as neither it or its parent exist.', root)
return
root = root.parent
result = self.client.query('watch-project', str(root.absolute()))
if 'warning' in result:
logger.warning('Watchman warning: %s', result['warning'])
logger.debug('Watchman watch-project result: %s', result)
return result['watch'], result.get('relative_path')
@functools.lru_cache()
def _get_clock(self, root):
return self.client.query('clock', root)['clock']
def _subscribe(self, directory, name, expression):
root, rel_path = self._watch_root(directory)
query = {
'expression': expression,
'fields': ['name'],
'since': self._get_clock(root),
'dedup_results': True,
}
if rel_path:
query['relative_root'] = rel_path
logger.debug('Issuing watchman subscription %s, for root %s. Query: %s', name, root, query)
self.client.query('subscribe', root, name, query)
def _subscribe_dir(self, directory, filenames):
if not directory.exists():
if not directory.parent.exists():
logger.warning('Unable to watch directory %s as neither it or its parent exist.', directory)
return
prefix = 'files-parent-%s' % directory.name
filenames = ['%s/%s' % (directory.name, filename) for filename in filenames]
directory = directory.parent
expression = ['name', filenames, 'wholename']
else:
prefix = 'files'
expression = ['name', filenames]
self._subscribe(directory, '%s:%s' % (prefix, directory), expression)
def _watch_glob(self, directory, patterns):
"""
Watch a directory with a specific glob. If the directory doesn't yet
exist, attempt to watch the parent directory and amend the patterns to
include this. It's important this method isn't called more than one per
directory when updating all subscriptions. Subsequent calls will
overwrite the named subscription, so it must include all possible glob
expressions.
"""
prefix = 'glob'
if not directory.exists():
if not directory.parent.exists():
logger.warning('Unable to watch directory %s as neither it or its parent exist.', directory)
return
prefix = 'glob-parent-%s' % directory.name
patterns = ['%s/%s' % (directory.name, pattern) for pattern in patterns]
directory = directory.parent
expression = ['anyof']
for pattern in patterns:
expression.append(['match', pattern, 'wholename'])
self._subscribe(directory, '%s:%s' % (prefix, directory), expression)
def watched_roots(self, watched_files):
extra_directories = self.directory_globs.keys()
watched_file_dirs = [f.parent for f in watched_files]
sys_paths = list(sys_path_directories())
return frozenset((*extra_directories, *watched_file_dirs, *sys_paths))
def _update_watches(self):
watched_files = list(self.watched_files(include_globs=False))
found_roots = common_roots(self.watched_roots(watched_files))
logger.debug('Watching %s files', len(watched_files))
logger.debug('Found common roots: %s', found_roots)
# Setup initial roots for performance, shortest roots first.
for root in sorted(found_roots):
self._watch_root(root)
for directory, patterns in self.directory_globs.items():
self._watch_glob(directory, patterns)
# Group sorted watched_files by their parent directory.
sorted_files = sorted(watched_files, key=lambda p: p.parent)
for directory, group in itertools.groupby(sorted_files, key=lambda p: p.parent):
# These paths need to be relative to the parent directory.
self._subscribe_dir(directory, [str(p.relative_to(directory)) for p in group])
def update_watches(self):
try:
self._update_watches()
except Exception as ex:
# If the service is still available, raise the original exception.
if self.check_server_status(ex):
raise
def _check_subscription(self, sub):
subscription = self.client.getSubscription(sub)
if not subscription:
return
logger.debug('Watchman subscription %s has results.', sub)
for result in subscription:
# When using watch-project, it's not simple to get the relative
# directory without storing some specific state. Store the full
# path to the directory in the subscription name, prefixed by its
# type (glob, files).
root_directory = Path(result['subscription'].split(':', 1)[1])
logger.debug('Found root directory %s', root_directory)
for file in result.get('files', []):
self.notify_file_changed(root_directory / file)
def request_processed(self, **kwargs):
logger.debug('Request processed. Setting update_watches event.')
self.processed_request.set()
def tick(self):
request_finished.connect(self.request_processed)
self.update_watches()
while True:
if self.processed_request.is_set():
self.update_watches()
self.processed_request.clear()
try:
self.client.receive()
except pywatchman.SocketTimeout:
pass
except pywatchman.WatchmanError as ex:
logger.debug('Watchman error: %s, checking server status.', ex)
self.check_server_status(ex)
else:
for sub in list(self.client.subs.keys()):
self._check_subscription(sub)
yield
def stop(self):
self.client.close()
super().stop()
def check_server_status(self, inner_ex=None):
"""Return True if the server is available."""
try:
self.client.query('version')
except Exception:
raise WatchmanUnavailable(str(inner_ex)) from inner_ex
return True
@classmethod
def check_availability(cls):
if not pywatchman:
raise WatchmanUnavailable('pywatchman not installed.')
client = pywatchman.client(timeout=0.1)
try:
result = client.capabilityCheck()
except Exception:
# The service is down?
raise WatchmanUnavailable('Cannot connect to the watchman service.')
version = get_version_tuple(result['version'])
# Watchman 4.9 includes multiple improvements to watching project
# directories as well as case insensitive filesystems.
logger.debug('Watchman version %s', version)
if version < (4, 9):
raise WatchmanUnavailable('Watchman 4.9 or later is required.')
def get_reloader():
"""Return the most suitable reloader for this environment."""
try:
WatchmanReloader.check_availability()
except WatchmanUnavailable:
return StatReloader()
return WatchmanReloader()
def start_django(reloader, main_func, *args, **kwargs):
ensure_echo_on()
main_func = check_errors(main_func)
django_main_thread = threading.Thread(target=main_func, args=args, kwargs=kwargs, name='django-main-thread')
django_main_thread.setDaemon(True)
django_main_thread.start()
while not reloader.should_stop:
try:
reloader.run(django_main_thread)
except WatchmanUnavailable as ex:
# It's possible that the watchman service shuts down or otherwise
# becomes unavailable. In that case, use the StatReloader.
reloader = StatReloader()
logger.error('Error connecting to Watchman: %s', ex)
logger.info('Watching for file changes with %s', reloader.__class__.__name__)
def run_with_reloader(main_func, *args, **kwargs):
signal.signal(signal.SIGTERM, lambda *args: sys.exit(0))
try:
if os.environ.get(DJANGO_AUTORELOAD_ENV) == 'true':
reloader = get_reloader()
logger.info('Watching for file changes with %s', reloader.__class__.__name__)
start_django(reloader, main_func, *args, **kwargs)
else:
exit_code = restart_with_reloader()
sys.exit(exit_code)
except KeyboardInterrupt:
pass
|
d12459a6f55313d2898dfa03933f7ea3debabda6501421dab376696b59406ac5 | import html.entities
import re
import unicodedata
import warnings
from gzip import GzipFile
from io import BytesIO
from django.utils.deprecation import RemovedInDjango40Warning
from django.utils.functional import SimpleLazyObject, keep_lazy_text, lazy
from django.utils.regex_helper import _lazy_re_compile
from django.utils.translation import gettext as _, gettext_lazy, pgettext
@keep_lazy_text
def capfirst(x):
"""Capitalize the first letter of a string."""
return x and str(x)[0].upper() + str(x)[1:]
# Set up regular expressions
re_words = _lazy_re_compile(r'<[^>]+?>|([^<>\s]+)', re.S)
re_chars = _lazy_re_compile(r'<[^>]+?>|(.)', re.S)
re_tag = _lazy_re_compile(r'<(/)?(\S+?)(?:(\s*/)|\s.*?)?>', re.S)
re_newlines = _lazy_re_compile(r'\r\n|\r') # Used in normalize_newlines
re_camel_case = _lazy_re_compile(r'(((?<=[a-z])[A-Z])|([A-Z](?![A-Z]|$)))')
@keep_lazy_text
def wrap(text, width):
"""
A word-wrap function that preserves existing line breaks. Expects that
existing line breaks are posix newlines.
Preserve all white space except added line breaks consume the space on
which they break the line.
Don't wrap long words, thus the output text may have lines longer than
``width``.
"""
def _generator():
for line in text.splitlines(True): # True keeps trailing linebreaks
max_width = min((line.endswith('\n') and width + 1 or width), width)
while len(line) > max_width:
space = line[:max_width + 1].rfind(' ') + 1
if space == 0:
space = line.find(' ') + 1
if space == 0:
yield line
line = ''
break
yield '%s\n' % line[:space - 1]
line = line[space:]
max_width = min((line.endswith('\n') and width + 1 or width), width)
if line:
yield line
return ''.join(_generator())
class Truncator(SimpleLazyObject):
"""
An object used to truncate text, either by characters or words.
"""
def __init__(self, text):
super().__init__(lambda: str(text))
def add_truncation_text(self, text, truncate=None):
if truncate is None:
truncate = pgettext(
'String to return when truncating text',
'%(truncated_text)s…')
if '%(truncated_text)s' in truncate:
return truncate % {'truncated_text': text}
# The truncation text didn't contain the %(truncated_text)s string
# replacement argument so just append it to the text.
if text.endswith(truncate):
# But don't append the truncation text if the current text already
# ends in this.
return text
return '%s%s' % (text, truncate)
def chars(self, num, truncate=None, html=False):
"""
Return the text truncated to be no longer than the specified number
of characters.
`truncate` specifies what should be used to notify that the string has
been truncated, defaulting to a translatable string of an ellipsis.
"""
self._setup()
length = int(num)
text = unicodedata.normalize('NFC', self._wrapped)
# Calculate the length to truncate to (max length - end_text length)
truncate_len = length
for char in self.add_truncation_text('', truncate):
if not unicodedata.combining(char):
truncate_len -= 1
if truncate_len == 0:
break
if html:
return self._truncate_html(length, truncate, text, truncate_len, False)
return self._text_chars(length, truncate, text, truncate_len)
def _text_chars(self, length, truncate, text, truncate_len):
"""Truncate a string after a certain number of chars."""
s_len = 0
end_index = None
for i, char in enumerate(text):
if unicodedata.combining(char):
# Don't consider combining characters
# as adding to the string length
continue
s_len += 1
if end_index is None and s_len > truncate_len:
end_index = i
if s_len > length:
# Return the truncated string
return self.add_truncation_text(text[:end_index or 0],
truncate)
# Return the original string since no truncation was necessary
return text
def words(self, num, truncate=None, html=False):
"""
Truncate a string after a certain number of words. `truncate` specifies
what should be used to notify that the string has been truncated,
defaulting to ellipsis.
"""
self._setup()
length = int(num)
if html:
return self._truncate_html(length, truncate, self._wrapped, length, True)
return self._text_words(length, truncate)
def _text_words(self, length, truncate):
"""
Truncate a string after a certain number of words.
Strip newlines in the string.
"""
words = self._wrapped.split()
if len(words) > length:
words = words[:length]
return self.add_truncation_text(' '.join(words), truncate)
return ' '.join(words)
def _truncate_html(self, length, truncate, text, truncate_len, words):
"""
Truncate HTML to a certain number of chars (not counting tags and
comments), or, if words is True, then to a certain number of words.
Close opened tags if they were correctly closed in the given HTML.
Preserve newlines in the HTML.
"""
if words and length <= 0:
return ''
html4_singlets = (
'br', 'col', 'link', 'base', 'img',
'param', 'area', 'hr', 'input'
)
# Count non-HTML chars/words and keep note of open tags
pos = 0
end_text_pos = 0
current_len = 0
open_tags = []
regex = re_words if words else re_chars
while current_len <= length:
m = regex.search(text, pos)
if not m:
# Checked through whole string
break
pos = m.end(0)
if m[1]:
# It's an actual non-HTML word or char
current_len += 1
if current_len == truncate_len:
end_text_pos = pos
continue
# Check for tag
tag = re_tag.match(m[0])
if not tag or current_len >= truncate_len:
# Don't worry about non tags or tags after our truncate point
continue
closing_tag, tagname, self_closing = tag.groups()
# Element names are always case-insensitive
tagname = tagname.lower()
if self_closing or tagname in html4_singlets:
pass
elif closing_tag:
# Check for match in open tags list
try:
i = open_tags.index(tagname)
except ValueError:
pass
else:
# SGML: An end tag closes, back to the matching start tag,
# all unclosed intervening start tags with omitted end tags
open_tags = open_tags[i + 1:]
else:
# Add it to the start of the open tags list
open_tags.insert(0, tagname)
if current_len <= length:
return text
out = text[:end_text_pos]
truncate_text = self.add_truncation_text('', truncate)
if truncate_text:
out += truncate_text
# Close any tags still open
for tag in open_tags:
out += '</%s>' % tag
# Return string
return out
@keep_lazy_text
def get_valid_filename(s):
"""
Return the given string converted to a string that can be used for a clean
filename. Remove leading and trailing spaces; convert other spaces to
underscores; and remove anything that is not an alphanumeric, dash,
underscore, or dot.
>>> get_valid_filename("john's portrait in 2004.jpg")
'johns_portrait_in_2004.jpg'
"""
s = str(s).strip().replace(' ', '_')
return re.sub(r'(?u)[^-\w.]', '', s)
@keep_lazy_text
def get_text_list(list_, last_word=gettext_lazy('or')):
"""
>>> get_text_list(['a', 'b', 'c', 'd'])
'a, b, c or d'
>>> get_text_list(['a', 'b', 'c'], 'and')
'a, b and c'
>>> get_text_list(['a', 'b'], 'and')
'a and b'
>>> get_text_list(['a'])
'a'
>>> get_text_list([])
''
"""
if not list_:
return ''
if len(list_) == 1:
return str(list_[0])
return '%s %s %s' % (
# Translators: This string is used as a separator between list elements
_(', ').join(str(i) for i in list_[:-1]), str(last_word), str(list_[-1])
)
@keep_lazy_text
def normalize_newlines(text):
"""Normalize CRLF and CR newlines to just LF."""
return re_newlines.sub('\n', str(text))
@keep_lazy_text
def phone2numeric(phone):
"""Convert a phone number with letters into its numeric equivalent."""
char2number = {
'a': '2', 'b': '2', 'c': '2', 'd': '3', 'e': '3', 'f': '3', 'g': '4',
'h': '4', 'i': '4', 'j': '5', 'k': '5', 'l': '5', 'm': '6', 'n': '6',
'o': '6', 'p': '7', 'q': '7', 'r': '7', 's': '7', 't': '8', 'u': '8',
'v': '8', 'w': '9', 'x': '9', 'y': '9', 'z': '9',
}
return ''.join(char2number.get(c, c) for c in phone.lower())
# From http://www.xhaus.com/alan/python/httpcomp.html#gzip
# Used with permission.
def compress_string(s):
zbuf = BytesIO()
with GzipFile(mode='wb', compresslevel=6, fileobj=zbuf, mtime=0) as zfile:
zfile.write(s)
return zbuf.getvalue()
class StreamingBuffer(BytesIO):
def read(self):
ret = self.getvalue()
self.seek(0)
self.truncate()
return ret
# Like compress_string, but for iterators of strings.
def compress_sequence(sequence):
buf = StreamingBuffer()
with GzipFile(mode='wb', compresslevel=6, fileobj=buf, mtime=0) as zfile:
# Output headers...
yield buf.read()
for item in sequence:
zfile.write(item)
data = buf.read()
if data:
yield data
yield buf.read()
# Expression to match some_token and some_token="with spaces" (and similarly
# for single-quoted strings).
smart_split_re = _lazy_re_compile(r"""
((?:
[^\s'"]*
(?:
(?:"(?:[^"\\]|\\.)*" | '(?:[^'\\]|\\.)*')
[^\s'"]*
)+
) | \S+)
""", re.VERBOSE)
def smart_split(text):
r"""
Generator that splits a string by spaces, leaving quoted phrases together.
Supports both single and double quotes, and supports escaping quotes with
backslashes. In the output, strings will keep their initial and trailing
quote marks and escaped quotes will remain escaped (the results can then
be further processed with unescape_string_literal()).
>>> list(smart_split(r'This is "a person\'s" test.'))
['This', 'is', '"a person\\\'s"', 'test.']
>>> list(smart_split(r"Another 'person\'s' test."))
['Another', "'person\\'s'", 'test.']
>>> list(smart_split(r'A "\"funky\" style" test.'))
['A', '"\\"funky\\" style"', 'test.']
"""
for bit in smart_split_re.finditer(str(text)):
yield bit[0]
def _replace_entity(match):
text = match[1]
if text[0] == '#':
text = text[1:]
try:
if text[0] in 'xX':
c = int(text[1:], 16)
else:
c = int(text)
return chr(c)
except ValueError:
return match[0]
else:
try:
return chr(html.entities.name2codepoint[text])
except KeyError:
return match[0]
_entity_re = _lazy_re_compile(r"&(#?[xX]?(?:[0-9a-fA-F]+|\w{1,8}));")
@keep_lazy_text
def unescape_entities(text):
warnings.warn(
'django.utils.text.unescape_entities() is deprecated in favor of '
'html.unescape().',
RemovedInDjango40Warning, stacklevel=2,
)
return _entity_re.sub(_replace_entity, str(text))
@keep_lazy_text
def unescape_string_literal(s):
r"""
Convert quoted string literals to unquoted strings with escaped quotes and
backslashes unquoted::
>>> unescape_string_literal('"abc"')
'abc'
>>> unescape_string_literal("'abc'")
'abc'
>>> unescape_string_literal('"a \"bc\""')
'a "bc"'
>>> unescape_string_literal("'\'ab\' c'")
"'ab' c"
"""
if s[0] not in "\"'" or s[-1] != s[0]:
raise ValueError("Not a string literal: %r" % s)
quote = s[0]
return s[1:-1].replace(r'\%s' % quote, quote).replace(r'\\', '\\')
@keep_lazy_text
def slugify(value, allow_unicode=False):
"""
Convert to ASCII if 'allow_unicode' is False. Convert spaces to hyphens.
Remove characters that aren't alphanumerics, underscores, or hyphens.
Convert to lowercase. Also strip leading and trailing whitespace.
"""
value = str(value)
if allow_unicode:
value = unicodedata.normalize('NFKC', value)
else:
value = unicodedata.normalize('NFKD', value).encode('ascii', 'ignore').decode('ascii')
value = re.sub(r'[^\w\s-]', '', value.lower()).strip()
return re.sub(r'[-\s]+', '-', value)
def camel_case_to_spaces(value):
"""
Split CamelCase and convert to lowercase. Strip surrounding whitespace.
"""
return re_camel_case.sub(r' \1', value).strip().lower()
def _format_lazy(format_string, *args, **kwargs):
"""
Apply str.format() on 'format_string' where format_string, args,
and/or kwargs might be lazy.
"""
return format_string.format(*args, **kwargs)
format_lazy = lazy(_format_lazy, str)
|
2403d4288b1eb7c7ee170413d85623c9347cce3256bf742995a04dd270157614 | class CyclicDependencyError(ValueError):
pass
def topological_sort_as_sets(dependency_graph):
"""
Variation of Kahn's algorithm (1962) that returns sets.
Take a dependency graph as a dictionary of node => dependencies.
Yield sets of items in topological order, where the first set contains
all nodes without dependencies, and each following set contains all
nodes that may depend on the nodes only in the previously yielded sets.
"""
todo = dependency_graph.copy()
while todo:
current = {node for node, deps in todo.items() if not deps}
if not current:
raise CyclicDependencyError('Cyclic dependency in graph: {}'.format(
', '.join(repr(x) for x in todo.items())))
yield current
# remove current from todo's nodes & dependencies
todo = {node: (dependencies - current) for node, dependencies in
todo.items() if node not in current}
def stable_topological_sort(nodes, dependency_graph):
result = []
for layer in topological_sort_as_sets(dependency_graph):
for node in nodes:
if node in layer:
result.append(node)
return result
|
b4db356825740a5247272d9062eaab4a857656f33103abd08883e42852cb3d2c | import asyncio
import inspect
import warnings
from asgiref.sync import sync_to_async
class RemovedInDjango40Warning(DeprecationWarning):
pass
class RemovedInDjango41Warning(PendingDeprecationWarning):
pass
RemovedInNextVersionWarning = RemovedInDjango40Warning
class warn_about_renamed_method:
def __init__(self, class_name, old_method_name, new_method_name, deprecation_warning):
self.class_name = class_name
self.old_method_name = old_method_name
self.new_method_name = new_method_name
self.deprecation_warning = deprecation_warning
def __call__(self, f):
def wrapped(*args, **kwargs):
warnings.warn(
"`%s.%s` is deprecated, use `%s` instead." %
(self.class_name, self.old_method_name, self.new_method_name),
self.deprecation_warning, 2)
return f(*args, **kwargs)
return wrapped
class RenameMethodsBase(type):
"""
Handles the deprecation paths when renaming a method.
It does the following:
1) Define the new method if missing and complain about it.
2) Define the old method if missing.
3) Complain whenever an old method is called.
See #15363 for more details.
"""
renamed_methods = ()
def __new__(cls, name, bases, attrs):
new_class = super().__new__(cls, name, bases, attrs)
for base in inspect.getmro(new_class):
class_name = base.__name__
for renamed_method in cls.renamed_methods:
old_method_name = renamed_method[0]
old_method = base.__dict__.get(old_method_name)
new_method_name = renamed_method[1]
new_method = base.__dict__.get(new_method_name)
deprecation_warning = renamed_method[2]
wrapper = warn_about_renamed_method(class_name, *renamed_method)
# Define the new method if missing and complain about it
if not new_method and old_method:
warnings.warn(
"`%s.%s` method should be renamed `%s`." %
(class_name, old_method_name, new_method_name),
deprecation_warning, 2)
setattr(base, new_method_name, old_method)
setattr(base, old_method_name, wrapper(old_method))
# Define the old method as a wrapped call to the new method.
if not old_method and new_method:
setattr(base, old_method_name, wrapper(new_method))
return new_class
class DeprecationInstanceCheck(type):
def __instancecheck__(self, instance):
warnings.warn(
"`%s` is deprecated, use `%s` instead." % (self.__name__, self.alternative),
self.deprecation_warning, 2
)
return super().__instancecheck__(instance)
class MiddlewareMixin:
sync_capable = True
async_capable = True
# RemovedInDjango40Warning: when the deprecation ends, replace with:
# def __init__(self, get_response):
def __init__(self, get_response=None):
self._get_response_none_deprecation(get_response)
self.get_response = get_response
self._async_check()
super().__init__()
def _async_check(self):
"""
If get_response is a coroutine function, turns us into async mode so
a thread is not consumed during a whole request.
"""
if asyncio.iscoroutinefunction(self.get_response):
# Mark the class as async-capable, but do the actual switch
# inside __call__ to avoid swapping out dunder methods
self._is_coroutine = asyncio.coroutines._is_coroutine
def __call__(self, request):
# Exit out to async mode, if needed
if asyncio.iscoroutinefunction(self.get_response):
return self.__acall__(request)
response = None
if hasattr(self, 'process_request'):
response = self.process_request(request)
response = response or self.get_response(request)
if hasattr(self, 'process_response'):
response = self.process_response(request, response)
return response
async def __acall__(self, request):
"""
Async version of __call__ that is swapped in when an async request
is running.
"""
response = None
if hasattr(self, 'process_request'):
response = await sync_to_async(self.process_request)(request)
response = response or await self.get_response(request)
if hasattr(self, 'process_response'):
response = await sync_to_async(self.process_response)(request, response)
return response
def _get_response_none_deprecation(self, get_response):
if get_response is None:
warnings.warn(
'Passing None for the middleware get_response argument is '
'deprecated.',
RemovedInDjango40Warning, stacklevel=3,
)
|
b2e2ad6396a3cf09a8d25363f7b043e379be8cbebd82b54f670e709d71cadd4e | """
PHP date() style date formatting
See http://www.php.net/date for format strings
Usage:
>>> import datetime
>>> d = datetime.datetime.now()
>>> df = DateFormat(d)
>>> print(df.format('jS F Y H:i'))
7th October 2003 11:39
>>>
"""
import calendar
import datetime
import time
from email.utils import format_datetime as format_datetime_rfc5322
from django.utils.dates import (
MONTHS, MONTHS_3, MONTHS_ALT, MONTHS_AP, WEEKDAYS, WEEKDAYS_ABBR,
)
from django.utils.regex_helper import _lazy_re_compile
from django.utils.timezone import (
get_default_timezone, is_aware, is_naive, make_aware,
)
from django.utils.translation import gettext as _
re_formatchars = _lazy_re_compile(r'(?<!\\)([aAbcdDeEfFgGhHiIjlLmMnNoOPrsStTUuwWyYzZ])')
re_escaped = _lazy_re_compile(r'\\(.)')
class Formatter:
def format(self, formatstr):
pieces = []
for i, piece in enumerate(re_formatchars.split(str(formatstr))):
if i % 2:
if type(self.data) is datetime.date and hasattr(TimeFormat, piece):
raise TypeError(
"The format for date objects may not contain "
"time-related format specifiers (found '%s')." % piece
)
pieces.append(str(getattr(self, piece)()))
elif piece:
pieces.append(re_escaped.sub(r'\1', piece))
return ''.join(pieces)
class TimeFormat(Formatter):
def __init__(self, obj):
self.data = obj
self.timezone = None
# We only support timezone when formatting datetime objects,
# not date objects (timezone information not appropriate),
# or time objects (against established django policy).
if isinstance(obj, datetime.datetime):
if is_naive(obj):
self.timezone = get_default_timezone()
else:
self.timezone = obj.tzinfo
def a(self):
"'a.m.' or 'p.m.'"
if self.data.hour > 11:
return _('p.m.')
return _('a.m.')
def A(self):
"'AM' or 'PM'"
if self.data.hour > 11:
return _('PM')
return _('AM')
def e(self):
"""
Timezone name.
If timezone information is not available, return an empty string.
"""
if not self.timezone:
return ""
try:
if hasattr(self.data, 'tzinfo') and self.data.tzinfo:
return self.data.tzname() or ''
except NotImplementedError:
pass
return ""
def f(self):
"""
Time, in 12-hour hours and minutes, with minutes left off if they're
zero.
Examples: '1', '1:30', '2:05', '2'
Proprietary extension.
"""
if self.data.minute == 0:
return self.g()
return '%s:%s' % (self.g(), self.i())
def g(self):
"Hour, 12-hour format without leading zeros; i.e. '1' to '12'"
if self.data.hour == 0:
return 12
if self.data.hour > 12:
return self.data.hour - 12
return self.data.hour
def G(self):
"Hour, 24-hour format without leading zeros; i.e. '0' to '23'"
return self.data.hour
def h(self):
"Hour, 12-hour format; i.e. '01' to '12'"
return '%02d' % self.g()
def H(self):
"Hour, 24-hour format; i.e. '00' to '23'"
return '%02d' % self.G()
def i(self):
"Minutes; i.e. '00' to '59'"
return '%02d' % self.data.minute
def O(self): # NOQA: E743, E741
"""
Difference to Greenwich time in hours; e.g. '+0200', '-0430'.
If timezone information is not available, return an empty string.
"""
if not self.timezone:
return ""
seconds = self.Z()
if seconds == "":
return ""
sign = '-' if seconds < 0 else '+'
seconds = abs(seconds)
return "%s%02d%02d" % (sign, seconds // 3600, (seconds // 60) % 60)
def P(self):
"""
Time, in 12-hour hours, minutes and 'a.m.'/'p.m.', with minutes left off
if they're zero and the strings 'midnight' and 'noon' if appropriate.
Examples: '1 a.m.', '1:30 p.m.', 'midnight', 'noon', '12:30 p.m.'
Proprietary extension.
"""
if self.data.minute == 0 and self.data.hour == 0:
return _('midnight')
if self.data.minute == 0 and self.data.hour == 12:
return _('noon')
return '%s %s' % (self.f(), self.a())
def s(self):
"Seconds; i.e. '00' to '59'"
return '%02d' % self.data.second
def T(self):
"""
Time zone of this machine; e.g. 'EST' or 'MDT'.
If timezone information is not available, return an empty string.
"""
if not self.timezone:
return ""
name = None
try:
name = self.timezone.tzname(self.data)
except Exception:
# pytz raises AmbiguousTimeError during the autumn DST change.
# This happens mainly when __init__ receives a naive datetime
# and sets self.timezone = get_default_timezone().
pass
if name is None:
name = self.format('O')
return str(name)
def u(self):
"Microseconds; i.e. '000000' to '999999'"
return '%06d' % self.data.microsecond
def Z(self):
"""
Time zone offset in seconds (i.e. '-43200' to '43200'). The offset for
timezones west of UTC is always negative, and for those east of UTC is
always positive.
If timezone information is not available, return an empty string.
"""
if not self.timezone:
return ""
try:
offset = self.timezone.utcoffset(self.data)
except Exception:
# pytz raises AmbiguousTimeError during the autumn DST change.
# This happens mainly when __init__ receives a naive datetime
# and sets self.timezone = get_default_timezone().
return ""
# `offset` is a datetime.timedelta. For negative values (to the west of
# UTC) only days can be negative (days=-1) and seconds are always
# positive. e.g. UTC-1 -> timedelta(days=-1, seconds=82800, microseconds=0)
# Positive offsets have days=0
return offset.days * 86400 + offset.seconds
class DateFormat(TimeFormat):
def b(self):
"Month, textual, 3 letters, lowercase; e.g. 'jan'"
return MONTHS_3[self.data.month]
def c(self):
"""
ISO 8601 Format
Example : '2008-01-02T10:30:00.000123'
"""
return self.data.isoformat()
def d(self):
"Day of the month, 2 digits with leading zeros; i.e. '01' to '31'"
return '%02d' % self.data.day
def D(self):
"Day of the week, textual, 3 letters; e.g. 'Fri'"
return WEEKDAYS_ABBR[self.data.weekday()]
def E(self):
"Alternative month names as required by some locales. Proprietary extension."
return MONTHS_ALT[self.data.month]
def F(self):
"Month, textual, long; e.g. 'January'"
return MONTHS[self.data.month]
def I(self): # NOQA: E743, E741
"'1' if Daylight Savings Time, '0' otherwise."
try:
if self.timezone and self.timezone.dst(self.data):
return '1'
else:
return '0'
except Exception:
# pytz raises AmbiguousTimeError during the autumn DST change.
# This happens mainly when __init__ receives a naive datetime
# and sets self.timezone = get_default_timezone().
return ''
def j(self):
"Day of the month without leading zeros; i.e. '1' to '31'"
return self.data.day
def l(self): # NOQA: E743, E741
"Day of the week, textual, long; e.g. 'Friday'"
return WEEKDAYS[self.data.weekday()]
def L(self):
"Boolean for whether it is a leap year; i.e. True or False"
return calendar.isleap(self.data.year)
def m(self):
"Month; i.e. '01' to '12'"
return '%02d' % self.data.month
def M(self):
"Month, textual, 3 letters; e.g. 'Jan'"
return MONTHS_3[self.data.month].title()
def n(self):
"Month without leading zeros; i.e. '1' to '12'"
return self.data.month
def N(self):
"Month abbreviation in Associated Press style. Proprietary extension."
return MONTHS_AP[self.data.month]
def o(self):
"ISO 8601 year number matching the ISO week number (W)"
return self.data.isocalendar()[0]
def r(self):
"RFC 5322 formatted date; e.g. 'Thu, 21 Dec 2000 16:01:07 +0200'"
if type(self.data) is datetime.date:
raise TypeError(
"The format for date objects may not contain time-related "
"format specifiers (found 'r')."
)
if is_naive(self.data):
dt = make_aware(self.data, timezone=self.timezone)
else:
dt = self.data
return format_datetime_rfc5322(dt)
def S(self):
"English ordinal suffix for the day of the month, 2 characters; i.e. 'st', 'nd', 'rd' or 'th'"
if self.data.day in (11, 12, 13): # Special case
return 'th'
last = self.data.day % 10
if last == 1:
return 'st'
if last == 2:
return 'nd'
if last == 3:
return 'rd'
return 'th'
def t(self):
"Number of days in the given month; i.e. '28' to '31'"
return '%02d' % calendar.monthrange(self.data.year, self.data.month)[1]
def U(self):
"Seconds since the Unix epoch (January 1 1970 00:00:00 GMT)"
if isinstance(self.data, datetime.datetime) and is_aware(self.data):
return int(calendar.timegm(self.data.utctimetuple()))
else:
return int(time.mktime(self.data.timetuple()))
def w(self):
"Day of the week, numeric, i.e. '0' (Sunday) to '6' (Saturday)"
return (self.data.weekday() + 1) % 7
def W(self):
"ISO-8601 week number of year, weeks starting on Monday"
return self.data.isocalendar()[1]
def y(self):
"Year, 2 digits; e.g. '99'"
return str(self.data.year)[2:]
def Y(self):
"Year, 4 digits; e.g. '1999'"
return self.data.year
def z(self):
"""Day of the year, i.e. 1 to 366."""
return self.data.timetuple().tm_yday
def format(value, format_string):
"Convenience function"
df = DateFormat(value)
return df.format(format_string)
def time_format(value, format_string):
"Convenience function"
tf = TimeFormat(value)
return tf.format(format_string)
|
2876e582f970c5b51c8a0366bdde580d7a1ad679d407f590ade57a476aae2a5a | import base64
import calendar
import datetime
import re
import unicodedata
import warnings
from binascii import Error as BinasciiError
from email.utils import formatdate
from urllib.parse import (
ParseResult, SplitResult, _coerce_args, _splitnetloc, _splitparams, quote,
quote_plus, scheme_chars, unquote, unquote_plus,
urlencode as original_urlencode, uses_params,
)
from django.core.exceptions import TooManyFieldsSent
from django.utils.datastructures import MultiValueDict
from django.utils.deprecation import RemovedInDjango40Warning
from django.utils.functional import keep_lazy_text
from django.utils.regex_helper import _lazy_re_compile
# based on RFC 7232, Appendix C
ETAG_MATCH = _lazy_re_compile(r'''
\A( # start of string and capture group
(?:W/)? # optional weak indicator
" # opening quote
[^"]* # any sequence of non-quote characters
" # end quote
)\Z # end of string and capture group
''', re.X)
MONTHS = 'jan feb mar apr may jun jul aug sep oct nov dec'.split()
__D = r'(?P<day>\d{2})'
__D2 = r'(?P<day>[ \d]\d)'
__M = r'(?P<mon>\w{3})'
__Y = r'(?P<year>\d{4})'
__Y2 = r'(?P<year>\d{2})'
__T = r'(?P<hour>\d{2}):(?P<min>\d{2}):(?P<sec>\d{2})'
RFC1123_DATE = _lazy_re_compile(r'^\w{3}, %s %s %s %s GMT$' % (__D, __M, __Y, __T))
RFC850_DATE = _lazy_re_compile(r'^\w{6,9}, %s-%s-%s %s GMT$' % (__D, __M, __Y2, __T))
ASCTIME_DATE = _lazy_re_compile(r'^\w{3} %s %s %s %s$' % (__M, __D2, __T, __Y))
RFC3986_GENDELIMS = ":/?#[]@"
RFC3986_SUBDELIMS = "!$&'()*+,;="
FIELDS_MATCH = _lazy_re_compile('[&;]')
@keep_lazy_text
def urlquote(url, safe='/'):
"""
A legacy compatibility wrapper to Python's urllib.parse.quote() function.
(was used for unicode handling on Python 2)
"""
warnings.warn(
'django.utils.http.urlquote() is deprecated in favor of '
'urllib.parse.quote().',
RemovedInDjango40Warning, stacklevel=2,
)
return quote(url, safe)
@keep_lazy_text
def urlquote_plus(url, safe=''):
"""
A legacy compatibility wrapper to Python's urllib.parse.quote_plus()
function. (was used for unicode handling on Python 2)
"""
warnings.warn(
'django.utils.http.urlquote_plus() is deprecated in favor of '
'urllib.parse.quote_plus(),',
RemovedInDjango40Warning, stacklevel=2,
)
return quote_plus(url, safe)
@keep_lazy_text
def urlunquote(quoted_url):
"""
A legacy compatibility wrapper to Python's urllib.parse.unquote() function.
(was used for unicode handling on Python 2)
"""
warnings.warn(
'django.utils.http.urlunquote() is deprecated in favor of '
'urllib.parse.unquote().',
RemovedInDjango40Warning, stacklevel=2,
)
return unquote(quoted_url)
@keep_lazy_text
def urlunquote_plus(quoted_url):
"""
A legacy compatibility wrapper to Python's urllib.parse.unquote_plus()
function. (was used for unicode handling on Python 2)
"""
warnings.warn(
'django.utils.http.urlunquote_plus() is deprecated in favor of '
'urllib.parse.unquote_plus().',
RemovedInDjango40Warning, stacklevel=2,
)
return unquote_plus(quoted_url)
def urlencode(query, doseq=False):
"""
A version of Python's urllib.parse.urlencode() function that can operate on
MultiValueDict and non-string values.
"""
if isinstance(query, MultiValueDict):
query = query.lists()
elif hasattr(query, 'items'):
query = query.items()
query_params = []
for key, value in query:
if value is None:
raise TypeError(
"Cannot encode None for key '%s' in a query string. Did you "
"mean to pass an empty string or omit the value?" % key
)
elif not doseq or isinstance(value, (str, bytes)):
query_val = value
else:
try:
itr = iter(value)
except TypeError:
query_val = value
else:
# Consume generators and iterators, when doseq=True, to
# work around https://bugs.python.org/issue31706.
query_val = []
for item in itr:
if item is None:
raise TypeError(
"Cannot encode None for key '%s' in a query "
"string. Did you mean to pass an empty string or "
"omit the value?" % key
)
elif not isinstance(item, bytes):
item = str(item)
query_val.append(item)
query_params.append((key, query_val))
return original_urlencode(query_params, doseq)
def http_date(epoch_seconds=None):
"""
Format the time to match the RFC1123 date format as specified by HTTP
RFC7231 section 7.1.1.1.
`epoch_seconds` is a floating point number expressed in seconds since the
epoch, in UTC - such as that outputted by time.time(). If set to None, it
defaults to the current time.
Output a string in the format 'Wdy, DD Mon YYYY HH:MM:SS GMT'.
"""
return formatdate(epoch_seconds, usegmt=True)
def parse_http_date(date):
"""
Parse a date format as specified by HTTP RFC7231 section 7.1.1.1.
The three formats allowed by the RFC are accepted, even if only the first
one is still in widespread use.
Return an integer expressed in seconds since the epoch, in UTC.
"""
# email.utils.parsedate() does the job for RFC1123 dates; unfortunately
# RFC7231 makes it mandatory to support RFC850 dates too. So we roll
# our own RFC-compliant parsing.
for regex in RFC1123_DATE, RFC850_DATE, ASCTIME_DATE:
m = regex.match(date)
if m is not None:
break
else:
raise ValueError("%r is not in a valid HTTP date format" % date)
try:
year = int(m['year'])
if year < 100:
current_year = datetime.datetime.utcnow().year
current_century = current_year - (current_year % 100)
if year - (current_year % 100) > 50:
# year that appears to be more than 50 years in the future are
# interpreted as representing the past.
year += current_century - 100
else:
year += current_century
month = MONTHS.index(m['mon'].lower()) + 1
day = int(m['day'])
hour = int(m['hour'])
min = int(m['min'])
sec = int(m['sec'])
result = datetime.datetime(year, month, day, hour, min, sec)
return calendar.timegm(result.utctimetuple())
except Exception as exc:
raise ValueError("%r is not a valid date" % date) from exc
def parse_http_date_safe(date):
"""
Same as parse_http_date, but return None if the input is invalid.
"""
try:
return parse_http_date(date)
except Exception:
pass
# Base 36 functions: useful for generating compact URLs
def base36_to_int(s):
"""
Convert a base 36 string to an int. Raise ValueError if the input won't fit
into an int.
"""
# To prevent overconsumption of server resources, reject any
# base36 string that is longer than 13 base36 digits (13 digits
# is sufficient to base36-encode any 64-bit integer)
if len(s) > 13:
raise ValueError("Base36 input too large")
return int(s, 36)
def int_to_base36(i):
"""Convert an integer to a base36 string."""
char_set = '0123456789abcdefghijklmnopqrstuvwxyz'
if i < 0:
raise ValueError("Negative base36 conversion input.")
if i < 36:
return char_set[i]
b36 = ''
while i != 0:
i, n = divmod(i, 36)
b36 = char_set[n] + b36
return b36
def urlsafe_base64_encode(s):
"""
Encode a bytestring to a base64 string for use in URLs. Strip any trailing
equal signs.
"""
return base64.urlsafe_b64encode(s).rstrip(b'\n=').decode('ascii')
def urlsafe_base64_decode(s):
"""
Decode a base64 encoded string. Add back any trailing equal signs that
might have been stripped.
"""
s = s.encode()
try:
return base64.urlsafe_b64decode(s.ljust(len(s) + len(s) % 4, b'='))
except (LookupError, BinasciiError) as e:
raise ValueError(e)
def parse_etags(etag_str):
"""
Parse a string of ETags given in an If-None-Match or If-Match header as
defined by RFC 7232. Return a list of quoted ETags, or ['*'] if all ETags
should be matched.
"""
if etag_str.strip() == '*':
return ['*']
else:
# Parse each ETag individually, and return any that are valid.
etag_matches = (ETAG_MATCH.match(etag.strip()) for etag in etag_str.split(','))
return [match[1] for match in etag_matches if match]
def quote_etag(etag_str):
"""
If the provided string is already a quoted ETag, return it. Otherwise, wrap
the string in quotes, making it a strong ETag.
"""
if ETAG_MATCH.match(etag_str):
return etag_str
else:
return '"%s"' % etag_str
def is_same_domain(host, pattern):
"""
Return ``True`` if the host is either an exact match or a match
to the wildcard pattern.
Any pattern beginning with a period matches a domain and all of its
subdomains. (e.g. ``.example.com`` matches ``example.com`` and
``foo.example.com``). Anything else is an exact string match.
"""
if not pattern:
return False
pattern = pattern.lower()
return (
pattern[0] == '.' and (host.endswith(pattern) or host == pattern[1:]) or
pattern == host
)
def url_has_allowed_host_and_scheme(url, allowed_hosts, require_https=False):
"""
Return ``True`` if the url uses an allowed host and a safe scheme.
Always return ``False`` on an empty url.
If ``require_https`` is ``True``, only 'https' will be considered a valid
scheme, as opposed to 'http' and 'https' with the default, ``False``.
Note: "True" doesn't entail that a URL is "safe". It may still be e.g.
quoted incorrectly. Ensure to also use django.utils.encoding.iri_to_uri()
on the path component of untrusted URLs.
"""
if url is not None:
url = url.strip()
if not url:
return False
if allowed_hosts is None:
allowed_hosts = set()
elif isinstance(allowed_hosts, str):
allowed_hosts = {allowed_hosts}
# Chrome treats \ completely as / in paths but it could be part of some
# basic auth credentials so we need to check both URLs.
return (
_url_has_allowed_host_and_scheme(url, allowed_hosts, require_https=require_https) and
_url_has_allowed_host_and_scheme(url.replace('\\', '/'), allowed_hosts, require_https=require_https)
)
def is_safe_url(url, allowed_hosts, require_https=False):
warnings.warn(
'django.utils.http.is_safe_url() is deprecated in favor of '
'url_has_allowed_host_and_scheme().',
RemovedInDjango40Warning, stacklevel=2,
)
return url_has_allowed_host_and_scheme(url, allowed_hosts, require_https)
# Copied from urllib.parse.urlparse() but uses fixed urlsplit() function.
def _urlparse(url, scheme='', allow_fragments=True):
"""Parse a URL into 6 components:
<scheme>://<netloc>/<path>;<params>?<query>#<fragment>
Return a 6-tuple: (scheme, netloc, path, params, query, fragment).
Note that we don't break the components up in smaller bits
(e.g. netloc is a single string) and we don't expand % escapes."""
url, scheme, _coerce_result = _coerce_args(url, scheme)
splitresult = _urlsplit(url, scheme, allow_fragments)
scheme, netloc, url, query, fragment = splitresult
if scheme in uses_params and ';' in url:
url, params = _splitparams(url)
else:
params = ''
result = ParseResult(scheme, netloc, url, params, query, fragment)
return _coerce_result(result)
# Copied from urllib.parse.urlsplit() with
# https://github.com/python/cpython/pull/661 applied.
def _urlsplit(url, scheme='', allow_fragments=True):
"""Parse a URL into 5 components:
<scheme>://<netloc>/<path>?<query>#<fragment>
Return a 5-tuple: (scheme, netloc, path, query, fragment).
Note that we don't break the components up in smaller bits
(e.g. netloc is a single string) and we don't expand % escapes."""
url, scheme, _coerce_result = _coerce_args(url, scheme)
netloc = query = fragment = ''
i = url.find(':')
if i > 0:
for c in url[:i]:
if c not in scheme_chars:
break
else:
scheme, url = url[:i].lower(), url[i + 1:]
if url[:2] == '//':
netloc, url = _splitnetloc(url, 2)
if (('[' in netloc and ']' not in netloc) or
(']' in netloc and '[' not in netloc)):
raise ValueError("Invalid IPv6 URL")
if allow_fragments and '#' in url:
url, fragment = url.split('#', 1)
if '?' in url:
url, query = url.split('?', 1)
v = SplitResult(scheme, netloc, url, query, fragment)
return _coerce_result(v)
def _url_has_allowed_host_and_scheme(url, allowed_hosts, require_https=False):
# Chrome considers any URL with more than two slashes to be absolute, but
# urlparse is not so flexible. Treat any url with three slashes as unsafe.
if url.startswith('///'):
return False
try:
url_info = _urlparse(url)
except ValueError: # e.g. invalid IPv6 addresses
return False
# Forbid URLs like http:///example.com - with a scheme, but without a hostname.
# In that URL, example.com is not the hostname but, a path component. However,
# Chrome will still consider example.com to be the hostname, so we must not
# allow this syntax.
if not url_info.netloc and url_info.scheme:
return False
# Forbid URLs that start with control characters. Some browsers (like
# Chrome) ignore quite a few control characters at the start of a
# URL and might consider the URL as scheme relative.
if unicodedata.category(url[0])[0] == 'C':
return False
scheme = url_info.scheme
# Consider URLs without a scheme (e.g. //example.com/p) to be http.
if not url_info.scheme and url_info.netloc:
scheme = 'http'
valid_schemes = ['https'] if require_https else ['http', 'https']
return ((not url_info.netloc or url_info.netloc in allowed_hosts) and
(not scheme or scheme in valid_schemes))
def limited_parse_qsl(qs, keep_blank_values=False, encoding='utf-8',
errors='replace', fields_limit=None):
"""
Return a list of key/value tuples parsed from query string.
Copied from urlparse with an additional "fields_limit" argument.
Copyright (C) 2013 Python Software Foundation (see LICENSE.python).
Arguments:
qs: percent-encoded query string to be parsed
keep_blank_values: flag indicating whether blank values in
percent-encoded queries should be treated as blank strings. A
true value indicates that blanks should be retained as blank
strings. The default false value indicates that blank values
are to be ignored and treated as if they were not included.
encoding and errors: specify how to decode percent-encoded sequences
into Unicode characters, as accepted by the bytes.decode() method.
fields_limit: maximum number of fields parsed or an exception
is raised. None means no limit and is the default.
"""
if fields_limit:
pairs = FIELDS_MATCH.split(qs, fields_limit)
if len(pairs) > fields_limit:
raise TooManyFieldsSent(
'The number of GET/POST parameters exceeded '
'settings.DATA_UPLOAD_MAX_NUMBER_FIELDS.'
)
else:
pairs = FIELDS_MATCH.split(qs)
r = []
for name_value in pairs:
if not name_value:
continue
nv = name_value.split('=', 1)
if len(nv) != 2:
# Handle case of a control-name with no equal sign
if keep_blank_values:
nv.append('')
else:
continue
if nv[1] or keep_blank_values:
name = nv[0].replace('+', ' ')
name = unquote(name, encoding=encoding, errors=errors)
value = nv[1].replace('+', ' ')
value = unquote(value, encoding=encoding, errors=errors)
r.append((name, value))
return r
def escape_leading_slashes(url):
"""
If redirecting to an absolute path (two leading slashes), a slash must be
escaped to prevent browsers from handling the path as schemaless and
redirecting to another host.
"""
if url.startswith('//'):
url = '/%2F{}'.format(url[2:])
return url
|
1648078c7e656ddf62d17fa872426554ae9301af22ab6daa477635aab9e62c36 | """JsLex: a lexer for Javascript"""
# Originally from https://bitbucket.org/ned/jslex
import re
class Tok:
"""
A specification for a token class.
"""
num = 0
def __init__(self, name, regex, next=None):
self.id = Tok.num
Tok.num += 1
self.name = name
self.regex = regex
self.next = next
def literals(choices, prefix="", suffix=""):
"""
Create a regex from a space-separated list of literal `choices`.
If provided, `prefix` and `suffix` will be attached to each choice
individually.
"""
return "|".join(prefix + re.escape(c) + suffix for c in choices.split())
class Lexer:
"""
A generic multi-state regex-based lexer.
"""
def __init__(self, states, first):
self.regexes = {}
self.toks = {}
for state, rules in states.items():
parts = []
for tok in rules:
groupid = "t%d" % tok.id
self.toks[groupid] = tok
parts.append("(?P<%s>%s)" % (groupid, tok.regex))
self.regexes[state] = re.compile("|".join(parts), re.MULTILINE | re.VERBOSE)
self.state = first
def lex(self, text):
"""
Lexically analyze `text`.
Yield pairs (`name`, `tokentext`).
"""
end = len(text)
state = self.state
regexes = self.regexes
toks = self.toks
start = 0
while start < end:
for match in regexes[state].finditer(text, start):
name = match.lastgroup
tok = toks[name]
toktext = match[name]
start += len(toktext)
yield (tok.name, toktext)
if tok.next:
state = tok.next
break
self.state = state
class JsLexer(Lexer):
"""
A Javascript lexer
>>> lexer = JsLexer()
>>> list(lexer.lex("a = 1"))
[('id', 'a'), ('ws', ' '), ('punct', '='), ('ws', ' '), ('dnum', '1')]
This doesn't properly handle non-ASCII characters in the Javascript source.
"""
# Because these tokens are matched as alternatives in a regex, longer
# possibilities must appear in the list before shorter ones, for example,
# '>>' before '>'.
#
# Note that we don't have to detect malformed Javascript, only properly
# lex correct Javascript, so much of this is simplified.
# Details of Javascript lexical structure are taken from
# http://www.ecma-international.org/publications/files/ECMA-ST/ECMA-262.pdf
# A useful explanation of automatic semicolon insertion is at
# http://inimino.org/~inimino/blog/javascript_semicolons
both_before = [
Tok("comment", r"/\*(.|\n)*?\*/"),
Tok("linecomment", r"//.*?$"),
Tok("ws", r"\s+"),
Tok("keyword", literals("""
break case catch class const continue debugger
default delete do else enum export extends
finally for function if import in instanceof
new return super switch this throw try typeof
var void while with
""", suffix=r"\b"), next='reg'),
Tok("reserved", literals("null true false", suffix=r"\b"), next='div'),
Tok("id", r"""
([a-zA-Z_$ ]|\\u[0-9a-fA-Z]{4}) # first char
([a-zA-Z_$0-9]|\\u[0-9a-fA-F]{4})* # rest chars
""", next='div'),
Tok("hnum", r"0[xX][0-9a-fA-F]+", next='div'),
Tok("onum", r"0[0-7]+"),
Tok("dnum", r"""
( (0|[1-9][0-9]*) # DecimalIntegerLiteral
\. # dot
[0-9]* # DecimalDigits-opt
([eE][-+]?[0-9]+)? # ExponentPart-opt
|
\. # dot
[0-9]+ # DecimalDigits
([eE][-+]?[0-9]+)? # ExponentPart-opt
|
(0|[1-9][0-9]*) # DecimalIntegerLiteral
([eE][-+]?[0-9]+)? # ExponentPart-opt
)
""", next='div'),
Tok("punct", literals("""
>>>= === !== >>> <<= >>= <= >= == != << >> &&
|| += -= *= %= &= |= ^=
"""), next="reg"),
Tok("punct", literals("++ -- ) ]"), next='div'),
Tok("punct", literals("{ } ( [ . ; , < > + - * % & | ^ ! ~ ? : ="), next='reg'),
Tok("string", r'"([^"\\]|(\\(.|\n)))*?"', next='div'),
Tok("string", r"'([^'\\]|(\\(.|\n)))*?'", next='div'),
]
both_after = [
Tok("other", r"."),
]
states = {
# slash will mean division
'div': both_before + [
Tok("punct", literals("/= /"), next='reg'),
] + both_after,
# slash will mean regex
'reg': both_before + [
Tok("regex",
r"""
/ # opening slash
# First character is..
( [^*\\/[] # anything but * \ / or [
| \\. # or an escape sequence
| \[ # or a class, which has
( [^\]\\] # anything but \ or ]
| \\. # or an escape sequence
)* # many times
\]
)
# Following characters are same, except for excluding a star
( [^\\/[] # anything but \ / or [
| \\. # or an escape sequence
| \[ # or a class, which has
( [^\]\\] # anything but \ or ]
| \\. # or an escape sequence
)* # many times
\]
)* # many times
/ # closing slash
[a-zA-Z0-9]* # trailing flags
""", next='div'),
] + both_after,
}
def __init__(self):
super().__init__(self.states, 'reg')
def prepare_js_for_gettext(js):
"""
Convert the Javascript source `js` into something resembling C for
xgettext.
What actually happens is that all the regex literals are replaced with
"REGEX".
"""
def escape_quotes(m):
"""Used in a regex to properly escape double quotes."""
s = m[0]
if s == '"':
return r'\"'
else:
return s
lexer = JsLexer()
c = []
for name, tok in lexer.lex(js):
if name == 'regex':
# C doesn't grok regexes, and they aren't needed for gettext,
# so just output a string instead.
tok = '"REGEX"'
elif name == 'string':
# C doesn't have single-quoted strings, so make all strings
# double-quoted.
if tok.startswith("'"):
guts = re.sub(r"\\.|.", escape_quotes, tok[1:-1])
tok = '"' + guts + '"'
elif name == 'id':
# C can't deal with Unicode escapes in identifiers. We don't
# need them for gettext anyway, so replace them with something
# innocuous
tok = tok.replace("\\", "U")
c.append(tok)
return ''.join(c)
|
01ca5a91123f00e89500dedce2bc1abb15dadae8dffeaf2d312ab010525e80c0 | """
This is the Django template system.
How it works:
The Lexer.tokenize() method converts a template string (i.e., a string
containing markup with custom template tags) to tokens, which can be either
plain text (TokenType.TEXT), variables (TokenType.VAR), or block statements
(TokenType.BLOCK).
The Parser() class takes a list of tokens in its constructor, and its parse()
method returns a compiled template -- which is, under the hood, a list of
Node objects.
Each Node is responsible for creating some sort of output -- e.g. simple text
(TextNode), variable values in a given context (VariableNode), results of basic
logic (IfNode), results of looping (ForNode), or anything else. The core Node
types are TextNode, VariableNode, IfNode and ForNode, but plugin modules can
define their own custom node types.
Each Node has a render() method, which takes a Context and returns a string of
the rendered node. For example, the render() method of a Variable Node returns
the variable's value as a string. The render() method of a ForNode returns the
rendered output of whatever was inside the loop, recursively.
The Template class is a convenient wrapper that takes care of template
compilation and rendering.
Usage:
The only thing you should ever use directly in this file is the Template class.
Create a compiled template object with a template_string, then call render()
with a context. In the compilation stage, the TemplateSyntaxError exception
will be raised if the template doesn't have proper syntax.
Sample code:
>>> from django import template
>>> s = '<html>{% if test %}<h1>{{ varvalue }}</h1>{% endif %}</html>'
>>> t = template.Template(s)
(t is now a compiled template, and its render() method can be called multiple
times with multiple contexts)
>>> c = template.Context({'test':True, 'varvalue': 'Hello'})
>>> t.render(c)
'<html><h1>Hello</h1></html>'
>>> c = template.Context({'test':False, 'varvalue': 'Hello'})
>>> t.render(c)
'<html></html>'
"""
import inspect
import logging
import re
from enum import Enum
from django.template.context import BaseContext
from django.utils.formats import localize
from django.utils.html import conditional_escape, escape
from django.utils.regex_helper import _lazy_re_compile
from django.utils.safestring import SafeData, mark_safe
from django.utils.text import (
get_text_list, smart_split, unescape_string_literal,
)
from django.utils.timezone import template_localtime
from django.utils.translation import gettext_lazy, pgettext_lazy
from .exceptions import TemplateSyntaxError
# template syntax constants
FILTER_SEPARATOR = '|'
FILTER_ARGUMENT_SEPARATOR = ':'
VARIABLE_ATTRIBUTE_SEPARATOR = '.'
BLOCK_TAG_START = '{%'
BLOCK_TAG_END = '%}'
VARIABLE_TAG_START = '{{'
VARIABLE_TAG_END = '}}'
COMMENT_TAG_START = '{#'
COMMENT_TAG_END = '#}'
TRANSLATOR_COMMENT_MARK = 'Translators'
SINGLE_BRACE_START = '{'
SINGLE_BRACE_END = '}'
# what to report as the origin for templates that come from non-loader sources
# (e.g. strings)
UNKNOWN_SOURCE = '<unknown source>'
# match a variable or block tag and capture the entire tag, including start/end
# delimiters
tag_re = (_lazy_re_compile('(%s.*?%s|%s.*?%s|%s.*?%s)' %
(re.escape(BLOCK_TAG_START), re.escape(BLOCK_TAG_END),
re.escape(VARIABLE_TAG_START), re.escape(VARIABLE_TAG_END),
re.escape(COMMENT_TAG_START), re.escape(COMMENT_TAG_END))))
logger = logging.getLogger('django.template')
class TokenType(Enum):
TEXT = 0
VAR = 1
BLOCK = 2
COMMENT = 3
class VariableDoesNotExist(Exception):
def __init__(self, msg, params=()):
self.msg = msg
self.params = params
def __str__(self):
return self.msg % self.params
class Origin:
def __init__(self, name, template_name=None, loader=None):
self.name = name
self.template_name = template_name
self.loader = loader
def __str__(self):
return self.name
def __eq__(self, other):
return (
isinstance(other, Origin) and
self.name == other.name and
self.loader == other.loader
)
@property
def loader_name(self):
if self.loader:
return '%s.%s' % (
self.loader.__module__, self.loader.__class__.__name__,
)
class Template:
def __init__(self, template_string, origin=None, name=None, engine=None):
# If Template is instantiated directly rather than from an Engine and
# exactly one Django template engine is configured, use that engine.
# This is required to preserve backwards-compatibility for direct use
# e.g. Template('...').render(Context({...}))
if engine is None:
from .engine import Engine
engine = Engine.get_default()
if origin is None:
origin = Origin(UNKNOWN_SOURCE)
self.name = name
self.origin = origin
self.engine = engine
self.source = str(template_string) # May be lazy.
self.nodelist = self.compile_nodelist()
def __iter__(self):
for node in self.nodelist:
yield from node
def _render(self, context):
return self.nodelist.render(context)
def render(self, context):
"Display stage -- can be called many times"
with context.render_context.push_state(self):
if context.template is None:
with context.bind_template(self):
context.template_name = self.name
return self._render(context)
else:
return self._render(context)
def compile_nodelist(self):
"""
Parse and compile the template source into a nodelist. If debug
is True and an exception occurs during parsing, the exception is
annotated with contextual line information where it occurred in the
template source.
"""
if self.engine.debug:
lexer = DebugLexer(self.source)
else:
lexer = Lexer(self.source)
tokens = lexer.tokenize()
parser = Parser(
tokens, self.engine.template_libraries, self.engine.template_builtins,
self.origin,
)
try:
return parser.parse()
except Exception as e:
if self.engine.debug:
e.template_debug = self.get_exception_info(e, e.token)
raise
def get_exception_info(self, exception, token):
"""
Return a dictionary containing contextual line information of where
the exception occurred in the template. The following information is
provided:
message
The message of the exception raised.
source_lines
The lines before, after, and including the line the exception
occurred on.
line
The line number the exception occurred on.
before, during, after
The line the exception occurred on split into three parts:
1. The content before the token that raised the error.
2. The token that raised the error.
3. The content after the token that raised the error.
total
The number of lines in source_lines.
top
The line number where source_lines starts.
bottom
The line number where source_lines ends.
start
The start position of the token in the template source.
end
The end position of the token in the template source.
"""
start, end = token.position
context_lines = 10
line = 0
upto = 0
source_lines = []
before = during = after = ""
for num, next in enumerate(linebreak_iter(self.source)):
if start >= upto and end <= next:
line = num
before = escape(self.source[upto:start])
during = escape(self.source[start:end])
after = escape(self.source[end:next])
source_lines.append((num, escape(self.source[upto:next])))
upto = next
total = len(source_lines)
top = max(1, line - context_lines)
bottom = min(total, line + 1 + context_lines)
# In some rare cases exc_value.args can be empty or an invalid
# string.
try:
message = str(exception.args[0])
except (IndexError, UnicodeDecodeError):
message = '(Could not get exception message)'
return {
'message': message,
'source_lines': source_lines[top:bottom],
'before': before,
'during': during,
'after': after,
'top': top,
'bottom': bottom,
'total': total,
'line': line,
'name': self.origin.name,
'start': start,
'end': end,
}
def linebreak_iter(template_source):
yield 0
p = template_source.find('\n')
while p >= 0:
yield p + 1
p = template_source.find('\n', p + 1)
yield len(template_source) + 1
class Token:
def __init__(self, token_type, contents, position=None, lineno=None):
"""
A token representing a string from the template.
token_type
A TokenType, either .TEXT, .VAR, .BLOCK, or .COMMENT.
contents
The token source string.
position
An optional tuple containing the start and end index of the token
in the template source. This is used for traceback information
when debug is on.
lineno
The line number the token appears on in the template source.
This is used for traceback information and gettext files.
"""
self.token_type, self.contents = token_type, contents
self.lineno = lineno
self.position = position
def __str__(self):
token_name = self.token_type.name.capitalize()
return ('<%s token: "%s...">' %
(token_name, self.contents[:20].replace('\n', '')))
def split_contents(self):
split = []
bits = smart_split(self.contents)
for bit in bits:
# Handle translation-marked template pieces
if bit.startswith(('_("', "_('")):
sentinel = bit[2] + ')'
trans_bit = [bit]
while not bit.endswith(sentinel):
bit = next(bits)
trans_bit.append(bit)
bit = ' '.join(trans_bit)
split.append(bit)
return split
class Lexer:
def __init__(self, template_string):
self.template_string = template_string
self.verbatim = False
def tokenize(self):
"""
Return a list of tokens from a given template_string.
"""
in_tag = False
lineno = 1
result = []
for bit in tag_re.split(self.template_string):
if bit:
result.append(self.create_token(bit, None, lineno, in_tag))
in_tag = not in_tag
lineno += bit.count('\n')
return result
def create_token(self, token_string, position, lineno, in_tag):
"""
Convert the given token string into a new Token object and return it.
If in_tag is True, we are processing something that matched a tag,
otherwise it should be treated as a literal string.
"""
if in_tag and token_string.startswith(BLOCK_TAG_START):
# The [2:-2] ranges below strip off *_TAG_START and *_TAG_END.
# We could do len(BLOCK_TAG_START) to be more "correct", but we've
# hard-coded the 2s here for performance. And it's not like
# the TAG_START values are going to change anytime, anyway.
block_content = token_string[2:-2].strip()
if self.verbatim and block_content == self.verbatim:
self.verbatim = False
if in_tag and not self.verbatim:
if token_string.startswith(VARIABLE_TAG_START):
return Token(TokenType.VAR, token_string[2:-2].strip(), position, lineno)
elif token_string.startswith(BLOCK_TAG_START):
if block_content[:9] in ('verbatim', 'verbatim '):
self.verbatim = 'end%s' % block_content
return Token(TokenType.BLOCK, block_content, position, lineno)
elif token_string.startswith(COMMENT_TAG_START):
content = ''
if token_string.find(TRANSLATOR_COMMENT_MARK):
content = token_string[2:-2].strip()
return Token(TokenType.COMMENT, content, position, lineno)
else:
return Token(TokenType.TEXT, token_string, position, lineno)
class DebugLexer(Lexer):
def tokenize(self):
"""
Split a template string into tokens and annotates each token with its
start and end position in the source. This is slower than the default
lexer so only use it when debug is True.
"""
lineno = 1
result = []
upto = 0
for match in tag_re.finditer(self.template_string):
start, end = match.span()
if start > upto:
token_string = self.template_string[upto:start]
result.append(self.create_token(token_string, (upto, start), lineno, in_tag=False))
lineno += token_string.count('\n')
token_string = self.template_string[start:end]
result.append(self.create_token(token_string, (start, end), lineno, in_tag=True))
lineno += token_string.count('\n')
upto = end
last_bit = self.template_string[upto:]
if last_bit:
result.append(self.create_token(last_bit, (upto, upto + len(last_bit)), lineno, in_tag=False))
return result
class Parser:
def __init__(self, tokens, libraries=None, builtins=None, origin=None):
# Reverse the tokens so delete_first_token(), prepend_token(), and
# next_token() can operate at the end of the list in constant time.
self.tokens = list(reversed(tokens))
self.tags = {}
self.filters = {}
self.command_stack = []
if libraries is None:
libraries = {}
if builtins is None:
builtins = []
self.libraries = libraries
for builtin in builtins:
self.add_library(builtin)
self.origin = origin
def parse(self, parse_until=None):
"""
Iterate through the parser tokens and compiles each one into a node.
If parse_until is provided, parsing will stop once one of the
specified tokens has been reached. This is formatted as a list of
tokens, e.g. ['elif', 'else', 'endif']. If no matching token is
reached, raise an exception with the unclosed block tag details.
"""
if parse_until is None:
parse_until = []
nodelist = NodeList()
while self.tokens:
token = self.next_token()
# Use the raw values here for TokenType.* for a tiny performance boost.
if token.token_type.value == 0: # TokenType.TEXT
self.extend_nodelist(nodelist, TextNode(token.contents), token)
elif token.token_type.value == 1: # TokenType.VAR
if not token.contents:
raise self.error(token, 'Empty variable tag on line %d' % token.lineno)
try:
filter_expression = self.compile_filter(token.contents)
except TemplateSyntaxError as e:
raise self.error(token, e)
var_node = VariableNode(filter_expression)
self.extend_nodelist(nodelist, var_node, token)
elif token.token_type.value == 2: # TokenType.BLOCK
try:
command = token.contents.split()[0]
except IndexError:
raise self.error(token, 'Empty block tag on line %d' % token.lineno)
if command in parse_until:
# A matching token has been reached. Return control to
# the caller. Put the token back on the token list so the
# caller knows where it terminated.
self.prepend_token(token)
return nodelist
# Add the token to the command stack. This is used for error
# messages if further parsing fails due to an unclosed block
# tag.
self.command_stack.append((command, token))
# Get the tag callback function from the ones registered with
# the parser.
try:
compile_func = self.tags[command]
except KeyError:
self.invalid_block_tag(token, command, parse_until)
# Compile the callback into a node object and add it to
# the node list.
try:
compiled_result = compile_func(self, token)
except Exception as e:
raise self.error(token, e)
self.extend_nodelist(nodelist, compiled_result, token)
# Compile success. Remove the token from the command stack.
self.command_stack.pop()
if parse_until:
self.unclosed_block_tag(parse_until)
return nodelist
def skip_past(self, endtag):
while self.tokens:
token = self.next_token()
if token.token_type == TokenType.BLOCK and token.contents == endtag:
return
self.unclosed_block_tag([endtag])
def extend_nodelist(self, nodelist, node, token):
# Check that non-text nodes don't appear before an extends tag.
if node.must_be_first and nodelist.contains_nontext:
raise self.error(
token, '%r must be the first tag in the template.' % node,
)
if isinstance(nodelist, NodeList) and not isinstance(node, TextNode):
nodelist.contains_nontext = True
# Set origin and token here since we can't modify the node __init__()
# method.
node.token = token
node.origin = self.origin
nodelist.append(node)
def error(self, token, e):
"""
Return an exception annotated with the originating token. Since the
parser can be called recursively, check if a token is already set. This
ensures the innermost token is highlighted if an exception occurs,
e.g. a compile error within the body of an if statement.
"""
if not isinstance(e, Exception):
e = TemplateSyntaxError(e)
if not hasattr(e, 'token'):
e.token = token
return e
def invalid_block_tag(self, token, command, parse_until=None):
if parse_until:
raise self.error(
token,
"Invalid block tag on line %d: '%s', expected %s. Did you "
"forget to register or load this tag?" % (
token.lineno,
command,
get_text_list(["'%s'" % p for p in parse_until], 'or'),
),
)
raise self.error(
token,
"Invalid block tag on line %d: '%s'. Did you forget to register "
"or load this tag?" % (token.lineno, command)
)
def unclosed_block_tag(self, parse_until):
command, token = self.command_stack.pop()
msg = "Unclosed tag on line %d: '%s'. Looking for one of: %s." % (
token.lineno,
command,
', '.join(parse_until),
)
raise self.error(token, msg)
def next_token(self):
return self.tokens.pop()
def prepend_token(self, token):
self.tokens.append(token)
def delete_first_token(self):
del self.tokens[-1]
def add_library(self, lib):
self.tags.update(lib.tags)
self.filters.update(lib.filters)
def compile_filter(self, token):
"""
Convenient wrapper for FilterExpression
"""
return FilterExpression(token, self)
def find_filter(self, filter_name):
if filter_name in self.filters:
return self.filters[filter_name]
else:
raise TemplateSyntaxError("Invalid filter: '%s'" % filter_name)
# This only matches constant *strings* (things in quotes or marked for
# translation). Numbers are treated as variables for implementation reasons
# (so that they retain their type when passed to filters).
constant_string = r"""
(?:%(i18n_open)s%(strdq)s%(i18n_close)s|
%(i18n_open)s%(strsq)s%(i18n_close)s|
%(strdq)s|
%(strsq)s)
""" % {
'strdq': r'"[^"\\]*(?:\\.[^"\\]*)*"', # double-quoted string
'strsq': r"'[^'\\]*(?:\\.[^'\\]*)*'", # single-quoted string
'i18n_open': re.escape("_("),
'i18n_close': re.escape(")"),
}
constant_string = constant_string.replace("\n", "")
filter_raw_string = r"""
^(?P<constant>%(constant)s)|
^(?P<var>[%(var_chars)s]+|%(num)s)|
(?:\s*%(filter_sep)s\s*
(?P<filter_name>\w+)
(?:%(arg_sep)s
(?:
(?P<constant_arg>%(constant)s)|
(?P<var_arg>[%(var_chars)s]+|%(num)s)
)
)?
)""" % {
'constant': constant_string,
'num': r'[-+\.]?\d[\d\.e]*',
'var_chars': r'\w\.',
'filter_sep': re.escape(FILTER_SEPARATOR),
'arg_sep': re.escape(FILTER_ARGUMENT_SEPARATOR),
}
filter_re = _lazy_re_compile(filter_raw_string, re.VERBOSE)
class FilterExpression:
"""
Parse a variable token and its optional filters (all as a single string),
and return a list of tuples of the filter name and arguments.
Sample::
>>> token = 'variable|default:"Default value"|date:"Y-m-d"'
>>> p = Parser('')
>>> fe = FilterExpression(token, p)
>>> len(fe.filters)
2
>>> fe.var
<Variable: 'variable'>
"""
def __init__(self, token, parser):
self.token = token
matches = filter_re.finditer(token)
var_obj = None
filters = []
upto = 0
for match in matches:
start = match.start()
if upto != start:
raise TemplateSyntaxError("Could not parse some characters: "
"%s|%s|%s" %
(token[:upto], token[upto:start],
token[start:]))
if var_obj is None:
var, constant = match['var'], match['constant']
if constant:
try:
var_obj = Variable(constant).resolve({})
except VariableDoesNotExist:
var_obj = None
elif var is None:
raise TemplateSyntaxError("Could not find variable at "
"start of %s." % token)
else:
var_obj = Variable(var)
else:
filter_name = match['filter_name']
args = []
constant_arg, var_arg = match['constant_arg'], match['var_arg']
if constant_arg:
args.append((False, Variable(constant_arg).resolve({})))
elif var_arg:
args.append((True, Variable(var_arg)))
filter_func = parser.find_filter(filter_name)
self.args_check(filter_name, filter_func, args)
filters.append((filter_func, args))
upto = match.end()
if upto != len(token):
raise TemplateSyntaxError("Could not parse the remainder: '%s' "
"from '%s'" % (token[upto:], token))
self.filters = filters
self.var = var_obj
def resolve(self, context, ignore_failures=False):
if isinstance(self.var, Variable):
try:
obj = self.var.resolve(context)
except VariableDoesNotExist:
if ignore_failures:
obj = None
else:
string_if_invalid = context.template.engine.string_if_invalid
if string_if_invalid:
if '%s' in string_if_invalid:
return string_if_invalid % self.var
else:
return string_if_invalid
else:
obj = string_if_invalid
else:
obj = self.var
for func, args in self.filters:
arg_vals = []
for lookup, arg in args:
if not lookup:
arg_vals.append(mark_safe(arg))
else:
arg_vals.append(arg.resolve(context))
if getattr(func, 'expects_localtime', False):
obj = template_localtime(obj, context.use_tz)
if getattr(func, 'needs_autoescape', False):
new_obj = func(obj, autoescape=context.autoescape, *arg_vals)
else:
new_obj = func(obj, *arg_vals)
if getattr(func, 'is_safe', False) and isinstance(obj, SafeData):
obj = mark_safe(new_obj)
else:
obj = new_obj
return obj
def args_check(name, func, provided):
provided = list(provided)
# First argument, filter input, is implied.
plen = len(provided) + 1
# Check to see if a decorator is providing the real function.
func = inspect.unwrap(func)
args, _, _, defaults, _, _, _ = inspect.getfullargspec(func)
alen = len(args)
dlen = len(defaults or [])
# Not enough OR Too many
if plen < (alen - dlen) or plen > alen:
raise TemplateSyntaxError("%s requires %d arguments, %d provided" %
(name, alen - dlen, plen))
return True
args_check = staticmethod(args_check)
def __str__(self):
return self.token
class Variable:
"""
A template variable, resolvable against a given context. The variable may
be a hard-coded string (if it begins and ends with single or double quote
marks)::
>>> c = {'article': {'section':'News'}}
>>> Variable('article.section').resolve(c)
'News'
>>> Variable('article').resolve(c)
{'section': 'News'}
>>> class AClass: pass
>>> c = AClass()
>>> c.article = AClass()
>>> c.article.section = 'News'
(The example assumes VARIABLE_ATTRIBUTE_SEPARATOR is '.')
"""
def __init__(self, var):
self.var = var
self.literal = None
self.lookups = None
self.translate = False
self.message_context = None
if not isinstance(var, str):
raise TypeError(
"Variable must be a string or number, got %s" % type(var))
try:
# First try to treat this variable as a number.
#
# Note that this could cause an OverflowError here that we're not
# catching. Since this should only happen at compile time, that's
# probably OK.
# Try to interpret values containing a period or an 'e'/'E'
# (possibly scientific notation) as a float; otherwise, try int.
if '.' in var or 'e' in var.lower():
self.literal = float(var)
# "2." is invalid
if var.endswith('.'):
raise ValueError
else:
self.literal = int(var)
except ValueError:
# A ValueError means that the variable isn't a number.
if var.startswith('_(') and var.endswith(')'):
# The result of the lookup should be translated at rendering
# time.
self.translate = True
var = var[2:-1]
# If it's wrapped with quotes (single or double), then
# we're also dealing with a literal.
try:
self.literal = mark_safe(unescape_string_literal(var))
except ValueError:
# Otherwise we'll set self.lookups so that resolve() knows we're
# dealing with a bonafide variable
if var.find(VARIABLE_ATTRIBUTE_SEPARATOR + '_') > -1 or var[0] == '_':
raise TemplateSyntaxError("Variables and attributes may "
"not begin with underscores: '%s'" %
var)
self.lookups = tuple(var.split(VARIABLE_ATTRIBUTE_SEPARATOR))
def resolve(self, context):
"""Resolve this variable against a given context."""
if self.lookups is not None:
# We're dealing with a variable that needs to be resolved
value = self._resolve_lookup(context)
else:
# We're dealing with a literal, so it's already been "resolved"
value = self.literal
if self.translate:
is_safe = isinstance(value, SafeData)
msgid = value.replace('%', '%%')
msgid = mark_safe(msgid) if is_safe else msgid
if self.message_context:
return pgettext_lazy(self.message_context, msgid)
else:
return gettext_lazy(msgid)
return value
def __repr__(self):
return "<%s: %r>" % (self.__class__.__name__, self.var)
def __str__(self):
return self.var
def _resolve_lookup(self, context):
"""
Perform resolution of a real variable (i.e. not a literal) against the
given context.
As indicated by the method's name, this method is an implementation
detail and shouldn't be called by external code. Use Variable.resolve()
instead.
"""
current = context
try: # catch-all for silent variable failures
for bit in self.lookups:
try: # dictionary lookup
current = current[bit]
# ValueError/IndexError are for numpy.array lookup on
# numpy < 1.9 and 1.9+ respectively
except (TypeError, AttributeError, KeyError, ValueError, IndexError):
try: # attribute lookup
# Don't return class attributes if the class is the context:
if isinstance(current, BaseContext) and getattr(type(current), bit):
raise AttributeError
current = getattr(current, bit)
except (TypeError, AttributeError):
# Reraise if the exception was raised by a @property
if not isinstance(current, BaseContext) and bit in dir(current):
raise
try: # list-index lookup
current = current[int(bit)]
except (IndexError, # list index out of range
ValueError, # invalid literal for int()
KeyError, # current is a dict without `int(bit)` key
TypeError): # unsubscriptable object
raise VariableDoesNotExist("Failed lookup for key "
"[%s] in %r",
(bit, current)) # missing attribute
if callable(current):
if getattr(current, 'do_not_call_in_templates', False):
pass
elif getattr(current, 'alters_data', False):
current = context.template.engine.string_if_invalid
else:
try: # method call (assuming no args required)
current = current()
except TypeError:
signature = inspect.signature(current)
try:
signature.bind()
except TypeError: # arguments *were* required
current = context.template.engine.string_if_invalid # invalid method call
else:
raise
except Exception as e:
template_name = getattr(context, 'template_name', None) or 'unknown'
logger.debug(
"Exception while resolving variable '%s' in template '%s'.",
bit,
template_name,
exc_info=True,
)
if getattr(e, 'silent_variable_failure', False):
current = context.template.engine.string_if_invalid
else:
raise
return current
class Node:
# Set this to True for nodes that must be first in the template (although
# they can be preceded by text nodes.
must_be_first = False
child_nodelists = ('nodelist',)
token = None
def render(self, context):
"""
Return the node rendered as a string.
"""
pass
def render_annotated(self, context):
"""
Render the node. If debug is True and an exception occurs during
rendering, the exception is annotated with contextual line information
where it occurred in the template. For internal usage this method is
preferred over using the render method directly.
"""
try:
return self.render(context)
except Exception as e:
if context.template.engine.debug and not hasattr(e, 'template_debug'):
e.template_debug = context.render_context.template.get_exception_info(e, self.token)
raise
def __iter__(self):
yield self
def get_nodes_by_type(self, nodetype):
"""
Return a list of all nodes (within this node and its nodelist)
of the given type
"""
nodes = []
if isinstance(self, nodetype):
nodes.append(self)
for attr in self.child_nodelists:
nodelist = getattr(self, attr, None)
if nodelist:
nodes.extend(nodelist.get_nodes_by_type(nodetype))
return nodes
class NodeList(list):
# Set to True the first time a non-TextNode is inserted by
# extend_nodelist().
contains_nontext = False
def render(self, context):
bits = []
for node in self:
if isinstance(node, Node):
bit = node.render_annotated(context)
else:
bit = node
bits.append(str(bit))
return mark_safe(''.join(bits))
def get_nodes_by_type(self, nodetype):
"Return a list of all nodes of the given type"
nodes = []
for node in self:
nodes.extend(node.get_nodes_by_type(nodetype))
return nodes
class TextNode(Node):
def __init__(self, s):
self.s = s
def __repr__(self):
return "<%s: %r>" % (self.__class__.__name__, self.s[:25])
def render(self, context):
return self.s
def render_value_in_context(value, context):
"""
Convert any value to a string to become part of a rendered template. This
means escaping, if required, and conversion to a string. If value is a
string, it's expected to already be translated.
"""
value = template_localtime(value, use_tz=context.use_tz)
value = localize(value, use_l10n=context.use_l10n)
if context.autoescape:
if not issubclass(type(value), str):
value = str(value)
return conditional_escape(value)
else:
return str(value)
class VariableNode(Node):
def __init__(self, filter_expression):
self.filter_expression = filter_expression
def __repr__(self):
return "<Variable Node: %s>" % self.filter_expression
def render(self, context):
try:
output = self.filter_expression.resolve(context)
except UnicodeDecodeError:
# Unicode conversion can fail sometimes for reasons out of our
# control (e.g. exception rendering). In that case, we fail
# quietly.
return ''
return render_value_in_context(output, context)
# Regex for token keyword arguments
kwarg_re = _lazy_re_compile(r"(?:(\w+)=)?(.+)")
def token_kwargs(bits, parser, support_legacy=False):
"""
Parse token keyword arguments and return a dictionary of the arguments
retrieved from the ``bits`` token list.
`bits` is a list containing the remainder of the token (split by spaces)
that is to be checked for arguments. Valid arguments are removed from this
list.
`support_legacy` - if True, the legacy format ``1 as foo`` is accepted.
Otherwise, only the standard ``foo=1`` format is allowed.
There is no requirement for all remaining token ``bits`` to be keyword
arguments, so return the dictionary as soon as an invalid argument format
is reached.
"""
if not bits:
return {}
match = kwarg_re.match(bits[0])
kwarg_format = match and match[1]
if not kwarg_format:
if not support_legacy:
return {}
if len(bits) < 3 or bits[1] != 'as':
return {}
kwargs = {}
while bits:
if kwarg_format:
match = kwarg_re.match(bits[0])
if not match or not match[1]:
return kwargs
key, value = match.groups()
del bits[:1]
else:
if len(bits) < 3 or bits[1] != 'as':
return kwargs
key, value = bits[2], bits[0]
del bits[:3]
kwargs[key] = parser.compile_filter(value)
if bits and not kwarg_format:
if bits[0] != 'and':
return kwargs
del bits[:1]
return kwargs
|
b1b495fd2974eda5c515cf25459e780e7fa7f402217c3f6cb06295df690fcb75 | """Default tags used by the template system, available to all templates."""
import re
import sys
import warnings
from collections import namedtuple
from datetime import datetime
from itertools import cycle as itertools_cycle, groupby
from django.conf import settings
from django.utils import timezone
from django.utils.deprecation import RemovedInDjango40Warning
from django.utils.html import conditional_escape, format_html
from django.utils.lorem_ipsum import paragraphs, words
from django.utils.safestring import mark_safe
from .base import (
BLOCK_TAG_END, BLOCK_TAG_START, COMMENT_TAG_END, COMMENT_TAG_START,
FILTER_SEPARATOR, SINGLE_BRACE_END, SINGLE_BRACE_START,
VARIABLE_ATTRIBUTE_SEPARATOR, VARIABLE_TAG_END, VARIABLE_TAG_START, Node,
NodeList, TemplateSyntaxError, VariableDoesNotExist, kwarg_re,
render_value_in_context, token_kwargs,
)
from .context import Context
from .defaultfilters import date
from .library import Library
from .smartif import IfParser, Literal
register = Library()
class AutoEscapeControlNode(Node):
"""Implement the actions of the autoescape tag."""
def __init__(self, setting, nodelist):
self.setting, self.nodelist = setting, nodelist
def render(self, context):
old_setting = context.autoescape
context.autoescape = self.setting
output = self.nodelist.render(context)
context.autoescape = old_setting
if self.setting:
return mark_safe(output)
else:
return output
class CommentNode(Node):
def render(self, context):
return ''
class CsrfTokenNode(Node):
def render(self, context):
csrf_token = context.get('csrf_token')
if csrf_token:
if csrf_token == 'NOTPROVIDED':
return format_html("")
else:
return format_html('<input type="hidden" name="csrfmiddlewaretoken" value="{}">', csrf_token)
else:
# It's very probable that the token is missing because of
# misconfiguration, so we raise a warning
if settings.DEBUG:
warnings.warn(
"A {% csrf_token %} was used in a template, but the context "
"did not provide the value. This is usually caused by not "
"using RequestContext."
)
return ''
class CycleNode(Node):
def __init__(self, cyclevars, variable_name=None, silent=False):
self.cyclevars = cyclevars
self.variable_name = variable_name
self.silent = silent
def render(self, context):
if self not in context.render_context:
# First time the node is rendered in template
context.render_context[self] = itertools_cycle(self.cyclevars)
cycle_iter = context.render_context[self]
value = next(cycle_iter).resolve(context)
if self.variable_name:
context.set_upward(self.variable_name, value)
if self.silent:
return ''
return render_value_in_context(value, context)
def reset(self, context):
"""
Reset the cycle iteration back to the beginning.
"""
context.render_context[self] = itertools_cycle(self.cyclevars)
class DebugNode(Node):
def render(self, context):
from pprint import pformat
output = [pformat(val) for val in context]
output.append('\n\n')
output.append(pformat(sys.modules))
return ''.join(output)
class FilterNode(Node):
def __init__(self, filter_expr, nodelist):
self.filter_expr, self.nodelist = filter_expr, nodelist
def render(self, context):
output = self.nodelist.render(context)
# Apply filters.
with context.push(var=output):
return self.filter_expr.resolve(context)
class FirstOfNode(Node):
def __init__(self, variables, asvar=None):
self.vars = variables
self.asvar = asvar
def render(self, context):
first = ''
for var in self.vars:
value = var.resolve(context, ignore_failures=True)
if value:
first = render_value_in_context(value, context)
break
if self.asvar:
context[self.asvar] = first
return ''
return first
class ForNode(Node):
child_nodelists = ('nodelist_loop', 'nodelist_empty')
def __init__(self, loopvars, sequence, is_reversed, nodelist_loop, nodelist_empty=None):
self.loopvars, self.sequence = loopvars, sequence
self.is_reversed = is_reversed
self.nodelist_loop = nodelist_loop
if nodelist_empty is None:
self.nodelist_empty = NodeList()
else:
self.nodelist_empty = nodelist_empty
def __repr__(self):
reversed_text = ' reversed' if self.is_reversed else ''
return '<%s: for %s in %s, tail_len: %d%s>' % (
self.__class__.__name__,
', '.join(self.loopvars),
self.sequence,
len(self.nodelist_loop),
reversed_text,
)
def render(self, context):
if 'forloop' in context:
parentloop = context['forloop']
else:
parentloop = {}
with context.push():
values = self.sequence.resolve(context, ignore_failures=True)
if values is None:
values = []
if not hasattr(values, '__len__'):
values = list(values)
len_values = len(values)
if len_values < 1:
return self.nodelist_empty.render(context)
nodelist = []
if self.is_reversed:
values = reversed(values)
num_loopvars = len(self.loopvars)
unpack = num_loopvars > 1
# Create a forloop value in the context. We'll update counters on each
# iteration just below.
loop_dict = context['forloop'] = {'parentloop': parentloop}
for i, item in enumerate(values):
# Shortcuts for current loop iteration number.
loop_dict['counter0'] = i
loop_dict['counter'] = i + 1
# Reverse counter iteration numbers.
loop_dict['revcounter'] = len_values - i
loop_dict['revcounter0'] = len_values - i - 1
# Boolean values designating first and last times through loop.
loop_dict['first'] = (i == 0)
loop_dict['last'] = (i == len_values - 1)
pop_context = False
if unpack:
# If there are multiple loop variables, unpack the item into
# them.
try:
len_item = len(item)
except TypeError: # not an iterable
len_item = 1
# Check loop variable count before unpacking
if num_loopvars != len_item:
raise ValueError(
"Need {} values to unpack in for loop; got {}. "
.format(num_loopvars, len_item),
)
unpacked_vars = dict(zip(self.loopvars, item))
pop_context = True
context.update(unpacked_vars)
else:
context[self.loopvars[0]] = item
for node in self.nodelist_loop:
nodelist.append(node.render_annotated(context))
if pop_context:
# Pop the loop variables pushed on to the context to avoid
# the context ending up in an inconsistent state when other
# tags (e.g., include and with) push data to context.
context.pop()
return mark_safe(''.join(nodelist))
class IfChangedNode(Node):
child_nodelists = ('nodelist_true', 'nodelist_false')
def __init__(self, nodelist_true, nodelist_false, *varlist):
self.nodelist_true, self.nodelist_false = nodelist_true, nodelist_false
self._varlist = varlist
def render(self, context):
# Init state storage
state_frame = self._get_context_stack_frame(context)
state_frame.setdefault(self)
nodelist_true_output = None
if self._varlist:
# Consider multiple parameters. This behaves like an OR evaluation
# of the multiple variables.
compare_to = [var.resolve(context, ignore_failures=True) for var in self._varlist]
else:
# The "{% ifchanged %}" syntax (without any variables) compares
# the rendered output.
compare_to = nodelist_true_output = self.nodelist_true.render(context)
if compare_to != state_frame[self]:
state_frame[self] = compare_to
# render true block if not already rendered
return nodelist_true_output or self.nodelist_true.render(context)
elif self.nodelist_false:
return self.nodelist_false.render(context)
return ''
def _get_context_stack_frame(self, context):
# The Context object behaves like a stack where each template tag can create a new scope.
# Find the place where to store the state to detect changes.
if 'forloop' in context:
# Ifchanged is bound to the local for loop.
# When there is a loop-in-loop, the state is bound to the inner loop,
# so it resets when the outer loop continues.
return context['forloop']
else:
# Using ifchanged outside loops. Effectively this is a no-op because the state is associated with 'self'.
return context.render_context
class IfEqualNode(Node):
# RemovedInDjango40Warning.
child_nodelists = ('nodelist_true', 'nodelist_false')
def __init__(self, var1, var2, nodelist_true, nodelist_false, negate):
self.var1, self.var2 = var1, var2
self.nodelist_true, self.nodelist_false = nodelist_true, nodelist_false
self.negate = negate
def __repr__(self):
return '<%s>' % self.__class__.__name__
def render(self, context):
val1 = self.var1.resolve(context, ignore_failures=True)
val2 = self.var2.resolve(context, ignore_failures=True)
if (self.negate and val1 != val2) or (not self.negate and val1 == val2):
return self.nodelist_true.render(context)
return self.nodelist_false.render(context)
class IfNode(Node):
def __init__(self, conditions_nodelists):
self.conditions_nodelists = conditions_nodelists
def __repr__(self):
return '<%s>' % self.__class__.__name__
def __iter__(self):
for _, nodelist in self.conditions_nodelists:
yield from nodelist
@property
def nodelist(self):
return NodeList(self)
def render(self, context):
for condition, nodelist in self.conditions_nodelists:
if condition is not None: # if / elif clause
try:
match = condition.eval(context)
except VariableDoesNotExist:
match = None
else: # else clause
match = True
if match:
return nodelist.render(context)
return ''
class LoremNode(Node):
def __init__(self, count, method, common):
self.count, self.method, self.common = count, method, common
def render(self, context):
try:
count = int(self.count.resolve(context))
except (ValueError, TypeError):
count = 1
if self.method == 'w':
return words(count, common=self.common)
else:
paras = paragraphs(count, common=self.common)
if self.method == 'p':
paras = ['<p>%s</p>' % p for p in paras]
return '\n\n'.join(paras)
GroupedResult = namedtuple('GroupedResult', ['grouper', 'list'])
class RegroupNode(Node):
def __init__(self, target, expression, var_name):
self.target, self.expression = target, expression
self.var_name = var_name
def resolve_expression(self, obj, context):
# This method is called for each object in self.target. See regroup()
# for the reason why we temporarily put the object in the context.
context[self.var_name] = obj
return self.expression.resolve(context, ignore_failures=True)
def render(self, context):
obj_list = self.target.resolve(context, ignore_failures=True)
if obj_list is None:
# target variable wasn't found in context; fail silently.
context[self.var_name] = []
return ''
# List of dictionaries in the format:
# {'grouper': 'key', 'list': [list of contents]}.
context[self.var_name] = [
GroupedResult(grouper=key, list=list(val))
for key, val in
groupby(obj_list, lambda obj: self.resolve_expression(obj, context))
]
return ''
class LoadNode(Node):
def render(self, context):
return ''
class NowNode(Node):
def __init__(self, format_string, asvar=None):
self.format_string = format_string
self.asvar = asvar
def render(self, context):
tzinfo = timezone.get_current_timezone() if settings.USE_TZ else None
formatted = date(datetime.now(tz=tzinfo), self.format_string)
if self.asvar:
context[self.asvar] = formatted
return ''
else:
return formatted
class ResetCycleNode(Node):
def __init__(self, node):
self.node = node
def render(self, context):
self.node.reset(context)
return ''
class SpacelessNode(Node):
def __init__(self, nodelist):
self.nodelist = nodelist
def render(self, context):
from django.utils.html import strip_spaces_between_tags
return strip_spaces_between_tags(self.nodelist.render(context).strip())
class TemplateTagNode(Node):
mapping = {
'openblock': BLOCK_TAG_START,
'closeblock': BLOCK_TAG_END,
'openvariable': VARIABLE_TAG_START,
'closevariable': VARIABLE_TAG_END,
'openbrace': SINGLE_BRACE_START,
'closebrace': SINGLE_BRACE_END,
'opencomment': COMMENT_TAG_START,
'closecomment': COMMENT_TAG_END,
}
def __init__(self, tagtype):
self.tagtype = tagtype
def render(self, context):
return self.mapping.get(self.tagtype, '')
class URLNode(Node):
def __init__(self, view_name, args, kwargs, asvar):
self.view_name = view_name
self.args = args
self.kwargs = kwargs
self.asvar = asvar
def render(self, context):
from django.urls import reverse, NoReverseMatch
args = [arg.resolve(context) for arg in self.args]
kwargs = {k: v.resolve(context) for k, v in self.kwargs.items()}
view_name = self.view_name.resolve(context)
try:
current_app = context.request.current_app
except AttributeError:
try:
current_app = context.request.resolver_match.namespace
except AttributeError:
current_app = None
# Try to look up the URL. If it fails, raise NoReverseMatch unless the
# {% url ... as var %} construct is used, in which case return nothing.
url = ''
try:
url = reverse(view_name, args=args, kwargs=kwargs, current_app=current_app)
except NoReverseMatch:
if self.asvar is None:
raise
if self.asvar:
context[self.asvar] = url
return ''
else:
if context.autoescape:
url = conditional_escape(url)
return url
class VerbatimNode(Node):
def __init__(self, content):
self.content = content
def render(self, context):
return self.content
class WidthRatioNode(Node):
def __init__(self, val_expr, max_expr, max_width, asvar=None):
self.val_expr = val_expr
self.max_expr = max_expr
self.max_width = max_width
self.asvar = asvar
def render(self, context):
try:
value = self.val_expr.resolve(context)
max_value = self.max_expr.resolve(context)
max_width = int(self.max_width.resolve(context))
except VariableDoesNotExist:
return ''
except (ValueError, TypeError):
raise TemplateSyntaxError("widthratio final argument must be a number")
try:
value = float(value)
max_value = float(max_value)
ratio = (value / max_value) * max_width
result = str(round(ratio))
except ZeroDivisionError:
result = '0'
except (ValueError, TypeError, OverflowError):
result = ''
if self.asvar:
context[self.asvar] = result
return ''
else:
return result
class WithNode(Node):
def __init__(self, var, name, nodelist, extra_context=None):
self.nodelist = nodelist
# var and name are legacy attributes, being left in case they are used
# by third-party subclasses of this Node.
self.extra_context = extra_context or {}
if name:
self.extra_context[name] = var
def __repr__(self):
return '<%s>' % self.__class__.__name__
def render(self, context):
values = {key: val.resolve(context) for key, val in self.extra_context.items()}
with context.push(**values):
return self.nodelist.render(context)
@register.tag
def autoescape(parser, token):
"""
Force autoescape behavior for this block.
"""
# token.split_contents() isn't useful here because this tag doesn't accept variable as arguments
args = token.contents.split()
if len(args) != 2:
raise TemplateSyntaxError("'autoescape' tag requires exactly one argument.")
arg = args[1]
if arg not in ('on', 'off'):
raise TemplateSyntaxError("'autoescape' argument should be 'on' or 'off'")
nodelist = parser.parse(('endautoescape',))
parser.delete_first_token()
return AutoEscapeControlNode((arg == 'on'), nodelist)
@register.tag
def comment(parser, token):
"""
Ignore everything between ``{% comment %}`` and ``{% endcomment %}``.
"""
parser.skip_past('endcomment')
return CommentNode()
@register.tag
def cycle(parser, token):
"""
Cycle among the given strings each time this tag is encountered.
Within a loop, cycles among the given strings each time through
the loop::
{% for o in some_list %}
<tr class="{% cycle 'row1' 'row2' %}">
...
</tr>
{% endfor %}
Outside of a loop, give the values a unique name the first time you call
it, then use that name each successive time through::
<tr class="{% cycle 'row1' 'row2' 'row3' as rowcolors %}">...</tr>
<tr class="{% cycle rowcolors %}">...</tr>
<tr class="{% cycle rowcolors %}">...</tr>
You can use any number of values, separated by spaces. Commas can also
be used to separate values; if a comma is used, the cycle values are
interpreted as literal strings.
The optional flag "silent" can be used to prevent the cycle declaration
from returning any value::
{% for o in some_list %}
{% cycle 'row1' 'row2' as rowcolors silent %}
<tr class="{{ rowcolors }}">{% include "subtemplate.html " %}</tr>
{% endfor %}
"""
# Note: This returns the exact same node on each {% cycle name %} call;
# that is, the node object returned from {% cycle a b c as name %} and the
# one returned from {% cycle name %} are the exact same object. This
# shouldn't cause problems (heh), but if it does, now you know.
#
# Ugly hack warning: This stuffs the named template dict into parser so
# that names are only unique within each template (as opposed to using
# a global variable, which would make cycle names have to be unique across
# *all* templates.
#
# It keeps the last node in the parser to be able to reset it with
# {% resetcycle %}.
args = token.split_contents()
if len(args) < 2:
raise TemplateSyntaxError("'cycle' tag requires at least two arguments")
if len(args) == 2:
# {% cycle foo %} case.
name = args[1]
if not hasattr(parser, '_named_cycle_nodes'):
raise TemplateSyntaxError("No named cycles in template. '%s' is not defined" % name)
if name not in parser._named_cycle_nodes:
raise TemplateSyntaxError("Named cycle '%s' does not exist" % name)
return parser._named_cycle_nodes[name]
as_form = False
if len(args) > 4:
# {% cycle ... as foo [silent] %} case.
if args[-3] == "as":
if args[-1] != "silent":
raise TemplateSyntaxError("Only 'silent' flag is allowed after cycle's name, not '%s'." % args[-1])
as_form = True
silent = True
args = args[:-1]
elif args[-2] == "as":
as_form = True
silent = False
if as_form:
name = args[-1]
values = [parser.compile_filter(arg) for arg in args[1:-2]]
node = CycleNode(values, name, silent=silent)
if not hasattr(parser, '_named_cycle_nodes'):
parser._named_cycle_nodes = {}
parser._named_cycle_nodes[name] = node
else:
values = [parser.compile_filter(arg) for arg in args[1:]]
node = CycleNode(values)
parser._last_cycle_node = node
return node
@register.tag
def csrf_token(parser, token):
return CsrfTokenNode()
@register.tag
def debug(parser, token):
"""
Output a whole load of debugging information, including the current
context and imported modules.
Sample usage::
<pre>
{% debug %}
</pre>
"""
return DebugNode()
@register.tag('filter')
def do_filter(parser, token):
"""
Filter the contents of the block through variable filters.
Filters can also be piped through each other, and they can have
arguments -- just like in variable syntax.
Sample usage::
{% filter force_escape|lower %}
This text will be HTML-escaped, and will appear in lowercase.
{% endfilter %}
Note that the ``escape`` and ``safe`` filters are not acceptable arguments.
Instead, use the ``autoescape`` tag to manage autoescaping for blocks of
template code.
"""
# token.split_contents() isn't useful here because this tag doesn't accept variable as arguments
_, rest = token.contents.split(None, 1)
filter_expr = parser.compile_filter("var|%s" % (rest))
for func, unused in filter_expr.filters:
filter_name = getattr(func, '_filter_name', None)
if filter_name in ('escape', 'safe'):
raise TemplateSyntaxError('"filter %s" is not permitted. Use the "autoescape" tag instead.' % filter_name)
nodelist = parser.parse(('endfilter',))
parser.delete_first_token()
return FilterNode(filter_expr, nodelist)
@register.tag
def firstof(parser, token):
"""
Output the first variable passed that is not False.
Output nothing if all the passed variables are False.
Sample usage::
{% firstof var1 var2 var3 as myvar %}
This is equivalent to::
{% if var1 %}
{{ var1 }}
{% elif var2 %}
{{ var2 }}
{% elif var3 %}
{{ var3 }}
{% endif %}
but much cleaner!
You can also use a literal string as a fallback value in case all
passed variables are False::
{% firstof var1 var2 var3 "fallback value" %}
If you want to disable auto-escaping of variables you can use::
{% autoescape off %}
{% firstof var1 var2 var3 "<strong>fallback value</strong>" %}
{% autoescape %}
Or if only some variables should be escaped, you can use::
{% firstof var1 var2|safe var3 "<strong>fallback value</strong>"|safe %}
"""
bits = token.split_contents()[1:]
asvar = None
if not bits:
raise TemplateSyntaxError("'firstof' statement requires at least one argument")
if len(bits) >= 2 and bits[-2] == 'as':
asvar = bits[-1]
bits = bits[:-2]
return FirstOfNode([parser.compile_filter(bit) for bit in bits], asvar)
@register.tag('for')
def do_for(parser, token):
"""
Loop over each item in an array.
For example, to display a list of athletes given ``athlete_list``::
<ul>
{% for athlete in athlete_list %}
<li>{{ athlete.name }}</li>
{% endfor %}
</ul>
You can loop over a list in reverse by using
``{% for obj in list reversed %}``.
You can also unpack multiple values from a two-dimensional array::
{% for key,value in dict.items %}
{{ key }}: {{ value }}
{% endfor %}
The ``for`` tag can take an optional ``{% empty %}`` clause that will
be displayed if the given array is empty or could not be found::
<ul>
{% for athlete in athlete_list %}
<li>{{ athlete.name }}</li>
{% empty %}
<li>Sorry, no athletes in this list.</li>
{% endfor %}
<ul>
The above is equivalent to -- but shorter, cleaner, and possibly faster
than -- the following::
<ul>
{% if athlete_list %}
{% for athlete in athlete_list %}
<li>{{ athlete.name }}</li>
{% endfor %}
{% else %}
<li>Sorry, no athletes in this list.</li>
{% endif %}
</ul>
The for loop sets a number of variables available within the loop:
========================== ================================================
Variable Description
========================== ================================================
``forloop.counter`` The current iteration of the loop (1-indexed)
``forloop.counter0`` The current iteration of the loop (0-indexed)
``forloop.revcounter`` The number of iterations from the end of the
loop (1-indexed)
``forloop.revcounter0`` The number of iterations from the end of the
loop (0-indexed)
``forloop.first`` True if this is the first time through the loop
``forloop.last`` True if this is the last time through the loop
``forloop.parentloop`` For nested loops, this is the loop "above" the
current one
========================== ================================================
"""
bits = token.split_contents()
if len(bits) < 4:
raise TemplateSyntaxError("'for' statements should have at least four"
" words: %s" % token.contents)
is_reversed = bits[-1] == 'reversed'
in_index = -3 if is_reversed else -2
if bits[in_index] != 'in':
raise TemplateSyntaxError("'for' statements should use the format"
" 'for x in y': %s" % token.contents)
invalid_chars = frozenset((' ', '"', "'", FILTER_SEPARATOR))
loopvars = re.split(r' *, *', ' '.join(bits[1:in_index]))
for var in loopvars:
if not var or not invalid_chars.isdisjoint(var):
raise TemplateSyntaxError("'for' tag received an invalid argument:"
" %s" % token.contents)
sequence = parser.compile_filter(bits[in_index + 1])
nodelist_loop = parser.parse(('empty', 'endfor',))
token = parser.next_token()
if token.contents == 'empty':
nodelist_empty = parser.parse(('endfor',))
parser.delete_first_token()
else:
nodelist_empty = None
return ForNode(loopvars, sequence, is_reversed, nodelist_loop, nodelist_empty)
def do_ifequal(parser, token, negate):
# RemovedInDjango40Warning.
bits = list(token.split_contents())
if len(bits) != 3:
raise TemplateSyntaxError("%r takes two arguments" % bits[0])
end_tag = 'end' + bits[0]
nodelist_true = parser.parse(('else', end_tag))
token = parser.next_token()
if token.contents == 'else':
nodelist_false = parser.parse((end_tag,))
parser.delete_first_token()
else:
nodelist_false = NodeList()
val1 = parser.compile_filter(bits[1])
val2 = parser.compile_filter(bits[2])
return IfEqualNode(val1, val2, nodelist_true, nodelist_false, negate)
@register.tag
def ifequal(parser, token):
"""
Output the contents of the block if the two arguments equal each other.
Examples::
{% ifequal user.id comment.user_id %}
...
{% endifequal %}
{% ifnotequal user.id comment.user_id %}
...
{% else %}
...
{% endifnotequal %}
"""
warnings.warn(
'The {% ifequal %} template tag is deprecated in favor of {% if %}.',
RemovedInDjango40Warning,
)
return do_ifequal(parser, token, False)
@register.tag
def ifnotequal(parser, token):
"""
Output the contents of the block if the two arguments are not equal.
See ifequal.
"""
warnings.warn(
'The {% ifnotequal %} template tag is deprecated in favor of '
'{% if %}.',
RemovedInDjango40Warning,
)
return do_ifequal(parser, token, True)
class TemplateLiteral(Literal):
def __init__(self, value, text):
self.value = value
self.text = text # for better error messages
def display(self):
return self.text
def eval(self, context):
return self.value.resolve(context, ignore_failures=True)
class TemplateIfParser(IfParser):
error_class = TemplateSyntaxError
def __init__(self, parser, *args, **kwargs):
self.template_parser = parser
super().__init__(*args, **kwargs)
def create_var(self, value):
return TemplateLiteral(self.template_parser.compile_filter(value), value)
@register.tag('if')
def do_if(parser, token):
"""
Evaluate a variable, and if that variable is "true" (i.e., exists, is not
empty, and is not a false boolean value), output the contents of the block:
::
{% if athlete_list %}
Number of athletes: {{ athlete_list|count }}
{% elif athlete_in_locker_room_list %}
Athletes should be out of the locker room soon!
{% else %}
No athletes.
{% endif %}
In the above, if ``athlete_list`` is not empty, the number of athletes will
be displayed by the ``{{ athlete_list|count }}`` variable.
The ``if`` tag may take one or several `` {% elif %}`` clauses, as well as
an ``{% else %}`` clause that will be displayed if all previous conditions
fail. These clauses are optional.
``if`` tags may use ``or``, ``and`` or ``not`` to test a number of
variables or to negate a given variable::
{% if not athlete_list %}
There are no athletes.
{% endif %}
{% if athlete_list or coach_list %}
There are some athletes or some coaches.
{% endif %}
{% if athlete_list and coach_list %}
Both athletes and coaches are available.
{% endif %}
{% if not athlete_list or coach_list %}
There are no athletes, or there are some coaches.
{% endif %}
{% if athlete_list and not coach_list %}
There are some athletes and absolutely no coaches.
{% endif %}
Comparison operators are also available, and the use of filters is also
allowed, for example::
{% if articles|length >= 5 %}...{% endif %}
Arguments and operators _must_ have a space between them, so
``{% if 1>2 %}`` is not a valid if tag.
All supported operators are: ``or``, ``and``, ``in``, ``not in``
``==``, ``!=``, ``>``, ``>=``, ``<`` and ``<=``.
Operator precedence follows Python.
"""
# {% if ... %}
bits = token.split_contents()[1:]
condition = TemplateIfParser(parser, bits).parse()
nodelist = parser.parse(('elif', 'else', 'endif'))
conditions_nodelists = [(condition, nodelist)]
token = parser.next_token()
# {% elif ... %} (repeatable)
while token.contents.startswith('elif'):
bits = token.split_contents()[1:]
condition = TemplateIfParser(parser, bits).parse()
nodelist = parser.parse(('elif', 'else', 'endif'))
conditions_nodelists.append((condition, nodelist))
token = parser.next_token()
# {% else %} (optional)
if token.contents == 'else':
nodelist = parser.parse(('endif',))
conditions_nodelists.append((None, nodelist))
token = parser.next_token()
# {% endif %}
if token.contents != 'endif':
raise TemplateSyntaxError('Malformed template tag at line {}: "{}"'.format(token.lineno, token.contents))
return IfNode(conditions_nodelists)
@register.tag
def ifchanged(parser, token):
"""
Check if a value has changed from the last iteration of a loop.
The ``{% ifchanged %}`` block tag is used within a loop. It has two
possible uses.
1. Check its own rendered contents against its previous state and only
displays the content if it has changed. For example, this displays a
list of days, only displaying the month if it changes::
<h1>Archive for {{ year }}</h1>
{% for date in days %}
{% ifchanged %}<h3>{{ date|date:"F" }}</h3>{% endifchanged %}
<a href="{{ date|date:"M/d"|lower }}/">{{ date|date:"j" }}</a>
{% endfor %}
2. If given one or more variables, check whether any variable has changed.
For example, the following shows the date every time it changes, while
showing the hour if either the hour or the date has changed::
{% for date in days %}
{% ifchanged date.date %} {{ date.date }} {% endifchanged %}
{% ifchanged date.hour date.date %}
{{ date.hour }}
{% endifchanged %}
{% endfor %}
"""
bits = token.split_contents()
nodelist_true = parser.parse(('else', 'endifchanged'))
token = parser.next_token()
if token.contents == 'else':
nodelist_false = parser.parse(('endifchanged',))
parser.delete_first_token()
else:
nodelist_false = NodeList()
values = [parser.compile_filter(bit) for bit in bits[1:]]
return IfChangedNode(nodelist_true, nodelist_false, *values)
def find_library(parser, name):
try:
return parser.libraries[name]
except KeyError:
raise TemplateSyntaxError(
"'%s' is not a registered tag library. Must be one of:\n%s" % (
name, "\n".join(sorted(parser.libraries)),
),
)
def load_from_library(library, label, names):
"""
Return a subset of tags and filters from a library.
"""
subset = Library()
for name in names:
found = False
if name in library.tags:
found = True
subset.tags[name] = library.tags[name]
if name in library.filters:
found = True
subset.filters[name] = library.filters[name]
if found is False:
raise TemplateSyntaxError(
"'%s' is not a valid tag or filter in tag library '%s'" % (
name, label,
),
)
return subset
@register.tag
def load(parser, token):
"""
Load a custom template tag library into the parser.
For example, to load the template tags in
``django/templatetags/news/photos.py``::
{% load news.photos %}
Can also be used to load an individual tag/filter from
a library::
{% load byline from news %}
"""
# token.split_contents() isn't useful here because this tag doesn't accept variable as arguments
bits = token.contents.split()
if len(bits) >= 4 and bits[-2] == "from":
# from syntax is used; load individual tags from the library
name = bits[-1]
lib = find_library(parser, name)
subset = load_from_library(lib, name, bits[1:-2])
parser.add_library(subset)
else:
# one or more libraries are specified; load and add them to the parser
for name in bits[1:]:
lib = find_library(parser, name)
parser.add_library(lib)
return LoadNode()
@register.tag
def lorem(parser, token):
"""
Create random Latin text useful for providing test data in templates.
Usage format::
{% lorem [count] [method] [random] %}
``count`` is a number (or variable) containing the number of paragraphs or
words to generate (default is 1).
``method`` is either ``w`` for words, ``p`` for HTML paragraphs, ``b`` for
plain-text paragraph blocks (default is ``b``).
``random`` is the word ``random``, which if given, does not use the common
paragraph (starting "Lorem ipsum dolor sit amet, consectetuer...").
Examples:
* ``{% lorem %}`` outputs the common "lorem ipsum" paragraph
* ``{% lorem 3 p %}`` outputs the common "lorem ipsum" paragraph
and two random paragraphs each wrapped in HTML ``<p>`` tags
* ``{% lorem 2 w random %}`` outputs two random latin words
"""
bits = list(token.split_contents())
tagname = bits[0]
# Random bit
common = bits[-1] != 'random'
if not common:
bits.pop()
# Method bit
if bits[-1] in ('w', 'p', 'b'):
method = bits.pop()
else:
method = 'b'
# Count bit
if len(bits) > 1:
count = bits.pop()
else:
count = '1'
count = parser.compile_filter(count)
if len(bits) != 1:
raise TemplateSyntaxError("Incorrect format for %r tag" % tagname)
return LoremNode(count, method, common)
@register.tag
def now(parser, token):
"""
Display the date, formatted according to the given string.
Use the same format as PHP's ``date()`` function; see https://php.net/date
for all the possible values.
Sample usage::
It is {% now "jS F Y H:i" %}
"""
bits = token.split_contents()
asvar = None
if len(bits) == 4 and bits[-2] == 'as':
asvar = bits[-1]
bits = bits[:-2]
if len(bits) != 2:
raise TemplateSyntaxError("'now' statement takes one argument")
format_string = bits[1][1:-1]
return NowNode(format_string, asvar)
@register.tag
def regroup(parser, token):
"""
Regroup a list of alike objects by a common attribute.
This complex tag is best illustrated by use of an example: say that
``musicians`` is a list of ``Musician`` objects that have ``name`` and
``instrument`` attributes, and you'd like to display a list that
looks like:
* Guitar:
* Django Reinhardt
* Emily Remler
* Piano:
* Lovie Austin
* Bud Powell
* Trumpet:
* Duke Ellington
The following snippet of template code would accomplish this dubious task::
{% regroup musicians by instrument as grouped %}
<ul>
{% for group in grouped %}
<li>{{ group.grouper }}
<ul>
{% for musician in group.list %}
<li>{{ musician.name }}</li>
{% endfor %}
</ul>
{% endfor %}
</ul>
As you can see, ``{% regroup %}`` populates a variable with a list of
objects with ``grouper`` and ``list`` attributes. ``grouper`` contains the
item that was grouped by; ``list`` contains the list of objects that share
that ``grouper``. In this case, ``grouper`` would be ``Guitar``, ``Piano``
and ``Trumpet``, and ``list`` is the list of musicians who play this
instrument.
Note that ``{% regroup %}`` does not work when the list to be grouped is not
sorted by the key you are grouping by! This means that if your list of
musicians was not sorted by instrument, you'd need to make sure it is sorted
before using it, i.e.::
{% regroup musicians|dictsort:"instrument" by instrument as grouped %}
"""
bits = token.split_contents()
if len(bits) != 6:
raise TemplateSyntaxError("'regroup' tag takes five arguments")
target = parser.compile_filter(bits[1])
if bits[2] != 'by':
raise TemplateSyntaxError("second argument to 'regroup' tag must be 'by'")
if bits[4] != 'as':
raise TemplateSyntaxError("next-to-last argument to 'regroup' tag must"
" be 'as'")
var_name = bits[5]
# RegroupNode will take each item in 'target', put it in the context under
# 'var_name', evaluate 'var_name'.'expression' in the current context, and
# group by the resulting value. After all items are processed, it will
# save the final result in the context under 'var_name', thus clearing the
# temporary values. This hack is necessary because the template engine
# doesn't provide a context-aware equivalent of Python's getattr.
expression = parser.compile_filter(var_name +
VARIABLE_ATTRIBUTE_SEPARATOR +
bits[3])
return RegroupNode(target, expression, var_name)
@register.tag
def resetcycle(parser, token):
"""
Reset a cycle tag.
If an argument is given, reset the last rendered cycle tag whose name
matches the argument, else reset the last rendered cycle tag (named or
unnamed).
"""
args = token.split_contents()
if len(args) > 2:
raise TemplateSyntaxError("%r tag accepts at most one argument." % args[0])
if len(args) == 2:
name = args[1]
try:
return ResetCycleNode(parser._named_cycle_nodes[name])
except (AttributeError, KeyError):
raise TemplateSyntaxError("Named cycle '%s' does not exist." % name)
try:
return ResetCycleNode(parser._last_cycle_node)
except AttributeError:
raise TemplateSyntaxError("No cycles in template.")
@register.tag
def spaceless(parser, token):
"""
Remove whitespace between HTML tags, including tab and newline characters.
Example usage::
{% spaceless %}
<p>
<a href="foo/">Foo</a>
</p>
{% endspaceless %}
This example returns this HTML::
<p><a href="foo/">Foo</a></p>
Only space between *tags* is normalized -- not space between tags and text.
In this example, the space around ``Hello`` isn't stripped::
{% spaceless %}
<strong>
Hello
</strong>
{% endspaceless %}
"""
nodelist = parser.parse(('endspaceless',))
parser.delete_first_token()
return SpacelessNode(nodelist)
@register.tag
def templatetag(parser, token):
"""
Output one of the bits used to compose template tags.
Since the template system has no concept of "escaping", to display one of
the bits used in template tags, you must use the ``{% templatetag %}`` tag.
The argument tells which template bit to output:
================== =======
Argument Outputs
================== =======
``openblock`` ``{%``
``closeblock`` ``%}``
``openvariable`` ``{{``
``closevariable`` ``}}``
``openbrace`` ``{``
``closebrace`` ``}``
``opencomment`` ``{#``
``closecomment`` ``#}``
================== =======
"""
# token.split_contents() isn't useful here because this tag doesn't accept variable as arguments
bits = token.contents.split()
if len(bits) != 2:
raise TemplateSyntaxError("'templatetag' statement takes one argument")
tag = bits[1]
if tag not in TemplateTagNode.mapping:
raise TemplateSyntaxError("Invalid templatetag argument: '%s'."
" Must be one of: %s" %
(tag, list(TemplateTagNode.mapping)))
return TemplateTagNode(tag)
@register.tag
def url(parser, token):
r"""
Return an absolute URL matching the given view with its parameters.
This is a way to define links that aren't tied to a particular URL
configuration::
{% url "url_name" arg1 arg2 %}
or
{% url "url_name" name1=value1 name2=value2 %}
The first argument is a URL pattern name. Other arguments are
space-separated values that will be filled in place of positional and
keyword arguments in the URL. Don't mix positional and keyword arguments.
All arguments for the URL must be present.
For example, if you have a view ``app_name.views.client_details`` taking
the client's id and the corresponding line in a URLconf looks like this::
path('client/<int:id>/', views.client_details, name='client-detail-view')
and this app's URLconf is included into the project's URLconf under some
path::
path('clients/', include('app_name.urls'))
then in a template you can create a link for a certain client like this::
{% url "client-detail-view" client.id %}
The URL will look like ``/clients/client/123/``.
The first argument may also be the name of a template variable that will be
evaluated to obtain the view name or the URL name, e.g.::
{% with url_name="client-detail-view" %}
{% url url_name client.id %}
{% endwith %}
"""
bits = token.split_contents()
if len(bits) < 2:
raise TemplateSyntaxError("'%s' takes at least one argument, a URL pattern name." % bits[0])
viewname = parser.compile_filter(bits[1])
args = []
kwargs = {}
asvar = None
bits = bits[2:]
if len(bits) >= 2 and bits[-2] == 'as':
asvar = bits[-1]
bits = bits[:-2]
for bit in bits:
match = kwarg_re.match(bit)
if not match:
raise TemplateSyntaxError("Malformed arguments to url tag")
name, value = match.groups()
if name:
kwargs[name] = parser.compile_filter(value)
else:
args.append(parser.compile_filter(value))
return URLNode(viewname, args, kwargs, asvar)
@register.tag
def verbatim(parser, token):
"""
Stop the template engine from rendering the contents of this block tag.
Usage::
{% verbatim %}
{% don't process this %}
{% endverbatim %}
You can also designate a specific closing tag block (allowing the
unrendered use of ``{% endverbatim %}``)::
{% verbatim myblock %}
...
{% endverbatim myblock %}
"""
nodelist = parser.parse(('endverbatim',))
parser.delete_first_token()
return VerbatimNode(nodelist.render(Context()))
@register.tag
def widthratio(parser, token):
"""
For creating bar charts and such. Calculate the ratio of a given value to a
maximum value, and then apply that ratio to a constant.
For example::
<img src="bar.png" alt="Bar"
height="10" width="{% widthratio this_value max_value max_width %}">
If ``this_value`` is 175, ``max_value`` is 200, and ``max_width`` is 100,
the image in the above example will be 88 pixels wide
(because 175/200 = .875; .875 * 100 = 87.5 which is rounded up to 88).
In some cases you might want to capture the result of widthratio in a
variable. It can be useful for instance in a blocktranslate like this::
{% widthratio this_value max_value max_width as width %}
{% blocktranslate %}The width is: {{ width }}{% endblocktranslate %}
"""
bits = token.split_contents()
if len(bits) == 4:
tag, this_value_expr, max_value_expr, max_width = bits
asvar = None
elif len(bits) == 6:
tag, this_value_expr, max_value_expr, max_width, as_, asvar = bits
if as_ != 'as':
raise TemplateSyntaxError("Invalid syntax in widthratio tag. Expecting 'as' keyword")
else:
raise TemplateSyntaxError("widthratio takes at least three arguments")
return WidthRatioNode(parser.compile_filter(this_value_expr),
parser.compile_filter(max_value_expr),
parser.compile_filter(max_width),
asvar=asvar)
@register.tag('with')
def do_with(parser, token):
"""
Add one or more values to the context (inside of this block) for caching
and easy access.
For example::
{% with total=person.some_sql_method %}
{{ total }} object{{ total|pluralize }}
{% endwith %}
Multiple values can be added to the context::
{% with foo=1 bar=2 %}
...
{% endwith %}
The legacy format of ``{% with person.some_sql_method as total %}`` is
still accepted.
"""
bits = token.split_contents()
remaining_bits = bits[1:]
extra_context = token_kwargs(remaining_bits, parser, support_legacy=True)
if not extra_context:
raise TemplateSyntaxError("%r expected at least one variable "
"assignment" % bits[0])
if remaining_bits:
raise TemplateSyntaxError("%r received an invalid token: %r" %
(bits[0], remaining_bits[0]))
nodelist = parser.parse(('endwith',))
parser.delete_first_token()
return WithNode(None, None, nodelist, extra_context=extra_context)
|
e8f959367f98a84100478526411f1a72c47a0b2aa365e2b2be8a05e40c282537 | """Default variable filters."""
import random as random_module
import re
import types
from decimal import ROUND_HALF_UP, Context, Decimal, InvalidOperation
from functools import wraps
from operator import itemgetter
from pprint import pformat
from urllib.parse import quote
from django.utils import formats
from django.utils.dateformat import format, time_format
from django.utils.encoding import iri_to_uri
from django.utils.html import (
avoid_wrapping, conditional_escape, escape, escapejs,
json_script as _json_script, linebreaks, strip_tags, urlize as _urlize,
)
from django.utils.safestring import SafeData, mark_safe
from django.utils.text import (
Truncator, normalize_newlines, phone2numeric, slugify as _slugify, wrap,
)
from django.utils.timesince import timesince, timeuntil
from django.utils.translation import gettext, ngettext
from .base import Variable, VariableDoesNotExist
from .library import Library
register = Library()
#######################
# STRING DECORATOR #
#######################
def stringfilter(func):
"""
Decorator for filters which should only receive strings. The object
passed as the first positional argument will be converted to a string.
"""
def _dec(*args, **kwargs):
args = list(args)
args[0] = str(args[0])
if (isinstance(args[0], SafeData) and
getattr(_dec._decorated_function, 'is_safe', False)):
return mark_safe(func(*args, **kwargs))
return func(*args, **kwargs)
# Include a reference to the real function (used to check original
# arguments by the template parser, and to bear the 'is_safe' attribute
# when multiple decorators are applied).
_dec._decorated_function = getattr(func, '_decorated_function', func)
return wraps(func)(_dec)
###################
# STRINGS #
###################
@register.filter(is_safe=True)
@stringfilter
def addslashes(value):
"""
Add slashes before quotes. Useful for escaping strings in CSV, for
example. Less useful for escaping JavaScript; use the ``escapejs``
filter instead.
"""
return value.replace('\\', '\\\\').replace('"', '\\"').replace("'", "\\'")
@register.filter(is_safe=True)
@stringfilter
def capfirst(value):
"""Capitalize the first character of the value."""
return value and value[0].upper() + value[1:]
@register.filter("escapejs")
@stringfilter
def escapejs_filter(value):
"""Hex encode characters for use in JavaScript strings."""
return escapejs(value)
@register.filter(is_safe=True)
def json_script(value, element_id):
"""
Output value JSON-encoded, wrapped in a <script type="application/json">
tag.
"""
return _json_script(value, element_id)
@register.filter(is_safe=True)
def floatformat(text, arg=-1):
"""
Display a float to a specified number of decimal places.
If called without an argument, display the floating point number with one
decimal place -- but only if there's a decimal place to be displayed:
* num1 = 34.23234
* num2 = 34.00000
* num3 = 34.26000
* {{ num1|floatformat }} displays "34.2"
* {{ num2|floatformat }} displays "34"
* {{ num3|floatformat }} displays "34.3"
If arg is positive, always display exactly arg number of decimal places:
* {{ num1|floatformat:3 }} displays "34.232"
* {{ num2|floatformat:3 }} displays "34.000"
* {{ num3|floatformat:3 }} displays "34.260"
If arg is negative, display arg number of decimal places -- but only if
there are places to be displayed:
* {{ num1|floatformat:"-3" }} displays "34.232"
* {{ num2|floatformat:"-3" }} displays "34"
* {{ num3|floatformat:"-3" }} displays "34.260"
If the input float is infinity or NaN, display the string representation
of that value.
"""
try:
input_val = repr(text)
d = Decimal(input_val)
except InvalidOperation:
try:
d = Decimal(str(float(text)))
except (ValueError, InvalidOperation, TypeError):
return ''
try:
p = int(arg)
except ValueError:
return input_val
try:
m = int(d) - d
except (ValueError, OverflowError, InvalidOperation):
return input_val
if not m and p < 0:
return mark_safe(formats.number_format('%d' % (int(d)), 0))
exp = Decimal(1).scaleb(-abs(p))
# Set the precision high enough to avoid an exception (#15789).
tupl = d.as_tuple()
units = len(tupl[1])
units += -tupl[2] if m else tupl[2]
prec = abs(p) + units + 1
# Avoid conversion to scientific notation by accessing `sign`, `digits`,
# and `exponent` from Decimal.as_tuple() directly.
rounded_d = d.quantize(exp, ROUND_HALF_UP, Context(prec=prec))
sign, digits, exponent = rounded_d.as_tuple()
digits = [str(digit) for digit in reversed(digits)]
while len(digits) <= abs(exponent):
digits.append('0')
digits.insert(-exponent, '.')
if sign and rounded_d:
digits.append('-')
number = ''.join(reversed(digits))
return mark_safe(formats.number_format(number, abs(p)))
@register.filter(is_safe=True)
@stringfilter
def iriencode(value):
"""Escape an IRI value for use in a URL."""
return iri_to_uri(value)
@register.filter(is_safe=True, needs_autoescape=True)
@stringfilter
def linenumbers(value, autoescape=True):
"""Display text with line numbers."""
lines = value.split('\n')
# Find the maximum width of the line count, for use with zero padding
# string format command
width = str(len(str(len(lines))))
if not autoescape or isinstance(value, SafeData):
for i, line in enumerate(lines):
lines[i] = ("%0" + width + "d. %s") % (i + 1, line)
else:
for i, line in enumerate(lines):
lines[i] = ("%0" + width + "d. %s") % (i + 1, escape(line))
return mark_safe('\n'.join(lines))
@register.filter(is_safe=True)
@stringfilter
def lower(value):
"""Convert a string into all lowercase."""
return value.lower()
@register.filter(is_safe=False)
@stringfilter
def make_list(value):
"""
Return the value turned into a list.
For an integer, it's a list of digits.
For a string, it's a list of characters.
"""
return list(value)
@register.filter(is_safe=True)
@stringfilter
def slugify(value):
"""
Convert to ASCII. Convert spaces to hyphens. Remove characters that aren't
alphanumerics, underscores, or hyphens. Convert to lowercase. Also strip
leading and trailing whitespace.
"""
return _slugify(value)
@register.filter(is_safe=True)
def stringformat(value, arg):
"""
Format the variable according to the arg, a string formatting specifier.
This specifier uses Python string formatting syntax, with the exception
that the leading "%" is dropped.
See https://docs.python.org/library/stdtypes.html#printf-style-string-formatting
for documentation of Python string formatting.
"""
if isinstance(value, tuple):
value = str(value)
try:
return ("%" + str(arg)) % value
except (ValueError, TypeError):
return ""
@register.filter(is_safe=True)
@stringfilter
def title(value):
"""Convert a string into titlecase."""
t = re.sub("([a-z])'([A-Z])", lambda m: m[0].lower(), value.title())
return re.sub(r'\d([A-Z])', lambda m: m[0].lower(), t)
@register.filter(is_safe=True)
@stringfilter
def truncatechars(value, arg):
"""Truncate a string after `arg` number of characters."""
try:
length = int(arg)
except ValueError: # Invalid literal for int().
return value # Fail silently.
return Truncator(value).chars(length)
@register.filter(is_safe=True)
@stringfilter
def truncatechars_html(value, arg):
"""
Truncate HTML after `arg` number of chars.
Preserve newlines in the HTML.
"""
try:
length = int(arg)
except ValueError: # invalid literal for int()
return value # Fail silently.
return Truncator(value).chars(length, html=True)
@register.filter(is_safe=True)
@stringfilter
def truncatewords(value, arg):
"""
Truncate a string after `arg` number of words.
Remove newlines within the string.
"""
try:
length = int(arg)
except ValueError: # Invalid literal for int().
return value # Fail silently.
return Truncator(value).words(length, truncate=' …')
@register.filter(is_safe=True)
@stringfilter
def truncatewords_html(value, arg):
"""
Truncate HTML after `arg` number of words.
Preserve newlines in the HTML.
"""
try:
length = int(arg)
except ValueError: # invalid literal for int()
return value # Fail silently.
return Truncator(value).words(length, html=True, truncate=' …')
@register.filter(is_safe=False)
@stringfilter
def upper(value):
"""Convert a string into all uppercase."""
return value.upper()
@register.filter(is_safe=False)
@stringfilter
def urlencode(value, safe=None):
"""
Escape a value for use in a URL.
The ``safe`` parameter determines the characters which should not be
escaped by Python's quote() function. If not provided, use the default safe
characters (but an empty string can be provided when *all* characters
should be escaped).
"""
kwargs = {}
if safe is not None:
kwargs['safe'] = safe
return quote(value, **kwargs)
@register.filter(is_safe=True, needs_autoescape=True)
@stringfilter
def urlize(value, autoescape=True):
"""Convert URLs in plain text into clickable links."""
return mark_safe(_urlize(value, nofollow=True, autoescape=autoescape))
@register.filter(is_safe=True, needs_autoescape=True)
@stringfilter
def urlizetrunc(value, limit, autoescape=True):
"""
Convert URLs into clickable links, truncating URLs to the given character
limit, and adding 'rel=nofollow' attribute to discourage spamming.
Argument: Length to truncate URLs to.
"""
return mark_safe(_urlize(value, trim_url_limit=int(limit), nofollow=True, autoescape=autoescape))
@register.filter(is_safe=False)
@stringfilter
def wordcount(value):
"""Return the number of words."""
return len(value.split())
@register.filter(is_safe=True)
@stringfilter
def wordwrap(value, arg):
"""Wrap words at `arg` line length."""
return wrap(value, int(arg))
@register.filter(is_safe=True)
@stringfilter
def ljust(value, arg):
"""Left-align the value in a field of a given width."""
return value.ljust(int(arg))
@register.filter(is_safe=True)
@stringfilter
def rjust(value, arg):
"""Right-align the value in a field of a given width."""
return value.rjust(int(arg))
@register.filter(is_safe=True)
@stringfilter
def center(value, arg):
"""Center the value in a field of a given width."""
return value.center(int(arg))
@register.filter
@stringfilter
def cut(value, arg):
"""Remove all values of arg from the given string."""
safe = isinstance(value, SafeData)
value = value.replace(arg, '')
if safe and arg != ';':
return mark_safe(value)
return value
###################
# HTML STRINGS #
###################
@register.filter("escape", is_safe=True)
@stringfilter
def escape_filter(value):
"""Mark the value as a string that should be auto-escaped."""
return conditional_escape(value)
@register.filter(is_safe=True)
@stringfilter
def force_escape(value):
"""
Escape a string's HTML. Return a new string containing the escaped
characters (as opposed to "escape", which marks the content for later
possible escaping).
"""
return escape(value)
@register.filter("linebreaks", is_safe=True, needs_autoescape=True)
@stringfilter
def linebreaks_filter(value, autoescape=True):
"""
Replace line breaks in plain text with appropriate HTML; a single
newline becomes an HTML line break (``<br>``) and a new line
followed by a blank line becomes a paragraph break (``</p>``).
"""
autoescape = autoescape and not isinstance(value, SafeData)
return mark_safe(linebreaks(value, autoescape))
@register.filter(is_safe=True, needs_autoescape=True)
@stringfilter
def linebreaksbr(value, autoescape=True):
"""
Convert all newlines in a piece of plain text to HTML line breaks
(``<br>``).
"""
autoescape = autoescape and not isinstance(value, SafeData)
value = normalize_newlines(value)
if autoescape:
value = escape(value)
return mark_safe(value.replace('\n', '<br>'))
@register.filter(is_safe=True)
@stringfilter
def safe(value):
"""Mark the value as a string that should not be auto-escaped."""
return mark_safe(value)
@register.filter(is_safe=True)
def safeseq(value):
"""
A "safe" filter for sequences. Mark each element in the sequence,
individually, as safe, after converting them to strings. Return a list
with the results.
"""
return [mark_safe(obj) for obj in value]
@register.filter(is_safe=True)
@stringfilter
def striptags(value):
"""Strip all [X]HTML tags."""
return strip_tags(value)
###################
# LISTS #
###################
def _property_resolver(arg):
"""
When arg is convertible to float, behave like operator.itemgetter(arg)
Otherwise, behave like Variable(arg).resolve
>>> _property_resolver(1)('abc')
'b'
>>> _property_resolver('1')('abc')
Traceback (most recent call last):
...
TypeError: string indices must be integers
>>> class Foo:
... a = 42
... b = 3.14
... c = 'Hey!'
>>> _property_resolver('b')(Foo())
3.14
"""
try:
float(arg)
except ValueError:
return Variable(arg).resolve
else:
return itemgetter(arg)
@register.filter(is_safe=False)
def dictsort(value, arg):
"""
Given a list of dicts, return that list sorted by the property given in
the argument.
"""
try:
return sorted(value, key=_property_resolver(arg))
except (TypeError, VariableDoesNotExist):
return ''
@register.filter(is_safe=False)
def dictsortreversed(value, arg):
"""
Given a list of dicts, return that list sorted in reverse order by the
property given in the argument.
"""
try:
return sorted(value, key=_property_resolver(arg), reverse=True)
except (TypeError, VariableDoesNotExist):
return ''
@register.filter(is_safe=False)
def first(value):
"""Return the first item in a list."""
try:
return value[0]
except IndexError:
return ''
@register.filter(is_safe=True, needs_autoescape=True)
def join(value, arg, autoescape=True):
"""Join a list with a string, like Python's ``str.join(list)``."""
try:
if autoescape:
value = [conditional_escape(v) for v in value]
data = conditional_escape(arg).join(value)
except TypeError: # Fail silently if arg isn't iterable.
return value
return mark_safe(data)
@register.filter(is_safe=True)
def last(value):
"""Return the last item in a list."""
try:
return value[-1]
except IndexError:
return ''
@register.filter(is_safe=False)
def length(value):
"""Return the length of the value - useful for lists."""
try:
return len(value)
except (ValueError, TypeError):
return 0
@register.filter(is_safe=False)
def length_is(value, arg):
"""Return a boolean of whether the value's length is the argument."""
try:
return len(value) == int(arg)
except (ValueError, TypeError):
return ''
@register.filter(is_safe=True)
def random(value):
"""Return a random item from the list."""
return random_module.choice(value)
@register.filter("slice", is_safe=True)
def slice_filter(value, arg):
"""
Return a slice of the list using the same syntax as Python's list slicing.
"""
try:
bits = []
for x in str(arg).split(':'):
if not x:
bits.append(None)
else:
bits.append(int(x))
return value[slice(*bits)]
except (ValueError, TypeError):
return value # Fail silently.
@register.filter(is_safe=True, needs_autoescape=True)
def unordered_list(value, autoescape=True):
"""
Recursively take a self-nested list and return an HTML unordered list --
WITHOUT opening and closing <ul> tags.
Assume the list is in the proper format. For example, if ``var`` contains:
``['States', ['Kansas', ['Lawrence', 'Topeka'], 'Illinois']]``, then
``{{ var|unordered_list }}`` returns::
<li>States
<ul>
<li>Kansas
<ul>
<li>Lawrence</li>
<li>Topeka</li>
</ul>
</li>
<li>Illinois</li>
</ul>
</li>
"""
if autoescape:
escaper = conditional_escape
else:
def escaper(x):
return x
def walk_items(item_list):
item_iterator = iter(item_list)
try:
item = next(item_iterator)
while True:
try:
next_item = next(item_iterator)
except StopIteration:
yield item, None
break
if isinstance(next_item, (list, tuple, types.GeneratorType)):
try:
iter(next_item)
except TypeError:
pass
else:
yield item, next_item
item = next(item_iterator)
continue
yield item, None
item = next_item
except StopIteration:
pass
def list_formatter(item_list, tabs=1):
indent = '\t' * tabs
output = []
for item, children in walk_items(item_list):
sublist = ''
if children:
sublist = '\n%s<ul>\n%s\n%s</ul>\n%s' % (
indent, list_formatter(children, tabs + 1), indent, indent)
output.append('%s<li>%s%s</li>' % (
indent, escaper(item), sublist))
return '\n'.join(output)
return mark_safe(list_formatter(value))
###################
# INTEGERS #
###################
@register.filter(is_safe=False)
def add(value, arg):
"""Add the arg to the value."""
try:
return int(value) + int(arg)
except (ValueError, TypeError):
try:
return value + arg
except Exception:
return ''
@register.filter(is_safe=False)
def get_digit(value, arg):
"""
Given a whole number, return the requested digit of it, where 1 is the
right-most digit, 2 is the second-right-most digit, etc. Return the
original value for invalid input (if input or argument is not an integer,
or if argument is less than 1). Otherwise, output is always an integer.
"""
try:
arg = int(arg)
value = int(value)
except ValueError:
return value # Fail silently for an invalid argument
if arg < 1:
return value
try:
return int(str(value)[-arg])
except IndexError:
return 0
###################
# DATES #
###################
@register.filter(expects_localtime=True, is_safe=False)
def date(value, arg=None):
"""Format a date according to the given format."""
if value in (None, ''):
return ''
try:
return formats.date_format(value, arg)
except AttributeError:
try:
return format(value, arg)
except AttributeError:
return ''
@register.filter(expects_localtime=True, is_safe=False)
def time(value, arg=None):
"""Format a time according to the given format."""
if value in (None, ''):
return ''
try:
return formats.time_format(value, arg)
except (AttributeError, TypeError):
try:
return time_format(value, arg)
except (AttributeError, TypeError):
return ''
@register.filter("timesince", is_safe=False)
def timesince_filter(value, arg=None):
"""Format a date as the time since that date (i.e. "4 days, 6 hours")."""
if not value:
return ''
try:
if arg:
return timesince(value, arg)
return timesince(value)
except (ValueError, TypeError):
return ''
@register.filter("timeuntil", is_safe=False)
def timeuntil_filter(value, arg=None):
"""Format a date as the time until that date (i.e. "4 days, 6 hours")."""
if not value:
return ''
try:
return timeuntil(value, arg)
except (ValueError, TypeError):
return ''
###################
# LOGIC #
###################
@register.filter(is_safe=False)
def default(value, arg):
"""If value is unavailable, use given default."""
return value or arg
@register.filter(is_safe=False)
def default_if_none(value, arg):
"""If value is None, use given default."""
if value is None:
return arg
return value
@register.filter(is_safe=False)
def divisibleby(value, arg):
"""Return True if the value is divisible by the argument."""
return int(value) % int(arg) == 0
@register.filter(is_safe=False)
def yesno(value, arg=None):
"""
Given a string mapping values for true, false, and (optionally) None,
return one of those strings according to the value:
========== ====================== ==================================
Value Argument Outputs
========== ====================== ==================================
``True`` ``"yeah,no,maybe"`` ``yeah``
``False`` ``"yeah,no,maybe"`` ``no``
``None`` ``"yeah,no,maybe"`` ``maybe``
``None`` ``"yeah,no"`` ``"no"`` (converts None to False
if no mapping for None is given.
========== ====================== ==================================
"""
if arg is None:
# Translators: Please do not add spaces around commas.
arg = gettext('yes,no,maybe')
bits = arg.split(',')
if len(bits) < 2:
return value # Invalid arg.
try:
yes, no, maybe = bits
except ValueError:
# Unpack list of wrong size (no "maybe" value provided).
yes, no, maybe = bits[0], bits[1], bits[1]
if value is None:
return maybe
if value:
return yes
return no
###################
# MISC #
###################
@register.filter(is_safe=True)
def filesizeformat(bytes_):
"""
Format the value like a 'human-readable' file size (i.e. 13 KB, 4.1 MB,
102 bytes, etc.).
"""
try:
bytes_ = int(bytes_)
except (TypeError, ValueError, UnicodeDecodeError):
value = ngettext("%(size)d byte", "%(size)d bytes", 0) % {'size': 0}
return avoid_wrapping(value)
def filesize_number_format(value):
return formats.number_format(round(value, 1), 1)
KB = 1 << 10
MB = 1 << 20
GB = 1 << 30
TB = 1 << 40
PB = 1 << 50
negative = bytes_ < 0
if negative:
bytes_ = -bytes_ # Allow formatting of negative numbers.
if bytes_ < KB:
value = ngettext("%(size)d byte", "%(size)d bytes", bytes_) % {'size': bytes_}
elif bytes_ < MB:
value = gettext("%s KB") % filesize_number_format(bytes_ / KB)
elif bytes_ < GB:
value = gettext("%s MB") % filesize_number_format(bytes_ / MB)
elif bytes_ < TB:
value = gettext("%s GB") % filesize_number_format(bytes_ / GB)
elif bytes_ < PB:
value = gettext("%s TB") % filesize_number_format(bytes_ / TB)
else:
value = gettext("%s PB") % filesize_number_format(bytes_ / PB)
if negative:
value = "-%s" % value
return avoid_wrapping(value)
@register.filter(is_safe=False)
def pluralize(value, arg='s'):
"""
Return a plural suffix if the value is not 1, '1', or an object of
length 1. By default, use 's' as the suffix:
* If value is 0, vote{{ value|pluralize }} display "votes".
* If value is 1, vote{{ value|pluralize }} display "vote".
* If value is 2, vote{{ value|pluralize }} display "votes".
If an argument is provided, use that string instead:
* If value is 0, class{{ value|pluralize:"es" }} display "classes".
* If value is 1, class{{ value|pluralize:"es" }} display "class".
* If value is 2, class{{ value|pluralize:"es" }} display "classes".
If the provided argument contains a comma, use the text before the comma
for the singular case and the text after the comma for the plural case:
* If value is 0, cand{{ value|pluralize:"y,ies" }} display "candies".
* If value is 1, cand{{ value|pluralize:"y,ies" }} display "candy".
* If value is 2, cand{{ value|pluralize:"y,ies" }} display "candies".
"""
if ',' not in arg:
arg = ',' + arg
bits = arg.split(',')
if len(bits) > 2:
return ''
singular_suffix, plural_suffix = bits[:2]
try:
return singular_suffix if float(value) == 1 else plural_suffix
except ValueError: # Invalid string that's not a number.
pass
except TypeError: # Value isn't a string or a number; maybe it's a list?
try:
return singular_suffix if len(value) == 1 else plural_suffix
except TypeError: # len() of unsized object.
pass
return ''
@register.filter("phone2numeric", is_safe=True)
def phone2numeric_filter(value):
"""Take a phone number and converts it in to its numerical equivalent."""
return phone2numeric(value)
@register.filter(is_safe=True)
def pprint(value):
"""A wrapper around pprint.pprint -- for debugging, really."""
try:
return pformat(value)
except Exception as e:
return "Error in formatting: %s: %s" % (e.__class__.__name__, e)
|
cb38b568c29066da710653aa0a5e9a1acfa6e08d3bad476003f93fdb0ad538ee | """
This module converts requested URLs to callback view functions.
URLResolver is the main class here. Its resolve() method takes a URL (as
a string) and returns a ResolverMatch object which provides access to all
attributes of the resolved URL match.
"""
import functools
import inspect
import re
import string
from importlib import import_module
from urllib.parse import quote
from asgiref.local import Local
from django.conf import settings
from django.core.checks import Error, Warning
from django.core.checks.urls import check_resolver
from django.core.exceptions import ImproperlyConfigured, ViewDoesNotExist
from django.utils.datastructures import MultiValueDict
from django.utils.functional import cached_property
from django.utils.http import RFC3986_SUBDELIMS, escape_leading_slashes
from django.utils.regex_helper import _lazy_re_compile, normalize
from django.utils.translation import get_language
from .converters import get_converter
from .exceptions import NoReverseMatch, Resolver404
from .utils import get_callable
class ResolverMatch:
def __init__(self, func, args, kwargs, url_name=None, app_names=None, namespaces=None, route=None):
self.func = func
self.args = args
self.kwargs = kwargs
self.url_name = url_name
self.route = route
# If a URLRegexResolver doesn't have a namespace or app_name, it passes
# in an empty value.
self.app_names = [x for x in app_names if x] if app_names else []
self.app_name = ':'.join(self.app_names)
self.namespaces = [x for x in namespaces if x] if namespaces else []
self.namespace = ':'.join(self.namespaces)
if not hasattr(func, '__name__'):
# A class-based view
self._func_path = func.__class__.__module__ + '.' + func.__class__.__name__
else:
# A function-based view
self._func_path = func.__module__ + '.' + func.__name__
view_path = url_name or self._func_path
self.view_name = ':'.join(self.namespaces + [view_path])
def __getitem__(self, index):
return (self.func, self.args, self.kwargs)[index]
def __repr__(self):
return "ResolverMatch(func=%s, args=%s, kwargs=%s, url_name=%s, app_names=%s, namespaces=%s, route=%s)" % (
self._func_path, self.args, self.kwargs, self.url_name,
self.app_names, self.namespaces, self.route,
)
def get_resolver(urlconf=None):
if urlconf is None:
urlconf = settings.ROOT_URLCONF
return _get_cached_resolver(urlconf)
@functools.lru_cache(maxsize=None)
def _get_cached_resolver(urlconf=None):
return URLResolver(RegexPattern(r'^/'), urlconf)
@functools.lru_cache(maxsize=None)
def get_ns_resolver(ns_pattern, resolver, converters):
# Build a namespaced resolver for the given parent URLconf pattern.
# This makes it possible to have captured parameters in the parent
# URLconf pattern.
pattern = RegexPattern(ns_pattern)
pattern.converters = dict(converters)
ns_resolver = URLResolver(pattern, resolver.url_patterns)
return URLResolver(RegexPattern(r'^/'), [ns_resolver])
class LocaleRegexDescriptor:
def __init__(self, attr):
self.attr = attr
def __get__(self, instance, cls=None):
"""
Return a compiled regular expression based on the active language.
"""
if instance is None:
return self
# As a performance optimization, if the given regex string is a regular
# string (not a lazily-translated string proxy), compile it once and
# avoid per-language compilation.
pattern = getattr(instance, self.attr)
if isinstance(pattern, str):
instance.__dict__['regex'] = instance._compile(pattern)
return instance.__dict__['regex']
language_code = get_language()
if language_code not in instance._regex_dict:
instance._regex_dict[language_code] = instance._compile(str(pattern))
return instance._regex_dict[language_code]
class CheckURLMixin:
def describe(self):
"""
Format the URL pattern for display in warning messages.
"""
description = "'{}'".format(self)
if self.name:
description += " [name='{}']".format(self.name)
return description
def _check_pattern_startswith_slash(self):
"""
Check that the pattern does not begin with a forward slash.
"""
regex_pattern = self.regex.pattern
if not settings.APPEND_SLASH:
# Skip check as it can be useful to start a URL pattern with a slash
# when APPEND_SLASH=False.
return []
if regex_pattern.startswith(('/', '^/', '^\\/')) and not regex_pattern.endswith('/'):
warning = Warning(
"Your URL pattern {} has a route beginning with a '/'. Remove this "
"slash as it is unnecessary. If this pattern is targeted in an "
"include(), ensure the include() pattern has a trailing '/'.".format(
self.describe()
),
id="urls.W002",
)
return [warning]
else:
return []
class RegexPattern(CheckURLMixin):
regex = LocaleRegexDescriptor('_regex')
def __init__(self, regex, name=None, is_endpoint=False):
self._regex = regex
self._regex_dict = {}
self._is_endpoint = is_endpoint
self.name = name
self.converters = {}
def match(self, path):
match = self.regex.search(path)
if match:
# If there are any named groups, use those as kwargs, ignoring
# non-named groups. Otherwise, pass all non-named arguments as
# positional arguments.
kwargs = match.groupdict()
args = () if kwargs else match.groups()
kwargs = {k: v for k, v in kwargs.items() if v is not None}
return path[match.end():], args, kwargs
return None
def check(self):
warnings = []
warnings.extend(self._check_pattern_startswith_slash())
if not self._is_endpoint:
warnings.extend(self._check_include_trailing_dollar())
return warnings
def _check_include_trailing_dollar(self):
regex_pattern = self.regex.pattern
if regex_pattern.endswith('$') and not regex_pattern.endswith(r'\$'):
return [Warning(
"Your URL pattern {} uses include with a route ending with a '$'. "
"Remove the dollar from the route to avoid problems including "
"URLs.".format(self.describe()),
id='urls.W001',
)]
else:
return []
def _compile(self, regex):
"""Compile and return the given regular expression."""
try:
return re.compile(regex)
except re.error as e:
raise ImproperlyConfigured(
'"%s" is not a valid regular expression: %s' % (regex, e)
) from e
def __str__(self):
return str(self._regex)
_PATH_PARAMETER_COMPONENT_RE = _lazy_re_compile(
r'<(?:(?P<converter>[^>:]+):)?(?P<parameter>[^>]+)>'
)
def _route_to_regex(route, is_endpoint=False):
"""
Convert a path pattern into a regular expression. Return the regular
expression and a dictionary mapping the capture names to the converters.
For example, 'foo/<int:pk>' returns '^foo\\/(?P<pk>[0-9]+)'
and {'pk': <django.urls.converters.IntConverter>}.
"""
if not set(route).isdisjoint(string.whitespace):
raise ImproperlyConfigured("URL route '%s' cannot contain whitespace." % route)
original_route = route
parts = ['^']
converters = {}
while True:
match = _PATH_PARAMETER_COMPONENT_RE.search(route)
if not match:
parts.append(re.escape(route))
break
parts.append(re.escape(route[:match.start()]))
route = route[match.end():]
parameter = match['parameter']
if not parameter.isidentifier():
raise ImproperlyConfigured(
"URL route '%s' uses parameter name %r which isn't a valid "
"Python identifier." % (original_route, parameter)
)
raw_converter = match['converter']
if raw_converter is None:
# If a converter isn't specified, the default is `str`.
raw_converter = 'str'
try:
converter = get_converter(raw_converter)
except KeyError as e:
raise ImproperlyConfigured(
'URL route %r uses invalid converter %r.'
% (original_route, raw_converter)
) from e
converters[parameter] = converter
parts.append('(?P<' + parameter + '>' + converter.regex + ')')
if is_endpoint:
parts.append('$')
return ''.join(parts), converters
class RoutePattern(CheckURLMixin):
regex = LocaleRegexDescriptor('_route')
def __init__(self, route, name=None, is_endpoint=False):
self._route = route
self._regex_dict = {}
self._is_endpoint = is_endpoint
self.name = name
self.converters = _route_to_regex(str(route), is_endpoint)[1]
def match(self, path):
match = self.regex.search(path)
if match:
# RoutePattern doesn't allow non-named groups so args are ignored.
kwargs = match.groupdict()
for key, value in kwargs.items():
converter = self.converters[key]
try:
kwargs[key] = converter.to_python(value)
except ValueError:
return None
return path[match.end():], (), kwargs
return None
def check(self):
warnings = self._check_pattern_startswith_slash()
route = self._route
if '(?P<' in route or route.startswith('^') or route.endswith('$'):
warnings.append(Warning(
"Your URL pattern {} has a route that contains '(?P<', begins "
"with a '^', or ends with a '$'. This was likely an oversight "
"when migrating to django.urls.path().".format(self.describe()),
id='2_0.W001',
))
return warnings
def _compile(self, route):
return re.compile(_route_to_regex(route, self._is_endpoint)[0])
def __str__(self):
return str(self._route)
class LocalePrefixPattern:
def __init__(self, prefix_default_language=True):
self.prefix_default_language = prefix_default_language
self.converters = {}
@property
def regex(self):
# This is only used by reverse() and cached in _reverse_dict.
return re.compile(self.language_prefix)
@property
def language_prefix(self):
language_code = get_language() or settings.LANGUAGE_CODE
if language_code == settings.LANGUAGE_CODE and not self.prefix_default_language:
return ''
else:
return '%s/' % language_code
def match(self, path):
language_prefix = self.language_prefix
if path.startswith(language_prefix):
return path[len(language_prefix):], (), {}
return None
def check(self):
return []
def describe(self):
return "'{}'".format(self)
def __str__(self):
return self.language_prefix
class URLPattern:
def __init__(self, pattern, callback, default_args=None, name=None):
self.pattern = pattern
self.callback = callback # the view
self.default_args = default_args or {}
self.name = name
def __repr__(self):
return '<%s %s>' % (self.__class__.__name__, self.pattern.describe())
def check(self):
warnings = self._check_pattern_name()
warnings.extend(self.pattern.check())
return warnings
def _check_pattern_name(self):
"""
Check that the pattern name does not contain a colon.
"""
if self.pattern.name is not None and ":" in self.pattern.name:
warning = Warning(
"Your URL pattern {} has a name including a ':'. Remove the colon, to "
"avoid ambiguous namespace references.".format(self.pattern.describe()),
id="urls.W003",
)
return [warning]
else:
return []
def resolve(self, path):
match = self.pattern.match(path)
if match:
new_path, args, kwargs = match
# Pass any extra_kwargs as **kwargs.
kwargs.update(self.default_args)
return ResolverMatch(self.callback, args, kwargs, self.pattern.name, route=str(self.pattern))
@cached_property
def lookup_str(self):
"""
A string that identifies the view (e.g. 'path.to.view_function' or
'path.to.ClassBasedView').
"""
callback = self.callback
if isinstance(callback, functools.partial):
callback = callback.func
if not hasattr(callback, '__name__'):
return callback.__module__ + "." + callback.__class__.__name__
return callback.__module__ + "." + callback.__qualname__
class URLResolver:
def __init__(self, pattern, urlconf_name, default_kwargs=None, app_name=None, namespace=None):
self.pattern = pattern
# urlconf_name is the dotted Python path to the module defining
# urlpatterns. It may also be an object with an urlpatterns attribute
# or urlpatterns itself.
self.urlconf_name = urlconf_name
self.callback = None
self.default_kwargs = default_kwargs or {}
self.namespace = namespace
self.app_name = app_name
self._reverse_dict = {}
self._namespace_dict = {}
self._app_dict = {}
# set of dotted paths to all functions and classes that are used in
# urlpatterns
self._callback_strs = set()
self._populated = False
self._local = Local()
def __repr__(self):
if isinstance(self.urlconf_name, list) and self.urlconf_name:
# Don't bother to output the whole list, it can be huge
urlconf_repr = '<%s list>' % self.urlconf_name[0].__class__.__name__
else:
urlconf_repr = repr(self.urlconf_name)
return '<%s %s (%s:%s) %s>' % (
self.__class__.__name__, urlconf_repr, self.app_name,
self.namespace, self.pattern.describe(),
)
def check(self):
messages = []
for pattern in self.url_patterns:
messages.extend(check_resolver(pattern))
messages.extend(self._check_custom_error_handlers())
return messages or self.pattern.check()
def _check_custom_error_handlers(self):
messages = []
# All handlers take (request, exception) arguments except handler500
# which takes (request).
for status_code, num_parameters in [(400, 2), (403, 2), (404, 2), (500, 1)]:
try:
handler, param_dict = self.resolve_error_handler(status_code)
except (ImportError, ViewDoesNotExist) as e:
path = getattr(self.urlconf_module, 'handler%s' % status_code)
msg = (
"The custom handler{status_code} view '{path}' could not be imported."
).format(status_code=status_code, path=path)
messages.append(Error(msg, hint=str(e), id='urls.E008'))
continue
signature = inspect.signature(handler)
args = [None] * num_parameters
try:
signature.bind(*args)
except TypeError:
msg = (
"The custom handler{status_code} view '{path}' does not "
"take the correct number of arguments ({args})."
).format(
status_code=status_code,
path=handler.__module__ + '.' + handler.__qualname__,
args='request, exception' if num_parameters == 2 else 'request',
)
messages.append(Error(msg, id='urls.E007'))
return messages
def _populate(self):
# Short-circuit if called recursively in this thread to prevent
# infinite recursion. Concurrent threads may call this at the same
# time and will need to continue, so set 'populating' on a
# thread-local variable.
if getattr(self._local, 'populating', False):
return
try:
self._local.populating = True
lookups = MultiValueDict()
namespaces = {}
apps = {}
language_code = get_language()
for url_pattern in reversed(self.url_patterns):
p_pattern = url_pattern.pattern.regex.pattern
if p_pattern.startswith('^'):
p_pattern = p_pattern[1:]
if isinstance(url_pattern, URLPattern):
self._callback_strs.add(url_pattern.lookup_str)
bits = normalize(url_pattern.pattern.regex.pattern)
lookups.appendlist(
url_pattern.callback,
(bits, p_pattern, url_pattern.default_args, url_pattern.pattern.converters)
)
if url_pattern.name is not None:
lookups.appendlist(
url_pattern.name,
(bits, p_pattern, url_pattern.default_args, url_pattern.pattern.converters)
)
else: # url_pattern is a URLResolver.
url_pattern._populate()
if url_pattern.app_name:
apps.setdefault(url_pattern.app_name, []).append(url_pattern.namespace)
namespaces[url_pattern.namespace] = (p_pattern, url_pattern)
else:
for name in url_pattern.reverse_dict:
for matches, pat, defaults, converters in url_pattern.reverse_dict.getlist(name):
new_matches = normalize(p_pattern + pat)
lookups.appendlist(
name,
(
new_matches,
p_pattern + pat,
{**defaults, **url_pattern.default_kwargs},
{**self.pattern.converters, **url_pattern.pattern.converters, **converters}
)
)
for namespace, (prefix, sub_pattern) in url_pattern.namespace_dict.items():
current_converters = url_pattern.pattern.converters
sub_pattern.pattern.converters.update(current_converters)
namespaces[namespace] = (p_pattern + prefix, sub_pattern)
for app_name, namespace_list in url_pattern.app_dict.items():
apps.setdefault(app_name, []).extend(namespace_list)
self._callback_strs.update(url_pattern._callback_strs)
self._namespace_dict[language_code] = namespaces
self._app_dict[language_code] = apps
self._reverse_dict[language_code] = lookups
self._populated = True
finally:
self._local.populating = False
@property
def reverse_dict(self):
language_code = get_language()
if language_code not in self._reverse_dict:
self._populate()
return self._reverse_dict[language_code]
@property
def namespace_dict(self):
language_code = get_language()
if language_code not in self._namespace_dict:
self._populate()
return self._namespace_dict[language_code]
@property
def app_dict(self):
language_code = get_language()
if language_code not in self._app_dict:
self._populate()
return self._app_dict[language_code]
@staticmethod
def _join_route(route1, route2):
"""Join two routes, without the starting ^ in the second route."""
if not route1:
return route2
if route2.startswith('^'):
route2 = route2[1:]
return route1 + route2
def _is_callback(self, name):
if not self._populated:
self._populate()
return name in self._callback_strs
def resolve(self, path):
path = str(path) # path may be a reverse_lazy object
tried = []
match = self.pattern.match(path)
if match:
new_path, args, kwargs = match
for pattern in self.url_patterns:
try:
sub_match = pattern.resolve(new_path)
except Resolver404 as e:
sub_tried = e.args[0].get('tried')
if sub_tried is not None:
tried.extend([pattern] + t for t in sub_tried)
else:
tried.append([pattern])
else:
if sub_match:
# Merge captured arguments in match with submatch
sub_match_dict = {**kwargs, **self.default_kwargs}
# Update the sub_match_dict with the kwargs from the sub_match.
sub_match_dict.update(sub_match.kwargs)
# If there are *any* named groups, ignore all non-named groups.
# Otherwise, pass all non-named arguments as positional arguments.
sub_match_args = sub_match.args
if not sub_match_dict:
sub_match_args = args + sub_match.args
current_route = '' if isinstance(pattern, URLPattern) else str(pattern.pattern)
return ResolverMatch(
sub_match.func,
sub_match_args,
sub_match_dict,
sub_match.url_name,
[self.app_name] + sub_match.app_names,
[self.namespace] + sub_match.namespaces,
self._join_route(current_route, sub_match.route),
)
tried.append([pattern])
raise Resolver404({'tried': tried, 'path': new_path})
raise Resolver404({'path': path})
@cached_property
def urlconf_module(self):
if isinstance(self.urlconf_name, str):
return import_module(self.urlconf_name)
else:
return self.urlconf_name
@cached_property
def url_patterns(self):
# urlconf_module might be a valid set of patterns, so we default to it
patterns = getattr(self.urlconf_module, "urlpatterns", self.urlconf_module)
try:
iter(patterns)
except TypeError as e:
msg = (
"The included URLconf '{name}' does not appear to have any "
"patterns in it. If you see valid patterns in the file then "
"the issue is probably caused by a circular import."
)
raise ImproperlyConfigured(msg.format(name=self.urlconf_name)) from e
return patterns
def resolve_error_handler(self, view_type):
callback = getattr(self.urlconf_module, 'handler%s' % view_type, None)
if not callback:
# No handler specified in file; use lazy import, since
# django.conf.urls imports this file.
from django.conf import urls
callback = getattr(urls, 'handler%s' % view_type)
return get_callable(callback), {}
def reverse(self, lookup_view, *args, **kwargs):
return self._reverse_with_prefix(lookup_view, '', *args, **kwargs)
def _reverse_with_prefix(self, lookup_view, _prefix, *args, **kwargs):
if args and kwargs:
raise ValueError("Don't mix *args and **kwargs in call to reverse()!")
if not self._populated:
self._populate()
possibilities = self.reverse_dict.getlist(lookup_view)
for possibility, pattern, defaults, converters in possibilities:
for result, params in possibility:
if args:
if len(args) != len(params):
continue
candidate_subs = dict(zip(params, args))
else:
if set(kwargs).symmetric_difference(params).difference(defaults):
continue
if any(kwargs.get(k, v) != v for k, v in defaults.items()):
continue
candidate_subs = kwargs
# Convert the candidate subs to text using Converter.to_url().
text_candidate_subs = {}
match = True
for k, v in candidate_subs.items():
if k in converters:
try:
text_candidate_subs[k] = converters[k].to_url(v)
except ValueError:
match = False
break
else:
text_candidate_subs[k] = str(v)
if not match:
continue
# WSGI provides decoded URLs, without %xx escapes, and the URL
# resolver operates on such URLs. First substitute arguments
# without quoting to build a decoded URL and look for a match.
# Then, if we have a match, redo the substitution with quoted
# arguments in order to return a properly encoded URL.
candidate_pat = _prefix.replace('%', '%%') + result
if re.search('^%s%s' % (re.escape(_prefix), pattern), candidate_pat % text_candidate_subs):
# safe characters from `pchar` definition of RFC 3986
url = quote(candidate_pat % text_candidate_subs, safe=RFC3986_SUBDELIMS + '/~:@')
# Don't allow construction of scheme relative urls.
return escape_leading_slashes(url)
# lookup_view can be URL name or callable, but callables are not
# friendly in error messages.
m = getattr(lookup_view, '__module__', None)
n = getattr(lookup_view, '__name__', None)
if m is not None and n is not None:
lookup_view_s = "%s.%s" % (m, n)
else:
lookup_view_s = lookup_view
patterns = [pattern for (_, pattern, _, _) in possibilities]
if patterns:
if args:
arg_msg = "arguments '%s'" % (args,)
elif kwargs:
arg_msg = "keyword arguments '%s'" % kwargs
else:
arg_msg = "no arguments"
msg = (
"Reverse for '%s' with %s not found. %d pattern(s) tried: %s" %
(lookup_view_s, arg_msg, len(patterns), patterns)
)
else:
msg = (
"Reverse for '%(view)s' not found. '%(view)s' is not "
"a valid view function or pattern name." % {'view': lookup_view_s}
)
raise NoReverseMatch(msg)
|
7e35e2363dbce61af71b23428b616f87269617d32c594dc92fbbf9a832d20b07 | """
HTML Widget classes
"""
import copy
import datetime
import warnings
from collections import defaultdict
from itertools import chain
from django.conf import settings
from django.forms.utils import to_current_timezone
from django.templatetags.static import static
from django.utils import datetime_safe, formats
from django.utils.datastructures import OrderedSet
from django.utils.dates import MONTHS
from django.utils.formats import get_format
from django.utils.html import format_html, html_safe
from django.utils.regex_helper import _lazy_re_compile
from django.utils.safestring import mark_safe
from django.utils.topological_sort import (
CyclicDependencyError, stable_topological_sort,
)
from django.utils.translation import gettext_lazy as _
from .renderers import get_default_renderer
__all__ = (
'Media', 'MediaDefiningClass', 'Widget', 'TextInput', 'NumberInput',
'EmailInput', 'URLInput', 'PasswordInput', 'HiddenInput',
'MultipleHiddenInput', 'FileInput', 'ClearableFileInput', 'Textarea',
'DateInput', 'DateTimeInput', 'TimeInput', 'CheckboxInput', 'Select',
'NullBooleanSelect', 'SelectMultiple', 'RadioSelect',
'CheckboxSelectMultiple', 'MultiWidget', 'SplitDateTimeWidget',
'SplitHiddenDateTimeWidget', 'SelectDateWidget',
)
MEDIA_TYPES = ('css', 'js')
class MediaOrderConflictWarning(RuntimeWarning):
pass
@html_safe
class Media:
def __init__(self, media=None, css=None, js=None):
if media is not None:
css = getattr(media, 'css', {})
js = getattr(media, 'js', [])
else:
if css is None:
css = {}
if js is None:
js = []
self._css_lists = [css]
self._js_lists = [js]
def __repr__(self):
return 'Media(css=%r, js=%r)' % (self._css, self._js)
def __str__(self):
return self.render()
@property
def _css(self):
css = defaultdict(list)
for css_list in self._css_lists:
for medium, sublist in css_list.items():
css[medium].append(sublist)
return {medium: self.merge(*lists) for medium, lists in css.items()}
@property
def _js(self):
return self.merge(*self._js_lists)
def render(self):
return mark_safe('\n'.join(chain.from_iterable(getattr(self, 'render_' + name)() for name in MEDIA_TYPES)))
def render_js(self):
return [
format_html(
'<script src="{}"></script>',
self.absolute_path(path)
) for path in self._js
]
def render_css(self):
# To keep rendering order consistent, we can't just iterate over items().
# We need to sort the keys, and iterate over the sorted list.
media = sorted(self._css)
return chain.from_iterable([
format_html(
'<link href="{}" type="text/css" media="{}" rel="stylesheet">',
self.absolute_path(path), medium
) for path in self._css[medium]
] for medium in media)
def absolute_path(self, path):
"""
Given a relative or absolute path to a static asset, return an absolute
path. An absolute path will be returned unchanged while a relative path
will be passed to django.templatetags.static.static().
"""
if path.startswith(('http://', 'https://', '/')):
return path
return static(path)
def __getitem__(self, name):
"""Return a Media object that only contains media of the given type."""
if name in MEDIA_TYPES:
return Media(**{str(name): getattr(self, '_' + name)})
raise KeyError('Unknown media type "%s"' % name)
@staticmethod
def merge(*lists):
"""
Merge lists while trying to keep the relative order of the elements.
Warn if the lists have the same elements in a different relative order.
For static assets it can be important to have them included in the DOM
in a certain order. In JavaScript you may not be able to reference a
global or in CSS you might want to override a style.
"""
dependency_graph = defaultdict(set)
all_items = OrderedSet()
for list_ in filter(None, lists):
head = list_[0]
# The first items depend on nothing but have to be part of the
# dependency graph to be included in the result.
dependency_graph.setdefault(head, set())
for item in list_:
all_items.add(item)
# No self dependencies
if head != item:
dependency_graph[item].add(head)
head = item
try:
return stable_topological_sort(all_items, dependency_graph)
except CyclicDependencyError:
warnings.warn(
'Detected duplicate Media files in an opposite order: {}'.format(
', '.join(repr(list_) for list_ in lists)
), MediaOrderConflictWarning,
)
return list(all_items)
def __add__(self, other):
combined = Media()
combined._css_lists = self._css_lists + other._css_lists
combined._js_lists = self._js_lists + other._js_lists
return combined
def media_property(cls):
def _media(self):
# Get the media property of the superclass, if it exists
sup_cls = super(cls, self)
try:
base = sup_cls.media
except AttributeError:
base = Media()
# Get the media definition for this class
definition = getattr(cls, 'Media', None)
if definition:
extend = getattr(definition, 'extend', True)
if extend:
if extend is True:
m = base
else:
m = Media()
for medium in extend:
m = m + base[medium]
return m + Media(definition)
return Media(definition)
return base
return property(_media)
class MediaDefiningClass(type):
"""
Metaclass for classes that can have media definitions.
"""
def __new__(mcs, name, bases, attrs):
new_class = super().__new__(mcs, name, bases, attrs)
if 'media' not in attrs:
new_class.media = media_property(new_class)
return new_class
class Widget(metaclass=MediaDefiningClass):
needs_multipart_form = False # Determines does this widget need multipart form
is_localized = False
is_required = False
supports_microseconds = True
def __init__(self, attrs=None):
self.attrs = {} if attrs is None else attrs.copy()
def __deepcopy__(self, memo):
obj = copy.copy(self)
obj.attrs = self.attrs.copy()
memo[id(self)] = obj
return obj
@property
def is_hidden(self):
return self.input_type == 'hidden' if hasattr(self, 'input_type') else False
def subwidgets(self, name, value, attrs=None):
context = self.get_context(name, value, attrs)
yield context['widget']
def format_value(self, value):
"""
Return a value as it should appear when rendered in a template.
"""
if value == '' or value is None:
return None
if self.is_localized:
return formats.localize_input(value)
return str(value)
def get_context(self, name, value, attrs):
return {
'widget': {
'name': name,
'is_hidden': self.is_hidden,
'required': self.is_required,
'value': self.format_value(value),
'attrs': self.build_attrs(self.attrs, attrs),
'template_name': self.template_name,
},
}
def render(self, name, value, attrs=None, renderer=None):
"""Render the widget as an HTML string."""
context = self.get_context(name, value, attrs)
return self._render(self.template_name, context, renderer)
def _render(self, template_name, context, renderer=None):
if renderer is None:
renderer = get_default_renderer()
return mark_safe(renderer.render(template_name, context))
def build_attrs(self, base_attrs, extra_attrs=None):
"""Build an attribute dictionary."""
return {**base_attrs, **(extra_attrs or {})}
def value_from_datadict(self, data, files, name):
"""
Given a dictionary of data and this widget's name, return the value
of this widget or None if it's not provided.
"""
return data.get(name)
def value_omitted_from_data(self, data, files, name):
return name not in data
def id_for_label(self, id_):
"""
Return the HTML ID attribute of this Widget for use by a <label>,
given the ID of the field. Return None if no ID is available.
This hook is necessary because some widgets have multiple HTML
elements and, thus, multiple IDs. In that case, this method should
return an ID value that corresponds to the first ID in the widget's
tags.
"""
return id_
def use_required_attribute(self, initial):
return not self.is_hidden
class Input(Widget):
"""
Base class for all <input> widgets.
"""
input_type = None # Subclasses must define this.
template_name = 'django/forms/widgets/input.html'
def __init__(self, attrs=None):
if attrs is not None:
attrs = attrs.copy()
self.input_type = attrs.pop('type', self.input_type)
super().__init__(attrs)
def get_context(self, name, value, attrs):
context = super().get_context(name, value, attrs)
context['widget']['type'] = self.input_type
return context
class TextInput(Input):
input_type = 'text'
template_name = 'django/forms/widgets/text.html'
class NumberInput(Input):
input_type = 'number'
template_name = 'django/forms/widgets/number.html'
class EmailInput(Input):
input_type = 'email'
template_name = 'django/forms/widgets/email.html'
class URLInput(Input):
input_type = 'url'
template_name = 'django/forms/widgets/url.html'
class PasswordInput(Input):
input_type = 'password'
template_name = 'django/forms/widgets/password.html'
def __init__(self, attrs=None, render_value=False):
super().__init__(attrs)
self.render_value = render_value
def get_context(self, name, value, attrs):
if not self.render_value:
value = None
return super().get_context(name, value, attrs)
class HiddenInput(Input):
input_type = 'hidden'
template_name = 'django/forms/widgets/hidden.html'
class MultipleHiddenInput(HiddenInput):
"""
Handle <input type="hidden"> for fields that have a list
of values.
"""
template_name = 'django/forms/widgets/multiple_hidden.html'
def get_context(self, name, value, attrs):
context = super().get_context(name, value, attrs)
final_attrs = context['widget']['attrs']
id_ = context['widget']['attrs'].get('id')
subwidgets = []
for index, value_ in enumerate(context['widget']['value']):
widget_attrs = final_attrs.copy()
if id_:
# An ID attribute was given. Add a numeric index as a suffix
# so that the inputs don't all have the same ID attribute.
widget_attrs['id'] = '%s_%s' % (id_, index)
widget = HiddenInput()
widget.is_required = self.is_required
subwidgets.append(widget.get_context(name, value_, widget_attrs)['widget'])
context['widget']['subwidgets'] = subwidgets
return context
def value_from_datadict(self, data, files, name):
try:
getter = data.getlist
except AttributeError:
getter = data.get
return getter(name)
def format_value(self, value):
return [] if value is None else value
class FileInput(Input):
input_type = 'file'
needs_multipart_form = True
template_name = 'django/forms/widgets/file.html'
def format_value(self, value):
"""File input never renders a value."""
return
def value_from_datadict(self, data, files, name):
"File widgets take data from FILES, not POST"
return files.get(name)
def value_omitted_from_data(self, data, files, name):
return name not in files
def use_required_attribute(self, initial):
return super().use_required_attribute(initial) and not initial
FILE_INPUT_CONTRADICTION = object()
class ClearableFileInput(FileInput):
clear_checkbox_label = _('Clear')
initial_text = _('Currently')
input_text = _('Change')
template_name = 'django/forms/widgets/clearable_file_input.html'
def clear_checkbox_name(self, name):
"""
Given the name of the file input, return the name of the clear checkbox
input.
"""
return name + '-clear'
def clear_checkbox_id(self, name):
"""
Given the name of the clear checkbox input, return the HTML id for it.
"""
return name + '_id'
def is_initial(self, value):
"""
Return whether value is considered to be initial value.
"""
return bool(value and getattr(value, 'url', False))
def format_value(self, value):
"""
Return the file object if it has a defined url attribute.
"""
if self.is_initial(value):
return value
def get_context(self, name, value, attrs):
context = super().get_context(name, value, attrs)
checkbox_name = self.clear_checkbox_name(name)
checkbox_id = self.clear_checkbox_id(checkbox_name)
context['widget'].update({
'checkbox_name': checkbox_name,
'checkbox_id': checkbox_id,
'is_initial': self.is_initial(value),
'input_text': self.input_text,
'initial_text': self.initial_text,
'clear_checkbox_label': self.clear_checkbox_label,
})
return context
def value_from_datadict(self, data, files, name):
upload = super().value_from_datadict(data, files, name)
if not self.is_required and CheckboxInput().value_from_datadict(
data, files, self.clear_checkbox_name(name)):
if upload:
# If the user contradicts themselves (uploads a new file AND
# checks the "clear" checkbox), we return a unique marker
# object that FileField will turn into a ValidationError.
return FILE_INPUT_CONTRADICTION
# False signals to clear any existing value, as opposed to just None
return False
return upload
def value_omitted_from_data(self, data, files, name):
return (
super().value_omitted_from_data(data, files, name) and
self.clear_checkbox_name(name) not in data
)
class Textarea(Widget):
template_name = 'django/forms/widgets/textarea.html'
def __init__(self, attrs=None):
# Use slightly better defaults than HTML's 20x2 box
default_attrs = {'cols': '40', 'rows': '10'}
if attrs:
default_attrs.update(attrs)
super().__init__(default_attrs)
class DateTimeBaseInput(TextInput):
format_key = ''
supports_microseconds = False
def __init__(self, attrs=None, format=None):
super().__init__(attrs)
self.format = format or None
def format_value(self, value):
return formats.localize_input(value, self.format or formats.get_format(self.format_key)[0])
class DateInput(DateTimeBaseInput):
format_key = 'DATE_INPUT_FORMATS'
template_name = 'django/forms/widgets/date.html'
class DateTimeInput(DateTimeBaseInput):
format_key = 'DATETIME_INPUT_FORMATS'
template_name = 'django/forms/widgets/datetime.html'
class TimeInput(DateTimeBaseInput):
format_key = 'TIME_INPUT_FORMATS'
template_name = 'django/forms/widgets/time.html'
# Defined at module level so that CheckboxInput is picklable (#17976)
def boolean_check(v):
return not (v is False or v is None or v == '')
class CheckboxInput(Input):
input_type = 'checkbox'
template_name = 'django/forms/widgets/checkbox.html'
def __init__(self, attrs=None, check_test=None):
super().__init__(attrs)
# check_test is a callable that takes a value and returns True
# if the checkbox should be checked for that value.
self.check_test = boolean_check if check_test is None else check_test
def format_value(self, value):
"""Only return the 'value' attribute if value isn't empty."""
if value is True or value is False or value is None or value == '':
return
return str(value)
def get_context(self, name, value, attrs):
if self.check_test(value):
attrs = {**(attrs or {}), 'checked': True}
return super().get_context(name, value, attrs)
def value_from_datadict(self, data, files, name):
if name not in data:
# A missing value means False because HTML form submission does not
# send results for unselected checkboxes.
return False
value = data.get(name)
# Translate true and false strings to boolean values.
values = {'true': True, 'false': False}
if isinstance(value, str):
value = values.get(value.lower(), value)
return bool(value)
def value_omitted_from_data(self, data, files, name):
# HTML checkboxes don't appear in POST data if not checked, so it's
# never known if the value is actually omitted.
return False
class ChoiceWidget(Widget):
allow_multiple_selected = False
input_type = None
template_name = None
option_template_name = None
add_id_index = True
checked_attribute = {'checked': True}
option_inherits_attrs = True
def __init__(self, attrs=None, choices=()):
super().__init__(attrs)
# choices can be any iterable, but we may need to render this widget
# multiple times. Thus, collapse it into a list so it can be consumed
# more than once.
self.choices = list(choices)
def __deepcopy__(self, memo):
obj = copy.copy(self)
obj.attrs = self.attrs.copy()
obj.choices = copy.copy(self.choices)
memo[id(self)] = obj
return obj
def subwidgets(self, name, value, attrs=None):
"""
Yield all "subwidgets" of this widget. Used to enable iterating
options from a BoundField for choice widgets.
"""
value = self.format_value(value)
yield from self.options(name, value, attrs)
def options(self, name, value, attrs=None):
"""Yield a flat list of options for this widgets."""
for group in self.optgroups(name, value, attrs):
yield from group[1]
def optgroups(self, name, value, attrs=None):
"""Return a list of optgroups for this widget."""
groups = []
has_selected = False
for index, (option_value, option_label) in enumerate(self.choices):
if option_value is None:
option_value = ''
subgroup = []
if isinstance(option_label, (list, tuple)):
group_name = option_value
subindex = 0
choices = option_label
else:
group_name = None
subindex = None
choices = [(option_value, option_label)]
groups.append((group_name, subgroup, index))
for subvalue, sublabel in choices:
selected = (
str(subvalue) in value and
(not has_selected or self.allow_multiple_selected)
)
has_selected |= selected
subgroup.append(self.create_option(
name, subvalue, sublabel, selected, index,
subindex=subindex, attrs=attrs,
))
if subindex is not None:
subindex += 1
return groups
def create_option(self, name, value, label, selected, index, subindex=None, attrs=None):
index = str(index) if subindex is None else "%s_%s" % (index, subindex)
if attrs is None:
attrs = {}
option_attrs = self.build_attrs(self.attrs, attrs) if self.option_inherits_attrs else {}
if selected:
option_attrs.update(self.checked_attribute)
if 'id' in option_attrs:
option_attrs['id'] = self.id_for_label(option_attrs['id'], index)
return {
'name': name,
'value': value,
'label': label,
'selected': selected,
'index': index,
'attrs': option_attrs,
'type': self.input_type,
'template_name': self.option_template_name,
'wrap_label': True,
}
def get_context(self, name, value, attrs):
context = super().get_context(name, value, attrs)
context['widget']['optgroups'] = self.optgroups(name, context['widget']['value'], attrs)
return context
def id_for_label(self, id_, index='0'):
"""
Use an incremented id for each option where the main widget
references the zero index.
"""
if id_ and self.add_id_index:
id_ = '%s_%s' % (id_, index)
return id_
def value_from_datadict(self, data, files, name):
getter = data.get
if self.allow_multiple_selected:
try:
getter = data.getlist
except AttributeError:
pass
return getter(name)
def format_value(self, value):
"""Return selected values as a list."""
if value is None and self.allow_multiple_selected:
return []
if not isinstance(value, (tuple, list)):
value = [value]
return [str(v) if v is not None else '' for v in value]
class Select(ChoiceWidget):
input_type = 'select'
template_name = 'django/forms/widgets/select.html'
option_template_name = 'django/forms/widgets/select_option.html'
add_id_index = False
checked_attribute = {'selected': True}
option_inherits_attrs = False
def get_context(self, name, value, attrs):
context = super().get_context(name, value, attrs)
if self.allow_multiple_selected:
context['widget']['attrs']['multiple'] = True
return context
@staticmethod
def _choice_has_empty_value(choice):
"""Return True if the choice's value is empty string or None."""
value, _ = choice
return value is None or value == ''
def use_required_attribute(self, initial):
"""
Don't render 'required' if the first <option> has a value, as that's
invalid HTML.
"""
use_required_attribute = super().use_required_attribute(initial)
# 'required' is always okay for <select multiple>.
if self.allow_multiple_selected:
return use_required_attribute
first_choice = next(iter(self.choices), None)
return use_required_attribute and first_choice is not None and self._choice_has_empty_value(first_choice)
class NullBooleanSelect(Select):
"""
A Select Widget intended to be used with NullBooleanField.
"""
def __init__(self, attrs=None):
choices = (
('unknown', _('Unknown')),
('true', _('Yes')),
('false', _('No')),
)
super().__init__(attrs, choices)
def format_value(self, value):
try:
return {
True: 'true', False: 'false',
'true': 'true', 'false': 'false',
# For backwards compatibility with Django < 2.2.
'2': 'true', '3': 'false',
}[value]
except KeyError:
return 'unknown'
def value_from_datadict(self, data, files, name):
value = data.get(name)
return {
True: True,
'True': True,
'False': False,
False: False,
'true': True,
'false': False,
# For backwards compatibility with Django < 2.2.
'2': True,
'3': False,
}.get(value)
class SelectMultiple(Select):
allow_multiple_selected = True
def value_from_datadict(self, data, files, name):
try:
getter = data.getlist
except AttributeError:
getter = data.get
return getter(name)
def value_omitted_from_data(self, data, files, name):
# An unselected <select multiple> doesn't appear in POST data, so it's
# never known if the value is actually omitted.
return False
class RadioSelect(ChoiceWidget):
input_type = 'radio'
template_name = 'django/forms/widgets/radio.html'
option_template_name = 'django/forms/widgets/radio_option.html'
class CheckboxSelectMultiple(ChoiceWidget):
allow_multiple_selected = True
input_type = 'checkbox'
template_name = 'django/forms/widgets/checkbox_select.html'
option_template_name = 'django/forms/widgets/checkbox_option.html'
def use_required_attribute(self, initial):
# Don't use the 'required' attribute because browser validation would
# require all checkboxes to be checked instead of at least one.
return False
def value_omitted_from_data(self, data, files, name):
# HTML checkboxes don't appear in POST data if not checked, so it's
# never known if the value is actually omitted.
return False
def id_for_label(self, id_, index=None):
""""
Don't include for="field_0" in <label> because clicking such a label
would toggle the first checkbox.
"""
if index is None:
return ''
return super().id_for_label(id_, index)
class MultiWidget(Widget):
"""
A widget that is composed of multiple widgets.
In addition to the values added by Widget.get_context(), this widget
adds a list of subwidgets to the context as widget['subwidgets'].
These can be looped over and rendered like normal widgets.
You'll probably want to use this class with MultiValueField.
"""
template_name = 'django/forms/widgets/multiwidget.html'
def __init__(self, widgets, attrs=None):
if isinstance(widgets, dict):
self.widgets_names = [
('_%s' % name) if name else '' for name in widgets
]
widgets = widgets.values()
else:
self.widgets_names = ['_%s' % i for i in range(len(widgets))]
self.widgets = [w() if isinstance(w, type) else w for w in widgets]
super().__init__(attrs)
@property
def is_hidden(self):
return all(w.is_hidden for w in self.widgets)
def get_context(self, name, value, attrs):
context = super().get_context(name, value, attrs)
if self.is_localized:
for widget in self.widgets:
widget.is_localized = self.is_localized
# value is a list of values, each corresponding to a widget
# in self.widgets.
if not isinstance(value, list):
value = self.decompress(value)
final_attrs = context['widget']['attrs']
input_type = final_attrs.pop('type', None)
id_ = final_attrs.get('id')
subwidgets = []
for i, (widget_name, widget) in enumerate(zip(self.widgets_names, self.widgets)):
if input_type is not None:
widget.input_type = input_type
widget_name = name + widget_name
try:
widget_value = value[i]
except IndexError:
widget_value = None
if id_:
widget_attrs = final_attrs.copy()
widget_attrs['id'] = '%s_%s' % (id_, i)
else:
widget_attrs = final_attrs
subwidgets.append(widget.get_context(widget_name, widget_value, widget_attrs)['widget'])
context['widget']['subwidgets'] = subwidgets
return context
def id_for_label(self, id_):
if id_:
id_ += '_0'
return id_
def value_from_datadict(self, data, files, name):
return [
widget.value_from_datadict(data, files, name + widget_name)
for widget_name, widget in zip(self.widgets_names, self.widgets)
]
def value_omitted_from_data(self, data, files, name):
return all(
widget.value_omitted_from_data(data, files, name + widget_name)
for widget_name, widget in zip(self.widgets_names, self.widgets)
)
def decompress(self, value):
"""
Return a list of decompressed values for the given compressed value.
The given value can be assumed to be valid, but not necessarily
non-empty.
"""
raise NotImplementedError('Subclasses must implement this method.')
def _get_media(self):
"""
Media for a multiwidget is the combination of all media of the
subwidgets.
"""
media = Media()
for w in self.widgets:
media = media + w.media
return media
media = property(_get_media)
def __deepcopy__(self, memo):
obj = super().__deepcopy__(memo)
obj.widgets = copy.deepcopy(self.widgets)
return obj
@property
def needs_multipart_form(self):
return any(w.needs_multipart_form for w in self.widgets)
class SplitDateTimeWidget(MultiWidget):
"""
A widget that splits datetime input into two <input type="text"> boxes.
"""
supports_microseconds = False
template_name = 'django/forms/widgets/splitdatetime.html'
def __init__(self, attrs=None, date_format=None, time_format=None, date_attrs=None, time_attrs=None):
widgets = (
DateInput(
attrs=attrs if date_attrs is None else date_attrs,
format=date_format,
),
TimeInput(
attrs=attrs if time_attrs is None else time_attrs,
format=time_format,
),
)
super().__init__(widgets)
def decompress(self, value):
if value:
value = to_current_timezone(value)
return [value.date(), value.time()]
return [None, None]
class SplitHiddenDateTimeWidget(SplitDateTimeWidget):
"""
A widget that splits datetime input into two <input type="hidden"> inputs.
"""
template_name = 'django/forms/widgets/splithiddendatetime.html'
def __init__(self, attrs=None, date_format=None, time_format=None, date_attrs=None, time_attrs=None):
super().__init__(attrs, date_format, time_format, date_attrs, time_attrs)
for widget in self.widgets:
widget.input_type = 'hidden'
class SelectDateWidget(Widget):
"""
A widget that splits date input into three <select> boxes.
This also serves as an example of a Widget that has more than one HTML
element and hence implements value_from_datadict.
"""
none_value = ('', '---')
month_field = '%s_month'
day_field = '%s_day'
year_field = '%s_year'
template_name = 'django/forms/widgets/select_date.html'
input_type = 'select'
select_widget = Select
date_re = _lazy_re_compile(r'(\d{4}|0)-(\d\d?)-(\d\d?)$')
def __init__(self, attrs=None, years=None, months=None, empty_label=None):
self.attrs = attrs or {}
# Optional list or tuple of years to use in the "year" select box.
if years:
self.years = years
else:
this_year = datetime.date.today().year
self.years = range(this_year, this_year + 10)
# Optional dict of months to use in the "month" select box.
if months:
self.months = months
else:
self.months = MONTHS
# Optional string, list, or tuple to use as empty_label.
if isinstance(empty_label, (list, tuple)):
if not len(empty_label) == 3:
raise ValueError('empty_label list/tuple must have 3 elements.')
self.year_none_value = ('', empty_label[0])
self.month_none_value = ('', empty_label[1])
self.day_none_value = ('', empty_label[2])
else:
if empty_label is not None:
self.none_value = ('', empty_label)
self.year_none_value = self.none_value
self.month_none_value = self.none_value
self.day_none_value = self.none_value
def get_context(self, name, value, attrs):
context = super().get_context(name, value, attrs)
date_context = {}
year_choices = [(i, str(i)) for i in self.years]
if not self.is_required:
year_choices.insert(0, self.year_none_value)
year_name = self.year_field % name
date_context['year'] = self.select_widget(attrs, choices=year_choices).get_context(
name=year_name,
value=context['widget']['value']['year'],
attrs={**context['widget']['attrs'], 'id': 'id_%s' % year_name},
)
month_choices = list(self.months.items())
if not self.is_required:
month_choices.insert(0, self.month_none_value)
month_name = self.month_field % name
date_context['month'] = self.select_widget(attrs, choices=month_choices).get_context(
name=month_name,
value=context['widget']['value']['month'],
attrs={**context['widget']['attrs'], 'id': 'id_%s' % month_name},
)
day_choices = [(i, i) for i in range(1, 32)]
if not self.is_required:
day_choices.insert(0, self.day_none_value)
day_name = self.day_field % name
date_context['day'] = self.select_widget(attrs, choices=day_choices,).get_context(
name=day_name,
value=context['widget']['value']['day'],
attrs={**context['widget']['attrs'], 'id': 'id_%s' % day_name},
)
subwidgets = []
for field in self._parse_date_fmt():
subwidgets.append(date_context[field]['widget'])
context['widget']['subwidgets'] = subwidgets
return context
def format_value(self, value):
"""
Return a dict containing the year, month, and day of the current value.
Use dict instead of a datetime to allow invalid dates such as February
31 to display correctly.
"""
year, month, day = None, None, None
if isinstance(value, (datetime.date, datetime.datetime)):
year, month, day = value.year, value.month, value.day
elif isinstance(value, str):
match = self.date_re.match(value)
if match:
# Convert any zeros in the date to empty strings to match the
# empty option value.
year, month, day = [int(val) or '' for val in match.groups()]
elif settings.USE_L10N:
input_format = get_format('DATE_INPUT_FORMATS')[0]
try:
d = datetime.datetime.strptime(value, input_format)
except ValueError:
pass
else:
year, month, day = d.year, d.month, d.day
return {'year': year, 'month': month, 'day': day}
@staticmethod
def _parse_date_fmt():
fmt = get_format('DATE_FORMAT')
escaped = False
for char in fmt:
if escaped:
escaped = False
elif char == '\\':
escaped = True
elif char in 'Yy':
yield 'year'
elif char in 'bEFMmNn':
yield 'month'
elif char in 'dj':
yield 'day'
def id_for_label(self, id_):
for first_select in self._parse_date_fmt():
return '%s_%s' % (id_, first_select)
return '%s_month' % id_
def value_from_datadict(self, data, files, name):
y = data.get(self.year_field % name)
m = data.get(self.month_field % name)
d = data.get(self.day_field % name)
if y == m == d == '':
return None
if y is not None and m is not None and d is not None:
input_format = get_format('DATE_INPUT_FORMATS')[0]
try:
date_value = datetime.date(int(y), int(m), int(d))
except ValueError:
# Return pseudo-ISO dates with zeros for any unselected values,
# e.g. '2017-0-23'.
return '%s-%s-%s' % (y or 0, m or 0, d or 0)
date_value = datetime_safe.new_date(date_value)
return date_value.strftime(input_format)
return data.get(name)
def value_omitted_from_data(self, data, files, name):
return not any(
('{}_{}'.format(name, interval) in data)
for interval in ('year', 'month', 'day')
)
|
b666974d7daabf14520ff4b5036cd4dc15c185420174295d902f63abbc4b3b46 | import ipaddress
import re
from pathlib import Path
from urllib.parse import urlsplit, urlunsplit
from django.core.exceptions import ValidationError
from django.utils.deconstruct import deconstructible
from django.utils.encoding import punycode
from django.utils.ipv6 import is_valid_ipv6_address
from django.utils.regex_helper import _lazy_re_compile
from django.utils.translation import gettext_lazy as _, ngettext_lazy
# These values, if given to validate(), will trigger the self.required check.
EMPTY_VALUES = (None, '', [], (), {})
@deconstructible
class RegexValidator:
regex = ''
message = _('Enter a valid value.')
code = 'invalid'
inverse_match = False
flags = 0
def __init__(self, regex=None, message=None, code=None, inverse_match=None, flags=None):
if regex is not None:
self.regex = regex
if message is not None:
self.message = message
if code is not None:
self.code = code
if inverse_match is not None:
self.inverse_match = inverse_match
if flags is not None:
self.flags = flags
if self.flags and not isinstance(self.regex, str):
raise TypeError("If the flags are set, regex must be a regular expression string.")
self.regex = _lazy_re_compile(self.regex, self.flags)
def __call__(self, value):
"""
Validate that the input contains (or does *not* contain, if
inverse_match is True) a match for the regular expression.
"""
regex_matches = self.regex.search(str(value))
invalid_input = regex_matches if self.inverse_match else not regex_matches
if invalid_input:
raise ValidationError(self.message, code=self.code)
def __eq__(self, other):
return (
isinstance(other, RegexValidator) and
self.regex.pattern == other.regex.pattern and
self.regex.flags == other.regex.flags and
(self.message == other.message) and
(self.code == other.code) and
(self.inverse_match == other.inverse_match)
)
@deconstructible
class URLValidator(RegexValidator):
ul = '\u00a1-\uffff' # Unicode letters range (must not be a raw string).
# IP patterns
ipv4_re = r'(?:25[0-5]|2[0-4]\d|[0-1]?\d?\d)(?:\.(?:25[0-5]|2[0-4]\d|[0-1]?\d?\d)){3}'
ipv6_re = r'\[[0-9a-f:.]+\]' # (simple regex, validated later)
# Host patterns
hostname_re = r'[a-z' + ul + r'0-9](?:[a-z' + ul + r'0-9-]{0,61}[a-z' + ul + r'0-9])?'
# Max length for domain name labels is 63 characters per RFC 1034 sec. 3.1
domain_re = r'(?:\.(?!-)[a-z' + ul + r'0-9-]{1,63}(?<!-))*'
tld_re = (
r'\.' # dot
r'(?!-)' # can't start with a dash
r'(?:[a-z' + ul + '-]{2,63}' # domain label
r'|xn--[a-z0-9]{1,59})' # or punycode label
r'(?<!-)' # can't end with a dash
r'\.?' # may have a trailing dot
)
host_re = '(' + hostname_re + domain_re + tld_re + '|localhost)'
regex = _lazy_re_compile(
r'^(?:[a-z0-9.+-]*)://' # scheme is validated separately
r'(?:[^\s:@/]+(?::[^\s:@/]*)?@)?' # user:pass authentication
r'(?:' + ipv4_re + '|' + ipv6_re + '|' + host_re + ')'
r'(?::\d{2,5})?' # port
r'(?:[/?#][^\s]*)?' # resource path
r'\Z', re.IGNORECASE)
message = _('Enter a valid URL.')
schemes = ['http', 'https', 'ftp', 'ftps']
def __init__(self, schemes=None, **kwargs):
super().__init__(**kwargs)
if schemes is not None:
self.schemes = schemes
def __call__(self, value):
if not isinstance(value, str):
raise ValidationError(self.message, code=self.code)
# Check if the scheme is valid.
scheme = value.split('://')[0].lower()
if scheme not in self.schemes:
raise ValidationError(self.message, code=self.code)
# Then check full URL
try:
super().__call__(value)
except ValidationError as e:
# Trivial case failed. Try for possible IDN domain
if value:
try:
scheme, netloc, path, query, fragment = urlsplit(value)
except ValueError: # for example, "Invalid IPv6 URL"
raise ValidationError(self.message, code=self.code)
try:
netloc = punycode(netloc) # IDN -> ACE
except UnicodeError: # invalid domain part
raise e
url = urlunsplit((scheme, netloc, path, query, fragment))
super().__call__(url)
else:
raise
else:
# Now verify IPv6 in the netloc part
host_match = re.search(r'^\[(.+)\](?::\d{2,5})?$', urlsplit(value).netloc)
if host_match:
potential_ip = host_match[1]
try:
validate_ipv6_address(potential_ip)
except ValidationError:
raise ValidationError(self.message, code=self.code)
# The maximum length of a full host name is 253 characters per RFC 1034
# section 3.1. It's defined to be 255 bytes or less, but this includes
# one byte for the length of the name and one byte for the trailing dot
# that's used to indicate absolute names in DNS.
if len(urlsplit(value).netloc) > 253:
raise ValidationError(self.message, code=self.code)
integer_validator = RegexValidator(
_lazy_re_compile(r'^-?\d+\Z'),
message=_('Enter a valid integer.'),
code='invalid',
)
def validate_integer(value):
return integer_validator(value)
@deconstructible
class EmailValidator:
message = _('Enter a valid email address.')
code = 'invalid'
user_regex = _lazy_re_compile(
r"(^[-!#$%&'*+/=?^_`{}|~0-9A-Z]+(\.[-!#$%&'*+/=?^_`{}|~0-9A-Z]+)*\Z" # dot-atom
r'|^"([\001-\010\013\014\016-\037!#-\[\]-\177]|\\[\001-\011\013\014\016-\177])*"\Z)', # quoted-string
re.IGNORECASE)
domain_regex = _lazy_re_compile(
# max length for domain name labels is 63 characters per RFC 1034
r'((?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+)(?:[A-Z0-9-]{2,63}(?<!-))\Z',
re.IGNORECASE)
literal_regex = _lazy_re_compile(
# literal form, ipv4 or ipv6 address (SMTP 4.1.3)
r'\[([A-f0-9:.]+)\]\Z',
re.IGNORECASE)
domain_whitelist = ['localhost']
def __init__(self, message=None, code=None, whitelist=None):
if message is not None:
self.message = message
if code is not None:
self.code = code
if whitelist is not None:
self.domain_whitelist = whitelist
def __call__(self, value):
if not value or '@' not in value:
raise ValidationError(self.message, code=self.code)
user_part, domain_part = value.rsplit('@', 1)
if not self.user_regex.match(user_part):
raise ValidationError(self.message, code=self.code)
if (domain_part not in self.domain_whitelist and
not self.validate_domain_part(domain_part)):
# Try for possible IDN domain-part
try:
domain_part = punycode(domain_part)
except UnicodeError:
pass
else:
if self.validate_domain_part(domain_part):
return
raise ValidationError(self.message, code=self.code)
def validate_domain_part(self, domain_part):
if self.domain_regex.match(domain_part):
return True
literal_match = self.literal_regex.match(domain_part)
if literal_match:
ip_address = literal_match[1]
try:
validate_ipv46_address(ip_address)
return True
except ValidationError:
pass
return False
def __eq__(self, other):
return (
isinstance(other, EmailValidator) and
(self.domain_whitelist == other.domain_whitelist) and
(self.message == other.message) and
(self.code == other.code)
)
validate_email = EmailValidator()
slug_re = _lazy_re_compile(r'^[-a-zA-Z0-9_]+\Z')
validate_slug = RegexValidator(
slug_re,
# Translators: "letters" means latin letters: a-z and A-Z.
_('Enter a valid “slug” consisting of letters, numbers, underscores or hyphens.'),
'invalid'
)
slug_unicode_re = _lazy_re_compile(r'^[-\w]+\Z')
validate_unicode_slug = RegexValidator(
slug_unicode_re,
_('Enter a valid “slug” consisting of Unicode letters, numbers, underscores, or hyphens.'),
'invalid'
)
def validate_ipv4_address(value):
try:
ipaddress.IPv4Address(value)
except ValueError:
raise ValidationError(_('Enter a valid IPv4 address.'), code='invalid')
def validate_ipv6_address(value):
if not is_valid_ipv6_address(value):
raise ValidationError(_('Enter a valid IPv6 address.'), code='invalid')
def validate_ipv46_address(value):
try:
validate_ipv4_address(value)
except ValidationError:
try:
validate_ipv6_address(value)
except ValidationError:
raise ValidationError(_('Enter a valid IPv4 or IPv6 address.'), code='invalid')
ip_address_validator_map = {
'both': ([validate_ipv46_address], _('Enter a valid IPv4 or IPv6 address.')),
'ipv4': ([validate_ipv4_address], _('Enter a valid IPv4 address.')),
'ipv6': ([validate_ipv6_address], _('Enter a valid IPv6 address.')),
}
def ip_address_validators(protocol, unpack_ipv4):
"""
Depending on the given parameters, return the appropriate validators for
the GenericIPAddressField.
"""
if protocol != 'both' and unpack_ipv4:
raise ValueError(
"You can only use `unpack_ipv4` if `protocol` is set to 'both'")
try:
return ip_address_validator_map[protocol.lower()]
except KeyError:
raise ValueError("The protocol '%s' is unknown. Supported: %s"
% (protocol, list(ip_address_validator_map)))
def int_list_validator(sep=',', message=None, code='invalid', allow_negative=False):
regexp = _lazy_re_compile(r'^%(neg)s\d+(?:%(sep)s%(neg)s\d+)*\Z' % {
'neg': '(-)?' if allow_negative else '',
'sep': re.escape(sep),
})
return RegexValidator(regexp, message=message, code=code)
validate_comma_separated_integer_list = int_list_validator(
message=_('Enter only digits separated by commas.'),
)
@deconstructible
class BaseValidator:
message = _('Ensure this value is %(limit_value)s (it is %(show_value)s).')
code = 'limit_value'
def __init__(self, limit_value, message=None):
self.limit_value = limit_value
if message:
self.message = message
def __call__(self, value):
cleaned = self.clean(value)
limit_value = self.limit_value() if callable(self.limit_value) else self.limit_value
params = {'limit_value': limit_value, 'show_value': cleaned, 'value': value}
if self.compare(cleaned, limit_value):
raise ValidationError(self.message, code=self.code, params=params)
def __eq__(self, other):
if not isinstance(other, self.__class__):
return NotImplemented
return (
self.limit_value == other.limit_value and
self.message == other.message and
self.code == other.code
)
def compare(self, a, b):
return a is not b
def clean(self, x):
return x
@deconstructible
class MaxValueValidator(BaseValidator):
message = _('Ensure this value is less than or equal to %(limit_value)s.')
code = 'max_value'
def compare(self, a, b):
return a > b
@deconstructible
class MinValueValidator(BaseValidator):
message = _('Ensure this value is greater than or equal to %(limit_value)s.')
code = 'min_value'
def compare(self, a, b):
return a < b
@deconstructible
class MinLengthValidator(BaseValidator):
message = ngettext_lazy(
'Ensure this value has at least %(limit_value)d character (it has %(show_value)d).',
'Ensure this value has at least %(limit_value)d characters (it has %(show_value)d).',
'limit_value')
code = 'min_length'
def compare(self, a, b):
return a < b
def clean(self, x):
return len(x)
@deconstructible
class MaxLengthValidator(BaseValidator):
message = ngettext_lazy(
'Ensure this value has at most %(limit_value)d character (it has %(show_value)d).',
'Ensure this value has at most %(limit_value)d characters (it has %(show_value)d).',
'limit_value')
code = 'max_length'
def compare(self, a, b):
return a > b
def clean(self, x):
return len(x)
@deconstructible
class DecimalValidator:
"""
Validate that the input does not exceed the maximum number of digits
expected, otherwise raise ValidationError.
"""
messages = {
'invalid': _('Enter a number.'),
'max_digits': ngettext_lazy(
'Ensure that there are no more than %(max)s digit in total.',
'Ensure that there are no more than %(max)s digits in total.',
'max'
),
'max_decimal_places': ngettext_lazy(
'Ensure that there are no more than %(max)s decimal place.',
'Ensure that there are no more than %(max)s decimal places.',
'max'
),
'max_whole_digits': ngettext_lazy(
'Ensure that there are no more than %(max)s digit before the decimal point.',
'Ensure that there are no more than %(max)s digits before the decimal point.',
'max'
),
}
def __init__(self, max_digits, decimal_places):
self.max_digits = max_digits
self.decimal_places = decimal_places
def __call__(self, value):
digit_tuple, exponent = value.as_tuple()[1:]
if exponent in {'F', 'n', 'N'}:
raise ValidationError(self.messages['invalid'])
if exponent >= 0:
# A positive exponent adds that many trailing zeros.
digits = len(digit_tuple) + exponent
decimals = 0
else:
# If the absolute value of the negative exponent is larger than the
# number of digits, then it's the same as the number of digits,
# because it'll consume all of the digits in digit_tuple and then
# add abs(exponent) - len(digit_tuple) leading zeros after the
# decimal point.
if abs(exponent) > len(digit_tuple):
digits = decimals = abs(exponent)
else:
digits = len(digit_tuple)
decimals = abs(exponent)
whole_digits = digits - decimals
if self.max_digits is not None and digits > self.max_digits:
raise ValidationError(
self.messages['max_digits'],
code='max_digits',
params={'max': self.max_digits},
)
if self.decimal_places is not None and decimals > self.decimal_places:
raise ValidationError(
self.messages['max_decimal_places'],
code='max_decimal_places',
params={'max': self.decimal_places},
)
if (self.max_digits is not None and self.decimal_places is not None and
whole_digits > (self.max_digits - self.decimal_places)):
raise ValidationError(
self.messages['max_whole_digits'],
code='max_whole_digits',
params={'max': (self.max_digits - self.decimal_places)},
)
def __eq__(self, other):
return (
isinstance(other, self.__class__) and
self.max_digits == other.max_digits and
self.decimal_places == other.decimal_places
)
@deconstructible
class FileExtensionValidator:
message = _(
'File extension “%(extension)s” is not allowed. '
'Allowed extensions are: %(allowed_extensions)s.'
)
code = 'invalid_extension'
def __init__(self, allowed_extensions=None, message=None, code=None):
if allowed_extensions is not None:
allowed_extensions = [allowed_extension.lower() for allowed_extension in allowed_extensions]
self.allowed_extensions = allowed_extensions
if message is not None:
self.message = message
if code is not None:
self.code = code
def __call__(self, value):
extension = Path(value.name).suffix[1:].lower()
if self.allowed_extensions is not None and extension not in self.allowed_extensions:
raise ValidationError(
self.message,
code=self.code,
params={
'extension': extension,
'allowed_extensions': ', '.join(self.allowed_extensions)
}
)
def __eq__(self, other):
return (
isinstance(other, self.__class__) and
self.allowed_extensions == other.allowed_extensions and
self.message == other.message and
self.code == other.code
)
def get_available_image_extensions():
try:
from PIL import Image
except ImportError:
return []
else:
Image.init()
return [ext.lower()[1:] for ext in Image.EXTENSION]
def validate_image_file_extension(value):
return FileExtensionValidator(allowed_extensions=get_available_image_extensions())(value)
@deconstructible
class ProhibitNullCharactersValidator:
"""Validate that the string doesn't contain the null character."""
message = _('Null characters are not allowed.')
code = 'null_characters_not_allowed'
def __init__(self, message=None, code=None):
if message is not None:
self.message = message
if code is not None:
self.code = code
def __call__(self, value):
if '\x00' in str(value):
raise ValidationError(self.message, code=self.code)
def __eq__(self, other):
return (
isinstance(other, self.__class__) and
self.message == other.message and
self.code == other.code
)
|
0b8133136dc3e360a4f02f7288ddc46e7e0adcf4716dc79e8a8bf1e16a215eb9 | import datetime
import json
import mimetypes
import os
import re
import sys
import time
from email.header import Header
from http.client import responses
from urllib.parse import quote, urlparse
from django.conf import settings
from django.core import signals, signing
from django.core.exceptions import DisallowedRedirect
from django.core.serializers.json import DjangoJSONEncoder
from django.http.cookie import SimpleCookie
from django.utils import timezone
from django.utils.encoding import iri_to_uri
from django.utils.http import http_date
from django.utils.regex_helper import _lazy_re_compile
_charset_from_content_type_re = _lazy_re_compile(r';\s*charset=(?P<charset>[^\s;]+)', re.I)
class BadHeaderError(ValueError):
pass
class HttpResponseBase:
"""
An HTTP response base class with dictionary-accessed headers.
This class doesn't handle content. It should not be used directly.
Use the HttpResponse and StreamingHttpResponse subclasses instead.
"""
status_code = 200
def __init__(self, content_type=None, status=None, reason=None, charset=None):
# _headers is a mapping of the lowercase name to the original case of
# the header (required for working with legacy systems) and the header
# value. Both the name of the header and its value are ASCII strings.
self._headers = {}
self._resource_closers = []
# This parameter is set by the handler. It's necessary to preserve the
# historical behavior of request_finished.
self._handler_class = None
self.cookies = SimpleCookie()
self.closed = False
if status is not None:
try:
self.status_code = int(status)
except (ValueError, TypeError):
raise TypeError('HTTP status code must be an integer.')
if not 100 <= self.status_code <= 599:
raise ValueError('HTTP status code must be an integer from 100 to 599.')
self._reason_phrase = reason
self._charset = charset
if content_type is None:
content_type = 'text/html; charset=%s' % self.charset
self['Content-Type'] = content_type
@property
def reason_phrase(self):
if self._reason_phrase is not None:
return self._reason_phrase
# Leave self._reason_phrase unset in order to use the default
# reason phrase for status code.
return responses.get(self.status_code, 'Unknown Status Code')
@reason_phrase.setter
def reason_phrase(self, value):
self._reason_phrase = value
@property
def charset(self):
if self._charset is not None:
return self._charset
content_type = self.get('Content-Type', '')
matched = _charset_from_content_type_re.search(content_type)
if matched:
# Extract the charset and strip its double quotes
return matched['charset'].replace('"', '')
return settings.DEFAULT_CHARSET
@charset.setter
def charset(self, value):
self._charset = value
def serialize_headers(self):
"""HTTP headers as a bytestring."""
def to_bytes(val, encoding):
return val if isinstance(val, bytes) else val.encode(encoding)
headers = [
(to_bytes(key, 'ascii') + b': ' + to_bytes(value, 'latin-1'))
for key, value in self._headers.values()
]
return b'\r\n'.join(headers)
__bytes__ = serialize_headers
@property
def _content_type_for_repr(self):
return ', "%s"' % self['Content-Type'] if 'Content-Type' in self else ''
def _convert_to_charset(self, value, charset, mime_encode=False):
"""
Convert headers key/value to ascii/latin-1 native strings.
`charset` must be 'ascii' or 'latin-1'. If `mime_encode` is True and
`value` can't be represented in the given charset, apply MIME-encoding.
"""
if not isinstance(value, (bytes, str)):
value = str(value)
if ((isinstance(value, bytes) and (b'\n' in value or b'\r' in value)) or
isinstance(value, str) and ('\n' in value or '\r' in value)):
raise BadHeaderError("Header values can't contain newlines (got %r)" % value)
try:
if isinstance(value, str):
# Ensure string is valid in given charset
value.encode(charset)
else:
# Convert bytestring using given charset
value = value.decode(charset)
except UnicodeError as e:
if mime_encode:
value = Header(value, 'utf-8', maxlinelen=sys.maxsize).encode()
else:
e.reason += ', HTTP response headers must be in %s format' % charset
raise
return value
def __setitem__(self, header, value):
header = self._convert_to_charset(header, 'ascii')
value = self._convert_to_charset(value, 'latin-1', mime_encode=True)
self._headers[header.lower()] = (header, value)
def __delitem__(self, header):
self._headers.pop(header.lower(), False)
def __getitem__(self, header):
return self._headers[header.lower()][1]
def has_header(self, header):
"""Case-insensitive check for a header."""
return header.lower() in self._headers
__contains__ = has_header
def items(self):
return self._headers.values()
def get(self, header, alternate=None):
return self._headers.get(header.lower(), (None, alternate))[1]
def set_cookie(self, key, value='', max_age=None, expires=None, path='/',
domain=None, secure=False, httponly=False, samesite=None):
"""
Set a cookie.
``expires`` can be:
- a string in the correct format,
- a naive ``datetime.datetime`` object in UTC,
- an aware ``datetime.datetime`` object in any time zone.
If it is a ``datetime.datetime`` object then calculate ``max_age``.
"""
self.cookies[key] = value
if expires is not None:
if isinstance(expires, datetime.datetime):
if timezone.is_aware(expires):
expires = timezone.make_naive(expires, timezone.utc)
delta = expires - expires.utcnow()
# Add one second so the date matches exactly (a fraction of
# time gets lost between converting to a timedelta and
# then the date string).
delta = delta + datetime.timedelta(seconds=1)
# Just set max_age - the max_age logic will set expires.
expires = None
max_age = max(0, delta.days * 86400 + delta.seconds)
else:
self.cookies[key]['expires'] = expires
else:
self.cookies[key]['expires'] = ''
if max_age is not None:
self.cookies[key]['max-age'] = max_age
# IE requires expires, so set it if hasn't been already.
if not expires:
self.cookies[key]['expires'] = http_date(time.time() + max_age)
if path is not None:
self.cookies[key]['path'] = path
if domain is not None:
self.cookies[key]['domain'] = domain
if secure:
self.cookies[key]['secure'] = True
if httponly:
self.cookies[key]['httponly'] = True
if samesite:
if samesite.lower() not in ('lax', 'none', 'strict'):
raise ValueError('samesite must be "lax", "none", or "strict".')
self.cookies[key]['samesite'] = samesite
def setdefault(self, key, value):
"""Set a header unless it has already been set."""
if key not in self:
self[key] = value
def set_signed_cookie(self, key, value, salt='', **kwargs):
value = signing.get_cookie_signer(salt=key + salt).sign(value)
return self.set_cookie(key, value, **kwargs)
def delete_cookie(self, key, path='/', domain=None):
# Most browsers ignore the Set-Cookie header if the cookie name starts
# with __Host- or __Secure- and the cookie doesn't use the secure flag.
secure = key.startswith(('__Secure-', '__Host-'))
self.set_cookie(
key, max_age=0, path=path, domain=domain, secure=secure,
expires='Thu, 01 Jan 1970 00:00:00 GMT',
)
# Common methods used by subclasses
def make_bytes(self, value):
"""Turn a value into a bytestring encoded in the output charset."""
# Per PEP 3333, this response body must be bytes. To avoid returning
# an instance of a subclass, this function returns `bytes(value)`.
# This doesn't make a copy when `value` already contains bytes.
# Handle string types -- we can't rely on force_bytes here because:
# - Python attempts str conversion first
# - when self._charset != 'utf-8' it re-encodes the content
if isinstance(value, (bytes, memoryview)):
return bytes(value)
if isinstance(value, str):
return bytes(value.encode(self.charset))
# Handle non-string types.
return str(value).encode(self.charset)
# These methods partially implement the file-like object interface.
# See https://docs.python.org/library/io.html#io.IOBase
# The WSGI server must call this method upon completion of the request.
# See http://blog.dscpl.com.au/2012/10/obligations-for-calling-close-on.html
def close(self):
for closer in self._resource_closers:
try:
closer()
except Exception:
pass
# Free resources that were still referenced.
self._resource_closers.clear()
self.closed = True
signals.request_finished.send(sender=self._handler_class)
def write(self, content):
raise OSError('This %s instance is not writable' % self.__class__.__name__)
def flush(self):
pass
def tell(self):
raise OSError('This %s instance cannot tell its position' % self.__class__.__name__)
# These methods partially implement a stream-like object interface.
# See https://docs.python.org/library/io.html#io.IOBase
def readable(self):
return False
def seekable(self):
return False
def writable(self):
return False
def writelines(self, lines):
raise OSError('This %s instance is not writable' % self.__class__.__name__)
class HttpResponse(HttpResponseBase):
"""
An HTTP response class with a string as content.
This content that can be read, appended to, or replaced.
"""
streaming = False
def __init__(self, content=b'', *args, **kwargs):
super().__init__(*args, **kwargs)
# Content is a bytestring. See the `content` property methods.
self.content = content
def __repr__(self):
return '<%(cls)s status_code=%(status_code)d%(content_type)s>' % {
'cls': self.__class__.__name__,
'status_code': self.status_code,
'content_type': self._content_type_for_repr,
}
def serialize(self):
"""Full HTTP message, including headers, as a bytestring."""
return self.serialize_headers() + b'\r\n\r\n' + self.content
__bytes__ = serialize
@property
def content(self):
return b''.join(self._container)
@content.setter
def content(self, value):
# Consume iterators upon assignment to allow repeated iteration.
if hasattr(value, '__iter__') and not isinstance(value, (bytes, str)):
content = b''.join(self.make_bytes(chunk) for chunk in value)
if hasattr(value, 'close'):
try:
value.close()
except Exception:
pass
else:
content = self.make_bytes(value)
# Create a list of properly encoded bytestrings to support write().
self._container = [content]
def __iter__(self):
return iter(self._container)
def write(self, content):
self._container.append(self.make_bytes(content))
def tell(self):
return len(self.content)
def getvalue(self):
return self.content
def writable(self):
return True
def writelines(self, lines):
for line in lines:
self.write(line)
class StreamingHttpResponse(HttpResponseBase):
"""
A streaming HTTP response class with an iterator as content.
This should only be iterated once, when the response is streamed to the
client. However, it can be appended to or replaced with a new iterator
that wraps the original content (or yields entirely new content).
"""
streaming = True
def __init__(self, streaming_content=(), *args, **kwargs):
super().__init__(*args, **kwargs)
# `streaming_content` should be an iterable of bytestrings.
# See the `streaming_content` property methods.
self.streaming_content = streaming_content
@property
def content(self):
raise AttributeError(
"This %s instance has no `content` attribute. Use "
"`streaming_content` instead." % self.__class__.__name__
)
@property
def streaming_content(self):
return map(self.make_bytes, self._iterator)
@streaming_content.setter
def streaming_content(self, value):
self._set_streaming_content(value)
def _set_streaming_content(self, value):
# Ensure we can never iterate on "value" more than once.
self._iterator = iter(value)
if hasattr(value, 'close'):
self._resource_closers.append(value.close)
def __iter__(self):
return self.streaming_content
def getvalue(self):
return b''.join(self.streaming_content)
class FileResponse(StreamingHttpResponse):
"""
A streaming HTTP response class optimized for files.
"""
block_size = 4096
def __init__(self, *args, as_attachment=False, filename='', **kwargs):
self.as_attachment = as_attachment
self.filename = filename
super().__init__(*args, **kwargs)
def _set_streaming_content(self, value):
if not hasattr(value, 'read'):
self.file_to_stream = None
return super()._set_streaming_content(value)
self.file_to_stream = filelike = value
if hasattr(filelike, 'close'):
self._resource_closers.append(filelike.close)
value = iter(lambda: filelike.read(self.block_size), b'')
self.set_headers(filelike)
super()._set_streaming_content(value)
def set_headers(self, filelike):
"""
Set some common response headers (Content-Length, Content-Type, and
Content-Disposition) based on the `filelike` response content.
"""
encoding_map = {
'bzip2': 'application/x-bzip',
'gzip': 'application/gzip',
'xz': 'application/x-xz',
}
filename = getattr(filelike, 'name', None)
filename = filename if (isinstance(filename, str) and filename) else self.filename
if os.path.isabs(filename):
self['Content-Length'] = os.path.getsize(filelike.name)
elif hasattr(filelike, 'getbuffer'):
self['Content-Length'] = filelike.getbuffer().nbytes
if self.get('Content-Type', '').startswith('text/html'):
if filename:
content_type, encoding = mimetypes.guess_type(filename)
# Encoding isn't set to prevent browsers from automatically
# uncompressing files.
content_type = encoding_map.get(encoding, content_type)
self['Content-Type'] = content_type or 'application/octet-stream'
else:
self['Content-Type'] = 'application/octet-stream'
filename = self.filename or os.path.basename(filename)
if filename:
disposition = 'attachment' if self.as_attachment else 'inline'
try:
filename.encode('ascii')
file_expr = 'filename="{}"'.format(filename)
except UnicodeEncodeError:
file_expr = "filename*=utf-8''{}".format(quote(filename))
self['Content-Disposition'] = '{}; {}'.format(disposition, file_expr)
elif self.as_attachment:
self['Content-Disposition'] = 'attachment'
class HttpResponseRedirectBase(HttpResponse):
allowed_schemes = ['http', 'https', 'ftp']
def __init__(self, redirect_to, *args, **kwargs):
super().__init__(*args, **kwargs)
self['Location'] = iri_to_uri(redirect_to)
parsed = urlparse(str(redirect_to))
if parsed.scheme and parsed.scheme not in self.allowed_schemes:
raise DisallowedRedirect("Unsafe redirect to URL with protocol '%s'" % parsed.scheme)
url = property(lambda self: self['Location'])
def __repr__(self):
return '<%(cls)s status_code=%(status_code)d%(content_type)s, url="%(url)s">' % {
'cls': self.__class__.__name__,
'status_code': self.status_code,
'content_type': self._content_type_for_repr,
'url': self.url,
}
class HttpResponseRedirect(HttpResponseRedirectBase):
status_code = 302
class HttpResponsePermanentRedirect(HttpResponseRedirectBase):
status_code = 301
class HttpResponseNotModified(HttpResponse):
status_code = 304
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
del self['content-type']
@HttpResponse.content.setter
def content(self, value):
if value:
raise AttributeError("You cannot set content to a 304 (Not Modified) response")
self._container = []
class HttpResponseBadRequest(HttpResponse):
status_code = 400
class HttpResponseNotFound(HttpResponse):
status_code = 404
class HttpResponseForbidden(HttpResponse):
status_code = 403
class HttpResponseNotAllowed(HttpResponse):
status_code = 405
def __init__(self, permitted_methods, *args, **kwargs):
super().__init__(*args, **kwargs)
self['Allow'] = ', '.join(permitted_methods)
def __repr__(self):
return '<%(cls)s [%(methods)s] status_code=%(status_code)d%(content_type)s>' % {
'cls': self.__class__.__name__,
'status_code': self.status_code,
'content_type': self._content_type_for_repr,
'methods': self['Allow'],
}
class HttpResponseGone(HttpResponse):
status_code = 410
class HttpResponseServerError(HttpResponse):
status_code = 500
class Http404(Exception):
pass
class JsonResponse(HttpResponse):
"""
An HTTP response class that consumes data to be serialized to JSON.
:param data: Data to be dumped into json. By default only ``dict`` objects
are allowed to be passed due to a security flaw before EcmaScript 5. See
the ``safe`` parameter for more information.
:param encoder: Should be a json encoder class. Defaults to
``django.core.serializers.json.DjangoJSONEncoder``.
:param safe: Controls if only ``dict`` objects may be serialized. Defaults
to ``True``.
:param json_dumps_params: A dictionary of kwargs passed to json.dumps().
"""
def __init__(self, data, encoder=DjangoJSONEncoder, safe=True,
json_dumps_params=None, **kwargs):
if safe and not isinstance(data, dict):
raise TypeError(
'In order to allow non-dict objects to be serialized set the '
'safe parameter to False.'
)
if json_dumps_params is None:
json_dumps_params = {}
kwargs.setdefault('content_type', 'application/json')
data = json.dumps(data, cls=encoder, **json_dumps_params)
super().__init__(content=data, **kwargs)
|
0256ed7d33177cdb174650f62e898e500060a45556594bb0674cdd867da23bb7 | import cgi
import codecs
import copy
import warnings
from io import BytesIO
from itertools import chain
from urllib.parse import quote, urlencode, urljoin, urlsplit
from django.conf import settings
from django.core import signing
from django.core.exceptions import (
DisallowedHost, ImproperlyConfigured, RequestDataTooBig,
)
from django.core.files import uploadhandler
from django.http.multipartparser import MultiPartParser, MultiPartParserError
from django.utils.datastructures import (
CaseInsensitiveMapping, ImmutableList, MultiValueDict,
)
from django.utils.deprecation import RemovedInDjango40Warning
from django.utils.encoding import escape_uri_path, iri_to_uri
from django.utils.functional import cached_property
from django.utils.http import is_same_domain, limited_parse_qsl
from django.utils.regex_helper import _lazy_re_compile
from .multipartparser import parse_header
RAISE_ERROR = object()
host_validation_re = _lazy_re_compile(r"^([a-z0-9.-]+|\[[a-f0-9]*:[a-f0-9\.:]+\])(:\d+)?$")
class UnreadablePostError(OSError):
pass
class RawPostDataException(Exception):
"""
You cannot access raw_post_data from a request that has
multipart/* POST data if it has been accessed via POST,
FILES, etc..
"""
pass
class HttpRequest:
"""A basic HTTP request."""
# The encoding used in GET/POST dicts. None means use default setting.
_encoding = None
_upload_handlers = []
def __init__(self):
# WARNING: The `WSGIRequest` subclass doesn't call `super`.
# Any variable assignment made here should also happen in
# `WSGIRequest.__init__()`.
self.GET = QueryDict(mutable=True)
self.POST = QueryDict(mutable=True)
self.COOKIES = {}
self.META = {}
self.FILES = MultiValueDict()
self.path = ''
self.path_info = ''
self.method = None
self.resolver_match = None
self.content_type = None
self.content_params = None
def __repr__(self):
if self.method is None or not self.get_full_path():
return '<%s>' % self.__class__.__name__
return '<%s: %s %r>' % (self.__class__.__name__, self.method, self.get_full_path())
@cached_property
def headers(self):
return HttpHeaders(self.META)
@cached_property
def accepted_types(self):
"""Return a list of MediaType instances."""
return parse_accept_header(self.headers.get('Accept', '*/*'))
def accepts(self, media_type):
return any(
accepted_type.match(media_type)
for accepted_type in self.accepted_types
)
def _set_content_type_params(self, meta):
"""Set content_type, content_params, and encoding."""
self.content_type, self.content_params = cgi.parse_header(meta.get('CONTENT_TYPE', ''))
if 'charset' in self.content_params:
try:
codecs.lookup(self.content_params['charset'])
except LookupError:
pass
else:
self.encoding = self.content_params['charset']
def _get_raw_host(self):
"""
Return the HTTP host using the environment or request headers. Skip
allowed hosts protection, so may return an insecure host.
"""
# We try three options, in order of decreasing preference.
if settings.USE_X_FORWARDED_HOST and (
'HTTP_X_FORWARDED_HOST' in self.META):
host = self.META['HTTP_X_FORWARDED_HOST']
elif 'HTTP_HOST' in self.META:
host = self.META['HTTP_HOST']
else:
# Reconstruct the host using the algorithm from PEP 333.
host = self.META['SERVER_NAME']
server_port = self.get_port()
if server_port != ('443' if self.is_secure() else '80'):
host = '%s:%s' % (host, server_port)
return host
def get_host(self):
"""Return the HTTP host using the environment or request headers."""
host = self._get_raw_host()
# Allow variants of localhost if ALLOWED_HOSTS is empty and DEBUG=True.
allowed_hosts = settings.ALLOWED_HOSTS
if settings.DEBUG and not allowed_hosts:
allowed_hosts = ['.localhost', '127.0.0.1', '[::1]']
domain, port = split_domain_port(host)
if domain and validate_host(domain, allowed_hosts):
return host
else:
msg = "Invalid HTTP_HOST header: %r." % host
if domain:
msg += " You may need to add %r to ALLOWED_HOSTS." % domain
else:
msg += " The domain name provided is not valid according to RFC 1034/1035."
raise DisallowedHost(msg)
def get_port(self):
"""Return the port number for the request as a string."""
if settings.USE_X_FORWARDED_PORT and 'HTTP_X_FORWARDED_PORT' in self.META:
port = self.META['HTTP_X_FORWARDED_PORT']
else:
port = self.META['SERVER_PORT']
return str(port)
def get_full_path(self, force_append_slash=False):
return self._get_full_path(self.path, force_append_slash)
def get_full_path_info(self, force_append_slash=False):
return self._get_full_path(self.path_info, force_append_slash)
def _get_full_path(self, path, force_append_slash):
# RFC 3986 requires query string arguments to be in the ASCII range.
# Rather than crash if this doesn't happen, we encode defensively.
return '%s%s%s' % (
escape_uri_path(path),
'/' if force_append_slash and not path.endswith('/') else '',
('?' + iri_to_uri(self.META.get('QUERY_STRING', ''))) if self.META.get('QUERY_STRING', '') else ''
)
def get_signed_cookie(self, key, default=RAISE_ERROR, salt='', max_age=None):
"""
Attempt to return a signed cookie. If the signature fails or the
cookie has expired, raise an exception, unless the `default` argument
is provided, in which case return that value.
"""
try:
cookie_value = self.COOKIES[key]
except KeyError:
if default is not RAISE_ERROR:
return default
else:
raise
try:
value = signing.get_cookie_signer(salt=key + salt).unsign(
cookie_value, max_age=max_age)
except signing.BadSignature:
if default is not RAISE_ERROR:
return default
else:
raise
return value
def get_raw_uri(self):
"""
Return an absolute URI from variables available in this request. Skip
allowed hosts protection, so may return insecure URI.
"""
return '{scheme}://{host}{path}'.format(
scheme=self.scheme,
host=self._get_raw_host(),
path=self.get_full_path(),
)
def build_absolute_uri(self, location=None):
"""
Build an absolute URI from the location and the variables available in
this request. If no ``location`` is specified, build the absolute URI
using request.get_full_path(). If the location is absolute, convert it
to an RFC 3987 compliant URI and return it. If location is relative or
is scheme-relative (i.e., ``//example.com/``), urljoin() it to a base
URL constructed from the request variables.
"""
if location is None:
# Make it an absolute url (but schemeless and domainless) for the
# edge case that the path starts with '//'.
location = '//%s' % self.get_full_path()
else:
# Coerce lazy locations.
location = str(location)
bits = urlsplit(location)
if not (bits.scheme and bits.netloc):
# Handle the simple, most common case. If the location is absolute
# and a scheme or host (netloc) isn't provided, skip an expensive
# urljoin() as long as no path segments are '.' or '..'.
if (bits.path.startswith('/') and not bits.scheme and not bits.netloc and
'/./' not in bits.path and '/../' not in bits.path):
# If location starts with '//' but has no netloc, reuse the
# schema and netloc from the current request. Strip the double
# slashes and continue as if it wasn't specified.
if location.startswith('//'):
location = location[2:]
location = self._current_scheme_host + location
else:
# Join the constructed URL with the provided location, which
# allows the provided location to apply query strings to the
# base path.
location = urljoin(self._current_scheme_host + self.path, location)
return iri_to_uri(location)
@cached_property
def _current_scheme_host(self):
return '{}://{}'.format(self.scheme, self.get_host())
def _get_scheme(self):
"""
Hook for subclasses like WSGIRequest to implement. Return 'http' by
default.
"""
return 'http'
@property
def scheme(self):
if settings.SECURE_PROXY_SSL_HEADER:
try:
header, secure_value = settings.SECURE_PROXY_SSL_HEADER
except ValueError:
raise ImproperlyConfigured(
'The SECURE_PROXY_SSL_HEADER setting must be a tuple containing two values.'
)
header_value = self.META.get(header)
if header_value is not None:
return 'https' if header_value == secure_value else 'http'
return self._get_scheme()
def is_secure(self):
return self.scheme == 'https'
def is_ajax(self):
warnings.warn(
'request.is_ajax() is deprecated. See Django 3.1 release notes '
'for more details about this deprecation.',
RemovedInDjango40Warning,
)
return self.META.get('HTTP_X_REQUESTED_WITH') == 'XMLHttpRequest'
@property
def encoding(self):
return self._encoding
@encoding.setter
def encoding(self, val):
"""
Set the encoding used for GET/POST accesses. If the GET or POST
dictionary has already been created, remove and recreate it on the
next access (so that it is decoded correctly).
"""
self._encoding = val
if hasattr(self, 'GET'):
del self.GET
if hasattr(self, '_post'):
del self._post
def _initialize_handlers(self):
self._upload_handlers = [uploadhandler.load_handler(handler, self)
for handler in settings.FILE_UPLOAD_HANDLERS]
@property
def upload_handlers(self):
if not self._upload_handlers:
# If there are no upload handlers defined, initialize them from settings.
self._initialize_handlers()
return self._upload_handlers
@upload_handlers.setter
def upload_handlers(self, upload_handlers):
if hasattr(self, '_files'):
raise AttributeError("You cannot set the upload handlers after the upload has been processed.")
self._upload_handlers = upload_handlers
def parse_file_upload(self, META, post_data):
"""Return a tuple of (POST QueryDict, FILES MultiValueDict)."""
self.upload_handlers = ImmutableList(
self.upload_handlers,
warning="You cannot alter upload handlers after the upload has been processed."
)
parser = MultiPartParser(META, post_data, self.upload_handlers, self.encoding)
return parser.parse()
@property
def body(self):
if not hasattr(self, '_body'):
if self._read_started:
raise RawPostDataException("You cannot access body after reading from request's data stream")
# Limit the maximum request data size that will be handled in-memory.
if (settings.DATA_UPLOAD_MAX_MEMORY_SIZE is not None and
int(self.META.get('CONTENT_LENGTH') or 0) > settings.DATA_UPLOAD_MAX_MEMORY_SIZE):
raise RequestDataTooBig('Request body exceeded settings.DATA_UPLOAD_MAX_MEMORY_SIZE.')
try:
self._body = self.read()
except OSError as e:
raise UnreadablePostError(*e.args) from e
self._stream = BytesIO(self._body)
return self._body
def _mark_post_parse_error(self):
self._post = QueryDict()
self._files = MultiValueDict()
def _load_post_and_files(self):
"""Populate self._post and self._files if the content-type is a form type"""
if self.method != 'POST':
self._post, self._files = QueryDict(encoding=self._encoding), MultiValueDict()
return
if self._read_started and not hasattr(self, '_body'):
self._mark_post_parse_error()
return
if self.content_type == 'multipart/form-data':
if hasattr(self, '_body'):
# Use already read data
data = BytesIO(self._body)
else:
data = self
try:
self._post, self._files = self.parse_file_upload(self.META, data)
except MultiPartParserError:
# An error occurred while parsing POST data. Since when
# formatting the error the request handler might access
# self.POST, set self._post and self._file to prevent
# attempts to parse POST data again.
self._mark_post_parse_error()
raise
elif self.content_type == 'application/x-www-form-urlencoded':
self._post, self._files = QueryDict(self.body, encoding=self._encoding), MultiValueDict()
else:
self._post, self._files = QueryDict(encoding=self._encoding), MultiValueDict()
def close(self):
if hasattr(self, '_files'):
for f in chain.from_iterable(list_[1] for list_ in self._files.lists()):
f.close()
# File-like and iterator interface.
#
# Expects self._stream to be set to an appropriate source of bytes by
# a corresponding request subclass (e.g. WSGIRequest).
# Also when request data has already been read by request.POST or
# request.body, self._stream points to a BytesIO instance
# containing that data.
def read(self, *args, **kwargs):
self._read_started = True
try:
return self._stream.read(*args, **kwargs)
except OSError as e:
raise UnreadablePostError(*e.args) from e
def readline(self, *args, **kwargs):
self._read_started = True
try:
return self._stream.readline(*args, **kwargs)
except OSError as e:
raise UnreadablePostError(*e.args) from e
def __iter__(self):
return iter(self.readline, b'')
def readlines(self):
return list(self)
class HttpHeaders(CaseInsensitiveMapping):
HTTP_PREFIX = 'HTTP_'
# PEP 333 gives two headers which aren't prepended with HTTP_.
UNPREFIXED_HEADERS = {'CONTENT_TYPE', 'CONTENT_LENGTH'}
def __init__(self, environ):
headers = {}
for header, value in environ.items():
name = self.parse_header_name(header)
if name:
headers[name] = value
super().__init__(headers)
def __getitem__(self, key):
"""Allow header lookup using underscores in place of hyphens."""
return super().__getitem__(key.replace('_', '-'))
@classmethod
def parse_header_name(cls, header):
if header.startswith(cls.HTTP_PREFIX):
header = header[len(cls.HTTP_PREFIX):]
elif header not in cls.UNPREFIXED_HEADERS:
return None
return header.replace('_', '-').title()
class QueryDict(MultiValueDict):
"""
A specialized MultiValueDict which represents a query string.
A QueryDict can be used to represent GET or POST data. It subclasses
MultiValueDict since keys in such data can be repeated, for instance
in the data from a form with a <select multiple> field.
By default QueryDicts are immutable, though the copy() method
will always return a mutable copy.
Both keys and values set on this class are converted from the given encoding
(DEFAULT_CHARSET by default) to str.
"""
# These are both reset in __init__, but is specified here at the class
# level so that unpickling will have valid values
_mutable = True
_encoding = None
def __init__(self, query_string=None, mutable=False, encoding=None):
super().__init__()
self.encoding = encoding or settings.DEFAULT_CHARSET
query_string = query_string or ''
parse_qsl_kwargs = {
'keep_blank_values': True,
'fields_limit': settings.DATA_UPLOAD_MAX_NUMBER_FIELDS,
'encoding': self.encoding,
}
if isinstance(query_string, bytes):
# query_string normally contains URL-encoded data, a subset of ASCII.
try:
query_string = query_string.decode(self.encoding)
except UnicodeDecodeError:
# ... but some user agents are misbehaving :-(
query_string = query_string.decode('iso-8859-1')
for key, value in limited_parse_qsl(query_string, **parse_qsl_kwargs):
self.appendlist(key, value)
self._mutable = mutable
@classmethod
def fromkeys(cls, iterable, value='', mutable=False, encoding=None):
"""
Return a new QueryDict with keys (may be repeated) from an iterable and
values from value.
"""
q = cls('', mutable=True, encoding=encoding)
for key in iterable:
q.appendlist(key, value)
if not mutable:
q._mutable = False
return q
@property
def encoding(self):
if self._encoding is None:
self._encoding = settings.DEFAULT_CHARSET
return self._encoding
@encoding.setter
def encoding(self, value):
self._encoding = value
def _assert_mutable(self):
if not self._mutable:
raise AttributeError("This QueryDict instance is immutable")
def __setitem__(self, key, value):
self._assert_mutable()
key = bytes_to_text(key, self.encoding)
value = bytes_to_text(value, self.encoding)
super().__setitem__(key, value)
def __delitem__(self, key):
self._assert_mutable()
super().__delitem__(key)
def __copy__(self):
result = self.__class__('', mutable=True, encoding=self.encoding)
for key, value in self.lists():
result.setlist(key, value)
return result
def __deepcopy__(self, memo):
result = self.__class__('', mutable=True, encoding=self.encoding)
memo[id(self)] = result
for key, value in self.lists():
result.setlist(copy.deepcopy(key, memo), copy.deepcopy(value, memo))
return result
def setlist(self, key, list_):
self._assert_mutable()
key = bytes_to_text(key, self.encoding)
list_ = [bytes_to_text(elt, self.encoding) for elt in list_]
super().setlist(key, list_)
def setlistdefault(self, key, default_list=None):
self._assert_mutable()
return super().setlistdefault(key, default_list)
def appendlist(self, key, value):
self._assert_mutable()
key = bytes_to_text(key, self.encoding)
value = bytes_to_text(value, self.encoding)
super().appendlist(key, value)
def pop(self, key, *args):
self._assert_mutable()
return super().pop(key, *args)
def popitem(self):
self._assert_mutable()
return super().popitem()
def clear(self):
self._assert_mutable()
super().clear()
def setdefault(self, key, default=None):
self._assert_mutable()
key = bytes_to_text(key, self.encoding)
default = bytes_to_text(default, self.encoding)
return super().setdefault(key, default)
def copy(self):
"""Return a mutable copy of this object."""
return self.__deepcopy__({})
def urlencode(self, safe=None):
"""
Return an encoded string of all query string arguments.
`safe` specifies characters which don't require quoting, for example::
>>> q = QueryDict(mutable=True)
>>> q['next'] = '/a&b/'
>>> q.urlencode()
'next=%2Fa%26b%2F'
>>> q.urlencode(safe='/')
'next=/a%26b/'
"""
output = []
if safe:
safe = safe.encode(self.encoding)
def encode(k, v):
return '%s=%s' % ((quote(k, safe), quote(v, safe)))
else:
def encode(k, v):
return urlencode({k: v})
for k, list_ in self.lists():
output.extend(
encode(k.encode(self.encoding), str(v).encode(self.encoding))
for v in list_
)
return '&'.join(output)
class MediaType:
def __init__(self, media_type_raw_line):
full_type, self.params = parse_header(
media_type_raw_line.encode('ascii') if media_type_raw_line else b''
)
self.main_type, _, self.sub_type = full_type.partition('/')
def __str__(self):
params_str = ''.join(
'; %s=%s' % (k, v.decode('ascii'))
for k, v in self.params.items()
)
return '%s%s%s' % (
self.main_type,
('/%s' % self.sub_type) if self.sub_type else '',
params_str,
)
def __repr__(self):
return '<%s: %s>' % (self.__class__.__qualname__, self)
@property
def is_all_types(self):
return self.main_type == '*' and self.sub_type == '*'
def match(self, other):
if self.is_all_types:
return True
other = MediaType(other)
if self.main_type == other.main_type and self.sub_type in {'*', other.sub_type}:
return True
return False
# It's neither necessary nor appropriate to use
# django.utils.encoding.force_str() for parsing URLs and form inputs. Thus,
# this slightly more restricted function, used by QueryDict.
def bytes_to_text(s, encoding):
"""
Convert bytes objects to strings, using the given encoding. Illegally
encoded input characters are replaced with Unicode "unknown" codepoint
(\ufffd).
Return any non-bytes objects without change.
"""
if isinstance(s, bytes):
return str(s, encoding, 'replace')
else:
return s
def split_domain_port(host):
"""
Return a (domain, port) tuple from a given host.
Returned domain is lowercased. If the host is invalid, the domain will be
empty.
"""
host = host.lower()
if not host_validation_re.match(host):
return '', ''
if host[-1] == ']':
# It's an IPv6 address without a port.
return host, ''
bits = host.rsplit(':', 1)
domain, port = bits if len(bits) == 2 else (bits[0], '')
# Remove a trailing dot (if present) from the domain.
domain = domain[:-1] if domain.endswith('.') else domain
return domain, port
def validate_host(host, allowed_hosts):
"""
Validate the given host for this site.
Check that the host looks valid and matches a host or host pattern in the
given list of ``allowed_hosts``. Any pattern beginning with a period
matches a domain and all its subdomains (e.g. ``.example.com`` matches
``example.com`` and any subdomain), ``*`` matches anything, and anything
else must match exactly.
Note: This function assumes that the given host is lowercased and has
already had the port, if any, stripped off.
Return ``True`` for a valid host, ``False`` otherwise.
"""
return any(pattern == '*' or is_same_domain(host, pattern) for pattern in allowed_hosts)
def parse_accept_header(header):
return [MediaType(token) for token in header.split(',') if token.strip()]
|
907cbdfe681e6ee50701b00c16b149d5b69d58426f6de647fd8319035142ff34 | import warnings
from django.urls import include, re_path
from django.utils.deprecation import RemovedInDjango40Warning
from django.views import defaults
__all__ = ['handler400', 'handler403', 'handler404', 'handler500', 'include', 'url']
handler400 = defaults.bad_request
handler403 = defaults.permission_denied
handler404 = defaults.page_not_found
handler500 = defaults.server_error
def url(regex, view, kwargs=None, name=None):
warnings.warn(
'django.conf.urls.url() is deprecated in favor of '
'django.urls.re_path().',
RemovedInDjango40Warning,
stacklevel=2,
)
return re_path(regex, view, kwargs, name)
|
c0261308a2bddef02e86c8e2879b14da723aafad68562b5f005cee033e523c02 | """
LANG_INFO is a dictionary structure to provide meta information about languages.
About name_local: capitalize it as if your language name was appearing
inside a sentence in your language.
The 'fallback' key can be used to specify a special fallback logic which doesn't
follow the traditional 'fr-ca' -> 'fr' fallback logic.
"""
LANG_INFO = {
'af': {
'bidi': False,
'code': 'af',
'name': 'Afrikaans',
'name_local': 'Afrikaans',
},
'ar': {
'bidi': True,
'code': 'ar',
'name': 'Arabic',
'name_local': 'العربيّة',
},
'ar-dz': {
'bidi': True,
'code': 'ar-dz',
'name': 'Algerian Arabic',
'name_local': 'العربية الجزائرية',
},
'ast': {
'bidi': False,
'code': 'ast',
'name': 'Asturian',
'name_local': 'asturianu',
},
'az': {
'bidi': True,
'code': 'az',
'name': 'Azerbaijani',
'name_local': 'Azərbaycanca',
},
'be': {
'bidi': False,
'code': 'be',
'name': 'Belarusian',
'name_local': 'беларуская',
},
'bg': {
'bidi': False,
'code': 'bg',
'name': 'Bulgarian',
'name_local': 'български',
},
'bn': {
'bidi': False,
'code': 'bn',
'name': 'Bengali',
'name_local': 'বাংলা',
},
'br': {
'bidi': False,
'code': 'br',
'name': 'Breton',
'name_local': 'brezhoneg',
},
'bs': {
'bidi': False,
'code': 'bs',
'name': 'Bosnian',
'name_local': 'bosanski',
},
'ca': {
'bidi': False,
'code': 'ca',
'name': 'Catalan',
'name_local': 'català',
},
'cs': {
'bidi': False,
'code': 'cs',
'name': 'Czech',
'name_local': 'česky',
},
'cy': {
'bidi': False,
'code': 'cy',
'name': 'Welsh',
'name_local': 'Cymraeg',
},
'da': {
'bidi': False,
'code': 'da',
'name': 'Danish',
'name_local': 'dansk',
},
'de': {
'bidi': False,
'code': 'de',
'name': 'German',
'name_local': 'Deutsch',
},
'dsb': {
'bidi': False,
'code': 'dsb',
'name': 'Lower Sorbian',
'name_local': 'dolnoserbski',
},
'el': {
'bidi': False,
'code': 'el',
'name': 'Greek',
'name_local': 'Ελληνικά',
},
'en': {
'bidi': False,
'code': 'en',
'name': 'English',
'name_local': 'English',
},
'en-au': {
'bidi': False,
'code': 'en-au',
'name': 'Australian English',
'name_local': 'Australian English',
},
'en-gb': {
'bidi': False,
'code': 'en-gb',
'name': 'British English',
'name_local': 'British English',
},
'eo': {
'bidi': False,
'code': 'eo',
'name': 'Esperanto',
'name_local': 'Esperanto',
},
'es': {
'bidi': False,
'code': 'es',
'name': 'Spanish',
'name_local': 'español',
},
'es-ar': {
'bidi': False,
'code': 'es-ar',
'name': 'Argentinian Spanish',
'name_local': 'español de Argentina',
},
'es-co': {
'bidi': False,
'code': 'es-co',
'name': 'Colombian Spanish',
'name_local': 'español de Colombia',
},
'es-mx': {
'bidi': False,
'code': 'es-mx',
'name': 'Mexican Spanish',
'name_local': 'español de Mexico',
},
'es-ni': {
'bidi': False,
'code': 'es-ni',
'name': 'Nicaraguan Spanish',
'name_local': 'español de Nicaragua',
},
'es-ve': {
'bidi': False,
'code': 'es-ve',
'name': 'Venezuelan Spanish',
'name_local': 'español de Venezuela',
},
'et': {
'bidi': False,
'code': 'et',
'name': 'Estonian',
'name_local': 'eesti',
},
'eu': {
'bidi': False,
'code': 'eu',
'name': 'Basque',
'name_local': 'Basque',
},
'fa': {
'bidi': True,
'code': 'fa',
'name': 'Persian',
'name_local': 'فارسی',
},
'fi': {
'bidi': False,
'code': 'fi',
'name': 'Finnish',
'name_local': 'suomi',
},
'fr': {
'bidi': False,
'code': 'fr',
'name': 'French',
'name_local': 'français',
},
'fy': {
'bidi': False,
'code': 'fy',
'name': 'Frisian',
'name_local': 'frysk',
},
'ga': {
'bidi': False,
'code': 'ga',
'name': 'Irish',
'name_local': 'Gaeilge',
},
'gd': {
'bidi': False,
'code': 'gd',
'name': 'Scottish Gaelic',
'name_local': 'Gàidhlig',
},
'gl': {
'bidi': False,
'code': 'gl',
'name': 'Galician',
'name_local': 'galego',
},
'he': {
'bidi': True,
'code': 'he',
'name': 'Hebrew',
'name_local': 'עברית',
},
'hi': {
'bidi': False,
'code': 'hi',
'name': 'Hindi',
'name_local': 'हिंदी',
},
'hr': {
'bidi': False,
'code': 'hr',
'name': 'Croatian',
'name_local': 'Hrvatski',
},
'hsb': {
'bidi': False,
'code': 'hsb',
'name': 'Upper Sorbian',
'name_local': 'hornjoserbsce',
},
'hu': {
'bidi': False,
'code': 'hu',
'name': 'Hungarian',
'name_local': 'Magyar',
},
'hy': {
'bidi': False,
'code': 'hy',
'name': 'Armenian',
'name_local': 'հայերեն',
},
'ia': {
'bidi': False,
'code': 'ia',
'name': 'Interlingua',
'name_local': 'Interlingua',
},
'io': {
'bidi': False,
'code': 'io',
'name': 'Ido',
'name_local': 'ido',
},
'id': {
'bidi': False,
'code': 'id',
'name': 'Indonesian',
'name_local': 'Bahasa Indonesia',
},
'is': {
'bidi': False,
'code': 'is',
'name': 'Icelandic',
'name_local': 'Íslenska',
},
'it': {
'bidi': False,
'code': 'it',
'name': 'Italian',
'name_local': 'italiano',
},
'ja': {
'bidi': False,
'code': 'ja',
'name': 'Japanese',
'name_local': '日本語',
},
'ka': {
'bidi': False,
'code': 'ka',
'name': 'Georgian',
'name_local': 'ქართული',
},
'kab': {
'bidi': False,
'code': 'kab',
'name': 'Kabyle',
'name_local': 'taqbaylit',
},
'kk': {
'bidi': False,
'code': 'kk',
'name': 'Kazakh',
'name_local': 'Қазақ',
},
'km': {
'bidi': False,
'code': 'km',
'name': 'Khmer',
'name_local': 'Khmer',
},
'kn': {
'bidi': False,
'code': 'kn',
'name': 'Kannada',
'name_local': 'Kannada',
},
'ko': {
'bidi': False,
'code': 'ko',
'name': 'Korean',
'name_local': '한국어',
},
'ky': {
'bidi': False,
'code': 'ky',
'name': 'Kyrgyz',
'name_local': 'Кыргызча',
},
'lb': {
'bidi': False,
'code': 'lb',
'name': 'Luxembourgish',
'name_local': 'Lëtzebuergesch',
},
'lt': {
'bidi': False,
'code': 'lt',
'name': 'Lithuanian',
'name_local': 'Lietuviškai',
},
'lv': {
'bidi': False,
'code': 'lv',
'name': 'Latvian',
'name_local': 'latviešu',
},
'mk': {
'bidi': False,
'code': 'mk',
'name': 'Macedonian',
'name_local': 'Македонски',
},
'ml': {
'bidi': False,
'code': 'ml',
'name': 'Malayalam',
'name_local': 'Malayalam',
},
'mn': {
'bidi': False,
'code': 'mn',
'name': 'Mongolian',
'name_local': 'Mongolian',
},
'mr': {
'bidi': False,
'code': 'mr',
'name': 'Marathi',
'name_local': 'मराठी',
},
'my': {
'bidi': False,
'code': 'my',
'name': 'Burmese',
'name_local': 'မြန်မာဘာသာ',
},
'nb': {
'bidi': False,
'code': 'nb',
'name': 'Norwegian Bokmal',
'name_local': 'norsk (bokmål)',
},
'ne': {
'bidi': False,
'code': 'ne',
'name': 'Nepali',
'name_local': 'नेपाली',
},
'nl': {
'bidi': False,
'code': 'nl',
'name': 'Dutch',
'name_local': 'Nederlands',
},
'nn': {
'bidi': False,
'code': 'nn',
'name': 'Norwegian Nynorsk',
'name_local': 'norsk (nynorsk)',
},
'no': {
'bidi': False,
'code': 'no',
'name': 'Norwegian',
'name_local': 'norsk',
},
'os': {
'bidi': False,
'code': 'os',
'name': 'Ossetic',
'name_local': 'Ирон',
},
'pa': {
'bidi': False,
'code': 'pa',
'name': 'Punjabi',
'name_local': 'Punjabi',
},
'pl': {
'bidi': False,
'code': 'pl',
'name': 'Polish',
'name_local': 'polski',
},
'pt': {
'bidi': False,
'code': 'pt',
'name': 'Portuguese',
'name_local': 'Português',
},
'pt-br': {
'bidi': False,
'code': 'pt-br',
'name': 'Brazilian Portuguese',
'name_local': 'Português Brasileiro',
},
'ro': {
'bidi': False,
'code': 'ro',
'name': 'Romanian',
'name_local': 'Română',
},
'ru': {
'bidi': False,
'code': 'ru',
'name': 'Russian',
'name_local': 'Русский',
},
'sk': {
'bidi': False,
'code': 'sk',
'name': 'Slovak',
'name_local': 'Slovensky',
},
'sl': {
'bidi': False,
'code': 'sl',
'name': 'Slovenian',
'name_local': 'Slovenščina',
},
'sq': {
'bidi': False,
'code': 'sq',
'name': 'Albanian',
'name_local': 'shqip',
},
'sr': {
'bidi': False,
'code': 'sr',
'name': 'Serbian',
'name_local': 'српски',
},
'sr-latn': {
'bidi': False,
'code': 'sr-latn',
'name': 'Serbian Latin',
'name_local': 'srpski (latinica)',
},
'sv': {
'bidi': False,
'code': 'sv',
'name': 'Swedish',
'name_local': 'svenska',
},
'sw': {
'bidi': False,
'code': 'sw',
'name': 'Swahili',
'name_local': 'Kiswahili',
},
'ta': {
'bidi': False,
'code': 'ta',
'name': 'Tamil',
'name_local': 'தமிழ்',
},
'te': {
'bidi': False,
'code': 'te',
'name': 'Telugu',
'name_local': 'తెలుగు',
},
'th': {
'bidi': False,
'code': 'th',
'name': 'Thai',
'name_local': 'ภาษาไทย',
},
'tr': {
'bidi': False,
'code': 'tr',
'name': 'Turkish',
'name_local': 'Türkçe',
},
'tt': {
'bidi': False,
'code': 'tt',
'name': 'Tatar',
'name_local': 'Татарча',
},
'udm': {
'bidi': False,
'code': 'udm',
'name': 'Udmurt',
'name_local': 'Удмурт',
},
'uk': {
'bidi': False,
'code': 'uk',
'name': 'Ukrainian',
'name_local': 'Українська',
},
'ur': {
'bidi': True,
'code': 'ur',
'name': 'Urdu',
'name_local': 'اردو',
},
'uz': {
'bidi': False,
'code': 'uz',
'name': 'Uzbek',
'name_local': 'oʻzbek tili',
},
'vi': {
'bidi': False,
'code': 'vi',
'name': 'Vietnamese',
'name_local': 'Tiếng Việt',
},
'zh-cn': {
'fallback': ['zh-hans'],
},
'zh-hans': {
'bidi': False,
'code': 'zh-hans',
'name': 'Simplified Chinese',
'name_local': '简体中文',
},
'zh-hant': {
'bidi': False,
'code': 'zh-hant',
'name': 'Traditional Chinese',
'name_local': '繁體中文',
},
'zh-hk': {
'fallback': ['zh-hant'],
},
'zh-mo': {
'fallback': ['zh-hant'],
},
'zh-my': {
'fallback': ['zh-hans'],
},
'zh-sg': {
'fallback': ['zh-hans'],
},
'zh-tw': {
'fallback': ['zh-hant'],
},
}
|
12d9d92ebd1b8b6fb24a18327ce22c78d55152d590129d7ed0de455b9810bf8b | # This file is distributed under the same license as the Django package.
#
# The *_FORMAT strings use the Django date format syntax,
# see https://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
DATE_FORMAT = 'j E Y ж.'
TIME_FORMAT = 'G:i'
DATETIME_FORMAT = 'j E Y ж. G:i'
YEAR_MONTH_FORMAT = 'F Y ж.'
MONTH_DAY_FORMAT = 'j F'
SHORT_DATE_FORMAT = 'd.m.Y'
SHORT_DATETIME_FORMAT = 'd.m.Y H:i'
FIRST_DAY_OF_WEEK = 1 # Дүйшөмбү, Monday
# The *_INPUT_FORMATS strings use the Python strftime format syntax,
# see https://docs.python.org/library/datetime.html#strftime-strptime-behavior
DATE_INPUT_FORMATS = [
'%d.%m.%Y', # '25.10.2006'
'%d.%m.%y', # '25.10.06'
]
DATETIME_INPUT_FORMATS = [
'%d.%m.%Y %H:%M:%S', # '25.10.2006 14:30:59'
'%d.%m.%Y %H:%M:%S.%f', # '25.10.2006 14:30:59.000200'
'%d.%m.%Y %H:%M', # '25.10.2006 14:30'
'%d.%m.%Y', # '25.10.2006'
'%d.%m.%y %H:%M:%S', # '25.10.06 14:30:59'
'%d.%m.%y %H:%M:%S.%f', # '25.10.06 14:30:59.000200'
'%d.%m.%y %H:%M', # '25.10.06 14:30'
'%d.%m.%y', # '25.10.06'
]
DECIMAL_SEPARATOR = '.'
THOUSAND_SEPARATOR = '\xa0' # non-breaking space
NUMBER_GROUPING = 3
|
c5ab938354fd16d5a3adcdb1c1e714fcc1c5d151549cb81253905bfd25eee39b | """Translation helper functions."""
import functools
import gettext as gettext_module
import os
import re
import sys
import warnings
from asgiref.local import Local
from django.apps import apps
from django.conf import settings
from django.conf.locale import LANG_INFO
from django.core.exceptions import AppRegistryNotReady
from django.core.signals import setting_changed
from django.dispatch import receiver
from django.utils.regex_helper import _lazy_re_compile
from django.utils.safestring import SafeData, mark_safe
from . import to_language, to_locale
# Translations are cached in a dictionary for every language.
# The active translations are stored by threadid to make them thread local.
_translations = {}
_active = Local()
# The default translation is based on the settings file.
_default = None
# magic gettext number to separate context from message
CONTEXT_SEPARATOR = "\x04"
# Format of Accept-Language header values. From RFC 2616, section 14.4 and 3.9
# and RFC 3066, section 2.1
accept_language_re = _lazy_re_compile(r'''
([A-Za-z]{1,8}(?:-[A-Za-z0-9]{1,8})*|\*) # "en", "en-au", "x-y-z", "es-419", "*"
(?:\s*;\s*q=(0(?:\.\d{,3})?|1(?:\.0{,3})?))? # Optional "q=1.00", "q=0.8"
(?:\s*,\s*|$) # Multiple accepts per header.
''', re.VERBOSE)
language_code_re = _lazy_re_compile(
r'^[a-z]{1,8}(?:-[a-z0-9]{1,8})*(?:@[a-z0-9]{1,20})?$',
re.IGNORECASE
)
language_code_prefix_re = _lazy_re_compile(r'^/(\w+([@-]\w+)?)(/|$)')
@receiver(setting_changed)
def reset_cache(**kwargs):
"""
Reset global state when LANGUAGES setting has been changed, as some
languages should no longer be accepted.
"""
if kwargs['setting'] in ('LANGUAGES', 'LANGUAGE_CODE'):
check_for_language.cache_clear()
get_languages.cache_clear()
get_supported_language_variant.cache_clear()
class TranslationCatalog:
"""
Simulate a dict for DjangoTranslation._catalog so as multiple catalogs
with different plural equations are kept separate.
"""
def __init__(self, trans=None):
self._catalogs = [trans._catalog.copy()] if trans else [{}]
self._plurals = [trans.plural] if trans else [lambda n: int(n != 1)]
def __getitem__(self, key):
for cat in self._catalogs:
try:
return cat[key]
except KeyError:
pass
raise KeyError(key)
def __setitem__(self, key, value):
self._catalogs[0][key] = value
def __contains__(self, key):
return any(key in cat for cat in self._catalogs)
def items(self):
for cat in self._catalogs:
yield from cat.items()
def keys(self):
for cat in self._catalogs:
yield from cat.keys()
def update(self, trans):
# Merge if plural function is the same, else prepend.
for cat, plural in zip(self._catalogs, self._plurals):
if trans.plural.__code__ == plural.__code__:
cat.update(trans._catalog)
break
else:
self._catalogs.insert(0, trans._catalog)
self._plurals.insert(0, trans.plural)
def get(self, key, default=None):
missing = object()
for cat in self._catalogs:
result = cat.get(key, missing)
if result is not missing:
return result
return default
def plural(self, msgid, num):
for cat, plural in zip(self._catalogs, self._plurals):
tmsg = cat.get((msgid, plural(num)))
if tmsg is not None:
return tmsg
raise KeyError
class DjangoTranslation(gettext_module.GNUTranslations):
"""
Set up the GNUTranslations context with regard to output charset.
This translation object will be constructed out of multiple GNUTranslations
objects by merging their catalogs. It will construct an object for the
requested language and add a fallback to the default language, if it's
different from the requested language.
"""
domain = 'django'
def __init__(self, language, domain=None, localedirs=None):
"""Create a GNUTranslations() using many locale directories"""
gettext_module.GNUTranslations.__init__(self)
if domain is not None:
self.domain = domain
self.__language = language
self.__to_language = to_language(language)
self.__locale = to_locale(language)
self._catalog = None
# If a language doesn't have a catalog, use the Germanic default for
# pluralization: anything except one is pluralized.
self.plural = lambda n: int(n != 1)
if self.domain == 'django':
if localedirs is not None:
# A module-level cache is used for caching 'django' translations
warnings.warn("localedirs is ignored when domain is 'django'.", RuntimeWarning)
localedirs = None
self._init_translation_catalog()
if localedirs:
for localedir in localedirs:
translation = self._new_gnu_trans(localedir)
self.merge(translation)
else:
self._add_installed_apps_translations()
self._add_local_translations()
if self.__language == settings.LANGUAGE_CODE and self.domain == 'django' and self._catalog is None:
# default lang should have at least one translation file available.
raise OSError('No translation files found for default language %s.' % settings.LANGUAGE_CODE)
self._add_fallback(localedirs)
if self._catalog is None:
# No catalogs found for this language, set an empty catalog.
self._catalog = TranslationCatalog()
def __repr__(self):
return "<DjangoTranslation lang:%s>" % self.__language
def _new_gnu_trans(self, localedir, use_null_fallback=True):
"""
Return a mergeable gettext.GNUTranslations instance.
A convenience wrapper. By default gettext uses 'fallback=False'.
Using param `use_null_fallback` to avoid confusion with any other
references to 'fallback'.
"""
return gettext_module.translation(
domain=self.domain,
localedir=localedir,
languages=[self.__locale],
fallback=use_null_fallback,
)
def _init_translation_catalog(self):
"""Create a base catalog using global django translations."""
settingsfile = sys.modules[settings.__module__].__file__
localedir = os.path.join(os.path.dirname(settingsfile), 'locale')
translation = self._new_gnu_trans(localedir)
self.merge(translation)
def _add_installed_apps_translations(self):
"""Merge translations from each installed app."""
try:
app_configs = reversed(list(apps.get_app_configs()))
except AppRegistryNotReady:
raise AppRegistryNotReady(
"The translation infrastructure cannot be initialized before the "
"apps registry is ready. Check that you don't make non-lazy "
"gettext calls at import time.")
for app_config in app_configs:
localedir = os.path.join(app_config.path, 'locale')
if os.path.exists(localedir):
translation = self._new_gnu_trans(localedir)
self.merge(translation)
def _add_local_translations(self):
"""Merge translations defined in LOCALE_PATHS."""
for localedir in reversed(settings.LOCALE_PATHS):
translation = self._new_gnu_trans(localedir)
self.merge(translation)
def _add_fallback(self, localedirs=None):
"""Set the GNUTranslations() fallback with the default language."""
# Don't set a fallback for the default language or any English variant
# (as it's empty, so it'll ALWAYS fall back to the default language)
if self.__language == settings.LANGUAGE_CODE or self.__language.startswith('en'):
return
if self.domain == 'django':
# Get from cache
default_translation = translation(settings.LANGUAGE_CODE)
else:
default_translation = DjangoTranslation(
settings.LANGUAGE_CODE, domain=self.domain, localedirs=localedirs
)
self.add_fallback(default_translation)
def merge(self, other):
"""Merge another translation into this catalog."""
if not getattr(other, '_catalog', None):
return # NullTranslations() has no _catalog
if self._catalog is None:
# Take plural and _info from first catalog found (generally Django's).
self.plural = other.plural
self._info = other._info.copy()
self._catalog = TranslationCatalog(other)
else:
self._catalog.update(other)
if other._fallback:
self.add_fallback(other._fallback)
def language(self):
"""Return the translation language."""
return self.__language
def to_language(self):
"""Return the translation language name."""
return self.__to_language
def ngettext(self, msgid1, msgid2, n):
try:
tmsg = self._catalog.plural(msgid1, n)
except KeyError:
if self._fallback:
return self._fallback.ngettext(msgid1, msgid2, n)
if n == 1:
tmsg = msgid1
else:
tmsg = msgid2
return tmsg
def translation(language):
"""
Return a translation object in the default 'django' domain.
"""
global _translations
if language not in _translations:
_translations[language] = DjangoTranslation(language)
return _translations[language]
def activate(language):
"""
Fetch the translation object for a given language and install it as the
current translation object for the current thread.
"""
if not language:
return
_active.value = translation(language)
def deactivate():
"""
Uninstall the active translation object so that further _() calls resolve
to the default translation object.
"""
if hasattr(_active, "value"):
del _active.value
def deactivate_all():
"""
Make the active translation object a NullTranslations() instance. This is
useful when we want delayed translations to appear as the original string
for some reason.
"""
_active.value = gettext_module.NullTranslations()
_active.value.to_language = lambda *args: None
def get_language():
"""Return the currently selected language."""
t = getattr(_active, "value", None)
if t is not None:
try:
return t.to_language()
except AttributeError:
pass
# If we don't have a real translation object, assume it's the default language.
return settings.LANGUAGE_CODE
def get_language_bidi():
"""
Return selected language's BiDi layout.
* False = left-to-right layout
* True = right-to-left layout
"""
lang = get_language()
if lang is None:
return False
else:
base_lang = get_language().split('-')[0]
return base_lang in settings.LANGUAGES_BIDI
def catalog():
"""
Return the current active catalog for further processing.
This can be used if you need to modify the catalog or want to access the
whole message catalog instead of just translating one string.
"""
global _default
t = getattr(_active, "value", None)
if t is not None:
return t
if _default is None:
_default = translation(settings.LANGUAGE_CODE)
return _default
def gettext(message):
"""
Translate the 'message' string. It uses the current thread to find the
translation object to use. If no current translation is activated, the
message will be run through the default translation object.
"""
global _default
eol_message = message.replace('\r\n', '\n').replace('\r', '\n')
if eol_message:
_default = _default or translation(settings.LANGUAGE_CODE)
translation_object = getattr(_active, "value", _default)
result = translation_object.gettext(eol_message)
else:
# Return an empty value of the corresponding type if an empty message
# is given, instead of metadata, which is the default gettext behavior.
result = type(message)('')
if isinstance(message, SafeData):
return mark_safe(result)
return result
def pgettext(context, message):
msg_with_ctxt = "%s%s%s" % (context, CONTEXT_SEPARATOR, message)
result = gettext(msg_with_ctxt)
if CONTEXT_SEPARATOR in result:
# Translation not found
result = message
elif isinstance(message, SafeData):
result = mark_safe(result)
return result
def gettext_noop(message):
"""
Mark strings for translation but don't translate them now. This can be
used to store strings in global variables that should stay in the base
language (because they might be used externally) and will be translated
later.
"""
return message
def do_ntranslate(singular, plural, number, translation_function):
global _default
t = getattr(_active, "value", None)
if t is not None:
return getattr(t, translation_function)(singular, plural, number)
if _default is None:
_default = translation(settings.LANGUAGE_CODE)
return getattr(_default, translation_function)(singular, plural, number)
def ngettext(singular, plural, number):
"""
Return a string of the translation of either the singular or plural,
based on the number.
"""
return do_ntranslate(singular, plural, number, 'ngettext')
def npgettext(context, singular, plural, number):
msgs_with_ctxt = ("%s%s%s" % (context, CONTEXT_SEPARATOR, singular),
"%s%s%s" % (context, CONTEXT_SEPARATOR, plural),
number)
result = ngettext(*msgs_with_ctxt)
if CONTEXT_SEPARATOR in result:
# Translation not found
result = ngettext(singular, plural, number)
return result
def all_locale_paths():
"""
Return a list of paths to user-provides languages files.
"""
globalpath = os.path.join(
os.path.dirname(sys.modules[settings.__module__].__file__), 'locale')
app_paths = []
for app_config in apps.get_app_configs():
locale_path = os.path.join(app_config.path, 'locale')
if os.path.exists(locale_path):
app_paths.append(locale_path)
return [globalpath, *settings.LOCALE_PATHS, *app_paths]
@functools.lru_cache(maxsize=1000)
def check_for_language(lang_code):
"""
Check whether there is a global language file for the given language
code. This is used to decide whether a user-provided language is
available.
lru_cache should have a maxsize to prevent from memory exhaustion attacks,
as the provided language codes are taken from the HTTP request. See also
<https://www.djangoproject.com/weblog/2007/oct/26/security-fix/>.
"""
# First, a quick check to make sure lang_code is well-formed (#21458)
if lang_code is None or not language_code_re.search(lang_code):
return False
return any(
gettext_module.find('django', path, [to_locale(lang_code)]) is not None
for path in all_locale_paths()
)
@functools.lru_cache()
def get_languages():
"""
Cache of settings.LANGUAGES in a dictionary for easy lookups by key.
"""
return dict(settings.LANGUAGES)
@functools.lru_cache(maxsize=1000)
def get_supported_language_variant(lang_code, strict=False):
"""
Return the language code that's listed in supported languages, possibly
selecting a more generic variant. Raise LookupError if nothing is found.
If `strict` is False (the default), look for a country-specific variant
when neither the language code nor its generic variant is found.
lru_cache should have a maxsize to prevent from memory exhaustion attacks,
as the provided language codes are taken from the HTTP request. See also
<https://www.djangoproject.com/weblog/2007/oct/26/security-fix/>.
"""
if lang_code:
# If 'fr-ca' is not supported, try special fallback or language-only 'fr'.
possible_lang_codes = [lang_code]
try:
possible_lang_codes.extend(LANG_INFO[lang_code]['fallback'])
except KeyError:
pass
generic_lang_code = lang_code.split('-')[0]
possible_lang_codes.append(generic_lang_code)
supported_lang_codes = get_languages()
for code in possible_lang_codes:
if code in supported_lang_codes and check_for_language(code):
return code
if not strict:
# if fr-fr is not supported, try fr-ca.
for supported_code in supported_lang_codes:
if supported_code.startswith(generic_lang_code + '-'):
return supported_code
raise LookupError(lang_code)
def get_language_from_path(path, strict=False):
"""
Return the language code if there's a valid language code found in `path`.
If `strict` is False (the default), look for a country-specific variant
when neither the language code nor its generic variant is found.
"""
regex_match = language_code_prefix_re.match(path)
if not regex_match:
return None
lang_code = regex_match[1]
try:
return get_supported_language_variant(lang_code, strict=strict)
except LookupError:
return None
def get_language_from_request(request, check_path=False):
"""
Analyze the request to find what language the user wants the system to
show. Only languages listed in settings.LANGUAGES are taken into account.
If the user requests a sublanguage where we have a main language, we send
out the main language.
If check_path is True, the URL path prefix will be checked for a language
code, otherwise this is skipped for backwards compatibility.
"""
if check_path:
lang_code = get_language_from_path(request.path_info)
if lang_code is not None:
return lang_code
lang_code = request.COOKIES.get(settings.LANGUAGE_COOKIE_NAME)
if lang_code is not None and lang_code in get_languages() and check_for_language(lang_code):
return lang_code
try:
return get_supported_language_variant(lang_code)
except LookupError:
pass
accept = request.META.get('HTTP_ACCEPT_LANGUAGE', '')
for accept_lang, unused in parse_accept_lang_header(accept):
if accept_lang == '*':
break
if not language_code_re.search(accept_lang):
continue
try:
return get_supported_language_variant(accept_lang)
except LookupError:
continue
try:
return get_supported_language_variant(settings.LANGUAGE_CODE)
except LookupError:
return settings.LANGUAGE_CODE
@functools.lru_cache(maxsize=1000)
def parse_accept_lang_header(lang_string):
"""
Parse the lang_string, which is the body of an HTTP Accept-Language
header, and return a tuple of (lang, q-value), ordered by 'q' values.
Return an empty tuple if there are any format errors in lang_string.
"""
result = []
pieces = accept_language_re.split(lang_string.lower())
if pieces[-1]:
return ()
for i in range(0, len(pieces) - 1, 3):
first, lang, priority = pieces[i:i + 3]
if first:
return ()
if priority:
priority = float(priority)
else:
priority = 1.0
result.append((lang, priority))
result.sort(key=lambda k: k[1], reverse=True)
return tuple(result)
|
495a5f280f1d7f8d707fb43e5aa365b8e4415a12f8a47880bf914db9f58ff4ba | import warnings
from io import StringIO
from django.template.base import TRANSLATOR_COMMENT_MARK, Lexer, TokenType
from django.utils.regex_helper import _lazy_re_compile
from . import TranslatorCommentWarning, trim_whitespace
dot_re = _lazy_re_compile(r'\S')
def blankout(src, char):
"""
Change every non-whitespace character to the given char.
Used in the templatize function.
"""
return dot_re.sub(char, src)
context_re = _lazy_re_compile(r"""^\s+.*context\s+((?:"[^"]*?")|(?:'[^']*?'))\s*""")
inline_re = _lazy_re_compile(
# Match the trans/translate 'some text' part.
r"""^\s*trans(?:late)?\s+((?:"[^"]*?")|(?:'[^']*?'))"""
# Match and ignore optional filters
r"""(?:\s*\|\s*[^\s:]+(?::(?:[^\s'":]+|(?:"[^"]*?")|(?:'[^']*?')))?)*"""
# Match the optional context part
r"""(\s+.*context\s+((?:"[^"]*?")|(?:'[^']*?')))?\s*"""
)
block_re = _lazy_re_compile(r"""^\s*blocktrans(?:late)?(\s+.*context\s+((?:"[^"]*?")|(?:'[^']*?')))?(?:\s+|$)""")
endblock_re = _lazy_re_compile(r"""^\s*endblocktrans(?:late)?$""")
plural_re = _lazy_re_compile(r"""^\s*plural$""")
constant_re = _lazy_re_compile(r"""_\(((?:".*?")|(?:'.*?'))\)""")
def templatize(src, origin=None):
"""
Turn a Django template into something that is understood by xgettext. It
does so by translating the Django translation tags into standard gettext
function invocations.
"""
out = StringIO('')
message_context = None
intrans = False
inplural = False
trimmed = False
singular = []
plural = []
incomment = False
comment = []
lineno_comment_map = {}
comment_lineno_cache = None
# Adding the u prefix allows gettext to recognize the string (#26093).
raw_prefix = 'u'
def join_tokens(tokens, trim=False):
message = ''.join(tokens)
if trim:
message = trim_whitespace(message)
return message
for t in Lexer(src).tokenize():
if incomment:
if t.token_type == TokenType.BLOCK and t.contents == 'endcomment':
content = ''.join(comment)
translators_comment_start = None
for lineno, line in enumerate(content.splitlines(True)):
if line.lstrip().startswith(TRANSLATOR_COMMENT_MARK):
translators_comment_start = lineno
for lineno, line in enumerate(content.splitlines(True)):
if translators_comment_start is not None and lineno >= translators_comment_start:
out.write(' # %s' % line)
else:
out.write(' #\n')
incomment = False
comment = []
else:
comment.append(t.contents)
elif intrans:
if t.token_type == TokenType.BLOCK:
endbmatch = endblock_re.match(t.contents)
pluralmatch = plural_re.match(t.contents)
if endbmatch:
if inplural:
if message_context:
out.write(' npgettext({p}{!r}, {p}{!r}, {p}{!r},count) '.format(
message_context,
join_tokens(singular, trimmed),
join_tokens(plural, trimmed),
p=raw_prefix,
))
else:
out.write(' ngettext({p}{!r}, {p}{!r}, count) '.format(
join_tokens(singular, trimmed),
join_tokens(plural, trimmed),
p=raw_prefix,
))
for part in singular:
out.write(blankout(part, 'S'))
for part in plural:
out.write(blankout(part, 'P'))
else:
if message_context:
out.write(' pgettext({p}{!r}, {p}{!r}) '.format(
message_context,
join_tokens(singular, trimmed),
p=raw_prefix,
))
else:
out.write(' gettext({p}{!r}) '.format(
join_tokens(singular, trimmed),
p=raw_prefix,
))
for part in singular:
out.write(blankout(part, 'S'))
message_context = None
intrans = False
inplural = False
singular = []
plural = []
elif pluralmatch:
inplural = True
else:
filemsg = ''
if origin:
filemsg = 'file %s, ' % origin
raise SyntaxError(
"Translation blocks must not include other block tags: "
"%s (%sline %d)" % (t.contents, filemsg, t.lineno)
)
elif t.token_type == TokenType.VAR:
if inplural:
plural.append('%%(%s)s' % t.contents)
else:
singular.append('%%(%s)s' % t.contents)
elif t.token_type == TokenType.TEXT:
contents = t.contents.replace('%', '%%')
if inplural:
plural.append(contents)
else:
singular.append(contents)
else:
# Handle comment tokens (`{# ... #}`) plus other constructs on
# the same line:
if comment_lineno_cache is not None:
cur_lineno = t.lineno + t.contents.count('\n')
if comment_lineno_cache == cur_lineno:
if t.token_type != TokenType.COMMENT:
for c in lineno_comment_map[comment_lineno_cache]:
filemsg = ''
if origin:
filemsg = 'file %s, ' % origin
warn_msg = (
"The translator-targeted comment '%s' "
"(%sline %d) was ignored, because it wasn't "
"the last item on the line."
) % (c, filemsg, comment_lineno_cache)
warnings.warn(warn_msg, TranslatorCommentWarning)
lineno_comment_map[comment_lineno_cache] = []
else:
out.write('# %s' % ' | '.join(lineno_comment_map[comment_lineno_cache]))
comment_lineno_cache = None
if t.token_type == TokenType.BLOCK:
imatch = inline_re.match(t.contents)
bmatch = block_re.match(t.contents)
cmatches = constant_re.findall(t.contents)
if imatch:
g = imatch[1]
if g[0] == '"':
g = g.strip('"')
elif g[0] == "'":
g = g.strip("'")
g = g.replace('%', '%%')
if imatch[2]:
# A context is provided
context_match = context_re.match(imatch[2])
message_context = context_match[1]
if message_context[0] == '"':
message_context = message_context.strip('"')
elif message_context[0] == "'":
message_context = message_context.strip("'")
out.write(' pgettext({p}{!r}, {p}{!r}) '.format(
message_context, g, p=raw_prefix
))
message_context = None
else:
out.write(' gettext({p}{!r}) '.format(g, p=raw_prefix))
elif bmatch:
for fmatch in constant_re.findall(t.contents):
out.write(' _(%s) ' % fmatch)
if bmatch[1]:
# A context is provided
context_match = context_re.match(bmatch[1])
message_context = context_match[1]
if message_context[0] == '"':
message_context = message_context.strip('"')
elif message_context[0] == "'":
message_context = message_context.strip("'")
intrans = True
inplural = False
trimmed = 'trimmed' in t.split_contents()
singular = []
plural = []
elif cmatches:
for cmatch in cmatches:
out.write(' _(%s) ' % cmatch)
elif t.contents == 'comment':
incomment = True
else:
out.write(blankout(t.contents, 'B'))
elif t.token_type == TokenType.VAR:
parts = t.contents.split('|')
cmatch = constant_re.match(parts[0])
if cmatch:
out.write(' _(%s) ' % cmatch[1])
for p in parts[1:]:
if p.find(':_(') >= 0:
out.write(' %s ' % p.split(':', 1)[1])
else:
out.write(blankout(p, 'F'))
elif t.token_type == TokenType.COMMENT:
if t.contents.lstrip().startswith(TRANSLATOR_COMMENT_MARK):
lineno_comment_map.setdefault(t.lineno, []).append(t.contents)
comment_lineno_cache = t.lineno
else:
out.write(blankout(t.contents, 'X'))
return out.getvalue()
|
531b576146d1042fc6d7c63026e440c74a641c0c469e78db6754ed6c1a2b92a2 | import functools
import re
from itertools import chain
from django.conf import settings
from django.db import models
from django.db.migrations import operations
from django.db.migrations.migration import Migration
from django.db.migrations.operations.models import AlterModelOptions
from django.db.migrations.optimizer import MigrationOptimizer
from django.db.migrations.questioner import MigrationQuestioner
from django.db.migrations.utils import (
COMPILED_REGEX_TYPE, RegexObject, get_migration_name_timestamp,
)
from django.utils.topological_sort import stable_topological_sort
class MigrationAutodetector:
"""
Take a pair of ProjectStates and compare them to see what the first would
need doing to make it match the second (the second usually being the
project's current state).
Note that this naturally operates on entire projects at a time,
as it's likely that changes interact (for example, you can't
add a ForeignKey without having a migration to add the table it
depends on first). A user interface may offer single-app usage
if it wishes, with the caveat that it may not always be possible.
"""
def __init__(self, from_state, to_state, questioner=None):
self.from_state = from_state
self.to_state = to_state
self.questioner = questioner or MigrationQuestioner()
self.existing_apps = {app for app, model in from_state.models}
def changes(self, graph, trim_to_apps=None, convert_apps=None, migration_name=None):
"""
Main entry point to produce a list of applicable changes.
Take a graph to base names on and an optional set of apps
to try and restrict to (restriction is not guaranteed)
"""
changes = self._detect_changes(convert_apps, graph)
changes = self.arrange_for_graph(changes, graph, migration_name)
if trim_to_apps:
changes = self._trim_to_apps(changes, trim_to_apps)
return changes
def deep_deconstruct(self, obj):
"""
Recursive deconstruction for a field and its arguments.
Used for full comparison for rename/alter; sometimes a single-level
deconstruction will not compare correctly.
"""
if isinstance(obj, list):
return [self.deep_deconstruct(value) for value in obj]
elif isinstance(obj, tuple):
return tuple(self.deep_deconstruct(value) for value in obj)
elif isinstance(obj, dict):
return {
key: self.deep_deconstruct(value)
for key, value in obj.items()
}
elif isinstance(obj, functools.partial):
return (obj.func, self.deep_deconstruct(obj.args), self.deep_deconstruct(obj.keywords))
elif isinstance(obj, COMPILED_REGEX_TYPE):
return RegexObject(obj)
elif isinstance(obj, type):
# If this is a type that implements 'deconstruct' as an instance method,
# avoid treating this as being deconstructible itself - see #22951
return obj
elif hasattr(obj, 'deconstruct'):
deconstructed = obj.deconstruct()
if isinstance(obj, models.Field):
# we have a field which also returns a name
deconstructed = deconstructed[1:]
path, args, kwargs = deconstructed
return (
path,
[self.deep_deconstruct(value) for value in args],
{
key: self.deep_deconstruct(value)
for key, value in kwargs.items()
},
)
else:
return obj
def only_relation_agnostic_fields(self, fields):
"""
Return a definition of the fields that ignores field names and
what related fields actually relate to. Used for detecting renames (as
the related fields change during renames).
"""
fields_def = []
for name, field in sorted(fields.items()):
deconstruction = self.deep_deconstruct(field)
if field.remote_field and field.remote_field.model:
del deconstruction[2]['to']
fields_def.append(deconstruction)
return fields_def
def _detect_changes(self, convert_apps=None, graph=None):
"""
Return a dict of migration plans which will achieve the
change from from_state to to_state. The dict has app labels
as keys and a list of migrations as values.
The resulting migrations aren't specially named, but the names
do matter for dependencies inside the set.
convert_apps is the list of apps to convert to use migrations
(i.e. to make initial migrations for, in the usual case)
graph is an optional argument that, if provided, can help improve
dependency generation and avoid potential circular dependencies.
"""
# The first phase is generating all the operations for each app
# and gathering them into a big per-app list.
# Then go through that list, order it, and split into migrations to
# resolve dependencies caused by M2Ms and FKs.
self.generated_operations = {}
self.altered_indexes = {}
self.altered_constraints = {}
# Prepare some old/new state and model lists, separating
# proxy models and ignoring unmigrated apps.
self.old_apps = self.from_state.concrete_apps
self.new_apps = self.to_state.apps
self.old_model_keys = set()
self.old_proxy_keys = set()
self.old_unmanaged_keys = set()
self.new_model_keys = set()
self.new_proxy_keys = set()
self.new_unmanaged_keys = set()
for al, mn in self.from_state.models:
model = self.old_apps.get_model(al, mn)
if not model._meta.managed:
self.old_unmanaged_keys.add((al, mn))
elif al not in self.from_state.real_apps:
if model._meta.proxy:
self.old_proxy_keys.add((al, mn))
else:
self.old_model_keys.add((al, mn))
for al, mn in self.to_state.models:
model = self.new_apps.get_model(al, mn)
if not model._meta.managed:
self.new_unmanaged_keys.add((al, mn))
elif (
al not in self.from_state.real_apps or
(convert_apps and al in convert_apps)
):
if model._meta.proxy:
self.new_proxy_keys.add((al, mn))
else:
self.new_model_keys.add((al, mn))
# Renames have to come first
self.generate_renamed_models()
# Prepare lists of fields and generate through model map
self._prepare_field_lists()
self._generate_through_model_map()
# Generate non-rename model operations
self.generate_deleted_models()
self.generate_created_models()
self.generate_deleted_proxies()
self.generate_created_proxies()
self.generate_altered_options()
self.generate_altered_managers()
# Create the altered indexes and store them in self.altered_indexes.
# This avoids the same computation in generate_removed_indexes()
# and generate_added_indexes().
self.create_altered_indexes()
self.create_altered_constraints()
# Generate index removal operations before field is removed
self.generate_removed_constraints()
self.generate_removed_indexes()
# Generate field operations
self.generate_renamed_fields()
self.generate_removed_fields()
self.generate_added_fields()
self.generate_altered_fields()
self.generate_altered_unique_together()
self.generate_altered_index_together()
self.generate_added_indexes()
self.generate_added_constraints()
self.generate_altered_db_table()
self.generate_altered_order_with_respect_to()
self._sort_migrations()
self._build_migration_list(graph)
self._optimize_migrations()
return self.migrations
def _prepare_field_lists(self):
"""
Prepare field lists and a list of the fields that used through models
in the old state so dependencies can be made from the through model
deletion to the field that uses it.
"""
self.kept_model_keys = self.old_model_keys & self.new_model_keys
self.kept_proxy_keys = self.old_proxy_keys & self.new_proxy_keys
self.kept_unmanaged_keys = self.old_unmanaged_keys & self.new_unmanaged_keys
self.through_users = {}
self.old_field_keys = {
(app_label, model_name, field_name)
for app_label, model_name in self.kept_model_keys
for field_name in self.from_state.models[
app_label,
self.renamed_models.get((app_label, model_name), model_name)
].fields
}
self.new_field_keys = {
(app_label, model_name, field_name)
for app_label, model_name in self.kept_model_keys
for field_name in self.to_state.models[app_label, model_name].fields
}
def _generate_through_model_map(self):
"""Through model map generation."""
for app_label, model_name in sorted(self.old_model_keys):
old_model_name = self.renamed_models.get((app_label, model_name), model_name)
old_model_state = self.from_state.models[app_label, old_model_name]
for field_name in old_model_state.fields:
old_field = self.old_apps.get_model(app_label, old_model_name)._meta.get_field(field_name)
if (hasattr(old_field, "remote_field") and getattr(old_field.remote_field, "through", None) and
not old_field.remote_field.through._meta.auto_created):
through_key = (
old_field.remote_field.through._meta.app_label,
old_field.remote_field.through._meta.model_name,
)
self.through_users[through_key] = (app_label, old_model_name, field_name)
@staticmethod
def _resolve_dependency(dependency):
"""
Return the resolved dependency and a boolean denoting whether or not
it was swappable.
"""
if dependency[0] != '__setting__':
return dependency, False
resolved_app_label, resolved_object_name = getattr(settings, dependency[1]).split('.')
return (resolved_app_label, resolved_object_name.lower()) + dependency[2:], True
def _build_migration_list(self, graph=None):
"""
Chop the lists of operations up into migrations with dependencies on
each other. Do this by going through an app's list of operations until
one is found that has an outgoing dependency that isn't in another
app's migration yet (hasn't been chopped off its list). Then chop off
the operations before it into a migration and move onto the next app.
If the loops completes without doing anything, there's a circular
dependency (which _should_ be impossible as the operations are
all split at this point so they can't depend and be depended on).
"""
self.migrations = {}
num_ops = sum(len(x) for x in self.generated_operations.values())
chop_mode = False
while num_ops:
# On every iteration, we step through all the apps and see if there
# is a completed set of operations.
# If we find that a subset of the operations are complete we can
# try to chop it off from the rest and continue, but we only
# do this if we've already been through the list once before
# without any chopping and nothing has changed.
for app_label in sorted(self.generated_operations):
chopped = []
dependencies = set()
for operation in list(self.generated_operations[app_label]):
deps_satisfied = True
operation_dependencies = set()
for dep in operation._auto_deps:
# Temporarily resolve the swappable dependency to
# prevent circular references. While keeping the
# dependency checks on the resolved model, add the
# swappable dependencies.
original_dep = dep
dep, is_swappable_dep = self._resolve_dependency(dep)
if dep[0] != app_label:
# External app dependency. See if it's not yet
# satisfied.
for other_operation in self.generated_operations.get(dep[0], []):
if self.check_dependency(other_operation, dep):
deps_satisfied = False
break
if not deps_satisfied:
break
else:
if is_swappable_dep:
operation_dependencies.add((original_dep[0], original_dep[1]))
elif dep[0] in self.migrations:
operation_dependencies.add((dep[0], self.migrations[dep[0]][-1].name))
else:
# If we can't find the other app, we add a first/last dependency,
# but only if we've already been through once and checked everything
if chop_mode:
# If the app already exists, we add a dependency on the last migration,
# as we don't know which migration contains the target field.
# If it's not yet migrated or has no migrations, we use __first__
if graph and graph.leaf_nodes(dep[0]):
operation_dependencies.add(graph.leaf_nodes(dep[0])[0])
else:
operation_dependencies.add((dep[0], "__first__"))
else:
deps_satisfied = False
if deps_satisfied:
chopped.append(operation)
dependencies.update(operation_dependencies)
del self.generated_operations[app_label][0]
else:
break
# Make a migration! Well, only if there's stuff to put in it
if dependencies or chopped:
if not self.generated_operations[app_label] or chop_mode:
subclass = type("Migration", (Migration,), {"operations": [], "dependencies": []})
instance = subclass("auto_%i" % (len(self.migrations.get(app_label, [])) + 1), app_label)
instance.dependencies = list(dependencies)
instance.operations = chopped
instance.initial = app_label not in self.existing_apps
self.migrations.setdefault(app_label, []).append(instance)
chop_mode = False
else:
self.generated_operations[app_label] = chopped + self.generated_operations[app_label]
new_num_ops = sum(len(x) for x in self.generated_operations.values())
if new_num_ops == num_ops:
if not chop_mode:
chop_mode = True
else:
raise ValueError("Cannot resolve operation dependencies: %r" % self.generated_operations)
num_ops = new_num_ops
def _sort_migrations(self):
"""
Reorder to make things possible. Reordering may be needed so FKs work
nicely inside the same app.
"""
for app_label, ops in sorted(self.generated_operations.items()):
# construct a dependency graph for intra-app dependencies
dependency_graph = {op: set() for op in ops}
for op in ops:
for dep in op._auto_deps:
# Resolve intra-app dependencies to handle circular
# references involving a swappable model.
dep = self._resolve_dependency(dep)[0]
if dep[0] == app_label:
for op2 in ops:
if self.check_dependency(op2, dep):
dependency_graph[op].add(op2)
# we use a stable sort for deterministic tests & general behavior
self.generated_operations[app_label] = stable_topological_sort(ops, dependency_graph)
def _optimize_migrations(self):
# Add in internal dependencies among the migrations
for app_label, migrations in self.migrations.items():
for m1, m2 in zip(migrations, migrations[1:]):
m2.dependencies.append((app_label, m1.name))
# De-dupe dependencies
for migrations in self.migrations.values():
for migration in migrations:
migration.dependencies = list(set(migration.dependencies))
# Optimize migrations
for app_label, migrations in self.migrations.items():
for migration in migrations:
migration.operations = MigrationOptimizer().optimize(migration.operations, app_label)
def check_dependency(self, operation, dependency):
"""
Return True if the given operation depends on the given dependency,
False otherwise.
"""
# Created model
if dependency[2] is None and dependency[3] is True:
return (
isinstance(operation, operations.CreateModel) and
operation.name_lower == dependency[1].lower()
)
# Created field
elif dependency[2] is not None and dependency[3] is True:
return (
(
isinstance(operation, operations.CreateModel) and
operation.name_lower == dependency[1].lower() and
any(dependency[2] == x for x, y in operation.fields)
) or
(
isinstance(operation, operations.AddField) and
operation.model_name_lower == dependency[1].lower() and
operation.name_lower == dependency[2].lower()
)
)
# Removed field
elif dependency[2] is not None and dependency[3] is False:
return (
isinstance(operation, operations.RemoveField) and
operation.model_name_lower == dependency[1].lower() and
operation.name_lower == dependency[2].lower()
)
# Removed model
elif dependency[2] is None and dependency[3] is False:
return (
isinstance(operation, operations.DeleteModel) and
operation.name_lower == dependency[1].lower()
)
# Field being altered
elif dependency[2] is not None and dependency[3] == "alter":
return (
isinstance(operation, operations.AlterField) and
operation.model_name_lower == dependency[1].lower() and
operation.name_lower == dependency[2].lower()
)
# order_with_respect_to being unset for a field
elif dependency[2] is not None and dependency[3] == "order_wrt_unset":
return (
isinstance(operation, operations.AlterOrderWithRespectTo) and
operation.name_lower == dependency[1].lower() and
(operation.order_with_respect_to or "").lower() != dependency[2].lower()
)
# Field is removed and part of an index/unique_together
elif dependency[2] is not None and dependency[3] == "foo_together_change":
return (
isinstance(operation, (operations.AlterUniqueTogether,
operations.AlterIndexTogether)) and
operation.name_lower == dependency[1].lower()
)
# Unknown dependency. Raise an error.
else:
raise ValueError("Can't handle dependency %r" % (dependency,))
def add_operation(self, app_label, operation, dependencies=None, beginning=False):
# Dependencies are (app_label, model_name, field_name, create/delete as True/False)
operation._auto_deps = dependencies or []
if beginning:
self.generated_operations.setdefault(app_label, []).insert(0, operation)
else:
self.generated_operations.setdefault(app_label, []).append(operation)
def swappable_first_key(self, item):
"""
Place potential swappable models first in lists of created models (only
real way to solve #22783).
"""
try:
model = self.new_apps.get_model(item[0], item[1])
base_names = [base.__name__ for base in model.__bases__]
string_version = "%s.%s" % (item[0], item[1])
if (
model._meta.swappable or
"AbstractUser" in base_names or
"AbstractBaseUser" in base_names or
settings.AUTH_USER_MODEL.lower() == string_version.lower()
):
return ("___" + item[0], "___" + item[1])
except LookupError:
pass
return item
def generate_renamed_models(self):
"""
Find any renamed models, generate the operations for them, and remove
the old entry from the model lists. Must be run before other
model-level generation.
"""
self.renamed_models = {}
self.renamed_models_rel = {}
added_models = self.new_model_keys - self.old_model_keys
for app_label, model_name in sorted(added_models):
model_state = self.to_state.models[app_label, model_name]
model_fields_def = self.only_relation_agnostic_fields(model_state.fields)
removed_models = self.old_model_keys - self.new_model_keys
for rem_app_label, rem_model_name in removed_models:
if rem_app_label == app_label:
rem_model_state = self.from_state.models[rem_app_label, rem_model_name]
rem_model_fields_def = self.only_relation_agnostic_fields(rem_model_state.fields)
if model_fields_def == rem_model_fields_def:
if self.questioner.ask_rename_model(rem_model_state, model_state):
model_opts = self.new_apps.get_model(app_label, model_name)._meta
dependencies = []
for field in model_opts.get_fields():
if field.is_relation:
dependencies.extend(self._get_dependencies_for_foreign_key(field))
self.add_operation(
app_label,
operations.RenameModel(
old_name=rem_model_state.name,
new_name=model_state.name,
),
dependencies=dependencies,
)
self.renamed_models[app_label, model_name] = rem_model_name
renamed_models_rel_key = '%s.%s' % (
rem_model_state.app_label,
rem_model_state.name_lower,
)
self.renamed_models_rel[renamed_models_rel_key] = '%s.%s' % (
model_state.app_label,
model_state.name_lower,
)
self.old_model_keys.remove((rem_app_label, rem_model_name))
self.old_model_keys.add((app_label, model_name))
break
def generate_created_models(self):
"""
Find all new models (both managed and unmanaged) and make create
operations for them as well as separate operations to create any
foreign key or M2M relationships (these are optimized later, if
possible).
Defer any model options that refer to collections of fields that might
be deferred (e.g. unique_together, index_together).
"""
old_keys = self.old_model_keys | self.old_unmanaged_keys
added_models = self.new_model_keys - old_keys
added_unmanaged_models = self.new_unmanaged_keys - old_keys
all_added_models = chain(
sorted(added_models, key=self.swappable_first_key, reverse=True),
sorted(added_unmanaged_models, key=self.swappable_first_key, reverse=True)
)
for app_label, model_name in all_added_models:
model_state = self.to_state.models[app_label, model_name]
model_opts = self.new_apps.get_model(app_label, model_name)._meta
# Gather related fields
related_fields = {}
primary_key_rel = None
for field in model_opts.local_fields:
if field.remote_field:
if field.remote_field.model:
if field.primary_key:
primary_key_rel = field.remote_field.model
elif not field.remote_field.parent_link:
related_fields[field.name] = field
# through will be none on M2Ms on swapped-out models;
# we can treat lack of through as auto_created=True, though.
if (getattr(field.remote_field, "through", None) and
not field.remote_field.through._meta.auto_created):
related_fields[field.name] = field
for field in model_opts.local_many_to_many:
if field.remote_field.model:
related_fields[field.name] = field
if getattr(field.remote_field, "through", None) and not field.remote_field.through._meta.auto_created:
related_fields[field.name] = field
# Are there indexes/unique|index_together to defer?
indexes = model_state.options.pop('indexes')
constraints = model_state.options.pop('constraints')
unique_together = model_state.options.pop('unique_together', None)
index_together = model_state.options.pop('index_together', None)
order_with_respect_to = model_state.options.pop('order_with_respect_to', None)
# Depend on the deletion of any possible proxy version of us
dependencies = [
(app_label, model_name, None, False),
]
# Depend on all bases
for base in model_state.bases:
if isinstance(base, str) and "." in base:
base_app_label, base_name = base.split(".", 1)
dependencies.append((base_app_label, base_name, None, True))
# Depend on the other end of the primary key if it's a relation
if primary_key_rel:
dependencies.append((
primary_key_rel._meta.app_label,
primary_key_rel._meta.object_name,
None,
True
))
# Generate creation operation
self.add_operation(
app_label,
operations.CreateModel(
name=model_state.name,
fields=[d for d in model_state.fields.items() if d[0] not in related_fields],
options=model_state.options,
bases=model_state.bases,
managers=model_state.managers,
),
dependencies=dependencies,
beginning=True,
)
# Don't add operations which modify the database for unmanaged models
if not model_opts.managed:
continue
# Generate operations for each related field
for name, field in sorted(related_fields.items()):
dependencies = self._get_dependencies_for_foreign_key(field)
# Depend on our own model being created
dependencies.append((app_label, model_name, None, True))
# Make operation
self.add_operation(
app_label,
operations.AddField(
model_name=model_name,
name=name,
field=field,
),
dependencies=list(set(dependencies)),
)
# Generate other opns
related_dependencies = [
(app_label, model_name, name, True)
for name in sorted(related_fields)
]
related_dependencies.append((app_label, model_name, None, True))
for index in indexes:
self.add_operation(
app_label,
operations.AddIndex(
model_name=model_name,
index=index,
),
dependencies=related_dependencies,
)
for constraint in constraints:
self.add_operation(
app_label,
operations.AddConstraint(
model_name=model_name,
constraint=constraint,
),
dependencies=related_dependencies,
)
if unique_together:
self.add_operation(
app_label,
operations.AlterUniqueTogether(
name=model_name,
unique_together=unique_together,
),
dependencies=related_dependencies
)
if index_together:
self.add_operation(
app_label,
operations.AlterIndexTogether(
name=model_name,
index_together=index_together,
),
dependencies=related_dependencies
)
if order_with_respect_to:
self.add_operation(
app_label,
operations.AlterOrderWithRespectTo(
name=model_name,
order_with_respect_to=order_with_respect_to,
),
dependencies=[
(app_label, model_name, order_with_respect_to, True),
(app_label, model_name, None, True),
]
)
# Fix relationships if the model changed from a proxy model to a
# concrete model.
if (app_label, model_name) in self.old_proxy_keys:
for related_object in model_opts.related_objects:
self.add_operation(
related_object.related_model._meta.app_label,
operations.AlterField(
model_name=related_object.related_model._meta.object_name,
name=related_object.field.name,
field=related_object.field,
),
dependencies=[(app_label, model_name, None, True)],
)
def generate_created_proxies(self):
"""
Make CreateModel statements for proxy models. Use the same statements
as that way there's less code duplication, but for proxy models it's
safe to skip all the pointless field stuff and chuck out an operation.
"""
added = self.new_proxy_keys - self.old_proxy_keys
for app_label, model_name in sorted(added):
model_state = self.to_state.models[app_label, model_name]
assert model_state.options.get("proxy")
# Depend on the deletion of any possible non-proxy version of us
dependencies = [
(app_label, model_name, None, False),
]
# Depend on all bases
for base in model_state.bases:
if isinstance(base, str) and "." in base:
base_app_label, base_name = base.split(".", 1)
dependencies.append((base_app_label, base_name, None, True))
# Generate creation operation
self.add_operation(
app_label,
operations.CreateModel(
name=model_state.name,
fields=[],
options=model_state.options,
bases=model_state.bases,
managers=model_state.managers,
),
# Depend on the deletion of any possible non-proxy version of us
dependencies=dependencies,
)
def generate_deleted_models(self):
"""
Find all deleted models (managed and unmanaged) and make delete
operations for them as well as separate operations to delete any
foreign key or M2M relationships (these are optimized later, if
possible).
Also bring forward removal of any model options that refer to
collections of fields - the inverse of generate_created_models().
"""
new_keys = self.new_model_keys | self.new_unmanaged_keys
deleted_models = self.old_model_keys - new_keys
deleted_unmanaged_models = self.old_unmanaged_keys - new_keys
all_deleted_models = chain(sorted(deleted_models), sorted(deleted_unmanaged_models))
for app_label, model_name in all_deleted_models:
model_state = self.from_state.models[app_label, model_name]
model = self.old_apps.get_model(app_label, model_name)
# Gather related fields
related_fields = {}
for field in model._meta.local_fields:
if field.remote_field:
if field.remote_field.model:
related_fields[field.name] = field
# through will be none on M2Ms on swapped-out models;
# we can treat lack of through as auto_created=True, though.
if (getattr(field.remote_field, "through", None) and
not field.remote_field.through._meta.auto_created):
related_fields[field.name] = field
for field in model._meta.local_many_to_many:
if field.remote_field.model:
related_fields[field.name] = field
if getattr(field.remote_field, "through", None) and not field.remote_field.through._meta.auto_created:
related_fields[field.name] = field
# Generate option removal first
unique_together = model_state.options.pop('unique_together', None)
index_together = model_state.options.pop('index_together', None)
if unique_together:
self.add_operation(
app_label,
operations.AlterUniqueTogether(
name=model_name,
unique_together=None,
)
)
if index_together:
self.add_operation(
app_label,
operations.AlterIndexTogether(
name=model_name,
index_together=None,
)
)
# Then remove each related field
for name in sorted(related_fields):
self.add_operation(
app_label,
operations.RemoveField(
model_name=model_name,
name=name,
)
)
# Finally, remove the model.
# This depends on both the removal/alteration of all incoming fields
# and the removal of all its own related fields, and if it's
# a through model the field that references it.
dependencies = []
for related_object in model._meta.related_objects:
related_object_app_label = related_object.related_model._meta.app_label
object_name = related_object.related_model._meta.object_name
field_name = related_object.field.name
dependencies.append((related_object_app_label, object_name, field_name, False))
if not related_object.many_to_many:
dependencies.append((related_object_app_label, object_name, field_name, "alter"))
for name in sorted(related_fields):
dependencies.append((app_label, model_name, name, False))
# We're referenced in another field's through=
through_user = self.through_users.get((app_label, model_state.name_lower))
if through_user:
dependencies.append((through_user[0], through_user[1], through_user[2], False))
# Finally, make the operation, deduping any dependencies
self.add_operation(
app_label,
operations.DeleteModel(
name=model_state.name,
),
dependencies=list(set(dependencies)),
)
def generate_deleted_proxies(self):
"""Make DeleteModel options for proxy models."""
deleted = self.old_proxy_keys - self.new_proxy_keys
for app_label, model_name in sorted(deleted):
model_state = self.from_state.models[app_label, model_name]
assert model_state.options.get("proxy")
self.add_operation(
app_label,
operations.DeleteModel(
name=model_state.name,
),
)
def generate_renamed_fields(self):
"""Work out renamed fields."""
self.renamed_fields = {}
for app_label, model_name, field_name in sorted(self.new_field_keys - self.old_field_keys):
old_model_name = self.renamed_models.get((app_label, model_name), model_name)
old_model_state = self.from_state.models[app_label, old_model_name]
field = self.new_apps.get_model(app_label, model_name)._meta.get_field(field_name)
# Scan to see if this is actually a rename!
field_dec = self.deep_deconstruct(field)
for rem_app_label, rem_model_name, rem_field_name in sorted(self.old_field_keys - self.new_field_keys):
if rem_app_label == app_label and rem_model_name == model_name:
old_field = old_model_state.fields[rem_field_name]
old_field_dec = self.deep_deconstruct(old_field)
if field.remote_field and field.remote_field.model and 'to' in old_field_dec[2]:
old_rel_to = old_field_dec[2]['to']
if old_rel_to in self.renamed_models_rel:
old_field_dec[2]['to'] = self.renamed_models_rel[old_rel_to]
old_field.set_attributes_from_name(rem_field_name)
old_db_column = old_field.get_attname_column()[1]
if (old_field_dec == field_dec or (
# Was the field renamed and db_column equal to the
# old field's column added?
old_field_dec[0:2] == field_dec[0:2] and
dict(old_field_dec[2], db_column=old_db_column) == field_dec[2])):
if self.questioner.ask_rename(model_name, rem_field_name, field_name, field):
self.add_operation(
app_label,
operations.RenameField(
model_name=model_name,
old_name=rem_field_name,
new_name=field_name,
)
)
self.old_field_keys.remove((rem_app_label, rem_model_name, rem_field_name))
self.old_field_keys.add((app_label, model_name, field_name))
self.renamed_fields[app_label, model_name, field_name] = rem_field_name
break
def generate_added_fields(self):
"""Make AddField operations."""
for app_label, model_name, field_name in sorted(self.new_field_keys - self.old_field_keys):
self._generate_added_field(app_label, model_name, field_name)
def _generate_added_field(self, app_label, model_name, field_name):
field = self.new_apps.get_model(app_label, model_name)._meta.get_field(field_name)
# Fields that are foreignkeys/m2ms depend on stuff
dependencies = []
if field.remote_field and field.remote_field.model:
dependencies.extend(self._get_dependencies_for_foreign_key(field))
# You can't just add NOT NULL fields with no default or fields
# which don't allow empty strings as default.
time_fields = (models.DateField, models.DateTimeField, models.TimeField)
preserve_default = (
field.null or field.has_default() or field.many_to_many or
(field.blank and field.empty_strings_allowed) or
(isinstance(field, time_fields) and field.auto_now)
)
if not preserve_default:
field = field.clone()
if isinstance(field, time_fields) and field.auto_now_add:
field.default = self.questioner.ask_auto_now_add_addition(field_name, model_name)
else:
field.default = self.questioner.ask_not_null_addition(field_name, model_name)
self.add_operation(
app_label,
operations.AddField(
model_name=model_name,
name=field_name,
field=field,
preserve_default=preserve_default,
),
dependencies=dependencies,
)
def generate_removed_fields(self):
"""Make RemoveField operations."""
for app_label, model_name, field_name in sorted(self.old_field_keys - self.new_field_keys):
self._generate_removed_field(app_label, model_name, field_name)
def _generate_removed_field(self, app_label, model_name, field_name):
self.add_operation(
app_label,
operations.RemoveField(
model_name=model_name,
name=field_name,
),
# We might need to depend on the removal of an
# order_with_respect_to or index/unique_together operation;
# this is safely ignored if there isn't one
dependencies=[
(app_label, model_name, field_name, "order_wrt_unset"),
(app_label, model_name, field_name, "foo_together_change"),
],
)
def generate_altered_fields(self):
"""
Make AlterField operations, or possibly RemovedField/AddField if alter
isn's possible.
"""
for app_label, model_name, field_name in sorted(self.old_field_keys & self.new_field_keys):
# Did the field change?
old_model_name = self.renamed_models.get((app_label, model_name), model_name)
old_field_name = self.renamed_fields.get((app_label, model_name, field_name), field_name)
old_field = self.old_apps.get_model(app_label, old_model_name)._meta.get_field(old_field_name)
new_field = self.new_apps.get_model(app_label, model_name)._meta.get_field(field_name)
dependencies = []
# Implement any model renames on relations; these are handled by RenameModel
# so we need to exclude them from the comparison
if hasattr(new_field, "remote_field") and getattr(new_field.remote_field, "model", None):
rename_key = (
new_field.remote_field.model._meta.app_label,
new_field.remote_field.model._meta.model_name,
)
if rename_key in self.renamed_models:
new_field.remote_field.model = old_field.remote_field.model
# Handle ForeignKey which can only have a single to_field.
remote_field_name = getattr(new_field.remote_field, 'field_name', None)
if remote_field_name:
to_field_rename_key = rename_key + (remote_field_name,)
if to_field_rename_key in self.renamed_fields:
# Repoint both model and field name because to_field
# inclusion in ForeignKey.deconstruct() is based on
# both.
new_field.remote_field.model = old_field.remote_field.model
new_field.remote_field.field_name = old_field.remote_field.field_name
# Handle ForeignObjects which can have multiple from_fields/to_fields.
from_fields = getattr(new_field, 'from_fields', None)
if from_fields:
from_rename_key = (app_label, model_name)
new_field.from_fields = tuple([
self.renamed_fields.get(from_rename_key + (from_field,), from_field)
for from_field in from_fields
])
new_field.to_fields = tuple([
self.renamed_fields.get(rename_key + (to_field,), to_field)
for to_field in new_field.to_fields
])
dependencies.extend(self._get_dependencies_for_foreign_key(new_field))
if hasattr(new_field, "remote_field") and getattr(new_field.remote_field, "through", None):
rename_key = (
new_field.remote_field.through._meta.app_label,
new_field.remote_field.through._meta.model_name,
)
if rename_key in self.renamed_models:
new_field.remote_field.through = old_field.remote_field.through
old_field_dec = self.deep_deconstruct(old_field)
new_field_dec = self.deep_deconstruct(new_field)
if old_field_dec != new_field_dec:
both_m2m = old_field.many_to_many and new_field.many_to_many
neither_m2m = not old_field.many_to_many and not new_field.many_to_many
if both_m2m or neither_m2m:
# Either both fields are m2m or neither is
preserve_default = True
if (old_field.null and not new_field.null and not new_field.has_default() and
not new_field.many_to_many):
field = new_field.clone()
new_default = self.questioner.ask_not_null_alteration(field_name, model_name)
if new_default is not models.NOT_PROVIDED:
field.default = new_default
preserve_default = False
else:
field = new_field
self.add_operation(
app_label,
operations.AlterField(
model_name=model_name,
name=field_name,
field=field,
preserve_default=preserve_default,
),
dependencies=dependencies,
)
else:
# We cannot alter between m2m and concrete fields
self._generate_removed_field(app_label, model_name, field_name)
self._generate_added_field(app_label, model_name, field_name)
def create_altered_indexes(self):
option_name = operations.AddIndex.option_name
for app_label, model_name in sorted(self.kept_model_keys):
old_model_name = self.renamed_models.get((app_label, model_name), model_name)
old_model_state = self.from_state.models[app_label, old_model_name]
new_model_state = self.to_state.models[app_label, model_name]
old_indexes = old_model_state.options[option_name]
new_indexes = new_model_state.options[option_name]
add_idx = [idx for idx in new_indexes if idx not in old_indexes]
rem_idx = [idx for idx in old_indexes if idx not in new_indexes]
self.altered_indexes.update({
(app_label, model_name): {
'added_indexes': add_idx, 'removed_indexes': rem_idx,
}
})
def generate_added_indexes(self):
for (app_label, model_name), alt_indexes in self.altered_indexes.items():
for index in alt_indexes['added_indexes']:
self.add_operation(
app_label,
operations.AddIndex(
model_name=model_name,
index=index,
)
)
def generate_removed_indexes(self):
for (app_label, model_name), alt_indexes in self.altered_indexes.items():
for index in alt_indexes['removed_indexes']:
self.add_operation(
app_label,
operations.RemoveIndex(
model_name=model_name,
name=index.name,
)
)
def create_altered_constraints(self):
option_name = operations.AddConstraint.option_name
for app_label, model_name in sorted(self.kept_model_keys):
old_model_name = self.renamed_models.get((app_label, model_name), model_name)
old_model_state = self.from_state.models[app_label, old_model_name]
new_model_state = self.to_state.models[app_label, model_name]
old_constraints = old_model_state.options[option_name]
new_constraints = new_model_state.options[option_name]
add_constraints = [c for c in new_constraints if c not in old_constraints]
rem_constraints = [c for c in old_constraints if c not in new_constraints]
self.altered_constraints.update({
(app_label, model_name): {
'added_constraints': add_constraints, 'removed_constraints': rem_constraints,
}
})
def generate_added_constraints(self):
for (app_label, model_name), alt_constraints in self.altered_constraints.items():
for constraint in alt_constraints['added_constraints']:
self.add_operation(
app_label,
operations.AddConstraint(
model_name=model_name,
constraint=constraint,
)
)
def generate_removed_constraints(self):
for (app_label, model_name), alt_constraints in self.altered_constraints.items():
for constraint in alt_constraints['removed_constraints']:
self.add_operation(
app_label,
operations.RemoveConstraint(
model_name=model_name,
name=constraint.name,
)
)
def _get_dependencies_for_foreign_key(self, field):
# Account for FKs to swappable models
swappable_setting = getattr(field, 'swappable_setting', None)
if swappable_setting is not None:
dep_app_label = "__setting__"
dep_object_name = swappable_setting
else:
dep_app_label = field.remote_field.model._meta.app_label
dep_object_name = field.remote_field.model._meta.object_name
dependencies = [(dep_app_label, dep_object_name, None, True)]
if getattr(field.remote_field, "through", None) and not field.remote_field.through._meta.auto_created:
dependencies.append((
field.remote_field.through._meta.app_label,
field.remote_field.through._meta.object_name,
None,
True,
))
return dependencies
def _generate_altered_foo_together(self, operation):
option_name = operation.option_name
for app_label, model_name in sorted(self.kept_model_keys):
old_model_name = self.renamed_models.get((app_label, model_name), model_name)
old_model_state = self.from_state.models[app_label, old_model_name]
new_model_state = self.to_state.models[app_label, model_name]
# We run the old version through the field renames to account for those
old_value = old_model_state.options.get(option_name)
old_value = {
tuple(
self.renamed_fields.get((app_label, model_name, n), n)
for n in unique
)
for unique in old_value
} if old_value else set()
new_value = new_model_state.options.get(option_name)
new_value = set(new_value) if new_value else set()
if old_value != new_value:
dependencies = []
for foo_togethers in new_value:
for field_name in foo_togethers:
field = self.new_apps.get_model(app_label, model_name)._meta.get_field(field_name)
if field.remote_field and field.remote_field.model:
dependencies.extend(self._get_dependencies_for_foreign_key(field))
self.add_operation(
app_label,
operation(
name=model_name,
**{option_name: new_value}
),
dependencies=dependencies,
)
def generate_altered_unique_together(self):
self._generate_altered_foo_together(operations.AlterUniqueTogether)
def generate_altered_index_together(self):
self._generate_altered_foo_together(operations.AlterIndexTogether)
def generate_altered_db_table(self):
models_to_check = self.kept_model_keys.union(self.kept_proxy_keys, self.kept_unmanaged_keys)
for app_label, model_name in sorted(models_to_check):
old_model_name = self.renamed_models.get((app_label, model_name), model_name)
old_model_state = self.from_state.models[app_label, old_model_name]
new_model_state = self.to_state.models[app_label, model_name]
old_db_table_name = old_model_state.options.get('db_table')
new_db_table_name = new_model_state.options.get('db_table')
if old_db_table_name != new_db_table_name:
self.add_operation(
app_label,
operations.AlterModelTable(
name=model_name,
table=new_db_table_name,
)
)
def generate_altered_options(self):
"""
Work out if any non-schema-affecting options have changed and make an
operation to represent them in state changes (in case Python code in
migrations needs them).
"""
models_to_check = self.kept_model_keys.union(
self.kept_proxy_keys,
self.kept_unmanaged_keys,
# unmanaged converted to managed
self.old_unmanaged_keys & self.new_model_keys,
# managed converted to unmanaged
self.old_model_keys & self.new_unmanaged_keys,
)
for app_label, model_name in sorted(models_to_check):
old_model_name = self.renamed_models.get((app_label, model_name), model_name)
old_model_state = self.from_state.models[app_label, old_model_name]
new_model_state = self.to_state.models[app_label, model_name]
old_options = {
key: value for key, value in old_model_state.options.items()
if key in AlterModelOptions.ALTER_OPTION_KEYS
}
new_options = {
key: value for key, value in new_model_state.options.items()
if key in AlterModelOptions.ALTER_OPTION_KEYS
}
if old_options != new_options:
self.add_operation(
app_label,
operations.AlterModelOptions(
name=model_name,
options=new_options,
)
)
def generate_altered_order_with_respect_to(self):
for app_label, model_name in sorted(self.kept_model_keys):
old_model_name = self.renamed_models.get((app_label, model_name), model_name)
old_model_state = self.from_state.models[app_label, old_model_name]
new_model_state = self.to_state.models[app_label, model_name]
if (old_model_state.options.get("order_with_respect_to") !=
new_model_state.options.get("order_with_respect_to")):
# Make sure it comes second if we're adding
# (removal dependency is part of RemoveField)
dependencies = []
if new_model_state.options.get("order_with_respect_to"):
dependencies.append((
app_label,
model_name,
new_model_state.options["order_with_respect_to"],
True,
))
# Actually generate the operation
self.add_operation(
app_label,
operations.AlterOrderWithRespectTo(
name=model_name,
order_with_respect_to=new_model_state.options.get('order_with_respect_to'),
),
dependencies=dependencies,
)
def generate_altered_managers(self):
for app_label, model_name in sorted(self.kept_model_keys):
old_model_name = self.renamed_models.get((app_label, model_name), model_name)
old_model_state = self.from_state.models[app_label, old_model_name]
new_model_state = self.to_state.models[app_label, model_name]
if old_model_state.managers != new_model_state.managers:
self.add_operation(
app_label,
operations.AlterModelManagers(
name=model_name,
managers=new_model_state.managers,
)
)
def arrange_for_graph(self, changes, graph, migration_name=None):
"""
Take a result from changes() and a MigrationGraph, and fix the names
and dependencies of the changes so they extend the graph from the leaf
nodes for each app.
"""
leaves = graph.leaf_nodes()
name_map = {}
for app_label, migrations in list(changes.items()):
if not migrations:
continue
# Find the app label's current leaf node
app_leaf = None
for leaf in leaves:
if leaf[0] == app_label:
app_leaf = leaf
break
# Do they want an initial migration for this app?
if app_leaf is None and not self.questioner.ask_initial(app_label):
# They don't.
for migration in migrations:
name_map[(app_label, migration.name)] = (app_label, "__first__")
del changes[app_label]
continue
# Work out the next number in the sequence
if app_leaf is None:
next_number = 1
else:
next_number = (self.parse_number(app_leaf[1]) or 0) + 1
# Name each migration
for i, migration in enumerate(migrations):
if i == 0 and app_leaf:
migration.dependencies.append(app_leaf)
if i == 0 and not app_leaf:
new_name = "0001_%s" % migration_name if migration_name else "0001_initial"
else:
new_name = "%04i_%s" % (
next_number,
migration_name or self.suggest_name(migration.operations)[:100],
)
name_map[(app_label, migration.name)] = (app_label, new_name)
next_number += 1
migration.name = new_name
# Now fix dependencies
for migrations in changes.values():
for migration in migrations:
migration.dependencies = [name_map.get(d, d) for d in migration.dependencies]
return changes
def _trim_to_apps(self, changes, app_labels):
"""
Take changes from arrange_for_graph() and set of app labels, and return
a modified set of changes which trims out as many migrations that are
not in app_labels as possible. Note that some other migrations may
still be present as they may be required dependencies.
"""
# Gather other app dependencies in a first pass
app_dependencies = {}
for app_label, migrations in changes.items():
for migration in migrations:
for dep_app_label, name in migration.dependencies:
app_dependencies.setdefault(app_label, set()).add(dep_app_label)
required_apps = set(app_labels)
# Keep resolving till there's no change
old_required_apps = None
while old_required_apps != required_apps:
old_required_apps = set(required_apps)
required_apps.update(*[app_dependencies.get(app_label, ()) for app_label in required_apps])
# Remove all migrations that aren't needed
for app_label in list(changes):
if app_label not in required_apps:
del changes[app_label]
return changes
@classmethod
def suggest_name(cls, ops):
"""
Given a set of operations, suggest a name for the migration they might
represent. Names are not guaranteed to be unique, but put some effort
into the fallback name to avoid VCS conflicts if possible.
"""
if len(ops) == 1:
if isinstance(ops[0], operations.CreateModel):
return ops[0].name_lower
elif isinstance(ops[0], operations.DeleteModel):
return "delete_%s" % ops[0].name_lower
elif isinstance(ops[0], operations.AddField):
return "%s_%s" % (ops[0].model_name_lower, ops[0].name_lower)
elif isinstance(ops[0], operations.RemoveField):
return "remove_%s_%s" % (ops[0].model_name_lower, ops[0].name_lower)
elif ops:
if all(isinstance(o, operations.CreateModel) for o in ops):
return "_".join(sorted(o.name_lower for o in ops))
return "auto_%s" % get_migration_name_timestamp()
@classmethod
def parse_number(cls, name):
"""
Given a migration name, try to extract a number from the beginning of
it. If no number is found, return None.
"""
match = re.match(r'^\d+', name)
if match:
return int(match[0])
return None
|
5f028616dd09e0facb543a295b937105cc2ae0bb768959977cb3c8962d5cd627 | """
The main QuerySet implementation. This provides the public API for the ORM.
"""
import copy
import operator
import warnings
from collections import namedtuple
from functools import lru_cache
from itertools import chain
import django
from django.conf import settings
from django.core import exceptions
from django.db import (
DJANGO_VERSION_PICKLE_KEY, IntegrityError, NotSupportedError, connections,
router, transaction,
)
from django.db.models import AutoField, DateField, DateTimeField, sql
from django.db.models.constants import LOOKUP_SEP
from django.db.models.deletion import Collector
from django.db.models.expressions import Case, Expression, F, Value, When
from django.db.models.functions import Cast, Trunc
from django.db.models.query_utils import FilteredRelation, Q
from django.db.models.sql.constants import CURSOR, GET_ITERATOR_CHUNK_SIZE
from django.db.models.utils import resolve_callables
from django.utils import timezone
from django.utils.functional import cached_property, partition
# The maximum number of results to fetch in a get() query.
MAX_GET_RESULTS = 21
# The maximum number of items to display in a QuerySet.__repr__
REPR_OUTPUT_SIZE = 20
class BaseIterable:
def __init__(self, queryset, chunked_fetch=False, chunk_size=GET_ITERATOR_CHUNK_SIZE):
self.queryset = queryset
self.chunked_fetch = chunked_fetch
self.chunk_size = chunk_size
class ModelIterable(BaseIterable):
"""Iterable that yields a model instance for each row."""
def __iter__(self):
queryset = self.queryset
db = queryset.db
compiler = queryset.query.get_compiler(using=db)
# Execute the query. This will also fill compiler.select, klass_info,
# and annotations.
results = compiler.execute_sql(chunked_fetch=self.chunked_fetch, chunk_size=self.chunk_size)
select, klass_info, annotation_col_map = (compiler.select, compiler.klass_info,
compiler.annotation_col_map)
model_cls = klass_info['model']
select_fields = klass_info['select_fields']
model_fields_start, model_fields_end = select_fields[0], select_fields[-1] + 1
init_list = [f[0].target.attname
for f in select[model_fields_start:model_fields_end]]
related_populators = get_related_populators(klass_info, select, db)
known_related_objects = [
(field, related_objs, operator.attrgetter(*[
field.attname
if from_field == 'self' else
queryset.model._meta.get_field(from_field).attname
for from_field in field.from_fields
])) for field, related_objs in queryset._known_related_objects.items()
]
for row in compiler.results_iter(results):
obj = model_cls.from_db(db, init_list, row[model_fields_start:model_fields_end])
for rel_populator in related_populators:
rel_populator.populate(row, obj)
if annotation_col_map:
for attr_name, col_pos in annotation_col_map.items():
setattr(obj, attr_name, row[col_pos])
# Add the known related objects to the model.
for field, rel_objs, rel_getter in known_related_objects:
# Avoid overwriting objects loaded by, e.g., select_related().
if field.is_cached(obj):
continue
rel_obj_id = rel_getter(obj)
try:
rel_obj = rel_objs[rel_obj_id]
except KeyError:
pass # May happen in qs1 | qs2 scenarios.
else:
setattr(obj, field.name, rel_obj)
yield obj
class ValuesIterable(BaseIterable):
"""
Iterable returned by QuerySet.values() that yields a dict for each row.
"""
def __iter__(self):
queryset = self.queryset
query = queryset.query
compiler = query.get_compiler(queryset.db)
# extra(select=...) cols are always at the start of the row.
names = [
*query.extra_select,
*query.values_select,
*query.annotation_select,
]
indexes = range(len(names))
for row in compiler.results_iter(chunked_fetch=self.chunked_fetch, chunk_size=self.chunk_size):
yield {names[i]: row[i] for i in indexes}
class ValuesListIterable(BaseIterable):
"""
Iterable returned by QuerySet.values_list(flat=False) that yields a tuple
for each row.
"""
def __iter__(self):
queryset = self.queryset
query = queryset.query
compiler = query.get_compiler(queryset.db)
if queryset._fields:
# extra(select=...) cols are always at the start of the row.
names = [
*query.extra_select,
*query.values_select,
*query.annotation_select,
]
fields = [*queryset._fields, *(f for f in query.annotation_select if f not in queryset._fields)]
if fields != names:
# Reorder according to fields.
index_map = {name: idx for idx, name in enumerate(names)}
rowfactory = operator.itemgetter(*[index_map[f] for f in fields])
return map(
rowfactory,
compiler.results_iter(chunked_fetch=self.chunked_fetch, chunk_size=self.chunk_size)
)
return compiler.results_iter(tuple_expected=True, chunked_fetch=self.chunked_fetch, chunk_size=self.chunk_size)
class NamedValuesListIterable(ValuesListIterable):
"""
Iterable returned by QuerySet.values_list(named=True) that yields a
namedtuple for each row.
"""
@staticmethod
@lru_cache()
def create_namedtuple_class(*names):
# Cache namedtuple() with @lru_cache() since it's too slow to be
# called for every QuerySet evaluation.
return namedtuple('Row', names)
def __iter__(self):
queryset = self.queryset
if queryset._fields:
names = queryset._fields
else:
query = queryset.query
names = [*query.extra_select, *query.values_select, *query.annotation_select]
tuple_class = self.create_namedtuple_class(*names)
new = tuple.__new__
for row in super().__iter__():
yield new(tuple_class, row)
class FlatValuesListIterable(BaseIterable):
"""
Iterable returned by QuerySet.values_list(flat=True) that yields single
values.
"""
def __iter__(self):
queryset = self.queryset
compiler = queryset.query.get_compiler(queryset.db)
for row in compiler.results_iter(chunked_fetch=self.chunked_fetch, chunk_size=self.chunk_size):
yield row[0]
class QuerySet:
"""Represent a lazy database lookup for a set of objects."""
def __init__(self, model=None, query=None, using=None, hints=None):
self.model = model
self._db = using
self._hints = hints or {}
self._query = query or sql.Query(self.model)
self._result_cache = None
self._sticky_filter = False
self._for_write = False
self._prefetch_related_lookups = ()
self._prefetch_done = False
self._known_related_objects = {} # {rel_field: {pk: rel_obj}}
self._iterable_class = ModelIterable
self._fields = None
self._defer_next_filter = False
self._deferred_filter = None
@property
def query(self):
if self._deferred_filter:
negate, args, kwargs = self._deferred_filter
self._filter_or_exclude_inplace(negate, *args, **kwargs)
self._deferred_filter = None
return self._query
@query.setter
def query(self, value):
self._query = value
def as_manager(cls):
# Address the circular dependency between `Queryset` and `Manager`.
from django.db.models.manager import Manager
manager = Manager.from_queryset(cls)()
manager._built_with_as_manager = True
return manager
as_manager.queryset_only = True
as_manager = classmethod(as_manager)
########################
# PYTHON MAGIC METHODS #
########################
def __deepcopy__(self, memo):
"""Don't populate the QuerySet's cache."""
obj = self.__class__()
for k, v in self.__dict__.items():
if k == '_result_cache':
obj.__dict__[k] = None
else:
obj.__dict__[k] = copy.deepcopy(v, memo)
return obj
def __getstate__(self):
# Force the cache to be fully populated.
self._fetch_all()
return {**self.__dict__, DJANGO_VERSION_PICKLE_KEY: django.__version__}
def __setstate__(self, state):
pickled_version = state.get(DJANGO_VERSION_PICKLE_KEY)
if pickled_version:
if pickled_version != django.__version__:
warnings.warn(
"Pickled queryset instance's Django version %s does not "
"match the current version %s."
% (pickled_version, django.__version__),
RuntimeWarning,
stacklevel=2,
)
else:
warnings.warn(
"Pickled queryset instance's Django version is not specified.",
RuntimeWarning,
stacklevel=2,
)
self.__dict__.update(state)
def __repr__(self):
data = list(self[:REPR_OUTPUT_SIZE + 1])
if len(data) > REPR_OUTPUT_SIZE:
data[-1] = "...(remaining elements truncated)..."
return '<%s %r>' % (self.__class__.__name__, data)
def __len__(self):
self._fetch_all()
return len(self._result_cache)
def __iter__(self):
"""
The queryset iterator protocol uses three nested iterators in the
default case:
1. sql.compiler.execute_sql()
- Returns 100 rows at time (constants.GET_ITERATOR_CHUNK_SIZE)
using cursor.fetchmany(). This part is responsible for
doing some column masking, and returning the rows in chunks.
2. sql.compiler.results_iter()
- Returns one row at time. At this point the rows are still just
tuples. In some cases the return values are converted to
Python values at this location.
3. self.iterator()
- Responsible for turning the rows into model objects.
"""
self._fetch_all()
return iter(self._result_cache)
def __bool__(self):
self._fetch_all()
return bool(self._result_cache)
def __getitem__(self, k):
"""Retrieve an item or slice from the set of results."""
if not isinstance(k, (int, slice)):
raise TypeError(
'QuerySet indices must be integers or slices, not %s.'
% type(k).__name__
)
assert ((not isinstance(k, slice) and (k >= 0)) or
(isinstance(k, slice) and (k.start is None or k.start >= 0) and
(k.stop is None or k.stop >= 0))), \
"Negative indexing is not supported."
if self._result_cache is not None:
return self._result_cache[k]
if isinstance(k, slice):
qs = self._chain()
if k.start is not None:
start = int(k.start)
else:
start = None
if k.stop is not None:
stop = int(k.stop)
else:
stop = None
qs.query.set_limits(start, stop)
return list(qs)[::k.step] if k.step else qs
qs = self._chain()
qs.query.set_limits(k, k + 1)
qs._fetch_all()
return qs._result_cache[0]
def __class_getitem__(cls, *args, **kwargs):
return cls
def __and__(self, other):
self._merge_sanity_check(other)
if isinstance(other, EmptyQuerySet):
return other
if isinstance(self, EmptyQuerySet):
return self
combined = self._chain()
combined._merge_known_related_objects(other)
combined.query.combine(other.query, sql.AND)
return combined
def __or__(self, other):
self._merge_sanity_check(other)
if isinstance(self, EmptyQuerySet):
return other
if isinstance(other, EmptyQuerySet):
return self
query = self if self.query.can_filter() else self.model._base_manager.filter(pk__in=self.values('pk'))
combined = query._chain()
combined._merge_known_related_objects(other)
if not other.query.can_filter():
other = other.model._base_manager.filter(pk__in=other.values('pk'))
combined.query.combine(other.query, sql.OR)
return combined
####################################
# METHODS THAT DO DATABASE QUERIES #
####################################
def _iterator(self, use_chunked_fetch, chunk_size):
yield from self._iterable_class(self, chunked_fetch=use_chunked_fetch, chunk_size=chunk_size)
def iterator(self, chunk_size=2000):
"""
An iterator over the results from applying this QuerySet to the
database.
"""
if chunk_size <= 0:
raise ValueError('Chunk size must be strictly positive.')
use_chunked_fetch = not connections[self.db].settings_dict.get('DISABLE_SERVER_SIDE_CURSORS')
return self._iterator(use_chunked_fetch, chunk_size)
def aggregate(self, *args, **kwargs):
"""
Return a dictionary containing the calculations (aggregation)
over the current queryset.
If args is present the expression is passed as a kwarg using
the Aggregate object's default alias.
"""
if self.query.distinct_fields:
raise NotImplementedError("aggregate() + distinct(fields) not implemented.")
self._validate_values_are_expressions((*args, *kwargs.values()), method_name='aggregate')
for arg in args:
# The default_alias property raises TypeError if default_alias
# can't be set automatically or AttributeError if it isn't an
# attribute.
try:
arg.default_alias
except (AttributeError, TypeError):
raise TypeError("Complex aggregates require an alias")
kwargs[arg.default_alias] = arg
query = self.query.chain()
for (alias, aggregate_expr) in kwargs.items():
query.add_annotation(aggregate_expr, alias, is_summary=True)
if not query.annotations[alias].contains_aggregate:
raise TypeError("%s is not an aggregate expression" % alias)
return query.get_aggregation(self.db, kwargs)
def count(self):
"""
Perform a SELECT COUNT() and return the number of records as an
integer.
If the QuerySet is already fully cached, return the length of the
cached results set to avoid multiple SELECT COUNT(*) calls.
"""
if self._result_cache is not None:
return len(self._result_cache)
return self.query.get_count(using=self.db)
def get(self, *args, **kwargs):
"""
Perform the query and return a single object matching the given
keyword arguments.
"""
clone = self._chain() if self.query.combinator else self.filter(*args, **kwargs)
if self.query.can_filter() and not self.query.distinct_fields:
clone = clone.order_by()
limit = None
if not clone.query.select_for_update or connections[clone.db].features.supports_select_for_update_with_limit:
limit = MAX_GET_RESULTS
clone.query.set_limits(high=limit)
num = len(clone)
if num == 1:
return clone._result_cache[0]
if not num:
raise self.model.DoesNotExist(
"%s matching query does not exist." %
self.model._meta.object_name
)
raise self.model.MultipleObjectsReturned(
'get() returned more than one %s -- it returned %s!' % (
self.model._meta.object_name,
num if not limit or num < limit else 'more than %s' % (limit - 1),
)
)
def create(self, **kwargs):
"""
Create a new object with the given kwargs, saving it to the database
and returning the created object.
"""
obj = self.model(**kwargs)
self._for_write = True
obj.save(force_insert=True, using=self.db)
return obj
def _populate_pk_values(self, objs):
for obj in objs:
if obj.pk is None:
obj.pk = obj._meta.pk.get_pk_value_on_save(obj)
def bulk_create(self, objs, batch_size=None, ignore_conflicts=False):
"""
Insert each of the instances into the database. Do *not* call
save() on each of the instances, do not send any pre/post_save
signals, and do not set the primary key attribute if it is an
autoincrement field (except if features.can_return_rows_from_bulk_insert=True).
Multi-table models are not supported.
"""
# When you bulk insert you don't get the primary keys back (if it's an
# autoincrement, except if can_return_rows_from_bulk_insert=True), so
# you can't insert into the child tables which references this. There
# are two workarounds:
# 1) This could be implemented if you didn't have an autoincrement pk
# 2) You could do it by doing O(n) normal inserts into the parent
# tables to get the primary keys back and then doing a single bulk
# insert into the childmost table.
# We currently set the primary keys on the objects when using
# PostgreSQL via the RETURNING ID clause. It should be possible for
# Oracle as well, but the semantics for extracting the primary keys is
# trickier so it's not done yet.
assert batch_size is None or batch_size > 0
# Check that the parents share the same concrete model with the our
# model to detect the inheritance pattern ConcreteGrandParent ->
# MultiTableParent -> ProxyChild. Simply checking self.model._meta.proxy
# would not identify that case as involving multiple tables.
for parent in self.model._meta.get_parent_list():
if parent._meta.concrete_model is not self.model._meta.concrete_model:
raise ValueError("Can't bulk create a multi-table inherited model")
if not objs:
return objs
self._for_write = True
connection = connections[self.db]
opts = self.model._meta
fields = opts.concrete_fields
objs = list(objs)
self._populate_pk_values(objs)
with transaction.atomic(using=self.db, savepoint=False):
objs_with_pk, objs_without_pk = partition(lambda o: o.pk is None, objs)
if objs_with_pk:
returned_columns = self._batched_insert(
objs_with_pk, fields, batch_size, ignore_conflicts=ignore_conflicts,
)
for obj_with_pk, results in zip(objs_with_pk, returned_columns):
for result, field in zip(results, opts.db_returning_fields):
if field != opts.pk:
setattr(obj_with_pk, field.attname, result)
for obj_with_pk in objs_with_pk:
obj_with_pk._state.adding = False
obj_with_pk._state.db = self.db
if objs_without_pk:
fields = [f for f in fields if not isinstance(f, AutoField)]
returned_columns = self._batched_insert(
objs_without_pk, fields, batch_size, ignore_conflicts=ignore_conflicts,
)
if connection.features.can_return_rows_from_bulk_insert and not ignore_conflicts:
assert len(returned_columns) == len(objs_without_pk)
for obj_without_pk, results in zip(objs_without_pk, returned_columns):
for result, field in zip(results, opts.db_returning_fields):
setattr(obj_without_pk, field.attname, result)
obj_without_pk._state.adding = False
obj_without_pk._state.db = self.db
return objs
def bulk_update(self, objs, fields, batch_size=None):
"""
Update the given fields in each of the given objects in the database.
"""
if batch_size is not None and batch_size < 0:
raise ValueError('Batch size must be a positive integer.')
if not fields:
raise ValueError('Field names must be given to bulk_update().')
objs = tuple(objs)
if any(obj.pk is None for obj in objs):
raise ValueError('All bulk_update() objects must have a primary key set.')
fields = [self.model._meta.get_field(name) for name in fields]
if any(not f.concrete or f.many_to_many for f in fields):
raise ValueError('bulk_update() can only be used with concrete fields.')
if any(f.primary_key for f in fields):
raise ValueError('bulk_update() cannot be used with primary key fields.')
if not objs:
return
# PK is used twice in the resulting update query, once in the filter
# and once in the WHEN. Each field will also have one CAST.
max_batch_size = connections[self.db].ops.bulk_batch_size(['pk', 'pk'] + fields, objs)
batch_size = min(batch_size, max_batch_size) if batch_size else max_batch_size
requires_casting = connections[self.db].features.requires_casted_case_in_updates
batches = (objs[i:i + batch_size] for i in range(0, len(objs), batch_size))
updates = []
for batch_objs in batches:
update_kwargs = {}
for field in fields:
when_statements = []
for obj in batch_objs:
attr = getattr(obj, field.attname)
if not isinstance(attr, Expression):
attr = Value(attr, output_field=field)
when_statements.append(When(pk=obj.pk, then=attr))
case_statement = Case(*when_statements, output_field=field)
if requires_casting:
case_statement = Cast(case_statement, output_field=field)
update_kwargs[field.attname] = case_statement
updates.append(([obj.pk for obj in batch_objs], update_kwargs))
with transaction.atomic(using=self.db, savepoint=False):
for pks, update_kwargs in updates:
self.filter(pk__in=pks).update(**update_kwargs)
bulk_update.alters_data = True
def get_or_create(self, defaults=None, **kwargs):
"""
Look up an object with the given kwargs, creating one if necessary.
Return a tuple of (object, created), where created is a boolean
specifying whether an object was created.
"""
# The get() needs to be targeted at the write database in order
# to avoid potential transaction consistency problems.
self._for_write = True
try:
return self.get(**kwargs), False
except self.model.DoesNotExist:
params = self._extract_model_params(defaults, **kwargs)
return self._create_object_from_params(kwargs, params)
def update_or_create(self, defaults=None, **kwargs):
"""
Look up an object with the given kwargs, updating one with defaults
if it exists, otherwise create a new one.
Return a tuple (object, created), where created is a boolean
specifying whether an object was created.
"""
defaults = defaults or {}
self._for_write = True
with transaction.atomic(using=self.db):
try:
obj = self.select_for_update().get(**kwargs)
except self.model.DoesNotExist:
params = self._extract_model_params(defaults, **kwargs)
# Lock the row so that a concurrent update is blocked until
# after update_or_create() has performed its save.
obj, created = self._create_object_from_params(kwargs, params, lock=True)
if created:
return obj, created
for k, v in resolve_callables(defaults):
setattr(obj, k, v)
obj.save(using=self.db)
return obj, False
def _create_object_from_params(self, lookup, params, lock=False):
"""
Try to create an object using passed params. Used by get_or_create()
and update_or_create().
"""
try:
with transaction.atomic(using=self.db):
params = dict(resolve_callables(params))
obj = self.create(**params)
return obj, True
except IntegrityError:
try:
qs = self.select_for_update() if lock else self
return qs.get(**lookup), False
except self.model.DoesNotExist:
pass
raise
def _extract_model_params(self, defaults, **kwargs):
"""
Prepare `params` for creating a model instance based on the given
kwargs; for use by get_or_create() and update_or_create().
"""
defaults = defaults or {}
params = {k: v for k, v in kwargs.items() if LOOKUP_SEP not in k}
params.update(defaults)
property_names = self.model._meta._property_names
invalid_params = []
for param in params:
try:
self.model._meta.get_field(param)
except exceptions.FieldDoesNotExist:
# It's okay to use a model's property if it has a setter.
if not (param in property_names and getattr(self.model, param).fset):
invalid_params.append(param)
if invalid_params:
raise exceptions.FieldError(
"Invalid field name(s) for model %s: '%s'." % (
self.model._meta.object_name,
"', '".join(sorted(invalid_params)),
))
return params
def _earliest(self, *fields):
"""
Return the earliest object according to fields (if given) or by the
model's Meta.get_latest_by.
"""
if fields:
order_by = fields
else:
order_by = getattr(self.model._meta, 'get_latest_by')
if order_by and not isinstance(order_by, (tuple, list)):
order_by = (order_by,)
if order_by is None:
raise ValueError(
"earliest() and latest() require either fields as positional "
"arguments or 'get_latest_by' in the model's Meta."
)
assert not self.query.is_sliced, \
"Cannot change a query once a slice has been taken."
obj = self._chain()
obj.query.set_limits(high=1)
obj.query.clear_ordering(force_empty=True)
obj.query.add_ordering(*order_by)
return obj.get()
def earliest(self, *fields):
return self._earliest(*fields)
def latest(self, *fields):
return self.reverse()._earliest(*fields)
def first(self):
"""Return the first object of a query or None if no match is found."""
for obj in (self if self.ordered else self.order_by('pk'))[:1]:
return obj
def last(self):
"""Return the last object of a query or None if no match is found."""
for obj in (self.reverse() if self.ordered else self.order_by('-pk'))[:1]:
return obj
def in_bulk(self, id_list=None, *, field_name='pk'):
"""
Return a dictionary mapping each of the given IDs to the object with
that ID. If `id_list` isn't provided, evaluate the entire QuerySet.
"""
assert not self.query.is_sliced, \
"Cannot use 'limit' or 'offset' with in_bulk"
opts = self.model._meta
unique_fields = [
constraint.fields[0]
for constraint in opts.total_unique_constraints
if len(constraint.fields) == 1
]
if (
field_name != 'pk' and
not opts.get_field(field_name).unique and
field_name not in unique_fields
):
raise ValueError("in_bulk()'s field_name must be a unique field but %r isn't." % field_name)
if id_list is not None:
if not id_list:
return {}
filter_key = '{}__in'.format(field_name)
batch_size = connections[self.db].features.max_query_params
id_list = tuple(id_list)
# If the database has a limit on the number of query parameters
# (e.g. SQLite), retrieve objects in batches if necessary.
if batch_size and batch_size < len(id_list):
qs = ()
for offset in range(0, len(id_list), batch_size):
batch = id_list[offset:offset + batch_size]
qs += tuple(self.filter(**{filter_key: batch}).order_by())
else:
qs = self.filter(**{filter_key: id_list}).order_by()
else:
qs = self._chain()
return {getattr(obj, field_name): obj for obj in qs}
def delete(self):
"""Delete the records in the current QuerySet."""
self._not_support_combined_queries('delete')
assert not self.query.is_sliced, \
"Cannot use 'limit' or 'offset' with delete."
if self._fields is not None:
raise TypeError("Cannot call delete() after .values() or .values_list()")
del_query = self._chain()
# The delete is actually 2 queries - one to find related objects,
# and one to delete. Make sure that the discovery of related
# objects is performed on the same database as the deletion.
del_query._for_write = True
# Disable non-supported fields.
del_query.query.select_for_update = False
del_query.query.select_related = False
del_query.query.clear_ordering(force_empty=True)
collector = Collector(using=del_query.db)
collector.collect(del_query)
deleted, _rows_count = collector.delete()
# Clear the result cache, in case this QuerySet gets reused.
self._result_cache = None
return deleted, _rows_count
delete.alters_data = True
delete.queryset_only = True
def _raw_delete(self, using):
"""
Delete objects found from the given queryset in single direct SQL
query. No signals are sent and there is no protection for cascades.
"""
query = self.query.clone()
query.__class__ = sql.DeleteQuery
cursor = query.get_compiler(using).execute_sql(CURSOR)
if cursor:
with cursor:
return cursor.rowcount
return 0
_raw_delete.alters_data = True
def update(self, **kwargs):
"""
Update all elements in the current QuerySet, setting all the given
fields to the appropriate values.
"""
self._not_support_combined_queries('update')
assert not self.query.is_sliced, \
"Cannot update a query once a slice has been taken."
self._for_write = True
query = self.query.chain(sql.UpdateQuery)
query.add_update_values(kwargs)
# Clear any annotations so that they won't be present in subqueries.
query.annotations = {}
with transaction.mark_for_rollback_on_error(using=self.db):
rows = query.get_compiler(self.db).execute_sql(CURSOR)
self._result_cache = None
return rows
update.alters_data = True
def _update(self, values):
"""
A version of update() that accepts field objects instead of field names.
Used primarily for model saving and not intended for use by general
code (it requires too much poking around at model internals to be
useful at that level).
"""
assert not self.query.is_sliced, \
"Cannot update a query once a slice has been taken."
query = self.query.chain(sql.UpdateQuery)
query.add_update_fields(values)
# Clear any annotations so that they won't be present in subqueries.
query.annotations = {}
self._result_cache = None
return query.get_compiler(self.db).execute_sql(CURSOR)
_update.alters_data = True
_update.queryset_only = False
def exists(self):
if self._result_cache is None:
return self.query.has_results(using=self.db)
return bool(self._result_cache)
def _prefetch_related_objects(self):
# This method can only be called once the result cache has been filled.
prefetch_related_objects(self._result_cache, *self._prefetch_related_lookups)
self._prefetch_done = True
def explain(self, *, format=None, **options):
return self.query.explain(using=self.db, format=format, **options)
##################################################
# PUBLIC METHODS THAT RETURN A QUERYSET SUBCLASS #
##################################################
def raw(self, raw_query, params=None, translations=None, using=None):
if using is None:
using = self.db
qs = RawQuerySet(raw_query, model=self.model, params=params, translations=translations, using=using)
qs._prefetch_related_lookups = self._prefetch_related_lookups[:]
return qs
def _values(self, *fields, **expressions):
clone = self._chain()
if expressions:
clone = clone.annotate(**expressions)
clone._fields = fields
clone.query.set_values(fields)
return clone
def values(self, *fields, **expressions):
fields += tuple(expressions)
clone = self._values(*fields, **expressions)
clone._iterable_class = ValuesIterable
return clone
def values_list(self, *fields, flat=False, named=False):
if flat and named:
raise TypeError("'flat' and 'named' can't be used together.")
if flat and len(fields) > 1:
raise TypeError("'flat' is not valid when values_list is called with more than one field.")
field_names = {f for f in fields if not hasattr(f, 'resolve_expression')}
_fields = []
expressions = {}
counter = 1
for field in fields:
if hasattr(field, 'resolve_expression'):
field_id_prefix = getattr(field, 'default_alias', field.__class__.__name__.lower())
while True:
field_id = field_id_prefix + str(counter)
counter += 1
if field_id not in field_names:
break
expressions[field_id] = field
_fields.append(field_id)
else:
_fields.append(field)
clone = self._values(*_fields, **expressions)
clone._iterable_class = (
NamedValuesListIterable if named
else FlatValuesListIterable if flat
else ValuesListIterable
)
return clone
def dates(self, field_name, kind, order='ASC'):
"""
Return a list of date objects representing all available dates for
the given field_name, scoped to 'kind'.
"""
assert kind in ('year', 'month', 'week', 'day'), \
"'kind' must be one of 'year', 'month', 'week', or 'day'."
assert order in ('ASC', 'DESC'), \
"'order' must be either 'ASC' or 'DESC'."
return self.annotate(
datefield=Trunc(field_name, kind, output_field=DateField()),
plain_field=F(field_name)
).values_list(
'datefield', flat=True
).distinct().filter(plain_field__isnull=False).order_by(('-' if order == 'DESC' else '') + 'datefield')
def datetimes(self, field_name, kind, order='ASC', tzinfo=None, is_dst=None):
"""
Return a list of datetime objects representing all available
datetimes for the given field_name, scoped to 'kind'.
"""
assert kind in ('year', 'month', 'week', 'day', 'hour', 'minute', 'second'), \
"'kind' must be one of 'year', 'month', 'week', 'day', 'hour', 'minute', or 'second'."
assert order in ('ASC', 'DESC'), \
"'order' must be either 'ASC' or 'DESC'."
if settings.USE_TZ:
if tzinfo is None:
tzinfo = timezone.get_current_timezone()
else:
tzinfo = None
return self.annotate(
datetimefield=Trunc(
field_name,
kind,
output_field=DateTimeField(),
tzinfo=tzinfo,
is_dst=is_dst,
),
plain_field=F(field_name)
).values_list(
'datetimefield', flat=True
).distinct().filter(plain_field__isnull=False).order_by(('-' if order == 'DESC' else '') + 'datetimefield')
def none(self):
"""Return an empty QuerySet."""
clone = self._chain()
clone.query.set_empty()
return clone
##################################################################
# PUBLIC METHODS THAT ALTER ATTRIBUTES AND RETURN A NEW QUERYSET #
##################################################################
def all(self):
"""
Return a new QuerySet that is a copy of the current one. This allows a
QuerySet to proxy for a model manager in some cases.
"""
return self._chain()
def filter(self, *args, **kwargs):
"""
Return a new QuerySet instance with the args ANDed to the existing
set.
"""
self._not_support_combined_queries('filter')
return self._filter_or_exclude(False, *args, **kwargs)
def exclude(self, *args, **kwargs):
"""
Return a new QuerySet instance with NOT (args) ANDed to the existing
set.
"""
self._not_support_combined_queries('exclude')
return self._filter_or_exclude(True, *args, **kwargs)
def _filter_or_exclude(self, negate, *args, **kwargs):
if args or kwargs:
assert not self.query.is_sliced, \
"Cannot filter a query once a slice has been taken."
clone = self._chain()
if self._defer_next_filter:
self._defer_next_filter = False
clone._deferred_filter = negate, args, kwargs
else:
clone._filter_or_exclude_inplace(negate, *args, **kwargs)
return clone
def _filter_or_exclude_inplace(self, negate, *args, **kwargs):
if negate:
self._query.add_q(~Q(*args, **kwargs))
else:
self._query.add_q(Q(*args, **kwargs))
def complex_filter(self, filter_obj):
"""
Return a new QuerySet instance with filter_obj added to the filters.
filter_obj can be a Q object or a dictionary of keyword lookup
arguments.
This exists to support framework features such as 'limit_choices_to',
and usually it will be more natural to use other methods.
"""
if isinstance(filter_obj, Q):
clone = self._chain()
clone.query.add_q(filter_obj)
return clone
else:
return self._filter_or_exclude(False, **filter_obj)
def _combinator_query(self, combinator, *other_qs, all=False):
# Clone the query to inherit the select list and everything
clone = self._chain()
# Clear limits and ordering so they can be reapplied
clone.query.clear_ordering(True)
clone.query.clear_limits()
clone.query.combined_queries = (self.query,) + tuple(qs.query for qs in other_qs)
clone.query.combinator = combinator
clone.query.combinator_all = all
return clone
def union(self, *other_qs, all=False):
# If the query is an EmptyQuerySet, combine all nonempty querysets.
if isinstance(self, EmptyQuerySet):
qs = [q for q in other_qs if not isinstance(q, EmptyQuerySet)]
return qs[0]._combinator_query('union', *qs[1:], all=all) if qs else self
return self._combinator_query('union', *other_qs, all=all)
def intersection(self, *other_qs):
# If any query is an EmptyQuerySet, return it.
if isinstance(self, EmptyQuerySet):
return self
for other in other_qs:
if isinstance(other, EmptyQuerySet):
return other
return self._combinator_query('intersection', *other_qs)
def difference(self, *other_qs):
# If the query is an EmptyQuerySet, return it.
if isinstance(self, EmptyQuerySet):
return self
return self._combinator_query('difference', *other_qs)
def select_for_update(self, nowait=False, skip_locked=False, of=(), no_key=False):
"""
Return a new QuerySet instance that will select objects with a
FOR UPDATE lock.
"""
if nowait and skip_locked:
raise ValueError('The nowait option cannot be used with skip_locked.')
obj = self._chain()
obj._for_write = True
obj.query.select_for_update = True
obj.query.select_for_update_nowait = nowait
obj.query.select_for_update_skip_locked = skip_locked
obj.query.select_for_update_of = of
obj.query.select_for_no_key_update = no_key
return obj
def select_related(self, *fields):
"""
Return a new QuerySet instance that will select related objects.
If fields are specified, they must be ForeignKey fields and only those
related objects are included in the selection.
If select_related(None) is called, clear the list.
"""
self._not_support_combined_queries('select_related')
if self._fields is not None:
raise TypeError("Cannot call select_related() after .values() or .values_list()")
obj = self._chain()
if fields == (None,):
obj.query.select_related = False
elif fields:
obj.query.add_select_related(fields)
else:
obj.query.select_related = True
return obj
def prefetch_related(self, *lookups):
"""
Return a new QuerySet instance that will prefetch the specified
Many-To-One and Many-To-Many related objects when the QuerySet is
evaluated.
When prefetch_related() is called more than once, append to the list of
prefetch lookups. If prefetch_related(None) is called, clear the list.
"""
self._not_support_combined_queries('prefetch_related')
clone = self._chain()
if lookups == (None,):
clone._prefetch_related_lookups = ()
else:
for lookup in lookups:
if isinstance(lookup, Prefetch):
lookup = lookup.prefetch_to
lookup = lookup.split(LOOKUP_SEP, 1)[0]
if lookup in self.query._filtered_relations:
raise ValueError('prefetch_related() is not supported with FilteredRelation.')
clone._prefetch_related_lookups = clone._prefetch_related_lookups + lookups
return clone
def annotate(self, *args, **kwargs):
"""
Return a query set in which the returned objects have been annotated
with extra data or aggregations.
"""
self._not_support_combined_queries('annotate')
self._validate_values_are_expressions(args + tuple(kwargs.values()), method_name='annotate')
annotations = {}
for arg in args:
# The default_alias property may raise a TypeError.
try:
if arg.default_alias in kwargs:
raise ValueError("The named annotation '%s' conflicts with the "
"default name for another annotation."
% arg.default_alias)
except TypeError:
raise TypeError("Complex annotations require an alias")
annotations[arg.default_alias] = arg
annotations.update(kwargs)
clone = self._chain()
names = self._fields
if names is None:
names = set(chain.from_iterable(
(field.name, field.attname) if hasattr(field, 'attname') else (field.name,)
for field in self.model._meta.get_fields()
))
for alias, annotation in annotations.items():
if alias in names:
raise ValueError("The annotation '%s' conflicts with a field on "
"the model." % alias)
if isinstance(annotation, FilteredRelation):
clone.query.add_filtered_relation(annotation, alias)
else:
clone.query.add_annotation(annotation, alias, is_summary=False)
for alias, annotation in clone.query.annotations.items():
if alias in annotations and annotation.contains_aggregate:
if clone._fields is None:
clone.query.group_by = True
else:
clone.query.set_group_by()
break
return clone
def order_by(self, *field_names):
"""Return a new QuerySet instance with the ordering changed."""
assert not self.query.is_sliced, \
"Cannot reorder a query once a slice has been taken."
obj = self._chain()
obj.query.clear_ordering(force_empty=False)
obj.query.add_ordering(*field_names)
return obj
def distinct(self, *field_names):
"""
Return a new QuerySet instance that will select only distinct results.
"""
self._not_support_combined_queries('distinct')
assert not self.query.is_sliced, \
"Cannot create distinct fields once a slice has been taken."
obj = self._chain()
obj.query.add_distinct_fields(*field_names)
return obj
def extra(self, select=None, where=None, params=None, tables=None,
order_by=None, select_params=None):
"""Add extra SQL fragments to the query."""
self._not_support_combined_queries('extra')
assert not self.query.is_sliced, \
"Cannot change a query once a slice has been taken"
clone = self._chain()
clone.query.add_extra(select, select_params, where, params, tables, order_by)
return clone
def reverse(self):
"""Reverse the ordering of the QuerySet."""
if self.query.is_sliced:
raise TypeError('Cannot reverse a query once a slice has been taken.')
clone = self._chain()
clone.query.standard_ordering = not clone.query.standard_ordering
return clone
def defer(self, *fields):
"""
Defer the loading of data for certain fields until they are accessed.
Add the set of deferred fields to any existing set of deferred fields.
The only exception to this is if None is passed in as the only
parameter, in which case removal all deferrals.
"""
self._not_support_combined_queries('defer')
if self._fields is not None:
raise TypeError("Cannot call defer() after .values() or .values_list()")
clone = self._chain()
if fields == (None,):
clone.query.clear_deferred_loading()
else:
clone.query.add_deferred_loading(fields)
return clone
def only(self, *fields):
"""
Essentially, the opposite of defer(). Only the fields passed into this
method and that are not already specified as deferred are loaded
immediately when the queryset is evaluated.
"""
self._not_support_combined_queries('only')
if self._fields is not None:
raise TypeError("Cannot call only() after .values() or .values_list()")
if fields == (None,):
# Can only pass None to defer(), not only(), as the rest option.
# That won't stop people trying to do this, so let's be explicit.
raise TypeError("Cannot pass None as an argument to only().")
for field in fields:
field = field.split(LOOKUP_SEP, 1)[0]
if field in self.query._filtered_relations:
raise ValueError('only() is not supported with FilteredRelation.')
clone = self._chain()
clone.query.add_immediate_loading(fields)
return clone
def using(self, alias):
"""Select which database this QuerySet should execute against."""
clone = self._chain()
clone._db = alias
return clone
###################################
# PUBLIC INTROSPECTION ATTRIBUTES #
###################################
@property
def ordered(self):
"""
Return True if the QuerySet is ordered -- i.e. has an order_by()
clause or a default ordering on the model (or is empty).
"""
if isinstance(self, EmptyQuerySet):
return True
if self.query.extra_order_by or self.query.order_by:
return True
elif self.query.default_ordering and self.query.get_meta().ordering:
return True
else:
return False
@property
def db(self):
"""Return the database used if this query is executed now."""
if self._for_write:
return self._db or router.db_for_write(self.model, **self._hints)
return self._db or router.db_for_read(self.model, **self._hints)
###################
# PRIVATE METHODS #
###################
def _insert(self, objs, fields, returning_fields=None, raw=False, using=None, ignore_conflicts=False):
"""
Insert a new record for the given model. This provides an interface to
the InsertQuery class and is how Model.save() is implemented.
"""
self._for_write = True
if using is None:
using = self.db
query = sql.InsertQuery(self.model, ignore_conflicts=ignore_conflicts)
query.insert_values(fields, objs, raw=raw)
return query.get_compiler(using=using).execute_sql(returning_fields)
_insert.alters_data = True
_insert.queryset_only = False
def _batched_insert(self, objs, fields, batch_size, ignore_conflicts=False):
"""
Helper method for bulk_create() to insert objs one batch at a time.
"""
if ignore_conflicts and not connections[self.db].features.supports_ignore_conflicts:
raise NotSupportedError('This database backend does not support ignoring conflicts.')
ops = connections[self.db].ops
max_batch_size = max(ops.bulk_batch_size(fields, objs), 1)
batch_size = min(batch_size, max_batch_size) if batch_size else max_batch_size
inserted_rows = []
bulk_return = connections[self.db].features.can_return_rows_from_bulk_insert
for item in [objs[i:i + batch_size] for i in range(0, len(objs), batch_size)]:
if bulk_return and not ignore_conflicts:
inserted_rows.extend(self._insert(
item, fields=fields, using=self.db,
returning_fields=self.model._meta.db_returning_fields,
ignore_conflicts=ignore_conflicts,
))
else:
self._insert(item, fields=fields, using=self.db, ignore_conflicts=ignore_conflicts)
return inserted_rows
def _chain(self, **kwargs):
"""
Return a copy of the current QuerySet that's ready for another
operation.
"""
obj = self._clone()
if obj._sticky_filter:
obj.query.filter_is_sticky = True
obj._sticky_filter = False
obj.__dict__.update(kwargs)
return obj
def _clone(self):
"""
Return a copy of the current QuerySet. A lightweight alternative
to deepcopy().
"""
c = self.__class__(model=self.model, query=self.query.chain(), using=self._db, hints=self._hints)
c._sticky_filter = self._sticky_filter
c._for_write = self._for_write
c._prefetch_related_lookups = self._prefetch_related_lookups[:]
c._known_related_objects = self._known_related_objects
c._iterable_class = self._iterable_class
c._fields = self._fields
return c
def _fetch_all(self):
if self._result_cache is None:
self._result_cache = list(self._iterable_class(self))
if self._prefetch_related_lookups and not self._prefetch_done:
self._prefetch_related_objects()
def _next_is_sticky(self):
"""
Indicate that the next filter call and the one following that should
be treated as a single filter. This is only important when it comes to
determining when to reuse tables for many-to-many filters. Required so
that we can filter naturally on the results of related managers.
This doesn't return a clone of the current QuerySet (it returns
"self"). The method is only used internally and should be immediately
followed by a filter() that does create a clone.
"""
self._sticky_filter = True
return self
def _merge_sanity_check(self, other):
"""Check that two QuerySet classes may be merged."""
if self._fields is not None and (
set(self.query.values_select) != set(other.query.values_select) or
set(self.query.extra_select) != set(other.query.extra_select) or
set(self.query.annotation_select) != set(other.query.annotation_select)):
raise TypeError(
"Merging '%s' classes must involve the same values in each case."
% self.__class__.__name__
)
def _merge_known_related_objects(self, other):
"""
Keep track of all known related objects from either QuerySet instance.
"""
for field, objects in other._known_related_objects.items():
self._known_related_objects.setdefault(field, {}).update(objects)
def resolve_expression(self, *args, **kwargs):
if self._fields and len(self._fields) > 1:
# values() queryset can only be used as nested queries
# if they are set up to select only a single field.
raise TypeError('Cannot use multi-field values as a filter value.')
query = self.query.resolve_expression(*args, **kwargs)
query._db = self._db
return query
resolve_expression.queryset_only = True
def _add_hints(self, **hints):
"""
Update hinting information for use by routers. Add new key/values or
overwrite existing key/values.
"""
self._hints.update(hints)
def _has_filters(self):
"""
Check if this QuerySet has any filtering going on. This isn't
equivalent with checking if all objects are present in results, for
example, qs[1:]._has_filters() -> False.
"""
return self.query.has_filters()
@staticmethod
def _validate_values_are_expressions(values, method_name):
invalid_args = sorted(str(arg) for arg in values if not hasattr(arg, 'resolve_expression'))
if invalid_args:
raise TypeError(
'QuerySet.%s() received non-expression(s): %s.' % (
method_name,
', '.join(invalid_args),
)
)
def _not_support_combined_queries(self, operation_name):
if self.query.combinator:
raise NotSupportedError(
'Calling QuerySet.%s() after %s() is not supported.'
% (operation_name, self.query.combinator)
)
class InstanceCheckMeta(type):
def __instancecheck__(self, instance):
return isinstance(instance, QuerySet) and instance.query.is_empty()
class EmptyQuerySet(metaclass=InstanceCheckMeta):
"""
Marker class to checking if a queryset is empty by .none():
isinstance(qs.none(), EmptyQuerySet) -> True
"""
def __init__(self, *args, **kwargs):
raise TypeError("EmptyQuerySet can't be instantiated")
class RawQuerySet:
"""
Provide an iterator which converts the results of raw SQL queries into
annotated model instances.
"""
def __init__(self, raw_query, model=None, query=None, params=None,
translations=None, using=None, hints=None):
self.raw_query = raw_query
self.model = model
self._db = using
self._hints = hints or {}
self.query = query or sql.RawQuery(sql=raw_query, using=self.db, params=params)
self.params = params or ()
self.translations = translations or {}
self._result_cache = None
self._prefetch_related_lookups = ()
self._prefetch_done = False
def resolve_model_init_order(self):
"""Resolve the init field names and value positions."""
converter = connections[self.db].introspection.identifier_converter
model_init_fields = [f for f in self.model._meta.fields if converter(f.column) in self.columns]
annotation_fields = [(column, pos) for pos, column in enumerate(self.columns)
if column not in self.model_fields]
model_init_order = [self.columns.index(converter(f.column)) for f in model_init_fields]
model_init_names = [f.attname for f in model_init_fields]
return model_init_names, model_init_order, annotation_fields
def prefetch_related(self, *lookups):
"""Same as QuerySet.prefetch_related()"""
clone = self._clone()
if lookups == (None,):
clone._prefetch_related_lookups = ()
else:
clone._prefetch_related_lookups = clone._prefetch_related_lookups + lookups
return clone
def _prefetch_related_objects(self):
prefetch_related_objects(self._result_cache, *self._prefetch_related_lookups)
self._prefetch_done = True
def _clone(self):
"""Same as QuerySet._clone()"""
c = self.__class__(
self.raw_query, model=self.model, query=self.query, params=self.params,
translations=self.translations, using=self._db, hints=self._hints
)
c._prefetch_related_lookups = self._prefetch_related_lookups[:]
return c
def _fetch_all(self):
if self._result_cache is None:
self._result_cache = list(self.iterator())
if self._prefetch_related_lookups and not self._prefetch_done:
self._prefetch_related_objects()
def __len__(self):
self._fetch_all()
return len(self._result_cache)
def __bool__(self):
self._fetch_all()
return bool(self._result_cache)
def __iter__(self):
self._fetch_all()
return iter(self._result_cache)
def iterator(self):
# Cache some things for performance reasons outside the loop.
db = self.db
compiler = connections[db].ops.compiler('SQLCompiler')(
self.query, connections[db], db
)
query = iter(self.query)
try:
model_init_names, model_init_pos, annotation_fields = self.resolve_model_init_order()
if self.model._meta.pk.attname not in model_init_names:
raise exceptions.FieldDoesNotExist(
'Raw query must include the primary key'
)
model_cls = self.model
fields = [self.model_fields.get(c) for c in self.columns]
converters = compiler.get_converters([
f.get_col(f.model._meta.db_table) if f else None for f in fields
])
if converters:
query = compiler.apply_converters(query, converters)
for values in query:
# Associate fields to values
model_init_values = [values[pos] for pos in model_init_pos]
instance = model_cls.from_db(db, model_init_names, model_init_values)
if annotation_fields:
for column, pos in annotation_fields:
setattr(instance, column, values[pos])
yield instance
finally:
# Done iterating the Query. If it has its own cursor, close it.
if hasattr(self.query, 'cursor') and self.query.cursor:
self.query.cursor.close()
def __repr__(self):
return "<%s: %s>" % (self.__class__.__name__, self.query)
def __getitem__(self, k):
return list(self)[k]
@property
def db(self):
"""Return the database used if this query is executed now."""
return self._db or router.db_for_read(self.model, **self._hints)
def using(self, alias):
"""Select the database this RawQuerySet should execute against."""
return RawQuerySet(
self.raw_query, model=self.model,
query=self.query.chain(using=alias),
params=self.params, translations=self.translations,
using=alias,
)
@cached_property
def columns(self):
"""
A list of model field names in the order they'll appear in the
query results.
"""
columns = self.query.get_columns()
# Adjust any column names which don't match field names
for (query_name, model_name) in self.translations.items():
# Ignore translations for nonexistent column names
try:
index = columns.index(query_name)
except ValueError:
pass
else:
columns[index] = model_name
return columns
@cached_property
def model_fields(self):
"""A dict mapping column names to model field names."""
converter = connections[self.db].introspection.identifier_converter
model_fields = {}
for field in self.model._meta.fields:
name, column = field.get_attname_column()
model_fields[converter(column)] = field
return model_fields
class Prefetch:
def __init__(self, lookup, queryset=None, to_attr=None):
# `prefetch_through` is the path we traverse to perform the prefetch.
self.prefetch_through = lookup
# `prefetch_to` is the path to the attribute that stores the result.
self.prefetch_to = lookup
if queryset is not None and (
isinstance(queryset, RawQuerySet) or (
hasattr(queryset, '_iterable_class') and
not issubclass(queryset._iterable_class, ModelIterable)
)
):
raise ValueError(
'Prefetch querysets cannot use raw(), values(), and '
'values_list().'
)
if to_attr:
self.prefetch_to = LOOKUP_SEP.join(lookup.split(LOOKUP_SEP)[:-1] + [to_attr])
self.queryset = queryset
self.to_attr = to_attr
def __getstate__(self):
obj_dict = self.__dict__.copy()
if self.queryset is not None:
# Prevent the QuerySet from being evaluated
obj_dict['queryset'] = self.queryset._chain(
_result_cache=[],
_prefetch_done=True,
)
return obj_dict
def add_prefix(self, prefix):
self.prefetch_through = prefix + LOOKUP_SEP + self.prefetch_through
self.prefetch_to = prefix + LOOKUP_SEP + self.prefetch_to
def get_current_prefetch_to(self, level):
return LOOKUP_SEP.join(self.prefetch_to.split(LOOKUP_SEP)[:level + 1])
def get_current_to_attr(self, level):
parts = self.prefetch_to.split(LOOKUP_SEP)
to_attr = parts[level]
as_attr = self.to_attr and level == len(parts) - 1
return to_attr, as_attr
def get_current_queryset(self, level):
if self.get_current_prefetch_to(level) == self.prefetch_to:
return self.queryset
return None
def __eq__(self, other):
if not isinstance(other, Prefetch):
return NotImplemented
return self.prefetch_to == other.prefetch_to
def __hash__(self):
return hash((self.__class__, self.prefetch_to))
def normalize_prefetch_lookups(lookups, prefix=None):
"""Normalize lookups into Prefetch objects."""
ret = []
for lookup in lookups:
if not isinstance(lookup, Prefetch):
lookup = Prefetch(lookup)
if prefix:
lookup.add_prefix(prefix)
ret.append(lookup)
return ret
def prefetch_related_objects(model_instances, *related_lookups):
"""
Populate prefetched object caches for a list of model instances based on
the lookups/Prefetch instances given.
"""
if not model_instances:
return # nothing to do
# We need to be able to dynamically add to the list of prefetch_related
# lookups that we look up (see below). So we need some book keeping to
# ensure we don't do duplicate work.
done_queries = {} # dictionary of things like 'foo__bar': [results]
auto_lookups = set() # we add to this as we go through.
followed_descriptors = set() # recursion protection
all_lookups = normalize_prefetch_lookups(reversed(related_lookups))
while all_lookups:
lookup = all_lookups.pop()
if lookup.prefetch_to in done_queries:
if lookup.queryset is not None:
raise ValueError("'%s' lookup was already seen with a different queryset. "
"You may need to adjust the ordering of your lookups." % lookup.prefetch_to)
continue
# Top level, the list of objects to decorate is the result cache
# from the primary QuerySet. It won't be for deeper levels.
obj_list = model_instances
through_attrs = lookup.prefetch_through.split(LOOKUP_SEP)
for level, through_attr in enumerate(through_attrs):
# Prepare main instances
if not obj_list:
break
prefetch_to = lookup.get_current_prefetch_to(level)
if prefetch_to in done_queries:
# Skip any prefetching, and any object preparation
obj_list = done_queries[prefetch_to]
continue
# Prepare objects:
good_objects = True
for obj in obj_list:
# Since prefetching can re-use instances, it is possible to have
# the same instance multiple times in obj_list, so obj might
# already be prepared.
if not hasattr(obj, '_prefetched_objects_cache'):
try:
obj._prefetched_objects_cache = {}
except (AttributeError, TypeError):
# Must be an immutable object from
# values_list(flat=True), for example (TypeError) or
# a QuerySet subclass that isn't returning Model
# instances (AttributeError), either in Django or a 3rd
# party. prefetch_related() doesn't make sense, so quit.
good_objects = False
break
if not good_objects:
break
# Descend down tree
# We assume that objects retrieved are homogeneous (which is the premise
# of prefetch_related), so what applies to first object applies to all.
first_obj = obj_list[0]
to_attr = lookup.get_current_to_attr(level)[0]
prefetcher, descriptor, attr_found, is_fetched = get_prefetcher(first_obj, through_attr, to_attr)
if not attr_found:
raise AttributeError("Cannot find '%s' on %s object, '%s' is an invalid "
"parameter to prefetch_related()" %
(through_attr, first_obj.__class__.__name__, lookup.prefetch_through))
if level == len(through_attrs) - 1 and prefetcher is None:
# Last one, this *must* resolve to something that supports
# prefetching, otherwise there is no point adding it and the
# developer asking for it has made a mistake.
raise ValueError("'%s' does not resolve to an item that supports "
"prefetching - this is an invalid parameter to "
"prefetch_related()." % lookup.prefetch_through)
if prefetcher is not None and not is_fetched:
obj_list, additional_lookups = prefetch_one_level(obj_list, prefetcher, lookup, level)
# We need to ensure we don't keep adding lookups from the
# same relationships to stop infinite recursion. So, if we
# are already on an automatically added lookup, don't add
# the new lookups from relationships we've seen already.
if not (prefetch_to in done_queries and lookup in auto_lookups and descriptor in followed_descriptors):
done_queries[prefetch_to] = obj_list
new_lookups = normalize_prefetch_lookups(reversed(additional_lookups), prefetch_to)
auto_lookups.update(new_lookups)
all_lookups.extend(new_lookups)
followed_descriptors.add(descriptor)
else:
# Either a singly related object that has already been fetched
# (e.g. via select_related), or hopefully some other property
# that doesn't support prefetching but needs to be traversed.
# We replace the current list of parent objects with the list
# of related objects, filtering out empty or missing values so
# that we can continue with nullable or reverse relations.
new_obj_list = []
for obj in obj_list:
if through_attr in getattr(obj, '_prefetched_objects_cache', ()):
# If related objects have been prefetched, use the
# cache rather than the object's through_attr.
new_obj = list(obj._prefetched_objects_cache.get(through_attr))
else:
try:
new_obj = getattr(obj, through_attr)
except exceptions.ObjectDoesNotExist:
continue
if new_obj is None:
continue
# We special-case `list` rather than something more generic
# like `Iterable` because we don't want to accidentally match
# user models that define __iter__.
if isinstance(new_obj, list):
new_obj_list.extend(new_obj)
else:
new_obj_list.append(new_obj)
obj_list = new_obj_list
def get_prefetcher(instance, through_attr, to_attr):
"""
For the attribute 'through_attr' on the given instance, find
an object that has a get_prefetch_queryset().
Return a 4 tuple containing:
(the object with get_prefetch_queryset (or None),
the descriptor object representing this relationship (or None),
a boolean that is False if the attribute was not found at all,
a boolean that is True if the attribute has already been fetched)
"""
prefetcher = None
is_fetched = False
# For singly related objects, we have to avoid getting the attribute
# from the object, as this will trigger the query. So we first try
# on the class, in order to get the descriptor object.
rel_obj_descriptor = getattr(instance.__class__, through_attr, None)
if rel_obj_descriptor is None:
attr_found = hasattr(instance, through_attr)
else:
attr_found = True
if rel_obj_descriptor:
# singly related object, descriptor object has the
# get_prefetch_queryset() method.
if hasattr(rel_obj_descriptor, 'get_prefetch_queryset'):
prefetcher = rel_obj_descriptor
if rel_obj_descriptor.is_cached(instance):
is_fetched = True
else:
# descriptor doesn't support prefetching, so we go ahead and get
# the attribute on the instance rather than the class to
# support many related managers
rel_obj = getattr(instance, through_attr)
if hasattr(rel_obj, 'get_prefetch_queryset'):
prefetcher = rel_obj
if through_attr != to_attr:
# Special case cached_property instances because hasattr
# triggers attribute computation and assignment.
if isinstance(getattr(instance.__class__, to_attr, None), cached_property):
is_fetched = to_attr in instance.__dict__
else:
is_fetched = hasattr(instance, to_attr)
else:
is_fetched = through_attr in instance._prefetched_objects_cache
return prefetcher, rel_obj_descriptor, attr_found, is_fetched
def prefetch_one_level(instances, prefetcher, lookup, level):
"""
Helper function for prefetch_related_objects().
Run prefetches on all instances using the prefetcher object,
assigning results to relevant caches in instance.
Return the prefetched objects along with any additional prefetches that
must be done due to prefetch_related lookups found from default managers.
"""
# prefetcher must have a method get_prefetch_queryset() which takes a list
# of instances, and returns a tuple:
# (queryset of instances of self.model that are related to passed in instances,
# callable that gets value to be matched for returned instances,
# callable that gets value to be matched for passed in instances,
# boolean that is True for singly related objects,
# cache or field name to assign to,
# boolean that is True when the previous argument is a cache name vs a field name).
# The 'values to be matched' must be hashable as they will be used
# in a dictionary.
rel_qs, rel_obj_attr, instance_attr, single, cache_name, is_descriptor = (
prefetcher.get_prefetch_queryset(instances, lookup.get_current_queryset(level)))
# We have to handle the possibility that the QuerySet we just got back
# contains some prefetch_related lookups. We don't want to trigger the
# prefetch_related functionality by evaluating the query. Rather, we need
# to merge in the prefetch_related lookups.
# Copy the lookups in case it is a Prefetch object which could be reused
# later (happens in nested prefetch_related).
additional_lookups = [
copy.copy(additional_lookup) for additional_lookup
in getattr(rel_qs, '_prefetch_related_lookups', ())
]
if additional_lookups:
# Don't need to clone because the manager should have given us a fresh
# instance, so we access an internal instead of using public interface
# for performance reasons.
rel_qs._prefetch_related_lookups = ()
all_related_objects = list(rel_qs)
rel_obj_cache = {}
for rel_obj in all_related_objects:
rel_attr_val = rel_obj_attr(rel_obj)
rel_obj_cache.setdefault(rel_attr_val, []).append(rel_obj)
to_attr, as_attr = lookup.get_current_to_attr(level)
# Make sure `to_attr` does not conflict with a field.
if as_attr and instances:
# We assume that objects retrieved are homogeneous (which is the premise
# of prefetch_related), so what applies to first object applies to all.
model = instances[0].__class__
try:
model._meta.get_field(to_attr)
except exceptions.FieldDoesNotExist:
pass
else:
msg = 'to_attr={} conflicts with a field on the {} model.'
raise ValueError(msg.format(to_attr, model.__name__))
# Whether or not we're prefetching the last part of the lookup.
leaf = len(lookup.prefetch_through.split(LOOKUP_SEP)) - 1 == level
for obj in instances:
instance_attr_val = instance_attr(obj)
vals = rel_obj_cache.get(instance_attr_val, [])
if single:
val = vals[0] if vals else None
if as_attr:
# A to_attr has been given for the prefetch.
setattr(obj, to_attr, val)
elif is_descriptor:
# cache_name points to a field name in obj.
# This field is a descriptor for a related object.
setattr(obj, cache_name, val)
else:
# No to_attr has been given for this prefetch operation and the
# cache_name does not point to a descriptor. Store the value of
# the field in the object's field cache.
obj._state.fields_cache[cache_name] = val
else:
if as_attr:
setattr(obj, to_attr, vals)
else:
manager = getattr(obj, to_attr)
if leaf and lookup.queryset is not None:
qs = manager._apply_rel_filters(lookup.queryset)
else:
qs = manager.get_queryset()
qs._result_cache = vals
# We don't want the individual qs doing prefetch_related now,
# since we have merged this into the current work.
qs._prefetch_done = True
obj._prefetched_objects_cache[cache_name] = qs
return all_related_objects, additional_lookups
class RelatedPopulator:
"""
RelatedPopulator is used for select_related() object instantiation.
The idea is that each select_related() model will be populated by a
different RelatedPopulator instance. The RelatedPopulator instances get
klass_info and select (computed in SQLCompiler) plus the used db as
input for initialization. That data is used to compute which columns
to use, how to instantiate the model, and how to populate the links
between the objects.
The actual creation of the objects is done in populate() method. This
method gets row and from_obj as input and populates the select_related()
model instance.
"""
def __init__(self, klass_info, select, db):
self.db = db
# Pre-compute needed attributes. The attributes are:
# - model_cls: the possibly deferred model class to instantiate
# - either:
# - cols_start, cols_end: usually the columns in the row are
# in the same order model_cls.__init__ expects them, so we
# can instantiate by model_cls(*row[cols_start:cols_end])
# - reorder_for_init: When select_related descends to a child
# class, then we want to reuse the already selected parent
# data. However, in this case the parent data isn't necessarily
# in the same order that Model.__init__ expects it to be, so
# we have to reorder the parent data. The reorder_for_init
# attribute contains a function used to reorder the field data
# in the order __init__ expects it.
# - pk_idx: the index of the primary key field in the reordered
# model data. Used to check if a related object exists at all.
# - init_list: the field attnames fetched from the database. For
# deferred models this isn't the same as all attnames of the
# model's fields.
# - related_populators: a list of RelatedPopulator instances if
# select_related() descends to related models from this model.
# - local_setter, remote_setter: Methods to set cached values on
# the object being populated and on the remote object. Usually
# these are Field.set_cached_value() methods.
select_fields = klass_info['select_fields']
from_parent = klass_info['from_parent']
if not from_parent:
self.cols_start = select_fields[0]
self.cols_end = select_fields[-1] + 1
self.init_list = [
f[0].target.attname for f in select[self.cols_start:self.cols_end]
]
self.reorder_for_init = None
else:
attname_indexes = {select[idx][0].target.attname: idx for idx in select_fields}
model_init_attnames = (f.attname for f in klass_info['model']._meta.concrete_fields)
self.init_list = [attname for attname in model_init_attnames if attname in attname_indexes]
self.reorder_for_init = operator.itemgetter(*[attname_indexes[attname] for attname in self.init_list])
self.model_cls = klass_info['model']
self.pk_idx = self.init_list.index(self.model_cls._meta.pk.attname)
self.related_populators = get_related_populators(klass_info, select, self.db)
self.local_setter = klass_info['local_setter']
self.remote_setter = klass_info['remote_setter']
def populate(self, row, from_obj):
if self.reorder_for_init:
obj_data = self.reorder_for_init(row)
else:
obj_data = row[self.cols_start:self.cols_end]
if obj_data[self.pk_idx] is None:
obj = None
else:
obj = self.model_cls.from_db(self.db, self.init_list, obj_data)
for rel_iter in self.related_populators:
rel_iter.populate(row, obj)
self.local_setter(from_obj, obj)
if obj is not None:
self.remote_setter(obj, from_obj)
def get_related_populators(klass_info, select, db):
iterators = []
related_klass_infos = klass_info.get('related_klass_infos', [])
for rel_klass_info in related_klass_infos:
rel_cls = RelatedPopulator(rel_klass_info, select, db)
iterators.append(rel_cls)
return iterators
|
cd7f5851a89b79eedc65c1686bdac2bdb2f38eaa563c01e72c9e7bef22a28fd6 | import copy
import inspect
import warnings
from functools import partialmethod
from itertools import chain
import django
from django.apps import apps
from django.conf import settings
from django.core import checks
from django.core.exceptions import (
NON_FIELD_ERRORS, FieldDoesNotExist, FieldError, MultipleObjectsReturned,
ObjectDoesNotExist, ValidationError,
)
from django.db import (
DEFAULT_DB_ALIAS, DJANGO_VERSION_PICKLE_KEY, DatabaseError, connection,
connections, router, transaction,
)
from django.db.models import (
NOT_PROVIDED, ExpressionWrapper, IntegerField, Max, Value,
)
from django.db.models.constants import LOOKUP_SEP
from django.db.models.constraints import CheckConstraint, UniqueConstraint
from django.db.models.deletion import CASCADE, Collector
from django.db.models.fields.related import (
ForeignObjectRel, OneToOneField, lazy_related_operation, resolve_relation,
)
from django.db.models.functions import Coalesce
from django.db.models.manager import Manager
from django.db.models.options import Options
from django.db.models.query import Q
from django.db.models.signals import (
class_prepared, post_init, post_save, pre_init, pre_save,
)
from django.db.models.utils import make_model_tuple
from django.utils.encoding import force_str
from django.utils.hashable import make_hashable
from django.utils.text import capfirst, get_text_list
from django.utils.translation import gettext_lazy as _
class Deferred:
def __repr__(self):
return '<Deferred field>'
def __str__(self):
return '<Deferred field>'
DEFERRED = Deferred()
def subclass_exception(name, bases, module, attached_to):
"""
Create exception subclass. Used by ModelBase below.
The exception is created in a way that allows it to be pickled, assuming
that the returned exception class will be added as an attribute to the
'attached_to' class.
"""
return type(name, bases, {
'__module__': module,
'__qualname__': '%s.%s' % (attached_to.__qualname__, name),
})
def _has_contribute_to_class(value):
# Only call contribute_to_class() if it's bound.
return not inspect.isclass(value) and hasattr(value, 'contribute_to_class')
class ModelBase(type):
"""Metaclass for all models."""
def __new__(cls, name, bases, attrs, **kwargs):
super_new = super().__new__
# Also ensure initialization is only performed for subclasses of Model
# (excluding Model class itself).
parents = [b for b in bases if isinstance(b, ModelBase)]
if not parents:
return super_new(cls, name, bases, attrs)
# Create the class.
module = attrs.pop('__module__')
new_attrs = {'__module__': module}
classcell = attrs.pop('__classcell__', None)
if classcell is not None:
new_attrs['__classcell__'] = classcell
attr_meta = attrs.pop('Meta', None)
# Pass all attrs without a (Django-specific) contribute_to_class()
# method to type.__new__() so that they're properly initialized
# (i.e. __set_name__()).
contributable_attrs = {}
for obj_name, obj in list(attrs.items()):
if _has_contribute_to_class(obj):
contributable_attrs[obj_name] = obj
else:
new_attrs[obj_name] = obj
new_class = super_new(cls, name, bases, new_attrs, **kwargs)
abstract = getattr(attr_meta, 'abstract', False)
meta = attr_meta or getattr(new_class, 'Meta', None)
base_meta = getattr(new_class, '_meta', None)
app_label = None
# Look for an application configuration to attach the model to.
app_config = apps.get_containing_app_config(module)
if getattr(meta, 'app_label', None) is None:
if app_config is None:
if not abstract:
raise RuntimeError(
"Model class %s.%s doesn't declare an explicit "
"app_label and isn't in an application in "
"INSTALLED_APPS." % (module, name)
)
else:
app_label = app_config.label
new_class.add_to_class('_meta', Options(meta, app_label))
if not abstract:
new_class.add_to_class(
'DoesNotExist',
subclass_exception(
'DoesNotExist',
tuple(
x.DoesNotExist for x in parents if hasattr(x, '_meta') and not x._meta.abstract
) or (ObjectDoesNotExist,),
module,
attached_to=new_class))
new_class.add_to_class(
'MultipleObjectsReturned',
subclass_exception(
'MultipleObjectsReturned',
tuple(
x.MultipleObjectsReturned for x in parents if hasattr(x, '_meta') and not x._meta.abstract
) or (MultipleObjectsReturned,),
module,
attached_to=new_class))
if base_meta and not base_meta.abstract:
# Non-abstract child classes inherit some attributes from their
# non-abstract parent (unless an ABC comes before it in the
# method resolution order).
if not hasattr(meta, 'ordering'):
new_class._meta.ordering = base_meta.ordering
if not hasattr(meta, 'get_latest_by'):
new_class._meta.get_latest_by = base_meta.get_latest_by
is_proxy = new_class._meta.proxy
# If the model is a proxy, ensure that the base class
# hasn't been swapped out.
if is_proxy and base_meta and base_meta.swapped:
raise TypeError("%s cannot proxy the swapped model '%s'." % (name, base_meta.swapped))
# Add remaining attributes (those with a contribute_to_class() method)
# to the class.
for obj_name, obj in contributable_attrs.items():
new_class.add_to_class(obj_name, obj)
# All the fields of any type declared on this model
new_fields = chain(
new_class._meta.local_fields,
new_class._meta.local_many_to_many,
new_class._meta.private_fields
)
field_names = {f.name for f in new_fields}
# Basic setup for proxy models.
if is_proxy:
base = None
for parent in [kls for kls in parents if hasattr(kls, '_meta')]:
if parent._meta.abstract:
if parent._meta.fields:
raise TypeError(
"Abstract base class containing model fields not "
"permitted for proxy model '%s'." % name
)
else:
continue
if base is None:
base = parent
elif parent._meta.concrete_model is not base._meta.concrete_model:
raise TypeError("Proxy model '%s' has more than one non-abstract model base class." % name)
if base is None:
raise TypeError("Proxy model '%s' has no non-abstract model base class." % name)
new_class._meta.setup_proxy(base)
new_class._meta.concrete_model = base._meta.concrete_model
else:
new_class._meta.concrete_model = new_class
# Collect the parent links for multi-table inheritance.
parent_links = {}
for base in reversed([new_class] + parents):
# Conceptually equivalent to `if base is Model`.
if not hasattr(base, '_meta'):
continue
# Skip concrete parent classes.
if base != new_class and not base._meta.abstract:
continue
# Locate OneToOneField instances.
for field in base._meta.local_fields:
if isinstance(field, OneToOneField) and field.remote_field.parent_link:
related = resolve_relation(new_class, field.remote_field.model)
parent_links[make_model_tuple(related)] = field
# Track fields inherited from base models.
inherited_attributes = set()
# Do the appropriate setup for any model parents.
for base in new_class.mro():
if base not in parents or not hasattr(base, '_meta'):
# Things without _meta aren't functional models, so they're
# uninteresting parents.
inherited_attributes.update(base.__dict__)
continue
parent_fields = base._meta.local_fields + base._meta.local_many_to_many
if not base._meta.abstract:
# Check for clashes between locally declared fields and those
# on the base classes.
for field in parent_fields:
if field.name in field_names:
raise FieldError(
'Local field %r in class %r clashes with field of '
'the same name from base class %r.' % (
field.name,
name,
base.__name__,
)
)
else:
inherited_attributes.add(field.name)
# Concrete classes...
base = base._meta.concrete_model
base_key = make_model_tuple(base)
if base_key in parent_links:
field = parent_links[base_key]
elif not is_proxy:
attr_name = '%s_ptr' % base._meta.model_name
field = OneToOneField(
base,
on_delete=CASCADE,
name=attr_name,
auto_created=True,
parent_link=True,
)
if attr_name in field_names:
raise FieldError(
"Auto-generated field '%s' in class %r for "
"parent_link to base class %r clashes with "
"declared field of the same name." % (
attr_name,
name,
base.__name__,
)
)
# Only add the ptr field if it's not already present;
# e.g. migrations will already have it specified
if not hasattr(new_class, attr_name):
new_class.add_to_class(attr_name, field)
else:
field = None
new_class._meta.parents[base] = field
else:
base_parents = base._meta.parents.copy()
# Add fields from abstract base class if it wasn't overridden.
for field in parent_fields:
if (field.name not in field_names and
field.name not in new_class.__dict__ and
field.name not in inherited_attributes):
new_field = copy.deepcopy(field)
new_class.add_to_class(field.name, new_field)
# Replace parent links defined on this base by the new
# field. It will be appropriately resolved if required.
if field.one_to_one:
for parent, parent_link in base_parents.items():
if field == parent_link:
base_parents[parent] = new_field
# Pass any non-abstract parent classes onto child.
new_class._meta.parents.update(base_parents)
# Inherit private fields (like GenericForeignKey) from the parent
# class
for field in base._meta.private_fields:
if field.name in field_names:
if not base._meta.abstract:
raise FieldError(
'Local field %r in class %r clashes with field of '
'the same name from base class %r.' % (
field.name,
name,
base.__name__,
)
)
else:
field = copy.deepcopy(field)
if not base._meta.abstract:
field.mti_inherited = True
new_class.add_to_class(field.name, field)
# Copy indexes so that index names are unique when models extend an
# abstract model.
new_class._meta.indexes = [copy.deepcopy(idx) for idx in new_class._meta.indexes]
if abstract:
# Abstract base models can't be instantiated and don't appear in
# the list of models for an app. We do the final setup for them a
# little differently from normal models.
attr_meta.abstract = False
new_class.Meta = attr_meta
return new_class
new_class._prepare()
new_class._meta.apps.register_model(new_class._meta.app_label, new_class)
return new_class
def add_to_class(cls, name, value):
if _has_contribute_to_class(value):
value.contribute_to_class(cls, name)
else:
setattr(cls, name, value)
def _prepare(cls):
"""Create some methods once self._meta has been populated."""
opts = cls._meta
opts._prepare(cls)
if opts.order_with_respect_to:
cls.get_next_in_order = partialmethod(cls._get_next_or_previous_in_order, is_next=True)
cls.get_previous_in_order = partialmethod(cls._get_next_or_previous_in_order, is_next=False)
# Defer creating accessors on the foreign class until it has been
# created and registered. If remote_field is None, we're ordering
# with respect to a GenericForeignKey and don't know what the
# foreign class is - we'll add those accessors later in
# contribute_to_class().
if opts.order_with_respect_to.remote_field:
wrt = opts.order_with_respect_to
remote = wrt.remote_field.model
lazy_related_operation(make_foreign_order_accessors, cls, remote)
# Give the class a docstring -- its definition.
if cls.__doc__ is None:
cls.__doc__ = "%s(%s)" % (cls.__name__, ", ".join(f.name for f in opts.fields))
get_absolute_url_override = settings.ABSOLUTE_URL_OVERRIDES.get(opts.label_lower)
if get_absolute_url_override:
setattr(cls, 'get_absolute_url', get_absolute_url_override)
if not opts.managers:
if any(f.name == 'objects' for f in opts.fields):
raise ValueError(
"Model %s must specify a custom Manager, because it has a "
"field named 'objects'." % cls.__name__
)
manager = Manager()
manager.auto_created = True
cls.add_to_class('objects', manager)
# Set the name of _meta.indexes. This can't be done in
# Options.contribute_to_class() because fields haven't been added to
# the model at that point.
for index in cls._meta.indexes:
if not index.name:
index.set_name_with_model(cls)
class_prepared.send(sender=cls)
@property
def _base_manager(cls):
return cls._meta.base_manager
@property
def _default_manager(cls):
return cls._meta.default_manager
class ModelStateFieldsCacheDescriptor:
def __get__(self, instance, cls=None):
if instance is None:
return self
res = instance.fields_cache = {}
return res
class ModelState:
"""Store model instance state."""
db = None
# If true, uniqueness validation checks will consider this a new, unsaved
# object. Necessary for correct validation of new instances of objects with
# explicit (non-auto) PKs. This impacts validation only; it has no effect
# on the actual save.
adding = True
fields_cache = ModelStateFieldsCacheDescriptor()
class Model(metaclass=ModelBase):
def __init__(self, *args, **kwargs):
# Alias some things as locals to avoid repeat global lookups
cls = self.__class__
opts = self._meta
_setattr = setattr
_DEFERRED = DEFERRED
pre_init.send(sender=cls, args=args, kwargs=kwargs)
# Set up the storage for instance state
self._state = ModelState()
# There is a rather weird disparity here; if kwargs, it's set, then args
# overrides it. It should be one or the other; don't duplicate the work
# The reason for the kwargs check is that standard iterator passes in by
# args, and instantiation for iteration is 33% faster.
if len(args) > len(opts.concrete_fields):
# Daft, but matches old exception sans the err msg.
raise IndexError("Number of args exceeds number of fields")
if not kwargs:
fields_iter = iter(opts.concrete_fields)
# The ordering of the zip calls matter - zip throws StopIteration
# when an iter throws it. So if the first iter throws it, the second
# is *not* consumed. We rely on this, so don't change the order
# without changing the logic.
for val, field in zip(args, fields_iter):
if val is _DEFERRED:
continue
_setattr(self, field.attname, val)
else:
# Slower, kwargs-ready version.
fields_iter = iter(opts.fields)
for val, field in zip(args, fields_iter):
if val is _DEFERRED:
continue
_setattr(self, field.attname, val)
kwargs.pop(field.name, None)
# Now we're left with the unprocessed fields that *must* come from
# keywords, or default.
for field in fields_iter:
is_related_object = False
# Virtual field
if field.attname not in kwargs and field.column is None:
continue
if kwargs:
if isinstance(field.remote_field, ForeignObjectRel):
try:
# Assume object instance was passed in.
rel_obj = kwargs.pop(field.name)
is_related_object = True
except KeyError:
try:
# Object instance wasn't passed in -- must be an ID.
val = kwargs.pop(field.attname)
except KeyError:
val = field.get_default()
else:
try:
val = kwargs.pop(field.attname)
except KeyError:
# This is done with an exception rather than the
# default argument on pop because we don't want
# get_default() to be evaluated, and then not used.
# Refs #12057.
val = field.get_default()
else:
val = field.get_default()
if is_related_object:
# If we are passed a related instance, set it using the
# field.name instead of field.attname (e.g. "user" instead of
# "user_id") so that the object gets properly cached (and type
# checked) by the RelatedObjectDescriptor.
if rel_obj is not _DEFERRED:
_setattr(self, field.name, rel_obj)
else:
if val is not _DEFERRED:
_setattr(self, field.attname, val)
if kwargs:
property_names = opts._property_names
for prop in tuple(kwargs):
try:
# Any remaining kwargs must correspond to properties or
# virtual fields.
if prop in property_names or opts.get_field(prop):
if kwargs[prop] is not _DEFERRED:
_setattr(self, prop, kwargs[prop])
del kwargs[prop]
except (AttributeError, FieldDoesNotExist):
pass
for kwarg in kwargs:
raise TypeError("%s() got an unexpected keyword argument '%s'" % (cls.__name__, kwarg))
super().__init__()
post_init.send(sender=cls, instance=self)
@classmethod
def from_db(cls, db, field_names, values):
if len(values) != len(cls._meta.concrete_fields):
values_iter = iter(values)
values = [
next(values_iter) if f.attname in field_names else DEFERRED
for f in cls._meta.concrete_fields
]
new = cls(*values)
new._state.adding = False
new._state.db = db
return new
def __repr__(self):
return '<%s: %s>' % (self.__class__.__name__, self)
def __str__(self):
return '%s object (%s)' % (self.__class__.__name__, self.pk)
def __eq__(self, other):
if not isinstance(other, Model):
return NotImplemented
if self._meta.concrete_model != other._meta.concrete_model:
return False
my_pk = self.pk
if my_pk is None:
return self is other
return my_pk == other.pk
def __hash__(self):
if self.pk is None:
raise TypeError("Model instances without primary key value are unhashable")
return hash(self.pk)
def __reduce__(self):
data = self.__getstate__()
data[DJANGO_VERSION_PICKLE_KEY] = django.__version__
class_id = self._meta.app_label, self._meta.object_name
return model_unpickle, (class_id,), data
def __getstate__(self):
"""Hook to allow choosing the attributes to pickle."""
return self.__dict__
def __setstate__(self, state):
pickled_version = state.get(DJANGO_VERSION_PICKLE_KEY)
if pickled_version:
if pickled_version != django.__version__:
warnings.warn(
"Pickled model instance's Django version %s does not "
"match the current version %s."
% (pickled_version, django.__version__),
RuntimeWarning,
stacklevel=2,
)
else:
warnings.warn(
"Pickled model instance's Django version is not specified.",
RuntimeWarning,
stacklevel=2,
)
self.__dict__.update(state)
def _get_pk_val(self, meta=None):
meta = meta or self._meta
return getattr(self, meta.pk.attname)
def _set_pk_val(self, value):
for parent_link in self._meta.parents.values():
if parent_link and parent_link != self._meta.pk:
setattr(self, parent_link.target_field.attname, value)
return setattr(self, self._meta.pk.attname, value)
pk = property(_get_pk_val, _set_pk_val)
def get_deferred_fields(self):
"""
Return a set containing names of deferred fields for this instance.
"""
return {
f.attname for f in self._meta.concrete_fields
if f.attname not in self.__dict__
}
def refresh_from_db(self, using=None, fields=None):
"""
Reload field values from the database.
By default, the reloading happens from the database this instance was
loaded from, or by the read router if this instance wasn't loaded from
any database. The using parameter will override the default.
Fields can be used to specify which fields to reload. The fields
should be an iterable of field attnames. If fields is None, then
all non-deferred fields are reloaded.
When accessing deferred fields of an instance, the deferred loading
of the field will call this method.
"""
if fields is None:
self._prefetched_objects_cache = {}
else:
prefetched_objects_cache = getattr(self, '_prefetched_objects_cache', ())
for field in fields:
if field in prefetched_objects_cache:
del prefetched_objects_cache[field]
fields.remove(field)
if not fields:
return
if any(LOOKUP_SEP in f for f in fields):
raise ValueError(
'Found "%s" in fields argument. Relations and transforms '
'are not allowed in fields.' % LOOKUP_SEP)
hints = {'instance': self}
db_instance_qs = self.__class__._base_manager.db_manager(using, hints=hints).filter(pk=self.pk)
# Use provided fields, if not set then reload all non-deferred fields.
deferred_fields = self.get_deferred_fields()
if fields is not None:
fields = list(fields)
db_instance_qs = db_instance_qs.only(*fields)
elif deferred_fields:
fields = [f.attname for f in self._meta.concrete_fields
if f.attname not in deferred_fields]
db_instance_qs = db_instance_qs.only(*fields)
db_instance = db_instance_qs.get()
non_loaded_fields = db_instance.get_deferred_fields()
for field in self._meta.concrete_fields:
if field.attname in non_loaded_fields:
# This field wasn't refreshed - skip ahead.
continue
setattr(self, field.attname, getattr(db_instance, field.attname))
# Clear cached foreign keys.
if field.is_relation and field.is_cached(self):
field.delete_cached_value(self)
# Clear cached relations.
for field in self._meta.related_objects:
if field.is_cached(self):
field.delete_cached_value(self)
self._state.db = db_instance._state.db
def serializable_value(self, field_name):
"""
Return the value of the field name for this instance. If the field is
a foreign key, return the id value instead of the object. If there's
no Field object with this name on the model, return the model
attribute's value.
Used to serialize a field's value (in the serializer, or form output,
for example). Normally, you would just access the attribute directly
and not use this method.
"""
try:
field = self._meta.get_field(field_name)
except FieldDoesNotExist:
return getattr(self, field_name)
return getattr(self, field.attname)
def save(self, force_insert=False, force_update=False, using=None,
update_fields=None):
"""
Save the current instance. Override this in a subclass if you want to
control the saving process.
The 'force_insert' and 'force_update' parameters can be used to insist
that the "save" must be an SQL insert or update (or equivalent for
non-SQL backends), respectively. Normally, they should not be set.
"""
# Ensure that a model instance without a PK hasn't been assigned to
# a ForeignKey or OneToOneField on this model. If the field is
# nullable, allowing the save() would result in silent data loss.
for field in self._meta.concrete_fields:
# If the related field isn't cached, then an instance hasn't
# been assigned and there's no need to worry about this check.
if field.is_relation and field.is_cached(self):
obj = getattr(self, field.name, None)
if not obj:
continue
# A pk may have been assigned manually to a model instance not
# saved to the database (or auto-generated in a case like
# UUIDField), but we allow the save to proceed and rely on the
# database to raise an IntegrityError if applicable. If
# constraints aren't supported by the database, there's the
# unavoidable risk of data corruption.
if obj.pk is None:
# Remove the object from a related instance cache.
if not field.remote_field.multiple:
field.remote_field.delete_cached_value(obj)
raise ValueError(
"save() prohibited to prevent data loss due to "
"unsaved related object '%s'." % field.name
)
elif getattr(self, field.attname) is None:
# Use pk from related object if it has been saved after
# an assignment.
setattr(self, field.attname, obj.pk)
# If the relationship's pk/to_field was changed, clear the
# cached relationship.
if getattr(obj, field.target_field.attname) != getattr(self, field.attname):
field.delete_cached_value(self)
using = using or router.db_for_write(self.__class__, instance=self)
if force_insert and (force_update or update_fields):
raise ValueError("Cannot force both insert and updating in model saving.")
deferred_fields = self.get_deferred_fields()
if update_fields is not None:
# If update_fields is empty, skip the save. We do also check for
# no-op saves later on for inheritance cases. This bailout is
# still needed for skipping signal sending.
if not update_fields:
return
update_fields = frozenset(update_fields)
field_names = set()
for field in self._meta.fields:
if not field.primary_key:
field_names.add(field.name)
if field.name != field.attname:
field_names.add(field.attname)
non_model_fields = update_fields.difference(field_names)
if non_model_fields:
raise ValueError("The following fields do not exist in this "
"model or are m2m fields: %s"
% ', '.join(non_model_fields))
# If saving to the same database, and this model is deferred, then
# automatically do an "update_fields" save on the loaded fields.
elif not force_insert and deferred_fields and using == self._state.db:
field_names = set()
for field in self._meta.concrete_fields:
if not field.primary_key and not hasattr(field, 'through'):
field_names.add(field.attname)
loaded_fields = field_names.difference(deferred_fields)
if loaded_fields:
update_fields = frozenset(loaded_fields)
self.save_base(using=using, force_insert=force_insert,
force_update=force_update, update_fields=update_fields)
save.alters_data = True
def save_base(self, raw=False, force_insert=False,
force_update=False, using=None, update_fields=None):
"""
Handle the parts of saving which should be done only once per save,
yet need to be done in raw saves, too. This includes some sanity
checks and signal sending.
The 'raw' argument is telling save_base not to save any parent
models and not to do any changes to the values before save. This
is used by fixture loading.
"""
using = using or router.db_for_write(self.__class__, instance=self)
assert not (force_insert and (force_update or update_fields))
assert update_fields is None or update_fields
cls = origin = self.__class__
# Skip proxies, but keep the origin as the proxy model.
if cls._meta.proxy:
cls = cls._meta.concrete_model
meta = cls._meta
if not meta.auto_created:
pre_save.send(
sender=origin, instance=self, raw=raw, using=using,
update_fields=update_fields,
)
# A transaction isn't needed if one query is issued.
if meta.parents:
context_manager = transaction.atomic(using=using, savepoint=False)
else:
context_manager = transaction.mark_for_rollback_on_error(using=using)
with context_manager:
parent_inserted = False
if not raw:
parent_inserted = self._save_parents(cls, using, update_fields)
updated = self._save_table(
raw, cls, force_insert or parent_inserted,
force_update, using, update_fields,
)
# Store the database on which the object was saved
self._state.db = using
# Once saved, this is no longer a to-be-added instance.
self._state.adding = False
# Signal that the save is complete
if not meta.auto_created:
post_save.send(
sender=origin, instance=self, created=(not updated),
update_fields=update_fields, raw=raw, using=using,
)
save_base.alters_data = True
def _save_parents(self, cls, using, update_fields):
"""Save all the parents of cls using values from self."""
meta = cls._meta
inserted = False
for parent, field in meta.parents.items():
# Make sure the link fields are synced between parent and self.
if (field and getattr(self, parent._meta.pk.attname) is None and
getattr(self, field.attname) is not None):
setattr(self, parent._meta.pk.attname, getattr(self, field.attname))
parent_inserted = self._save_parents(cls=parent, using=using, update_fields=update_fields)
updated = self._save_table(
cls=parent, using=using, update_fields=update_fields,
force_insert=parent_inserted,
)
if not updated:
inserted = True
# Set the parent's PK value to self.
if field:
setattr(self, field.attname, self._get_pk_val(parent._meta))
# Since we didn't have an instance of the parent handy set
# attname directly, bypassing the descriptor. Invalidate
# the related object cache, in case it's been accidentally
# populated. A fresh instance will be re-built from the
# database if necessary.
if field.is_cached(self):
field.delete_cached_value(self)
return inserted
def _save_table(self, raw=False, cls=None, force_insert=False,
force_update=False, using=None, update_fields=None):
"""
Do the heavy-lifting involved in saving. Update or insert the data
for a single table.
"""
meta = cls._meta
non_pks = [f for f in meta.local_concrete_fields if not f.primary_key]
if update_fields:
non_pks = [f for f in non_pks
if f.name in update_fields or f.attname in update_fields]
pk_val = self._get_pk_val(meta)
if pk_val is None:
pk_val = meta.pk.get_pk_value_on_save(self)
setattr(self, meta.pk.attname, pk_val)
pk_set = pk_val is not None
if not pk_set and (force_update or update_fields):
raise ValueError("Cannot force an update in save() with no primary key.")
updated = False
# Skip an UPDATE when adding an instance and primary key has a default.
if (
not raw and
not force_insert and
self._state.adding and
meta.pk.default and
meta.pk.default is not NOT_PROVIDED
):
force_insert = True
# If possible, try an UPDATE. If that doesn't update anything, do an INSERT.
if pk_set and not force_insert:
base_qs = cls._base_manager.using(using)
values = [(f, None, (getattr(self, f.attname) if raw else f.pre_save(self, False)))
for f in non_pks]
forced_update = update_fields or force_update
updated = self._do_update(base_qs, using, pk_val, values, update_fields,
forced_update)
if force_update and not updated:
raise DatabaseError("Forced update did not affect any rows.")
if update_fields and not updated:
raise DatabaseError("Save with update_fields did not affect any rows.")
if not updated:
if meta.order_with_respect_to:
# If this is a model with an order_with_respect_to
# autopopulate the _order field
field = meta.order_with_respect_to
filter_args = field.get_filter_kwargs_for_object(self)
self._order = cls._base_manager.using(using).filter(**filter_args).aggregate(
_order__max=Coalesce(
ExpressionWrapper(Max('_order') + Value(1), output_field=IntegerField()),
Value(0),
),
)['_order__max']
fields = meta.local_concrete_fields
if not pk_set:
fields = [f for f in fields if f is not meta.auto_field]
returning_fields = meta.db_returning_fields
results = self._do_insert(cls._base_manager, using, fields, returning_fields, raw)
if results:
for value, field in zip(results[0], returning_fields):
setattr(self, field.attname, value)
return updated
def _do_update(self, base_qs, using, pk_val, values, update_fields, forced_update):
"""
Try to update the model. Return True if the model was updated (if an
update query was done and a matching row was found in the DB).
"""
filtered = base_qs.filter(pk=pk_val)
if not values:
# We can end up here when saving a model in inheritance chain where
# update_fields doesn't target any field in current model. In that
# case we just say the update succeeded. Another case ending up here
# is a model with just PK - in that case check that the PK still
# exists.
return update_fields is not None or filtered.exists()
if self._meta.select_on_save and not forced_update:
return (
filtered.exists() and
# It may happen that the object is deleted from the DB right after
# this check, causing the subsequent UPDATE to return zero matching
# rows. The same result can occur in some rare cases when the
# database returns zero despite the UPDATE being executed
# successfully (a row is matched and updated). In order to
# distinguish these two cases, the object's existence in the
# database is again checked for if the UPDATE query returns 0.
(filtered._update(values) > 0 or filtered.exists())
)
return filtered._update(values) > 0
def _do_insert(self, manager, using, fields, returning_fields, raw):
"""
Do an INSERT. If returning_fields is defined then this method should
return the newly created data for the model.
"""
return manager._insert(
[self], fields=fields, returning_fields=returning_fields,
using=using, raw=raw,
)
def delete(self, using=None, keep_parents=False):
using = using or router.db_for_write(self.__class__, instance=self)
assert self.pk is not None, (
"%s object can't be deleted because its %s attribute is set to None." %
(self._meta.object_name, self._meta.pk.attname)
)
collector = Collector(using=using)
collector.collect([self], keep_parents=keep_parents)
return collector.delete()
delete.alters_data = True
def _get_FIELD_display(self, field):
value = getattr(self, field.attname)
choices_dict = dict(make_hashable(field.flatchoices))
# force_str() to coerce lazy strings.
return force_str(choices_dict.get(make_hashable(value), value), strings_only=True)
def _get_next_or_previous_by_FIELD(self, field, is_next, **kwargs):
if not self.pk:
raise ValueError("get_next/get_previous cannot be used on unsaved objects.")
op = 'gt' if is_next else 'lt'
order = '' if is_next else '-'
param = getattr(self, field.attname)
q = Q(**{'%s__%s' % (field.name, op): param})
q = q | Q(**{field.name: param, 'pk__%s' % op: self.pk})
qs = self.__class__._default_manager.using(self._state.db).filter(**kwargs).filter(q).order_by(
'%s%s' % (order, field.name), '%spk' % order
)
try:
return qs[0]
except IndexError:
raise self.DoesNotExist("%s matching query does not exist." % self.__class__._meta.object_name)
def _get_next_or_previous_in_order(self, is_next):
cachename = "__%s_order_cache" % is_next
if not hasattr(self, cachename):
op = 'gt' if is_next else 'lt'
order = '_order' if is_next else '-_order'
order_field = self._meta.order_with_respect_to
filter_args = order_field.get_filter_kwargs_for_object(self)
obj = self.__class__._default_manager.filter(**filter_args).filter(**{
'_order__%s' % op: self.__class__._default_manager.values('_order').filter(**{
self._meta.pk.name: self.pk
})
}).order_by(order)[:1].get()
setattr(self, cachename, obj)
return getattr(self, cachename)
def prepare_database_save(self, field):
if self.pk is None:
raise ValueError("Unsaved model instance %r cannot be used in an ORM query." % self)
return getattr(self, field.remote_field.get_related_field().attname)
def clean(self):
"""
Hook for doing any extra model-wide validation after clean() has been
called on every field by self.clean_fields. Any ValidationError raised
by this method will not be associated with a particular field; it will
have a special-case association with the field defined by NON_FIELD_ERRORS.
"""
pass
def validate_unique(self, exclude=None):
"""
Check unique constraints on the model and raise ValidationError if any
failed.
"""
unique_checks, date_checks = self._get_unique_checks(exclude=exclude)
errors = self._perform_unique_checks(unique_checks)
date_errors = self._perform_date_checks(date_checks)
for k, v in date_errors.items():
errors.setdefault(k, []).extend(v)
if errors:
raise ValidationError(errors)
def _get_unique_checks(self, exclude=None):
"""
Return a list of checks to perform. Since validate_unique() could be
called from a ModelForm, some fields may have been excluded; we can't
perform a unique check on a model that is missing fields involved
in that check. Fields that did not validate should also be excluded,
but they need to be passed in via the exclude argument.
"""
if exclude is None:
exclude = []
unique_checks = []
unique_togethers = [(self.__class__, self._meta.unique_together)]
constraints = [(self.__class__, self._meta.total_unique_constraints)]
for parent_class in self._meta.get_parent_list():
if parent_class._meta.unique_together:
unique_togethers.append((parent_class, parent_class._meta.unique_together))
if parent_class._meta.total_unique_constraints:
constraints.append(
(parent_class, parent_class._meta.total_unique_constraints)
)
for model_class, unique_together in unique_togethers:
for check in unique_together:
if not any(name in exclude for name in check):
# Add the check if the field isn't excluded.
unique_checks.append((model_class, tuple(check)))
for model_class, model_constraints in constraints:
for constraint in model_constraints:
if not any(name in exclude for name in constraint.fields):
unique_checks.append((model_class, constraint.fields))
# These are checks for the unique_for_<date/year/month>.
date_checks = []
# Gather a list of checks for fields declared as unique and add them to
# the list of checks.
fields_with_class = [(self.__class__, self._meta.local_fields)]
for parent_class in self._meta.get_parent_list():
fields_with_class.append((parent_class, parent_class._meta.local_fields))
for model_class, fields in fields_with_class:
for f in fields:
name = f.name
if name in exclude:
continue
if f.unique:
unique_checks.append((model_class, (name,)))
if f.unique_for_date and f.unique_for_date not in exclude:
date_checks.append((model_class, 'date', name, f.unique_for_date))
if f.unique_for_year and f.unique_for_year not in exclude:
date_checks.append((model_class, 'year', name, f.unique_for_year))
if f.unique_for_month and f.unique_for_month not in exclude:
date_checks.append((model_class, 'month', name, f.unique_for_month))
return unique_checks, date_checks
def _perform_unique_checks(self, unique_checks):
errors = {}
for model_class, unique_check in unique_checks:
# Try to look up an existing object with the same values as this
# object's values for all the unique field.
lookup_kwargs = {}
for field_name in unique_check:
f = self._meta.get_field(field_name)
lookup_value = getattr(self, f.attname)
# TODO: Handle multiple backends with different feature flags.
if (lookup_value is None or
(lookup_value == '' and connection.features.interprets_empty_strings_as_nulls)):
# no value, skip the lookup
continue
if f.primary_key and not self._state.adding:
# no need to check for unique primary key when editing
continue
lookup_kwargs[str(field_name)] = lookup_value
# some fields were skipped, no reason to do the check
if len(unique_check) != len(lookup_kwargs):
continue
qs = model_class._default_manager.filter(**lookup_kwargs)
# Exclude the current object from the query if we are editing an
# instance (as opposed to creating a new one)
# Note that we need to use the pk as defined by model_class, not
# self.pk. These can be different fields because model inheritance
# allows single model to have effectively multiple primary keys.
# Refs #17615.
model_class_pk = self._get_pk_val(model_class._meta)
if not self._state.adding and model_class_pk is not None:
qs = qs.exclude(pk=model_class_pk)
if qs.exists():
if len(unique_check) == 1:
key = unique_check[0]
else:
key = NON_FIELD_ERRORS
errors.setdefault(key, []).append(self.unique_error_message(model_class, unique_check))
return errors
def _perform_date_checks(self, date_checks):
errors = {}
for model_class, lookup_type, field, unique_for in date_checks:
lookup_kwargs = {}
# there's a ticket to add a date lookup, we can remove this special
# case if that makes it's way in
date = getattr(self, unique_for)
if date is None:
continue
if lookup_type == 'date':
lookup_kwargs['%s__day' % unique_for] = date.day
lookup_kwargs['%s__month' % unique_for] = date.month
lookup_kwargs['%s__year' % unique_for] = date.year
else:
lookup_kwargs['%s__%s' % (unique_for, lookup_type)] = getattr(date, lookup_type)
lookup_kwargs[field] = getattr(self, field)
qs = model_class._default_manager.filter(**lookup_kwargs)
# Exclude the current object from the query if we are editing an
# instance (as opposed to creating a new one)
if not self._state.adding and self.pk is not None:
qs = qs.exclude(pk=self.pk)
if qs.exists():
errors.setdefault(field, []).append(
self.date_error_message(lookup_type, field, unique_for)
)
return errors
def date_error_message(self, lookup_type, field_name, unique_for):
opts = self._meta
field = opts.get_field(field_name)
return ValidationError(
message=field.error_messages['unique_for_date'],
code='unique_for_date',
params={
'model': self,
'model_name': capfirst(opts.verbose_name),
'lookup_type': lookup_type,
'field': field_name,
'field_label': capfirst(field.verbose_name),
'date_field': unique_for,
'date_field_label': capfirst(opts.get_field(unique_for).verbose_name),
}
)
def unique_error_message(self, model_class, unique_check):
opts = model_class._meta
params = {
'model': self,
'model_class': model_class,
'model_name': capfirst(opts.verbose_name),
'unique_check': unique_check,
}
# A unique field
if len(unique_check) == 1:
field = opts.get_field(unique_check[0])
params['field_label'] = capfirst(field.verbose_name)
return ValidationError(
message=field.error_messages['unique'],
code='unique',
params=params,
)
# unique_together
else:
field_labels = [capfirst(opts.get_field(f).verbose_name) for f in unique_check]
params['field_labels'] = get_text_list(field_labels, _('and'))
return ValidationError(
message=_("%(model_name)s with this %(field_labels)s already exists."),
code='unique_together',
params=params,
)
def full_clean(self, exclude=None, validate_unique=True):
"""
Call clean_fields(), clean(), and validate_unique() on the model.
Raise a ValidationError for any errors that occur.
"""
errors = {}
if exclude is None:
exclude = []
else:
exclude = list(exclude)
try:
self.clean_fields(exclude=exclude)
except ValidationError as e:
errors = e.update_error_dict(errors)
# Form.clean() is run even if other validation fails, so do the
# same with Model.clean() for consistency.
try:
self.clean()
except ValidationError as e:
errors = e.update_error_dict(errors)
# Run unique checks, but only for fields that passed validation.
if validate_unique:
for name in errors:
if name != NON_FIELD_ERRORS and name not in exclude:
exclude.append(name)
try:
self.validate_unique(exclude=exclude)
except ValidationError as e:
errors = e.update_error_dict(errors)
if errors:
raise ValidationError(errors)
def clean_fields(self, exclude=None):
"""
Clean all fields and raise a ValidationError containing a dict
of all validation errors if any occur.
"""
if exclude is None:
exclude = []
errors = {}
for f in self._meta.fields:
if f.name in exclude:
continue
# Skip validation for empty fields with blank=True. The developer
# is responsible for making sure they have a valid value.
raw_value = getattr(self, f.attname)
if f.blank and raw_value in f.empty_values:
continue
try:
setattr(self, f.attname, f.clean(raw_value, self))
except ValidationError as e:
errors[f.name] = e.error_list
if errors:
raise ValidationError(errors)
@classmethod
def check(cls, **kwargs):
errors = [*cls._check_swappable(), *cls._check_model(), *cls._check_managers(**kwargs)]
if not cls._meta.swapped:
databases = kwargs.get('databases') or []
errors += [
*cls._check_fields(**kwargs),
*cls._check_m2m_through_same_relationship(),
*cls._check_long_column_names(databases),
]
clash_errors = (
*cls._check_id_field(),
*cls._check_field_name_clashes(),
*cls._check_model_name_db_lookup_clashes(),
*cls._check_property_name_related_field_accessor_clashes(),
*cls._check_single_primary_key(),
)
errors.extend(clash_errors)
# If there are field name clashes, hide consequent column name
# clashes.
if not clash_errors:
errors.extend(cls._check_column_name_clashes())
errors += [
*cls._check_index_together(),
*cls._check_unique_together(),
*cls._check_indexes(databases),
*cls._check_ordering(),
*cls._check_constraints(databases),
]
return errors
@classmethod
def _check_swappable(cls):
"""Check if the swapped model exists."""
errors = []
if cls._meta.swapped:
try:
apps.get_model(cls._meta.swapped)
except ValueError:
errors.append(
checks.Error(
"'%s' is not of the form 'app_label.app_name'." % cls._meta.swappable,
id='models.E001',
)
)
except LookupError:
app_label, model_name = cls._meta.swapped.split('.')
errors.append(
checks.Error(
"'%s' references '%s.%s', which has not been "
"installed, or is abstract." % (
cls._meta.swappable, app_label, model_name
),
id='models.E002',
)
)
return errors
@classmethod
def _check_model(cls):
errors = []
if cls._meta.proxy:
if cls._meta.local_fields or cls._meta.local_many_to_many:
errors.append(
checks.Error(
"Proxy model '%s' contains model fields." % cls.__name__,
id='models.E017',
)
)
return errors
@classmethod
def _check_managers(cls, **kwargs):
"""Perform all manager checks."""
errors = []
for manager in cls._meta.managers:
errors.extend(manager.check(**kwargs))
return errors
@classmethod
def _check_fields(cls, **kwargs):
"""Perform all field checks."""
errors = []
for field in cls._meta.local_fields:
errors.extend(field.check(**kwargs))
for field in cls._meta.local_many_to_many:
errors.extend(field.check(from_model=cls, **kwargs))
return errors
@classmethod
def _check_m2m_through_same_relationship(cls):
""" Check if no relationship model is used by more than one m2m field.
"""
errors = []
seen_intermediary_signatures = []
fields = cls._meta.local_many_to_many
# Skip when the target model wasn't found.
fields = (f for f in fields if isinstance(f.remote_field.model, ModelBase))
# Skip when the relationship model wasn't found.
fields = (f for f in fields if isinstance(f.remote_field.through, ModelBase))
for f in fields:
signature = (f.remote_field.model, cls, f.remote_field.through, f.remote_field.through_fields)
if signature in seen_intermediary_signatures:
errors.append(
checks.Error(
"The model has two identical many-to-many relations "
"through the intermediate model '%s'." %
f.remote_field.through._meta.label,
obj=cls,
id='models.E003',
)
)
else:
seen_intermediary_signatures.append(signature)
return errors
@classmethod
def _check_id_field(cls):
"""Check if `id` field is a primary key."""
fields = [f for f in cls._meta.local_fields if f.name == 'id' and f != cls._meta.pk]
# fields is empty or consists of the invalid "id" field
if fields and not fields[0].primary_key and cls._meta.pk.name == 'id':
return [
checks.Error(
"'id' can only be used as a field name if the field also "
"sets 'primary_key=True'.",
obj=cls,
id='models.E004',
)
]
else:
return []
@classmethod
def _check_field_name_clashes(cls):
"""Forbid field shadowing in multi-table inheritance."""
errors = []
used_fields = {} # name or attname -> field
# Check that multi-inheritance doesn't cause field name shadowing.
for parent in cls._meta.get_parent_list():
for f in parent._meta.local_fields:
clash = used_fields.get(f.name) or used_fields.get(f.attname) or None
if clash:
errors.append(
checks.Error(
"The field '%s' from parent model "
"'%s' clashes with the field '%s' "
"from parent model '%s'." % (
clash.name, clash.model._meta,
f.name, f.model._meta
),
obj=cls,
id='models.E005',
)
)
used_fields[f.name] = f
used_fields[f.attname] = f
# Check that fields defined in the model don't clash with fields from
# parents, including auto-generated fields like multi-table inheritance
# child accessors.
for parent in cls._meta.get_parent_list():
for f in parent._meta.get_fields():
if f not in used_fields:
used_fields[f.name] = f
for f in cls._meta.local_fields:
clash = used_fields.get(f.name) or used_fields.get(f.attname) or None
# Note that we may detect clash between user-defined non-unique
# field "id" and automatically added unique field "id", both
# defined at the same model. This special case is considered in
# _check_id_field and here we ignore it.
id_conflict = f.name == "id" and clash and clash.name == "id" and clash.model == cls
if clash and not id_conflict:
errors.append(
checks.Error(
"The field '%s' clashes with the field '%s' "
"from model '%s'." % (
f.name, clash.name, clash.model._meta
),
obj=f,
id='models.E006',
)
)
used_fields[f.name] = f
used_fields[f.attname] = f
return errors
@classmethod
def _check_column_name_clashes(cls):
# Store a list of column names which have already been used by other fields.
used_column_names = []
errors = []
for f in cls._meta.local_fields:
_, column_name = f.get_attname_column()
# Ensure the column name is not already in use.
if column_name and column_name in used_column_names:
errors.append(
checks.Error(
"Field '%s' has column name '%s' that is used by "
"another field." % (f.name, column_name),
hint="Specify a 'db_column' for the field.",
obj=cls,
id='models.E007'
)
)
else:
used_column_names.append(column_name)
return errors
@classmethod
def _check_model_name_db_lookup_clashes(cls):
errors = []
model_name = cls.__name__
if model_name.startswith('_') or model_name.endswith('_'):
errors.append(
checks.Error(
"The model name '%s' cannot start or end with an underscore "
"as it collides with the query lookup syntax." % model_name,
obj=cls,
id='models.E023'
)
)
elif LOOKUP_SEP in model_name:
errors.append(
checks.Error(
"The model name '%s' cannot contain double underscores as "
"it collides with the query lookup syntax." % model_name,
obj=cls,
id='models.E024'
)
)
return errors
@classmethod
def _check_property_name_related_field_accessor_clashes(cls):
errors = []
property_names = cls._meta._property_names
related_field_accessors = (
f.get_attname() for f in cls._meta._get_fields(reverse=False)
if f.is_relation and f.related_model is not None
)
for accessor in related_field_accessors:
if accessor in property_names:
errors.append(
checks.Error(
"The property '%s' clashes with a related field "
"accessor." % accessor,
obj=cls,
id='models.E025',
)
)
return errors
@classmethod
def _check_single_primary_key(cls):
errors = []
if sum(1 for f in cls._meta.local_fields if f.primary_key) > 1:
errors.append(
checks.Error(
"The model cannot have more than one field with "
"'primary_key=True'.",
obj=cls,
id='models.E026',
)
)
return errors
@classmethod
def _check_index_together(cls):
"""Check the value of "index_together" option."""
if not isinstance(cls._meta.index_together, (tuple, list)):
return [
checks.Error(
"'index_together' must be a list or tuple.",
obj=cls,
id='models.E008',
)
]
elif any(not isinstance(fields, (tuple, list)) for fields in cls._meta.index_together):
return [
checks.Error(
"All 'index_together' elements must be lists or tuples.",
obj=cls,
id='models.E009',
)
]
else:
errors = []
for fields in cls._meta.index_together:
errors.extend(cls._check_local_fields(fields, "index_together"))
return errors
@classmethod
def _check_unique_together(cls):
"""Check the value of "unique_together" option."""
if not isinstance(cls._meta.unique_together, (tuple, list)):
return [
checks.Error(
"'unique_together' must be a list or tuple.",
obj=cls,
id='models.E010',
)
]
elif any(not isinstance(fields, (tuple, list)) for fields in cls._meta.unique_together):
return [
checks.Error(
"All 'unique_together' elements must be lists or tuples.",
obj=cls,
id='models.E011',
)
]
else:
errors = []
for fields in cls._meta.unique_together:
errors.extend(cls._check_local_fields(fields, "unique_together"))
return errors
@classmethod
def _check_indexes(cls, databases):
"""Check fields, names, and conditions of indexes."""
errors = []
for index in cls._meta.indexes:
# Index name can't start with an underscore or a number, restricted
# for cross-database compatibility with Oracle.
if index.name[0] == '_' or index.name[0].isdigit():
errors.append(
checks.Error(
"The index name '%s' cannot start with an underscore "
"or a number." % index.name,
obj=cls,
id='models.E033',
),
)
if len(index.name) > index.max_name_length:
errors.append(
checks.Error(
"The index name '%s' cannot be longer than %d "
"characters." % (index.name, index.max_name_length),
obj=cls,
id='models.E034',
),
)
for db in databases:
if not router.allow_migrate_model(db, cls):
continue
connection = connections[db]
if (
connection.features.supports_partial_indexes or
'supports_partial_indexes' in cls._meta.required_db_features
):
continue
if any(index.condition is not None for index in cls._meta.indexes):
errors.append(
checks.Warning(
'%s does not support indexes with conditions.'
% connection.display_name,
hint=(
"Conditions will be ignored. Silence this warning "
"if you don't care about it."
),
obj=cls,
id='models.W037',
)
)
fields = [field for index in cls._meta.indexes for field, _ in index.fields_orders]
errors.extend(cls._check_local_fields(fields, 'indexes'))
return errors
@classmethod
def _check_local_fields(cls, fields, option):
from django.db import models
# In order to avoid hitting the relation tree prematurely, we use our
# own fields_map instead of using get_field()
forward_fields_map = {}
for field in cls._meta._get_fields(reverse=False):
forward_fields_map[field.name] = field
if hasattr(field, 'attname'):
forward_fields_map[field.attname] = field
errors = []
for field_name in fields:
try:
field = forward_fields_map[field_name]
except KeyError:
errors.append(
checks.Error(
"'%s' refers to the nonexistent field '%s'." % (
option, field_name,
),
obj=cls,
id='models.E012',
)
)
else:
if isinstance(field.remote_field, models.ManyToManyRel):
errors.append(
checks.Error(
"'%s' refers to a ManyToManyField '%s', but "
"ManyToManyFields are not permitted in '%s'." % (
option, field_name, option,
),
obj=cls,
id='models.E013',
)
)
elif field not in cls._meta.local_fields:
errors.append(
checks.Error(
"'%s' refers to field '%s' which is not local to model '%s'."
% (option, field_name, cls._meta.object_name),
hint="This issue may be caused by multi-table inheritance.",
obj=cls,
id='models.E016',
)
)
return errors
@classmethod
def _check_ordering(cls):
"""
Check "ordering" option -- is it a list of strings and do all fields
exist?
"""
if cls._meta._ordering_clash:
return [
checks.Error(
"'ordering' and 'order_with_respect_to' cannot be used together.",
obj=cls,
id='models.E021',
),
]
if cls._meta.order_with_respect_to or not cls._meta.ordering:
return []
if not isinstance(cls._meta.ordering, (list, tuple)):
return [
checks.Error(
"'ordering' must be a tuple or list (even if you want to order by only one field).",
obj=cls,
id='models.E014',
)
]
errors = []
fields = cls._meta.ordering
# Skip expressions and '?' fields.
fields = (f for f in fields if isinstance(f, str) and f != '?')
# Convert "-field" to "field".
fields = ((f[1:] if f.startswith('-') else f) for f in fields)
# Separate related fields and non-related fields.
_fields = []
related_fields = []
for f in fields:
if LOOKUP_SEP in f:
related_fields.append(f)
else:
_fields.append(f)
fields = _fields
# Check related fields.
for field in related_fields:
_cls = cls
fld = None
for part in field.split(LOOKUP_SEP):
try:
# pk is an alias that won't be found by opts.get_field.
if part == 'pk':
fld = _cls._meta.pk
else:
fld = _cls._meta.get_field(part)
if fld.is_relation:
_cls = fld.get_path_info()[-1].to_opts.model
else:
_cls = None
except (FieldDoesNotExist, AttributeError):
if fld is None or (
fld.get_transform(part) is None and fld.get_lookup(part) is None
):
errors.append(
checks.Error(
"'ordering' refers to the nonexistent field, "
"related field, or lookup '%s'." % field,
obj=cls,
id='models.E015',
)
)
# Skip ordering on pk. This is always a valid order_by field
# but is an alias and therefore won't be found by opts.get_field.
fields = {f for f in fields if f != 'pk'}
# Check for invalid or nonexistent fields in ordering.
invalid_fields = []
# Any field name that is not present in field_names does not exist.
# Also, ordering by m2m fields is not allowed.
opts = cls._meta
valid_fields = set(chain.from_iterable(
(f.name, f.attname) if not (f.auto_created and not f.concrete) else (f.field.related_query_name(),)
for f in chain(opts.fields, opts.related_objects)
))
invalid_fields.extend(fields - valid_fields)
for invalid_field in invalid_fields:
errors.append(
checks.Error(
"'ordering' refers to the nonexistent field, related "
"field, or lookup '%s'." % invalid_field,
obj=cls,
id='models.E015',
)
)
return errors
@classmethod
def _check_long_column_names(cls, databases):
"""
Check that any auto-generated column names are shorter than the limits
for each database in which the model will be created.
"""
if not databases:
return []
errors = []
allowed_len = None
db_alias = None
# Find the minimum max allowed length among all specified db_aliases.
for db in databases:
# skip databases where the model won't be created
if not router.allow_migrate_model(db, cls):
continue
connection = connections[db]
max_name_length = connection.ops.max_name_length()
if max_name_length is None or connection.features.truncates_names:
continue
else:
if allowed_len is None:
allowed_len = max_name_length
db_alias = db
elif max_name_length < allowed_len:
allowed_len = max_name_length
db_alias = db
if allowed_len is None:
return errors
for f in cls._meta.local_fields:
_, column_name = f.get_attname_column()
# Check if auto-generated name for the field is too long
# for the database.
if f.db_column is None and column_name is not None and len(column_name) > allowed_len:
errors.append(
checks.Error(
'Autogenerated column name too long for field "%s". '
'Maximum length is "%s" for database "%s".'
% (column_name, allowed_len, db_alias),
hint="Set the column name manually using 'db_column'.",
obj=cls,
id='models.E018',
)
)
for f in cls._meta.local_many_to_many:
# Skip nonexistent models.
if isinstance(f.remote_field.through, str):
continue
# Check if auto-generated name for the M2M field is too long
# for the database.
for m2m in f.remote_field.through._meta.local_fields:
_, rel_name = m2m.get_attname_column()
if m2m.db_column is None and rel_name is not None and len(rel_name) > allowed_len:
errors.append(
checks.Error(
'Autogenerated column name too long for M2M field '
'"%s". Maximum length is "%s" for database "%s".'
% (rel_name, allowed_len, db_alias),
hint=(
"Use 'through' to create a separate model for "
"M2M and then set column_name using 'db_column'."
),
obj=cls,
id='models.E019',
)
)
return errors
@classmethod
def _check_constraints(cls, databases):
errors = []
for db in databases:
if not router.allow_migrate_model(db, cls):
continue
connection = connections[db]
if not (
connection.features.supports_table_check_constraints or
'supports_table_check_constraints' in cls._meta.required_db_features
) and any(
isinstance(constraint, CheckConstraint)
for constraint in cls._meta.constraints
):
errors.append(
checks.Warning(
'%s does not support check constraints.' % connection.display_name,
hint=(
"A constraint won't be created. Silence this "
"warning if you don't care about it."
),
obj=cls,
id='models.W027',
)
)
if not (
connection.features.supports_partial_indexes or
'supports_partial_indexes' in cls._meta.required_db_features
) and any(
isinstance(constraint, UniqueConstraint) and constraint.condition is not None
for constraint in cls._meta.constraints
):
errors.append(
checks.Warning(
'%s does not support unique constraints with '
'conditions.' % connection.display_name,
hint=(
"A constraint won't be created. Silence this "
"warning if you don't care about it."
),
obj=cls,
id='models.W036',
)
)
if not (
connection.features.supports_deferrable_unique_constraints or
'supports_deferrable_unique_constraints' in cls._meta.required_db_features
) and any(
isinstance(constraint, UniqueConstraint) and constraint.deferrable is not None
for constraint in cls._meta.constraints
):
errors.append(
checks.Warning(
'%s does not support deferrable unique constraints.'
% connection.display_name,
hint=(
"A constraint won't be created. Silence this "
"warning if you don't care about it."
),
obj=cls,
id='models.W038',
)
)
fields = (
field
for constraint in cls._meta.constraints if isinstance(constraint, UniqueConstraint)
for field in constraint.fields
)
errors.extend(cls._check_local_fields(fields, 'constraints'))
return errors
############################################
# HELPER FUNCTIONS (CURRIED MODEL METHODS) #
############################################
# ORDERING METHODS #########################
def method_set_order(self, ordered_obj, id_list, using=None):
if using is None:
using = DEFAULT_DB_ALIAS
order_wrt = ordered_obj._meta.order_with_respect_to
filter_args = order_wrt.get_forward_related_filter(self)
ordered_obj.objects.db_manager(using).filter(**filter_args).bulk_update([
ordered_obj(pk=pk, _order=order) for order, pk in enumerate(id_list)
], ['_order'])
def method_get_order(self, ordered_obj):
order_wrt = ordered_obj._meta.order_with_respect_to
filter_args = order_wrt.get_forward_related_filter(self)
pk_name = ordered_obj._meta.pk.name
return ordered_obj.objects.filter(**filter_args).values_list(pk_name, flat=True)
def make_foreign_order_accessors(model, related_model):
setattr(
related_model,
'get_%s_order' % model.__name__.lower(),
partialmethod(method_get_order, model)
)
setattr(
related_model,
'set_%s_order' % model.__name__.lower(),
partialmethod(method_set_order, model)
)
########
# MISC #
########
def model_unpickle(model_id):
"""Used to unpickle Model subclasses with deferred fields."""
if isinstance(model_id, tuple):
model = apps.get_model(*model_id)
else:
# Backwards compat - the model was cached directly in earlier versions.
model = model_id
return model.__new__(model)
model_unpickle.__safe_for_unpickle__ = True
|
9a3e3cd1f9c359373b966c91a2a7aa2d2b396a52c0abc59cc1923a43c2a4cefc | import copy
import datetime
import inspect
from decimal import Decimal
from django.core.exceptions import EmptyResultSet, FieldError
from django.db import NotSupportedError, connection
from django.db.models import fields
from django.db.models.constants import LOOKUP_SEP
from django.db.models.query_utils import Q
from django.utils.deconstruct import deconstructible
from django.utils.functional import cached_property
from django.utils.hashable import make_hashable
class SQLiteNumericMixin:
"""
Some expressions with output_field=DecimalField() must be cast to
numeric to be properly filtered.
"""
def as_sqlite(self, compiler, connection, **extra_context):
sql, params = self.as_sql(compiler, connection, **extra_context)
try:
if self.output_field.get_internal_type() == 'DecimalField':
sql = 'CAST(%s AS NUMERIC)' % sql
except FieldError:
pass
return sql, params
class Combinable:
"""
Provide the ability to combine one or two objects with
some connector. For example F('foo') + F('bar').
"""
# Arithmetic connectors
ADD = '+'
SUB = '-'
MUL = '*'
DIV = '/'
POW = '^'
# The following is a quoted % operator - it is quoted because it can be
# used in strings that also have parameter substitution.
MOD = '%%'
# Bitwise operators - note that these are generated by .bitand()
# and .bitor(), the '&' and '|' are reserved for boolean operator
# usage.
BITAND = '&'
BITOR = '|'
BITLEFTSHIFT = '<<'
BITRIGHTSHIFT = '>>'
BITXOR = '#'
def _combine(self, other, connector, reversed):
if not hasattr(other, 'resolve_expression'):
# everything must be resolvable to an expression
if isinstance(other, datetime.timedelta):
other = DurationValue(other, output_field=fields.DurationField())
else:
other = Value(other)
if reversed:
return CombinedExpression(other, connector, self)
return CombinedExpression(self, connector, other)
#############
# OPERATORS #
#############
def __neg__(self):
return self._combine(-1, self.MUL, False)
def __add__(self, other):
return self._combine(other, self.ADD, False)
def __sub__(self, other):
return self._combine(other, self.SUB, False)
def __mul__(self, other):
return self._combine(other, self.MUL, False)
def __truediv__(self, other):
return self._combine(other, self.DIV, False)
def __mod__(self, other):
return self._combine(other, self.MOD, False)
def __pow__(self, other):
return self._combine(other, self.POW, False)
def __and__(self, other):
if getattr(self, 'conditional', False) and getattr(other, 'conditional', False):
return Q(self) & Q(other)
raise NotImplementedError(
"Use .bitand() and .bitor() for bitwise logical operations."
)
def bitand(self, other):
return self._combine(other, self.BITAND, False)
def bitleftshift(self, other):
return self._combine(other, self.BITLEFTSHIFT, False)
def bitrightshift(self, other):
return self._combine(other, self.BITRIGHTSHIFT, False)
def bitxor(self, other):
return self._combine(other, self.BITXOR, False)
def __or__(self, other):
if getattr(self, 'conditional', False) and getattr(other, 'conditional', False):
return Q(self) | Q(other)
raise NotImplementedError(
"Use .bitand() and .bitor() for bitwise logical operations."
)
def bitor(self, other):
return self._combine(other, self.BITOR, False)
def __radd__(self, other):
return self._combine(other, self.ADD, True)
def __rsub__(self, other):
return self._combine(other, self.SUB, True)
def __rmul__(self, other):
return self._combine(other, self.MUL, True)
def __rtruediv__(self, other):
return self._combine(other, self.DIV, True)
def __rmod__(self, other):
return self._combine(other, self.MOD, True)
def __rpow__(self, other):
return self._combine(other, self.POW, True)
def __rand__(self, other):
raise NotImplementedError(
"Use .bitand() and .bitor() for bitwise logical operations."
)
def __ror__(self, other):
raise NotImplementedError(
"Use .bitand() and .bitor() for bitwise logical operations."
)
@deconstructible
class BaseExpression:
"""Base class for all query expressions."""
# aggregate specific fields
is_summary = False
_output_field_resolved_to_none = False
# Can the expression be used in a WHERE clause?
filterable = True
# Can the expression can be used as a source expression in Window?
window_compatible = False
def __init__(self, output_field=None):
if output_field is not None:
self.output_field = output_field
def __getstate__(self):
state = self.__dict__.copy()
state.pop('convert_value', None)
return state
def get_db_converters(self, connection):
return (
[]
if self.convert_value is self._convert_value_noop else
[self.convert_value]
) + self.output_field.get_db_converters(connection)
def get_source_expressions(self):
return []
def set_source_expressions(self, exprs):
assert not exprs
def _parse_expressions(self, *expressions):
return [
arg if hasattr(arg, 'resolve_expression') else (
F(arg) if isinstance(arg, str) else Value(arg)
) for arg in expressions
]
def as_sql(self, compiler, connection):
"""
Responsible for returning a (sql, [params]) tuple to be included
in the current query.
Different backends can provide their own implementation, by
providing an `as_{vendor}` method and patching the Expression:
```
def override_as_sql(self, compiler, connection):
# custom logic
return super().as_sql(compiler, connection)
setattr(Expression, 'as_' + connection.vendor, override_as_sql)
```
Arguments:
* compiler: the query compiler responsible for generating the query.
Must have a compile method, returning a (sql, [params]) tuple.
Calling compiler(value) will return a quoted `value`.
* connection: the database connection used for the current query.
Return: (sql, params)
Where `sql` is a string containing ordered sql parameters to be
replaced with the elements of the list `params`.
"""
raise NotImplementedError("Subclasses must implement as_sql()")
@cached_property
def contains_aggregate(self):
return any(expr and expr.contains_aggregate for expr in self.get_source_expressions())
@cached_property
def contains_over_clause(self):
return any(expr and expr.contains_over_clause for expr in self.get_source_expressions())
@cached_property
def contains_column_references(self):
return any(expr and expr.contains_column_references for expr in self.get_source_expressions())
def resolve_expression(self, query=None, allow_joins=True, reuse=None, summarize=False, for_save=False):
"""
Provide the chance to do any preprocessing or validation before being
added to the query.
Arguments:
* query: the backend query implementation
* allow_joins: boolean allowing or denying use of joins
in this query
* reuse: a set of reusable joins for multijoins
* summarize: a terminal aggregate clause
* for_save: whether this expression about to be used in a save or update
Return: an Expression to be added to the query.
"""
c = self.copy()
c.is_summary = summarize
c.set_source_expressions([
expr.resolve_expression(query, allow_joins, reuse, summarize)
if expr else None
for expr in c.get_source_expressions()
])
return c
@property
def conditional(self):
return isinstance(self.output_field, fields.BooleanField)
@property
def field(self):
return self.output_field
@cached_property
def output_field(self):
"""Return the output type of this expressions."""
output_field = self._resolve_output_field()
if output_field is None:
self._output_field_resolved_to_none = True
raise FieldError('Cannot resolve expression type, unknown output_field')
return output_field
@cached_property
def _output_field_or_none(self):
"""
Return the output field of this expression, or None if
_resolve_output_field() didn't return an output type.
"""
try:
return self.output_field
except FieldError:
if not self._output_field_resolved_to_none:
raise
def _resolve_output_field(self):
"""
Attempt to infer the output type of the expression. If the output
fields of all source fields match then, simply infer the same type
here. This isn't always correct, but it makes sense most of the time.
Consider the difference between `2 + 2` and `2 / 3`. Inferring
the type here is a convenience for the common case. The user should
supply their own output_field with more complex computations.
If a source's output field resolves to None, exclude it from this check.
If all sources are None, then an error is raised higher up the stack in
the output_field property.
"""
sources_iter = (source for source in self.get_source_fields() if source is not None)
for output_field in sources_iter:
for source in sources_iter:
if not isinstance(output_field, source.__class__):
raise FieldError(
'Expression contains mixed types: %s, %s. You must '
'set output_field.' % (
output_field.__class__.__name__,
source.__class__.__name__,
)
)
return output_field
@staticmethod
def _convert_value_noop(value, expression, connection):
return value
@cached_property
def convert_value(self):
"""
Expressions provide their own converters because users have the option
of manually specifying the output_field which may be a different type
from the one the database returns.
"""
field = self.output_field
internal_type = field.get_internal_type()
if internal_type == 'FloatField':
return lambda value, expression, connection: None if value is None else float(value)
elif internal_type.endswith('IntegerField'):
return lambda value, expression, connection: None if value is None else int(value)
elif internal_type == 'DecimalField':
return lambda value, expression, connection: None if value is None else Decimal(value)
return self._convert_value_noop
def get_lookup(self, lookup):
return self.output_field.get_lookup(lookup)
def get_transform(self, name):
return self.output_field.get_transform(name)
def relabeled_clone(self, change_map):
clone = self.copy()
clone.set_source_expressions([
e.relabeled_clone(change_map) if e is not None else None
for e in self.get_source_expressions()
])
return clone
def copy(self):
return copy.copy(self)
def get_group_by_cols(self, alias=None):
if not self.contains_aggregate:
return [self]
cols = []
for source in self.get_source_expressions():
cols.extend(source.get_group_by_cols())
return cols
def get_source_fields(self):
"""Return the underlying field types used by this aggregate."""
return [e._output_field_or_none for e in self.get_source_expressions()]
def asc(self, **kwargs):
return OrderBy(self, **kwargs)
def desc(self, **kwargs):
return OrderBy(self, descending=True, **kwargs)
def reverse_ordering(self):
return self
def flatten(self):
"""
Recursively yield this expression and all subexpressions, in
depth-first order.
"""
yield self
for expr in self.get_source_expressions():
if expr:
yield from expr.flatten()
def select_format(self, compiler, sql, params):
"""
Custom format for select clauses. For example, EXISTS expressions need
to be wrapped in CASE WHEN on Oracle.
"""
return self.output_field.select_format(compiler, sql, params)
@cached_property
def identity(self):
constructor_signature = inspect.signature(self.__init__)
args, kwargs = self._constructor_args
signature = constructor_signature.bind_partial(*args, **kwargs)
signature.apply_defaults()
arguments = signature.arguments.items()
identity = [self.__class__]
for arg, value in arguments:
if isinstance(value, fields.Field):
if value.name and value.model:
value = (value.model._meta.label, value.name)
else:
value = type(value)
else:
value = make_hashable(value)
identity.append((arg, value))
return tuple(identity)
def __eq__(self, other):
if not isinstance(other, BaseExpression):
return NotImplemented
return other.identity == self.identity
def __hash__(self):
return hash(self.identity)
class Expression(BaseExpression, Combinable):
"""An expression that can be combined with other expressions."""
pass
class CombinedExpression(SQLiteNumericMixin, Expression):
def __init__(self, lhs, connector, rhs, output_field=None):
super().__init__(output_field=output_field)
self.connector = connector
self.lhs = lhs
self.rhs = rhs
def __repr__(self):
return "<{}: {}>".format(self.__class__.__name__, self)
def __str__(self):
return "{} {} {}".format(self.lhs, self.connector, self.rhs)
def get_source_expressions(self):
return [self.lhs, self.rhs]
def set_source_expressions(self, exprs):
self.lhs, self.rhs = exprs
def as_sql(self, compiler, connection):
try:
lhs_output = self.lhs.output_field
except FieldError:
lhs_output = None
try:
rhs_output = self.rhs.output_field
except FieldError:
rhs_output = None
if (not connection.features.has_native_duration_field and
((lhs_output and lhs_output.get_internal_type() == 'DurationField') or
(rhs_output and rhs_output.get_internal_type() == 'DurationField'))):
return DurationExpression(self.lhs, self.connector, self.rhs).as_sql(compiler, connection)
if (lhs_output and rhs_output and self.connector == self.SUB and
lhs_output.get_internal_type() in {'DateField', 'DateTimeField', 'TimeField'} and
lhs_output.get_internal_type() == rhs_output.get_internal_type()):
return TemporalSubtraction(self.lhs, self.rhs).as_sql(compiler, connection)
expressions = []
expression_params = []
sql, params = compiler.compile(self.lhs)
expressions.append(sql)
expression_params.extend(params)
sql, params = compiler.compile(self.rhs)
expressions.append(sql)
expression_params.extend(params)
# order of precedence
expression_wrapper = '(%s)'
sql = connection.ops.combine_expression(self.connector, expressions)
return expression_wrapper % sql, expression_params
def resolve_expression(self, query=None, allow_joins=True, reuse=None, summarize=False, for_save=False):
c = self.copy()
c.is_summary = summarize
c.lhs = c.lhs.resolve_expression(query, allow_joins, reuse, summarize, for_save)
c.rhs = c.rhs.resolve_expression(query, allow_joins, reuse, summarize, for_save)
return c
class DurationExpression(CombinedExpression):
def compile(self, side, compiler, connection):
if not isinstance(side, DurationValue):
try:
output = side.output_field
except FieldError:
pass
else:
if output.get_internal_type() == 'DurationField':
sql, params = compiler.compile(side)
return connection.ops.format_for_duration_arithmetic(sql), params
return compiler.compile(side)
def as_sql(self, compiler, connection):
connection.ops.check_expression_support(self)
expressions = []
expression_params = []
sql, params = self.compile(self.lhs, compiler, connection)
expressions.append(sql)
expression_params.extend(params)
sql, params = self.compile(self.rhs, compiler, connection)
expressions.append(sql)
expression_params.extend(params)
# order of precedence
expression_wrapper = '(%s)'
sql = connection.ops.combine_duration_expression(self.connector, expressions)
return expression_wrapper % sql, expression_params
class TemporalSubtraction(CombinedExpression):
output_field = fields.DurationField()
def __init__(self, lhs, rhs):
super().__init__(lhs, self.SUB, rhs)
def as_sql(self, compiler, connection):
connection.ops.check_expression_support(self)
lhs = compiler.compile(self.lhs)
rhs = compiler.compile(self.rhs)
return connection.ops.subtract_temporals(self.lhs.output_field.get_internal_type(), lhs, rhs)
@deconstructible
class F(Combinable):
"""An object capable of resolving references to existing query objects."""
def __init__(self, name):
"""
Arguments:
* name: the name of the field this expression references
"""
self.name = name
def __repr__(self):
return "{}({})".format(self.__class__.__name__, self.name)
def resolve_expression(self, query=None, allow_joins=True, reuse=None,
summarize=False, for_save=False):
return query.resolve_ref(self.name, allow_joins, reuse, summarize)
def asc(self, **kwargs):
return OrderBy(self, **kwargs)
def desc(self, **kwargs):
return OrderBy(self, descending=True, **kwargs)
def __eq__(self, other):
return self.__class__ == other.__class__ and self.name == other.name
def __hash__(self):
return hash(self.name)
class ResolvedOuterRef(F):
"""
An object that contains a reference to an outer query.
In this case, the reference to the outer query has been resolved because
the inner query has been used as a subquery.
"""
contains_aggregate = False
def as_sql(self, *args, **kwargs):
raise ValueError(
'This queryset contains a reference to an outer query and may '
'only be used in a subquery.'
)
def resolve_expression(self, *args, **kwargs):
col = super().resolve_expression(*args, **kwargs)
# FIXME: Rename possibly_multivalued to multivalued and fix detection
# for non-multivalued JOINs (e.g. foreign key fields). This should take
# into account only many-to-many and one-to-many relationships.
col.possibly_multivalued = LOOKUP_SEP in self.name
return col
def relabeled_clone(self, relabels):
return self
def get_group_by_cols(self, alias=None):
return []
class OuterRef(F):
def resolve_expression(self, *args, **kwargs):
if isinstance(self.name, self.__class__):
return self.name
return ResolvedOuterRef(self.name)
def relabeled_clone(self, relabels):
return self
class Func(SQLiteNumericMixin, Expression):
"""An SQL function call."""
function = None
template = '%(function)s(%(expressions)s)'
arg_joiner = ', '
arity = None # The number of arguments the function accepts.
def __init__(self, *expressions, output_field=None, **extra):
if self.arity is not None and len(expressions) != self.arity:
raise TypeError(
"'%s' takes exactly %s %s (%s given)" % (
self.__class__.__name__,
self.arity,
"argument" if self.arity == 1 else "arguments",
len(expressions),
)
)
super().__init__(output_field=output_field)
self.source_expressions = self._parse_expressions(*expressions)
self.extra = extra
def __repr__(self):
args = self.arg_joiner.join(str(arg) for arg in self.source_expressions)
extra = {**self.extra, **self._get_repr_options()}
if extra:
extra = ', '.join(str(key) + '=' + str(val) for key, val in sorted(extra.items()))
return "{}({}, {})".format(self.__class__.__name__, args, extra)
return "{}({})".format(self.__class__.__name__, args)
def _get_repr_options(self):
"""Return a dict of extra __init__() options to include in the repr."""
return {}
def get_source_expressions(self):
return self.source_expressions
def set_source_expressions(self, exprs):
self.source_expressions = exprs
def resolve_expression(self, query=None, allow_joins=True, reuse=None, summarize=False, for_save=False):
c = self.copy()
c.is_summary = summarize
for pos, arg in enumerate(c.source_expressions):
c.source_expressions[pos] = arg.resolve_expression(query, allow_joins, reuse, summarize, for_save)
return c
def as_sql(self, compiler, connection, function=None, template=None, arg_joiner=None, **extra_context):
connection.ops.check_expression_support(self)
sql_parts = []
params = []
for arg in self.source_expressions:
arg_sql, arg_params = compiler.compile(arg)
sql_parts.append(arg_sql)
params.extend(arg_params)
data = {**self.extra, **extra_context}
# Use the first supplied value in this order: the parameter to this
# method, a value supplied in __init__()'s **extra (the value in
# `data`), or the value defined on the class.
if function is not None:
data['function'] = function
else:
data.setdefault('function', self.function)
template = template or data.get('template', self.template)
arg_joiner = arg_joiner or data.get('arg_joiner', self.arg_joiner)
data['expressions'] = data['field'] = arg_joiner.join(sql_parts)
return template % data, params
def copy(self):
copy = super().copy()
copy.source_expressions = self.source_expressions[:]
copy.extra = self.extra.copy()
return copy
class Value(Expression):
"""Represent a wrapped value as a node within an expression."""
def __init__(self, value, output_field=None):
"""
Arguments:
* value: the value this expression represents. The value will be
added into the sql parameter list and properly quoted.
* output_field: an instance of the model field type that this
expression will return, such as IntegerField() or CharField().
"""
super().__init__(output_field=output_field)
self.value = value
def __repr__(self):
return "{}({})".format(self.__class__.__name__, self.value)
def as_sql(self, compiler, connection):
connection.ops.check_expression_support(self)
val = self.value
output_field = self._output_field_or_none
if output_field is not None:
if self.for_save:
val = output_field.get_db_prep_save(val, connection=connection)
else:
val = output_field.get_db_prep_value(val, connection=connection)
if hasattr(output_field, 'get_placeholder'):
return output_field.get_placeholder(val, compiler, connection), [val]
if val is None:
# cx_Oracle does not always convert None to the appropriate
# NULL type (like in case expressions using numbers), so we
# use a literal SQL NULL
return 'NULL', []
return '%s', [val]
def resolve_expression(self, query=None, allow_joins=True, reuse=None, summarize=False, for_save=False):
c = super().resolve_expression(query, allow_joins, reuse, summarize, for_save)
c.for_save = for_save
return c
def get_group_by_cols(self, alias=None):
return []
class DurationValue(Value):
def as_sql(self, compiler, connection):
connection.ops.check_expression_support(self)
if connection.features.has_native_duration_field:
return super().as_sql(compiler, connection)
return connection.ops.date_interval_sql(self.value), []
class RawSQL(Expression):
def __init__(self, sql, params, output_field=None):
if output_field is None:
output_field = fields.Field()
self.sql, self.params = sql, params
super().__init__(output_field=output_field)
def __repr__(self):
return "{}({}, {})".format(self.__class__.__name__, self.sql, self.params)
def as_sql(self, compiler, connection):
return '(%s)' % self.sql, self.params
def get_group_by_cols(self, alias=None):
return [self]
def resolve_expression(self, query=None, allow_joins=True, reuse=None, summarize=False, for_save=False):
# Resolve parents fields used in raw SQL.
for parent in query.model._meta.get_parent_list():
for parent_field in parent._meta.local_fields:
_, column_name = parent_field.get_attname_column()
if column_name.lower() in self.sql.lower():
query.resolve_ref(parent_field.name, allow_joins, reuse, summarize)
break
return super().resolve_expression(query, allow_joins, reuse, summarize, for_save)
class Star(Expression):
def __repr__(self):
return "'*'"
def as_sql(self, compiler, connection):
return '*', []
class Random(Expression):
output_field = fields.FloatField()
def __repr__(self):
return "Random()"
def as_sql(self, compiler, connection):
return connection.ops.random_function_sql(), []
class Col(Expression):
contains_column_references = True
possibly_multivalued = False
def __init__(self, alias, target, output_field=None):
if output_field is None:
output_field = target
super().__init__(output_field=output_field)
self.alias, self.target = alias, target
def __repr__(self):
alias, target = self.alias, self.target
identifiers = (alias, str(target)) if alias else (str(target),)
return '{}({})'.format(self.__class__.__name__, ', '.join(identifiers))
def as_sql(self, compiler, connection):
alias, column = self.alias, self.target.column
identifiers = (alias, column) if alias else (column,)
sql = '.'.join(map(compiler.quote_name_unless_alias, identifiers))
return sql, []
def relabeled_clone(self, relabels):
if self.alias is None:
return self
return self.__class__(relabels.get(self.alias, self.alias), self.target, self.output_field)
def get_group_by_cols(self, alias=None):
return [self]
def get_db_converters(self, connection):
if self.target == self.output_field:
return self.output_field.get_db_converters(connection)
return (self.output_field.get_db_converters(connection) +
self.target.get_db_converters(connection))
class Ref(Expression):
"""
Reference to column alias of the query. For example, Ref('sum_cost') in
qs.annotate(sum_cost=Sum('cost')) query.
"""
def __init__(self, refs, source):
super().__init__()
self.refs, self.source = refs, source
def __repr__(self):
return "{}({}, {})".format(self.__class__.__name__, self.refs, self.source)
def get_source_expressions(self):
return [self.source]
def set_source_expressions(self, exprs):
self.source, = exprs
def resolve_expression(self, query=None, allow_joins=True, reuse=None, summarize=False, for_save=False):
# The sub-expression `source` has already been resolved, as this is
# just a reference to the name of `source`.
return self
def relabeled_clone(self, relabels):
return self
def as_sql(self, compiler, connection):
return connection.ops.quote_name(self.refs), []
def get_group_by_cols(self, alias=None):
return [self]
class ExpressionList(Func):
"""
An expression containing multiple expressions. Can be used to provide a
list of expressions as an argument to another expression, like an
ordering clause.
"""
template = '%(expressions)s'
def __init__(self, *expressions, **extra):
if not expressions:
raise ValueError('%s requires at least one expression.' % self.__class__.__name__)
super().__init__(*expressions, **extra)
def __str__(self):
return self.arg_joiner.join(str(arg) for arg in self.source_expressions)
class ExpressionWrapper(Expression):
"""
An expression that can wrap another expression so that it can provide
extra context to the inner expression, such as the output_field.
"""
def __init__(self, expression, output_field):
super().__init__(output_field=output_field)
self.expression = expression
def set_source_expressions(self, exprs):
self.expression = exprs[0]
def get_source_expressions(self):
return [self.expression]
def as_sql(self, compiler, connection):
return self.expression.as_sql(compiler, connection)
def __repr__(self):
return "{}({})".format(self.__class__.__name__, self.expression)
class When(Expression):
template = 'WHEN %(condition)s THEN %(result)s'
# This isn't a complete conditional expression, must be used in Case().
conditional = False
def __init__(self, condition=None, then=None, **lookups):
if lookups and condition is None:
condition, lookups = Q(**lookups), None
if condition is None or not getattr(condition, 'conditional', False) or lookups:
raise TypeError(
'When() supports a Q object, a boolean expression, or lookups '
'as a condition.'
)
if isinstance(condition, Q) and not condition:
raise ValueError("An empty Q() can't be used as a When() condition.")
super().__init__(output_field=None)
self.condition = condition
self.result = self._parse_expressions(then)[0]
def __str__(self):
return "WHEN %r THEN %r" % (self.condition, self.result)
def __repr__(self):
return "<%s: %s>" % (self.__class__.__name__, self)
def get_source_expressions(self):
return [self.condition, self.result]
def set_source_expressions(self, exprs):
self.condition, self.result = exprs
def get_source_fields(self):
# We're only interested in the fields of the result expressions.
return [self.result._output_field_or_none]
def resolve_expression(self, query=None, allow_joins=True, reuse=None, summarize=False, for_save=False):
c = self.copy()
c.is_summary = summarize
if hasattr(c.condition, 'resolve_expression'):
c.condition = c.condition.resolve_expression(query, allow_joins, reuse, summarize, False)
c.result = c.result.resolve_expression(query, allow_joins, reuse, summarize, for_save)
return c
def as_sql(self, compiler, connection, template=None, **extra_context):
connection.ops.check_expression_support(self)
template_params = extra_context
sql_params = []
condition_sql, condition_params = compiler.compile(self.condition)
template_params['condition'] = condition_sql
sql_params.extend(condition_params)
result_sql, result_params = compiler.compile(self.result)
template_params['result'] = result_sql
sql_params.extend(result_params)
template = template or self.template
return template % template_params, sql_params
def get_group_by_cols(self, alias=None):
# This is not a complete expression and cannot be used in GROUP BY.
cols = []
for source in self.get_source_expressions():
cols.extend(source.get_group_by_cols())
return cols
class Case(Expression):
"""
An SQL searched CASE expression:
CASE
WHEN n > 0
THEN 'positive'
WHEN n < 0
THEN 'negative'
ELSE 'zero'
END
"""
template = 'CASE %(cases)s ELSE %(default)s END'
case_joiner = ' '
def __init__(self, *cases, default=None, output_field=None, **extra):
if not all(isinstance(case, When) for case in cases):
raise TypeError("Positional arguments must all be When objects.")
super().__init__(output_field)
self.cases = list(cases)
self.default = self._parse_expressions(default)[0]
self.extra = extra
def __str__(self):
return "CASE %s, ELSE %r" % (', '.join(str(c) for c in self.cases), self.default)
def __repr__(self):
return "<%s: %s>" % (self.__class__.__name__, self)
def get_source_expressions(self):
return self.cases + [self.default]
def set_source_expressions(self, exprs):
*self.cases, self.default = exprs
def resolve_expression(self, query=None, allow_joins=True, reuse=None, summarize=False, for_save=False):
c = self.copy()
c.is_summary = summarize
for pos, case in enumerate(c.cases):
c.cases[pos] = case.resolve_expression(query, allow_joins, reuse, summarize, for_save)
c.default = c.default.resolve_expression(query, allow_joins, reuse, summarize, for_save)
return c
def copy(self):
c = super().copy()
c.cases = c.cases[:]
return c
def as_sql(self, compiler, connection, template=None, case_joiner=None, **extra_context):
connection.ops.check_expression_support(self)
if not self.cases:
return compiler.compile(self.default)
template_params = {**self.extra, **extra_context}
case_parts = []
sql_params = []
for case in self.cases:
try:
case_sql, case_params = compiler.compile(case)
except EmptyResultSet:
continue
case_parts.append(case_sql)
sql_params.extend(case_params)
default_sql, default_params = compiler.compile(self.default)
if not case_parts:
return default_sql, default_params
case_joiner = case_joiner or self.case_joiner
template_params['cases'] = case_joiner.join(case_parts)
template_params['default'] = default_sql
sql_params.extend(default_params)
template = template or template_params.get('template', self.template)
sql = template % template_params
if self._output_field_or_none is not None:
sql = connection.ops.unification_cast_sql(self.output_field) % sql
return sql, sql_params
class Subquery(Expression):
"""
An explicit subquery. It may contain OuterRef() references to the outer
query which will be resolved when it is applied to that query.
"""
template = '(%(subquery)s)'
contains_aggregate = False
def __init__(self, queryset, output_field=None, **extra):
self.query = queryset.query
self.extra = extra
# Prevent the QuerySet from being evaluated.
self.queryset = queryset._chain(_result_cache=[], prefetch_done=True)
super().__init__(output_field)
def __getstate__(self):
state = super().__getstate__()
args, kwargs = state['_constructor_args']
if args:
args = (self.queryset, *args[1:])
else:
kwargs['queryset'] = self.queryset
state['_constructor_args'] = args, kwargs
return state
def get_source_expressions(self):
return [self.query]
def set_source_expressions(self, exprs):
self.query = exprs[0]
def _resolve_output_field(self):
return self.query.output_field
def copy(self):
clone = super().copy()
clone.query = clone.query.clone()
return clone
@property
def external_aliases(self):
return self.query.external_aliases
def as_sql(self, compiler, connection, template=None, **extra_context):
connection.ops.check_expression_support(self)
template_params = {**self.extra, **extra_context}
subquery_sql, sql_params = self.query.as_sql(compiler, connection)
template_params['subquery'] = subquery_sql[1:-1]
template = template or template_params.get('template', self.template)
sql = template % template_params
return sql, sql_params
def get_group_by_cols(self, alias=None):
if alias:
return [Ref(alias, self)]
external_cols = self.query.get_external_cols()
if any(col.possibly_multivalued for col in external_cols):
return [self]
return external_cols
class Exists(Subquery):
template = 'EXISTS(%(subquery)s)'
output_field = fields.BooleanField()
def __init__(self, queryset, negated=False, **kwargs):
# As a performance optimization, remove ordering since EXISTS doesn't
# care about it, just whether or not a row matches.
queryset = queryset.order_by()
self.negated = negated
super().__init__(queryset, **kwargs)
def __invert__(self):
clone = self.copy()
clone.negated = not self.negated
return clone
def as_sql(self, compiler, connection, template=None, **extra_context):
sql, params = super().as_sql(compiler, connection, template, **extra_context)
if self.negated:
sql = 'NOT {}'.format(sql)
return sql, params
def select_format(self, compiler, sql, params):
# Wrap EXISTS() with a CASE WHEN expression if a database backend
# (e.g. Oracle) doesn't support boolean expression in SELECT or GROUP
# BY list.
if not compiler.connection.features.supports_boolean_expr_in_select_clause:
sql = 'CASE WHEN {} THEN 1 ELSE 0 END'.format(sql)
return sql, params
class OrderBy(BaseExpression):
template = '%(expression)s %(ordering)s'
conditional = False
def __init__(self, expression, descending=False, nulls_first=False, nulls_last=False):
if nulls_first and nulls_last:
raise ValueError('nulls_first and nulls_last are mutually exclusive')
self.nulls_first = nulls_first
self.nulls_last = nulls_last
self.descending = descending
if not hasattr(expression, 'resolve_expression'):
raise ValueError('expression must be an expression type')
self.expression = expression
def __repr__(self):
return "{}({}, descending={})".format(
self.__class__.__name__, self.expression, self.descending)
def set_source_expressions(self, exprs):
self.expression = exprs[0]
def get_source_expressions(self):
return [self.expression]
def as_sql(self, compiler, connection, template=None, **extra_context):
template = template or self.template
if connection.features.supports_order_by_nulls_modifier:
if self.nulls_last:
template = '%s NULLS LAST' % template
elif self.nulls_first:
template = '%s NULLS FIRST' % template
else:
if self.nulls_last and not (
self.descending and connection.features.order_by_nulls_first
):
template = '%%(expression)s IS NULL, %s' % template
elif self.nulls_first and not (
not self.descending and connection.features.order_by_nulls_first
):
template = '%%(expression)s IS NOT NULL, %s' % template
connection.ops.check_expression_support(self)
expression_sql, params = compiler.compile(self.expression)
placeholders = {
'expression': expression_sql,
'ordering': 'DESC' if self.descending else 'ASC',
**extra_context,
}
template = template or self.template
params *= template.count('%(expression)s')
return (template % placeholders).rstrip(), params
def as_oracle(self, compiler, connection):
# Oracle doesn't allow ORDER BY EXISTS() unless it's wrapped in
# a CASE WHEN.
if isinstance(self.expression, Exists):
copy = self.copy()
copy.expression = Case(
When(self.expression, then=True),
default=False,
output_field=fields.BooleanField(),
)
return copy.as_sql(compiler, connection)
return self.as_sql(compiler, connection)
def get_group_by_cols(self, alias=None):
cols = []
for source in self.get_source_expressions():
cols.extend(source.get_group_by_cols())
return cols
def reverse_ordering(self):
self.descending = not self.descending
if self.nulls_first or self.nulls_last:
self.nulls_first = not self.nulls_first
self.nulls_last = not self.nulls_last
return self
def asc(self):
self.descending = False
def desc(self):
self.descending = True
class Window(Expression):
template = '%(expression)s OVER (%(window)s)'
# Although the main expression may either be an aggregate or an
# expression with an aggregate function, the GROUP BY that will
# be introduced in the query as a result is not desired.
contains_aggregate = False
contains_over_clause = True
filterable = False
def __init__(self, expression, partition_by=None, order_by=None, frame=None, output_field=None):
self.partition_by = partition_by
self.order_by = order_by
self.frame = frame
if not getattr(expression, 'window_compatible', False):
raise ValueError(
"Expression '%s' isn't compatible with OVER clauses." %
expression.__class__.__name__
)
if self.partition_by is not None:
if not isinstance(self.partition_by, (tuple, list)):
self.partition_by = (self.partition_by,)
self.partition_by = ExpressionList(*self.partition_by)
if self.order_by is not None:
if isinstance(self.order_by, (list, tuple)):
self.order_by = ExpressionList(*self.order_by)
elif not isinstance(self.order_by, BaseExpression):
raise ValueError(
'order_by must be either an Expression or a sequence of '
'expressions.'
)
super().__init__(output_field=output_field)
self.source_expression = self._parse_expressions(expression)[0]
def _resolve_output_field(self):
return self.source_expression.output_field
def get_source_expressions(self):
return [self.source_expression, self.partition_by, self.order_by, self.frame]
def set_source_expressions(self, exprs):
self.source_expression, self.partition_by, self.order_by, self.frame = exprs
def as_sql(self, compiler, connection, template=None):
connection.ops.check_expression_support(self)
if not connection.features.supports_over_clause:
raise NotSupportedError('This backend does not support window expressions.')
expr_sql, params = compiler.compile(self.source_expression)
window_sql, window_params = [], []
if self.partition_by is not None:
sql_expr, sql_params = self.partition_by.as_sql(
compiler=compiler, connection=connection,
template='PARTITION BY %(expressions)s',
)
window_sql.extend(sql_expr)
window_params.extend(sql_params)
if self.order_by is not None:
window_sql.append(' ORDER BY ')
order_sql, order_params = compiler.compile(self.order_by)
window_sql.extend(order_sql)
window_params.extend(order_params)
if self.frame:
frame_sql, frame_params = compiler.compile(self.frame)
window_sql.append(' ' + frame_sql)
window_params.extend(frame_params)
params.extend(window_params)
template = template or self.template
return template % {
'expression': expr_sql,
'window': ''.join(window_sql).strip()
}, params
def __str__(self):
return '{} OVER ({}{}{})'.format(
str(self.source_expression),
'PARTITION BY ' + str(self.partition_by) if self.partition_by else '',
'ORDER BY ' + str(self.order_by) if self.order_by else '',
str(self.frame or ''),
)
def __repr__(self):
return '<%s: %s>' % (self.__class__.__name__, self)
def get_group_by_cols(self, alias=None):
return []
class WindowFrame(Expression):
"""
Model the frame clause in window expressions. There are two types of frame
clauses which are subclasses, however, all processing and validation (by no
means intended to be complete) is done here. Thus, providing an end for a
frame is optional (the default is UNBOUNDED FOLLOWING, which is the last
row in the frame).
"""
template = '%(frame_type)s BETWEEN %(start)s AND %(end)s'
def __init__(self, start=None, end=None):
self.start = Value(start)
self.end = Value(end)
def set_source_expressions(self, exprs):
self.start, self.end = exprs
def get_source_expressions(self):
return [self.start, self.end]
def as_sql(self, compiler, connection):
connection.ops.check_expression_support(self)
start, end = self.window_frame_start_end(connection, self.start.value, self.end.value)
return self.template % {
'frame_type': self.frame_type,
'start': start,
'end': end,
}, []
def __repr__(self):
return '<%s: %s>' % (self.__class__.__name__, self)
def get_group_by_cols(self, alias=None):
return []
def __str__(self):
if self.start.value is not None and self.start.value < 0:
start = '%d %s' % (abs(self.start.value), connection.ops.PRECEDING)
elif self.start.value is not None and self.start.value == 0:
start = connection.ops.CURRENT_ROW
else:
start = connection.ops.UNBOUNDED_PRECEDING
if self.end.value is not None and self.end.value > 0:
end = '%d %s' % (self.end.value, connection.ops.FOLLOWING)
elif self.end.value is not None and self.end.value == 0:
end = connection.ops.CURRENT_ROW
else:
end = connection.ops.UNBOUNDED_FOLLOWING
return self.template % {
'frame_type': self.frame_type,
'start': start,
'end': end,
}
def window_frame_start_end(self, connection, start, end):
raise NotImplementedError('Subclasses must implement window_frame_start_end().')
class RowRange(WindowFrame):
frame_type = 'ROWS'
def window_frame_start_end(self, connection, start, end):
return connection.ops.window_frame_rows_start_end(start, end)
class ValueRange(WindowFrame):
frame_type = 'RANGE'
def window_frame_start_end(self, connection, start, end):
return connection.ops.window_frame_range_start_end(start, end)
|
d5827e0de182b692778e44aa89b6d2e56f2ed2d59b90ff2982bd4a3b46168edd | """
Create SQL statements for QuerySets.
The code in here encapsulates all of the SQL construction so that QuerySets
themselves do not have to (and could be backed by things other than SQL
databases). The abstraction barrier only works one way: this module has to know
all about the internals of models in order to get the information it needs.
"""
import copy
import difflib
import functools
import inspect
import sys
import warnings
from collections import Counter, namedtuple
from collections.abc import Iterator, Mapping
from itertools import chain, count, product
from string import ascii_uppercase
from django.core.exceptions import (
EmptyResultSet, FieldDoesNotExist, FieldError,
)
from django.db import DEFAULT_DB_ALIAS, NotSupportedError, connections
from django.db.models.aggregates import Count
from django.db.models.constants import LOOKUP_SEP
from django.db.models.expressions import BaseExpression, Col, F, OuterRef, Ref
from django.db.models.fields import Field
from django.db.models.fields.related_lookups import MultiColSource
from django.db.models.lookups import Lookup
from django.db.models.query_utils import (
Q, check_rel_lookup_compatibility, refs_expression,
)
from django.db.models.sql.constants import INNER, LOUTER, ORDER_DIR, SINGLE
from django.db.models.sql.datastructures import (
BaseTable, Empty, Join, MultiJoin,
)
from django.db.models.sql.where import (
AND, OR, ExtraWhere, NothingNode, WhereNode,
)
from django.utils.deprecation import RemovedInDjango40Warning
from django.utils.functional import cached_property
from django.utils.tree import Node
__all__ = ['Query', 'RawQuery']
def get_field_names_from_opts(opts):
return set(chain.from_iterable(
(f.name, f.attname) if f.concrete else (f.name,)
for f in opts.get_fields()
))
def get_children_from_q(q):
for child in q.children:
if isinstance(child, Node):
yield from get_children_from_q(child)
else:
yield child
JoinInfo = namedtuple(
'JoinInfo',
('final_field', 'targets', 'opts', 'joins', 'path', 'transform_function')
)
class RawQuery:
"""A single raw SQL query."""
def __init__(self, sql, using, params=None):
self.params = params or ()
self.sql = sql
self.using = using
self.cursor = None
# Mirror some properties of a normal query so that
# the compiler can be used to process results.
self.low_mark, self.high_mark = 0, None # Used for offset/limit
self.extra_select = {}
self.annotation_select = {}
def chain(self, using):
return self.clone(using)
def clone(self, using):
return RawQuery(self.sql, using, params=self.params)
def get_columns(self):
if self.cursor is None:
self._execute_query()
converter = connections[self.using].introspection.identifier_converter
return [converter(column_meta[0])
for column_meta in self.cursor.description]
def __iter__(self):
# Always execute a new query for a new iterator.
# This could be optimized with a cache at the expense of RAM.
self._execute_query()
if not connections[self.using].features.can_use_chunked_reads:
# If the database can't use chunked reads we need to make sure we
# evaluate the entire query up front.
result = list(self.cursor)
else:
result = self.cursor
return iter(result)
def __repr__(self):
return "<%s: %s>" % (self.__class__.__name__, self)
@property
def params_type(self):
return dict if isinstance(self.params, Mapping) else tuple
def __str__(self):
return self.sql % self.params_type(self.params)
def _execute_query(self):
connection = connections[self.using]
# Adapt parameters to the database, as much as possible considering
# that the target type isn't known. See #17755.
params_type = self.params_type
adapter = connection.ops.adapt_unknown_value
if params_type is tuple:
params = tuple(adapter(val) for val in self.params)
elif params_type is dict:
params = {key: adapter(val) for key, val in self.params.items()}
else:
raise RuntimeError("Unexpected params type: %s" % params_type)
self.cursor = connection.cursor()
self.cursor.execute(self.sql, params)
class Query(BaseExpression):
"""A single SQL query."""
alias_prefix = 'T'
subq_aliases = frozenset([alias_prefix])
compiler = 'SQLCompiler'
def __init__(self, model, where=WhereNode, alias_cols=True):
self.model = model
self.alias_refcount = {}
# alias_map is the most important data structure regarding joins.
# It's used for recording which joins exist in the query and what
# types they are. The key is the alias of the joined table (possibly
# the table name) and the value is a Join-like object (see
# sql.datastructures.Join for more information).
self.alias_map = {}
# Whether to provide alias to columns during reference resolving.
self.alias_cols = alias_cols
# Sometimes the query contains references to aliases in outer queries (as
# a result of split_exclude). Correct alias quoting needs to know these
# aliases too.
# Map external tables to whether they are aliased.
self.external_aliases = {}
self.table_map = {} # Maps table names to list of aliases.
self.default_cols = True
self.default_ordering = True
self.standard_ordering = True
self.used_aliases = set()
self.filter_is_sticky = False
self.subquery = False
# SQL-related attributes
# Select and related select clauses are expressions to use in the
# SELECT clause of the query.
# The select is used for cases where we want to set up the select
# clause to contain other than default fields (values(), subqueries...)
# Note that annotations go to annotations dictionary.
self.select = ()
self.where = where()
self.where_class = where
# The group_by attribute can have one of the following forms:
# - None: no group by at all in the query
# - A tuple of expressions: group by (at least) those expressions.
# String refs are also allowed for now.
# - True: group by all select fields of the model
# See compiler.get_group_by() for details.
self.group_by = None
self.order_by = ()
self.low_mark, self.high_mark = 0, None # Used for offset/limit
self.distinct = False
self.distinct_fields = ()
self.select_for_update = False
self.select_for_update_nowait = False
self.select_for_update_skip_locked = False
self.select_for_update_of = ()
self.select_for_no_key_update = False
self.select_related = False
# Arbitrary limit for select_related to prevents infinite recursion.
self.max_depth = 5
# Holds the selects defined by a call to values() or values_list()
# excluding annotation_select and extra_select.
self.values_select = ()
# SQL annotation-related attributes
self.annotations = {} # Maps alias -> Annotation Expression
self.annotation_select_mask = None
self._annotation_select_cache = None
# Set combination attributes
self.combinator = None
self.combinator_all = False
self.combined_queries = ()
# These are for extensions. The contents are more or less appended
# verbatim to the appropriate clause.
self.extra = {} # Maps col_alias -> (col_sql, params).
self.extra_select_mask = None
self._extra_select_cache = None
self.extra_tables = ()
self.extra_order_by = ()
# A tuple that is a set of model field names and either True, if these
# are the fields to defer, or False if these are the only fields to
# load.
self.deferred_loading = (frozenset(), True)
self._filtered_relations = {}
self.explain_query = False
self.explain_format = None
self.explain_options = {}
@property
def output_field(self):
if len(self.select) == 1:
select = self.select[0]
return getattr(select, 'target', None) or select.field
elif len(self.annotation_select) == 1:
return next(iter(self.annotation_select.values())).output_field
@property
def has_select_fields(self):
return bool(self.select or self.annotation_select_mask or self.extra_select_mask)
@cached_property
def base_table(self):
for alias in self.alias_map:
return alias
def __str__(self):
"""
Return the query as a string of SQL with the parameter values
substituted in (use sql_with_params() to see the unsubstituted string).
Parameter values won't necessarily be quoted correctly, since that is
done by the database interface at execution time.
"""
sql, params = self.sql_with_params()
return sql % params
def sql_with_params(self):
"""
Return the query as an SQL string and the parameters that will be
substituted into the query.
"""
return self.get_compiler(DEFAULT_DB_ALIAS).as_sql()
def __deepcopy__(self, memo):
"""Limit the amount of work when a Query is deepcopied."""
result = self.clone()
memo[id(self)] = result
return result
def get_compiler(self, using=None, connection=None):
if using is None and connection is None:
raise ValueError("Need either using or connection")
if using:
connection = connections[using]
return connection.ops.compiler(self.compiler)(self, connection, using)
def get_meta(self):
"""
Return the Options instance (the model._meta) from which to start
processing. Normally, this is self.model._meta, but it can be changed
by subclasses.
"""
return self.model._meta
def clone(self):
"""
Return a copy of the current Query. A lightweight alternative to
to deepcopy().
"""
obj = Empty()
obj.__class__ = self.__class__
# Copy references to everything.
obj.__dict__ = self.__dict__.copy()
# Clone attributes that can't use shallow copy.
obj.alias_refcount = self.alias_refcount.copy()
obj.alias_map = self.alias_map.copy()
obj.external_aliases = self.external_aliases.copy()
obj.table_map = self.table_map.copy()
obj.where = self.where.clone()
obj.annotations = self.annotations.copy()
if self.annotation_select_mask is None:
obj.annotation_select_mask = None
else:
obj.annotation_select_mask = self.annotation_select_mask.copy()
# _annotation_select_cache cannot be copied, as doing so breaks the
# (necessary) state in which both annotations and
# _annotation_select_cache point to the same underlying objects.
# It will get re-populated in the cloned queryset the next time it's
# used.
obj._annotation_select_cache = None
obj.extra = self.extra.copy()
if self.extra_select_mask is None:
obj.extra_select_mask = None
else:
obj.extra_select_mask = self.extra_select_mask.copy()
if self._extra_select_cache is None:
obj._extra_select_cache = None
else:
obj._extra_select_cache = self._extra_select_cache.copy()
if self.select_related is not False:
# Use deepcopy because select_related stores fields in nested
# dicts.
obj.select_related = copy.deepcopy(obj.select_related)
if 'subq_aliases' in self.__dict__:
obj.subq_aliases = self.subq_aliases.copy()
obj.used_aliases = self.used_aliases.copy()
obj._filtered_relations = self._filtered_relations.copy()
# Clear the cached_property
try:
del obj.base_table
except AttributeError:
pass
return obj
def chain(self, klass=None):
"""
Return a copy of the current Query that's ready for another operation.
The klass argument changes the type of the Query, e.g. UpdateQuery.
"""
obj = self.clone()
if klass and obj.__class__ != klass:
obj.__class__ = klass
if not obj.filter_is_sticky:
obj.used_aliases = set()
obj.filter_is_sticky = False
if hasattr(obj, '_setup_query'):
obj._setup_query()
return obj
def relabeled_clone(self, change_map):
clone = self.clone()
clone.change_aliases(change_map)
return clone
def _get_col(self, target, field, alias):
if not self.alias_cols:
alias = None
return target.get_col(alias, field)
def rewrite_cols(self, annotation, col_cnt):
# We must make sure the inner query has the referred columns in it.
# If we are aggregating over an annotation, then Django uses Ref()
# instances to note this. However, if we are annotating over a column
# of a related model, then it might be that column isn't part of the
# SELECT clause of the inner query, and we must manually make sure
# the column is selected. An example case is:
# .aggregate(Sum('author__awards'))
# Resolving this expression results in a join to author, but there
# is no guarantee the awards column of author is in the select clause
# of the query. Thus we must manually add the column to the inner
# query.
orig_exprs = annotation.get_source_expressions()
new_exprs = []
for expr in orig_exprs:
# FIXME: These conditions are fairly arbitrary. Identify a better
# method of having expressions decide which code path they should
# take.
if isinstance(expr, Ref):
# Its already a Ref to subquery (see resolve_ref() for
# details)
new_exprs.append(expr)
elif isinstance(expr, (WhereNode, Lookup)):
# Decompose the subexpressions further. The code here is
# copied from the else clause, but this condition must appear
# before the contains_aggregate/is_summary condition below.
new_expr, col_cnt = self.rewrite_cols(expr, col_cnt)
new_exprs.append(new_expr)
else:
# Reuse aliases of expressions already selected in subquery.
for col_alias, selected_annotation in self.annotation_select.items():
if selected_annotation is expr:
new_expr = Ref(col_alias, expr)
break
else:
# An expression that is not selected the subquery.
if isinstance(expr, Col) or (expr.contains_aggregate and not expr.is_summary):
# Reference column or another aggregate. Select it
# under a non-conflicting alias.
col_cnt += 1
col_alias = '__col%d' % col_cnt
self.annotations[col_alias] = expr
self.append_annotation_mask([col_alias])
new_expr = Ref(col_alias, expr)
else:
# Some other expression not referencing database values
# directly. Its subexpression might contain Cols.
new_expr, col_cnt = self.rewrite_cols(expr, col_cnt)
new_exprs.append(new_expr)
annotation.set_source_expressions(new_exprs)
return annotation, col_cnt
def get_aggregation(self, using, added_aggregate_names):
"""
Return the dictionary with the values of the existing aggregations.
"""
if not self.annotation_select:
return {}
existing_annotations = [
annotation for alias, annotation
in self.annotations.items()
if alias not in added_aggregate_names
]
# Decide if we need to use a subquery.
#
# Existing annotations would cause incorrect results as get_aggregation()
# must produce just one result and thus must not use GROUP BY. But we
# aren't smart enough to remove the existing annotations from the
# query, so those would force us to use GROUP BY.
#
# If the query has limit or distinct, or uses set operations, then
# those operations must be done in a subquery so that the query
# aggregates on the limit and/or distinct results instead of applying
# the distinct and limit after the aggregation.
if (isinstance(self.group_by, tuple) or self.is_sliced or existing_annotations or
self.distinct or self.combinator):
from django.db.models.sql.subqueries import AggregateQuery
outer_query = AggregateQuery(self.model)
inner_query = self.clone()
inner_query.select_for_update = False
inner_query.select_related = False
inner_query.set_annotation_mask(self.annotation_select)
if not self.is_sliced and not self.distinct_fields:
# Queries with distinct_fields need ordering and when a limit
# is applied we must take the slice from the ordered query.
# Otherwise no need for ordering.
inner_query.clear_ordering(True)
if not inner_query.distinct:
# If the inner query uses default select and it has some
# aggregate annotations, then we must make sure the inner
# query is grouped by the main model's primary key. However,
# clearing the select clause can alter results if distinct is
# used.
has_existing_aggregate_annotations = any(
annotation for annotation in existing_annotations
if getattr(annotation, 'contains_aggregate', True)
)
if inner_query.default_cols and has_existing_aggregate_annotations:
inner_query.group_by = (self.model._meta.pk.get_col(inner_query.get_initial_alias()),)
inner_query.default_cols = False
relabels = {t: 'subquery' for t in inner_query.alias_map}
relabels[None] = 'subquery'
# Remove any aggregates marked for reduction from the subquery
# and move them to the outer AggregateQuery.
col_cnt = 0
for alias, expression in list(inner_query.annotation_select.items()):
annotation_select_mask = inner_query.annotation_select_mask
if expression.is_summary:
expression, col_cnt = inner_query.rewrite_cols(expression, col_cnt)
outer_query.annotations[alias] = expression.relabeled_clone(relabels)
del inner_query.annotations[alias]
annotation_select_mask.remove(alias)
# Make sure the annotation_select wont use cached results.
inner_query.set_annotation_mask(inner_query.annotation_select_mask)
if inner_query.select == () and not inner_query.default_cols and not inner_query.annotation_select_mask:
# In case of Model.objects[0:3].count(), there would be no
# field selected in the inner query, yet we must use a subquery.
# So, make sure at least one field is selected.
inner_query.select = (self.model._meta.pk.get_col(inner_query.get_initial_alias()),)
try:
outer_query.add_subquery(inner_query, using)
except EmptyResultSet:
return {
alias: None
for alias in outer_query.annotation_select
}
else:
outer_query = self
self.select = ()
self.default_cols = False
self.extra = {}
outer_query.clear_ordering(True)
outer_query.clear_limits()
outer_query.select_for_update = False
outer_query.select_related = False
compiler = outer_query.get_compiler(using)
result = compiler.execute_sql(SINGLE)
if result is None:
result = [None] * len(outer_query.annotation_select)
converters = compiler.get_converters(outer_query.annotation_select.values())
result = next(compiler.apply_converters((result,), converters))
return dict(zip(outer_query.annotation_select, result))
def get_count(self, using):
"""
Perform a COUNT() query using the current filter constraints.
"""
obj = self.clone()
obj.add_annotation(Count('*'), alias='__count', is_summary=True)
number = obj.get_aggregation(using, ['__count'])['__count']
if number is None:
number = 0
return number
def has_filters(self):
return self.where
def has_results(self, using):
q = self.clone()
if not q.distinct:
if q.group_by is True:
q.add_fields((f.attname for f in self.model._meta.concrete_fields), False)
# Disable GROUP BY aliases to avoid orphaning references to the
# SELECT clause which is about to be cleared.
q.set_group_by(allow_aliases=False)
q.clear_select_clause()
q.clear_ordering(True)
q.set_limits(high=1)
compiler = q.get_compiler(using=using)
return compiler.has_results()
def explain(self, using, format=None, **options):
q = self.clone()
q.explain_query = True
q.explain_format = format
q.explain_options = options
compiler = q.get_compiler(using=using)
return '\n'.join(compiler.explain_query())
def combine(self, rhs, connector):
"""
Merge the 'rhs' query into the current one (with any 'rhs' effects
being applied *after* (that is, "to the right of") anything in the
current query. 'rhs' is not modified during a call to this function.
The 'connector' parameter describes how to connect filters from the
'rhs' query.
"""
assert self.model == rhs.model, \
"Cannot combine queries on two different base models."
assert not self.is_sliced, \
"Cannot combine queries once a slice has been taken."
assert self.distinct == rhs.distinct, \
"Cannot combine a unique query with a non-unique query."
assert self.distinct_fields == rhs.distinct_fields, \
"Cannot combine queries with different distinct fields."
# Work out how to relabel the rhs aliases, if necessary.
change_map = {}
conjunction = (connector == AND)
# Determine which existing joins can be reused. When combining the
# query with AND we must recreate all joins for m2m filters. When
# combining with OR we can reuse joins. The reason is that in AND
# case a single row can't fulfill a condition like:
# revrel__col=1 & revrel__col=2
# But, there might be two different related rows matching this
# condition. In OR case a single True is enough, so single row is
# enough, too.
#
# Note that we will be creating duplicate joins for non-m2m joins in
# the AND case. The results will be correct but this creates too many
# joins. This is something that could be fixed later on.
reuse = set() if conjunction else set(self.alias_map)
# Base table must be present in the query - this is the same
# table on both sides.
self.get_initial_alias()
joinpromoter = JoinPromoter(connector, 2, False)
joinpromoter.add_votes(
j for j in self.alias_map if self.alias_map[j].join_type == INNER)
rhs_votes = set()
# Now, add the joins from rhs query into the new query (skipping base
# table).
rhs_tables = list(rhs.alias_map)[1:]
for alias in rhs_tables:
join = rhs.alias_map[alias]
# If the left side of the join was already relabeled, use the
# updated alias.
join = join.relabeled_clone(change_map)
new_alias = self.join(join, reuse=reuse)
if join.join_type == INNER:
rhs_votes.add(new_alias)
# We can't reuse the same join again in the query. If we have two
# distinct joins for the same connection in rhs query, then the
# combined query must have two joins, too.
reuse.discard(new_alias)
if alias != new_alias:
change_map[alias] = new_alias
if not rhs.alias_refcount[alias]:
# The alias was unused in the rhs query. Unref it so that it
# will be unused in the new query, too. We have to add and
# unref the alias so that join promotion has information of
# the join type for the unused alias.
self.unref_alias(new_alias)
joinpromoter.add_votes(rhs_votes)
joinpromoter.update_join_types(self)
# Now relabel a copy of the rhs where-clause and add it to the current
# one.
w = rhs.where.clone()
w.relabel_aliases(change_map)
self.where.add(w, connector)
# Selection columns and extra extensions are those provided by 'rhs'.
if rhs.select:
self.set_select([col.relabeled_clone(change_map) for col in rhs.select])
else:
self.select = ()
if connector == OR:
# It would be nice to be able to handle this, but the queries don't
# really make sense (or return consistent value sets). Not worth
# the extra complexity when you can write a real query instead.
if self.extra and rhs.extra:
raise ValueError("When merging querysets using 'or', you cannot have extra(select=...) on both sides.")
self.extra.update(rhs.extra)
extra_select_mask = set()
if self.extra_select_mask is not None:
extra_select_mask.update(self.extra_select_mask)
if rhs.extra_select_mask is not None:
extra_select_mask.update(rhs.extra_select_mask)
if extra_select_mask:
self.set_extra_mask(extra_select_mask)
self.extra_tables += rhs.extra_tables
# Ordering uses the 'rhs' ordering, unless it has none, in which case
# the current ordering is used.
self.order_by = rhs.order_by or self.order_by
self.extra_order_by = rhs.extra_order_by or self.extra_order_by
def deferred_to_data(self, target, callback):
"""
Convert the self.deferred_loading data structure to an alternate data
structure, describing the field that *will* be loaded. This is used to
compute the columns to select from the database and also by the
QuerySet class to work out which fields are being initialized on each
model. Models that have all their fields included aren't mentioned in
the result, only those that have field restrictions in place.
The "target" parameter is the instance that is populated (in place).
The "callback" is a function that is called whenever a (model, field)
pair need to be added to "target". It accepts three parameters:
"target", and the model and list of fields being added for that model.
"""
field_names, defer = self.deferred_loading
if not field_names:
return
orig_opts = self.get_meta()
seen = {}
must_include = {orig_opts.concrete_model: {orig_opts.pk}}
for field_name in field_names:
parts = field_name.split(LOOKUP_SEP)
cur_model = self.model._meta.concrete_model
opts = orig_opts
for name in parts[:-1]:
old_model = cur_model
if name in self._filtered_relations:
name = self._filtered_relations[name].relation_name
source = opts.get_field(name)
if is_reverse_o2o(source):
cur_model = source.related_model
else:
cur_model = source.remote_field.model
opts = cur_model._meta
# Even if we're "just passing through" this model, we must add
# both the current model's pk and the related reference field
# (if it's not a reverse relation) to the things we select.
if not is_reverse_o2o(source):
must_include[old_model].add(source)
add_to_dict(must_include, cur_model, opts.pk)
field = opts.get_field(parts[-1])
is_reverse_object = field.auto_created and not field.concrete
model = field.related_model if is_reverse_object else field.model
model = model._meta.concrete_model
if model == opts.model:
model = cur_model
if not is_reverse_o2o(field):
add_to_dict(seen, model, field)
if defer:
# We need to load all fields for each model, except those that
# appear in "seen" (for all models that appear in "seen"). The only
# slight complexity here is handling fields that exist on parent
# models.
workset = {}
for model, values in seen.items():
for field in model._meta.local_fields:
if field not in values:
m = field.model._meta.concrete_model
add_to_dict(workset, m, field)
for model, values in must_include.items():
# If we haven't included a model in workset, we don't add the
# corresponding must_include fields for that model, since an
# empty set means "include all fields". That's why there's no
# "else" branch here.
if model in workset:
workset[model].update(values)
for model, values in workset.items():
callback(target, model, values)
else:
for model, values in must_include.items():
if model in seen:
seen[model].update(values)
else:
# As we've passed through this model, but not explicitly
# included any fields, we have to make sure it's mentioned
# so that only the "must include" fields are pulled in.
seen[model] = values
# Now ensure that every model in the inheritance chain is mentioned
# in the parent list. Again, it must be mentioned to ensure that
# only "must include" fields are pulled in.
for model in orig_opts.get_parent_list():
seen.setdefault(model, set())
for model, values in seen.items():
callback(target, model, values)
def table_alias(self, table_name, create=False, filtered_relation=None):
"""
Return a table alias for the given table_name and whether this is a
new alias or not.
If 'create' is true, a new alias is always created. Otherwise, the
most recently created alias for the table (if one exists) is reused.
"""
alias_list = self.table_map.get(table_name)
if not create and alias_list:
alias = alias_list[0]
self.alias_refcount[alias] += 1
return alias, False
# Create a new alias for this table.
if alias_list:
alias = '%s%d' % (self.alias_prefix, len(self.alias_map) + 1)
alias_list.append(alias)
else:
# The first occurrence of a table uses the table name directly.
alias = filtered_relation.alias if filtered_relation is not None else table_name
self.table_map[table_name] = [alias]
self.alias_refcount[alias] = 1
return alias, True
def ref_alias(self, alias):
"""Increases the reference count for this alias."""
self.alias_refcount[alias] += 1
def unref_alias(self, alias, amount=1):
"""Decreases the reference count for this alias."""
self.alias_refcount[alias] -= amount
def promote_joins(self, aliases):
"""
Promote recursively the join type of given aliases and its children to
an outer join. If 'unconditional' is False, only promote the join if
it is nullable or the parent join is an outer join.
The children promotion is done to avoid join chains that contain a LOUTER
b INNER c. So, if we have currently a INNER b INNER c and a->b is promoted,
then we must also promote b->c automatically, or otherwise the promotion
of a->b doesn't actually change anything in the query results.
"""
aliases = list(aliases)
while aliases:
alias = aliases.pop(0)
if self.alias_map[alias].join_type is None:
# This is the base table (first FROM entry) - this table
# isn't really joined at all in the query, so we should not
# alter its join type.
continue
# Only the first alias (skipped above) should have None join_type
assert self.alias_map[alias].join_type is not None
parent_alias = self.alias_map[alias].parent_alias
parent_louter = parent_alias and self.alias_map[parent_alias].join_type == LOUTER
already_louter = self.alias_map[alias].join_type == LOUTER
if ((self.alias_map[alias].nullable or parent_louter) and
not already_louter):
self.alias_map[alias] = self.alias_map[alias].promote()
# Join type of 'alias' changed, so re-examine all aliases that
# refer to this one.
aliases.extend(
join for join in self.alias_map
if self.alias_map[join].parent_alias == alias and join not in aliases
)
def demote_joins(self, aliases):
"""
Change join type from LOUTER to INNER for all joins in aliases.
Similarly to promote_joins(), this method must ensure no join chains
containing first an outer, then an inner join are generated. If we
are demoting b->c join in chain a LOUTER b LOUTER c then we must
demote a->b automatically, or otherwise the demotion of b->c doesn't
actually change anything in the query results. .
"""
aliases = list(aliases)
while aliases:
alias = aliases.pop(0)
if self.alias_map[alias].join_type == LOUTER:
self.alias_map[alias] = self.alias_map[alias].demote()
parent_alias = self.alias_map[alias].parent_alias
if self.alias_map[parent_alias].join_type == INNER:
aliases.append(parent_alias)
def reset_refcounts(self, to_counts):
"""
Reset reference counts for aliases so that they match the value passed
in `to_counts`.
"""
for alias, cur_refcount in self.alias_refcount.copy().items():
unref_amount = cur_refcount - to_counts.get(alias, 0)
self.unref_alias(alias, unref_amount)
def change_aliases(self, change_map):
"""
Change the aliases in change_map (which maps old-alias -> new-alias),
relabelling any references to them in select columns and the where
clause.
"""
assert set(change_map).isdisjoint(change_map.values())
# 1. Update references in "select" (normal columns plus aliases),
# "group by" and "where".
self.where.relabel_aliases(change_map)
if isinstance(self.group_by, tuple):
self.group_by = tuple([col.relabeled_clone(change_map) for col in self.group_by])
self.select = tuple([col.relabeled_clone(change_map) for col in self.select])
self.annotations = self.annotations and {
key: col.relabeled_clone(change_map) for key, col in self.annotations.items()
}
# 2. Rename the alias in the internal table/alias datastructures.
for old_alias, new_alias in change_map.items():
if old_alias not in self.alias_map:
continue
alias_data = self.alias_map[old_alias].relabeled_clone(change_map)
self.alias_map[new_alias] = alias_data
self.alias_refcount[new_alias] = self.alias_refcount[old_alias]
del self.alias_refcount[old_alias]
del self.alias_map[old_alias]
table_aliases = self.table_map[alias_data.table_name]
for pos, alias in enumerate(table_aliases):
if alias == old_alias:
table_aliases[pos] = new_alias
break
self.external_aliases = {
# Table is aliased or it's being changed and thus is aliased.
change_map.get(alias, alias): (aliased or alias in change_map)
for alias, aliased in self.external_aliases.items()
}
def bump_prefix(self, outer_query):
"""
Change the alias prefix to the next letter in the alphabet in a way
that the outer query's aliases and this query's aliases will not
conflict. Even tables that previously had no alias will get an alias
after this call.
"""
def prefix_gen():
"""
Generate a sequence of characters in alphabetical order:
-> 'A', 'B', 'C', ...
When the alphabet is finished, the sequence will continue with the
Cartesian product:
-> 'AA', 'AB', 'AC', ...
"""
alphabet = ascii_uppercase
prefix = chr(ord(self.alias_prefix) + 1)
yield prefix
for n in count(1):
seq = alphabet[alphabet.index(prefix):] if prefix else alphabet
for s in product(seq, repeat=n):
yield ''.join(s)
prefix = None
if self.alias_prefix != outer_query.alias_prefix:
# No clashes between self and outer query should be possible.
return
# Explicitly avoid infinite loop. The constant divider is based on how
# much depth recursive subquery references add to the stack. This value
# might need to be adjusted when adding or removing function calls from
# the code path in charge of performing these operations.
local_recursion_limit = sys.getrecursionlimit() // 16
for pos, prefix in enumerate(prefix_gen()):
if prefix not in self.subq_aliases:
self.alias_prefix = prefix
break
if pos > local_recursion_limit:
raise RecursionError(
'Maximum recursion depth exceeded: too many subqueries.'
)
self.subq_aliases = self.subq_aliases.union([self.alias_prefix])
outer_query.subq_aliases = outer_query.subq_aliases.union(self.subq_aliases)
self.change_aliases({
alias: '%s%d' % (self.alias_prefix, pos)
for pos, alias in enumerate(self.alias_map)
})
def get_initial_alias(self):
"""
Return the first alias for this query, after increasing its reference
count.
"""
if self.alias_map:
alias = self.base_table
self.ref_alias(alias)
else:
alias = self.join(BaseTable(self.get_meta().db_table, None))
return alias
def count_active_tables(self):
"""
Return the number of tables in this query with a non-zero reference
count. After execution, the reference counts are zeroed, so tables
added in compiler will not be seen by this method.
"""
return len([1 for count in self.alias_refcount.values() if count])
def join(self, join, reuse=None, reuse_with_filtered_relation=False):
"""
Return an alias for the 'join', either reusing an existing alias for
that join or creating a new one. 'join' is either a
sql.datastructures.BaseTable or Join.
The 'reuse' parameter can be either None which means all joins are
reusable, or it can be a set containing the aliases that can be reused.
The 'reuse_with_filtered_relation' parameter is used when computing
FilteredRelation instances.
A join is always created as LOUTER if the lhs alias is LOUTER to make
sure chains like t1 LOUTER t2 INNER t3 aren't generated. All new
joins are created as LOUTER if the join is nullable.
"""
if reuse_with_filtered_relation and reuse:
reuse_aliases = [
a for a, j in self.alias_map.items()
if a in reuse and j.equals(join, with_filtered_relation=False)
]
else:
reuse_aliases = [
a for a, j in self.alias_map.items()
if (reuse is None or a in reuse) and j == join
]
if reuse_aliases:
if join.table_alias in reuse_aliases:
reuse_alias = join.table_alias
else:
# Reuse the most recent alias of the joined table
# (a many-to-many relation may be joined multiple times).
reuse_alias = reuse_aliases[-1]
self.ref_alias(reuse_alias)
return reuse_alias
# No reuse is possible, so we need a new alias.
alias, _ = self.table_alias(join.table_name, create=True, filtered_relation=join.filtered_relation)
if join.join_type:
if self.alias_map[join.parent_alias].join_type == LOUTER or join.nullable:
join_type = LOUTER
else:
join_type = INNER
join.join_type = join_type
join.table_alias = alias
self.alias_map[alias] = join
return alias
def join_parent_model(self, opts, model, alias, seen):
"""
Make sure the given 'model' is joined in the query. If 'model' isn't
a parent of 'opts' or if it is None this method is a no-op.
The 'alias' is the root alias for starting the join, 'seen' is a dict
of model -> alias of existing joins. It must also contain a mapping
of None -> some alias. This will be returned in the no-op case.
"""
if model in seen:
return seen[model]
chain = opts.get_base_chain(model)
if not chain:
return alias
curr_opts = opts
for int_model in chain:
if int_model in seen:
curr_opts = int_model._meta
alias = seen[int_model]
continue
# Proxy model have elements in base chain
# with no parents, assign the new options
# object and skip to the next base in that
# case
if not curr_opts.parents[int_model]:
curr_opts = int_model._meta
continue
link_field = curr_opts.get_ancestor_link(int_model)
join_info = self.setup_joins([link_field.name], curr_opts, alias)
curr_opts = int_model._meta
alias = seen[int_model] = join_info.joins[-1]
return alias or seen[None]
def add_annotation(self, annotation, alias, is_summary=False):
"""Add a single annotation expression to the Query."""
annotation = annotation.resolve_expression(self, allow_joins=True, reuse=None,
summarize=is_summary)
self.append_annotation_mask([alias])
self.annotations[alias] = annotation
def resolve_expression(self, query, *args, **kwargs):
clone = self.clone()
# Subqueries need to use a different set of aliases than the outer query.
clone.bump_prefix(query)
clone.subquery = True
# It's safe to drop ordering if the queryset isn't using slicing,
# distinct(*fields) or select_for_update().
if (self.low_mark == 0 and self.high_mark is None and
not self.distinct_fields and
not self.select_for_update):
clone.clear_ordering(True)
clone.where.resolve_expression(query, *args, **kwargs)
for key, value in clone.annotations.items():
resolved = value.resolve_expression(query, *args, **kwargs)
if hasattr(resolved, 'external_aliases'):
resolved.external_aliases.update(clone.external_aliases)
clone.annotations[key] = resolved
# Outer query's aliases are considered external.
for alias, table in query.alias_map.items():
clone.external_aliases[alias] = (
(isinstance(table, Join) and table.join_field.related_model._meta.db_table != alias) or
(isinstance(table, BaseTable) and table.table_name != table.table_alias)
)
return clone
def get_external_cols(self):
exprs = chain(self.annotations.values(), self.where.children)
return [
col for col in self._gen_cols(exprs)
if col.alias in self.external_aliases
]
def as_sql(self, compiler, connection):
sql, params = self.get_compiler(connection=connection).as_sql()
if self.subquery:
sql = '(%s)' % sql
return sql, params
def resolve_lookup_value(self, value, can_reuse, allow_joins):
if hasattr(value, 'resolve_expression'):
value = value.resolve_expression(
self, reuse=can_reuse, allow_joins=allow_joins,
)
elif isinstance(value, (list, tuple)):
# The items of the iterable may be expressions and therefore need
# to be resolved independently.
return type(value)(
self.resolve_lookup_value(sub_value, can_reuse, allow_joins)
for sub_value in value
)
return value
def solve_lookup_type(self, lookup):
"""
Solve the lookup type from the lookup (e.g.: 'foobar__id__icontains').
"""
lookup_splitted = lookup.split(LOOKUP_SEP)
if self.annotations:
expression, expression_lookups = refs_expression(lookup_splitted, self.annotations)
if expression:
return expression_lookups, (), expression
_, field, _, lookup_parts = self.names_to_path(lookup_splitted, self.get_meta())
field_parts = lookup_splitted[0:len(lookup_splitted) - len(lookup_parts)]
if len(lookup_parts) > 1 and not field_parts:
raise FieldError(
'Invalid lookup "%s" for model %s".' %
(lookup, self.get_meta().model.__name__)
)
return lookup_parts, field_parts, False
def check_query_object_type(self, value, opts, field):
"""
Check whether the object passed while querying is of the correct type.
If not, raise a ValueError specifying the wrong object.
"""
if hasattr(value, '_meta'):
if not check_rel_lookup_compatibility(value._meta.model, opts, field):
raise ValueError(
'Cannot query "%s": Must be "%s" instance.' %
(value, opts.object_name))
def check_related_objects(self, field, value, opts):
"""Check the type of object passed to query relations."""
if field.is_relation:
# Check that the field and the queryset use the same model in a
# query like .filter(author=Author.objects.all()). For example, the
# opts would be Author's (from the author field) and value.model
# would be Author.objects.all() queryset's .model (Author also).
# The field is the related field on the lhs side.
if (isinstance(value, Query) and not value.has_select_fields and
not check_rel_lookup_compatibility(value.model, opts, field)):
raise ValueError(
'Cannot use QuerySet for "%s": Use a QuerySet for "%s".' %
(value.model._meta.object_name, opts.object_name)
)
elif hasattr(value, '_meta'):
self.check_query_object_type(value, opts, field)
elif hasattr(value, '__iter__'):
for v in value:
self.check_query_object_type(v, opts, field)
def check_filterable(self, expression):
"""Raise an error if expression cannot be used in a WHERE clause."""
if not getattr(expression, 'filterable', True):
raise NotSupportedError(
expression.__class__.__name__ + ' is disallowed in the filter '
'clause.'
)
if hasattr(expression, 'get_source_expressions'):
for expr in expression.get_source_expressions():
self.check_filterable(expr)
def build_lookup(self, lookups, lhs, rhs):
"""
Try to extract transforms and lookup from given lhs.
The lhs value is something that works like SQLExpression.
The rhs value is what the lookup is going to compare against.
The lookups is a list of names to extract using get_lookup()
and get_transform().
"""
# __exact is the default lookup if one isn't given.
*transforms, lookup_name = lookups or ['exact']
for name in transforms:
lhs = self.try_transform(lhs, name)
# First try get_lookup() so that the lookup takes precedence if the lhs
# supports both transform and lookup for the name.
lookup_class = lhs.get_lookup(lookup_name)
if not lookup_class:
if lhs.field.is_relation:
raise FieldError('Related Field got invalid lookup: {}'.format(lookup_name))
# A lookup wasn't found. Try to interpret the name as a transform
# and do an Exact lookup against it.
lhs = self.try_transform(lhs, lookup_name)
lookup_name = 'exact'
lookup_class = lhs.get_lookup(lookup_name)
if not lookup_class:
return
lookup = lookup_class(lhs, rhs)
# Interpret '__exact=None' as the sql 'is NULL'; otherwise, reject all
# uses of None as a query value unless the lookup supports it.
if lookup.rhs is None and not lookup.can_use_none_as_rhs:
if lookup_name not in ('exact', 'iexact'):
raise ValueError("Cannot use None as a query value")
return lhs.get_lookup('isnull')(lhs, True)
# For Oracle '' is equivalent to null. The check must be done at this
# stage because join promotion can't be done in the compiler. Using
# DEFAULT_DB_ALIAS isn't nice but it's the best that can be done here.
# A similar thing is done in is_nullable(), too.
if (connections[DEFAULT_DB_ALIAS].features.interprets_empty_strings_as_nulls and
lookup_name == 'exact' and lookup.rhs == ''):
return lhs.get_lookup('isnull')(lhs, True)
return lookup
def try_transform(self, lhs, name):
"""
Helper method for build_lookup(). Try to fetch and initialize
a transform for name parameter from lhs.
"""
transform_class = lhs.get_transform(name)
if transform_class:
return transform_class(lhs)
else:
output_field = lhs.output_field.__class__
suggested_lookups = difflib.get_close_matches(name, output_field.get_lookups())
if suggested_lookups:
suggestion = ', perhaps you meant %s?' % ' or '.join(suggested_lookups)
else:
suggestion = '.'
raise FieldError(
"Unsupported lookup '%s' for %s or join on the field not "
"permitted%s" % (name, output_field.__name__, suggestion)
)
def build_filter(self, filter_expr, branch_negated=False, current_negated=False,
can_reuse=None, allow_joins=True, split_subq=True,
reuse_with_filtered_relation=False, check_filterable=True):
"""
Build a WhereNode for a single filter clause but don't add it
to this Query. Query.add_q() will then add this filter to the where
Node.
The 'branch_negated' tells us if the current branch contains any
negations. This will be used to determine if subqueries are needed.
The 'current_negated' is used to determine if the current filter is
negated or not and this will be used to determine if IS NULL filtering
is needed.
The difference between current_negated and branch_negated is that
branch_negated is set on first negation, but current_negated is
flipped for each negation.
Note that add_filter will not do any negating itself, that is done
upper in the code by add_q().
The 'can_reuse' is a set of reusable joins for multijoins.
If 'reuse_with_filtered_relation' is True, then only joins in can_reuse
will be reused.
The method will create a filter clause that can be added to the current
query. However, if the filter isn't added to the query then the caller
is responsible for unreffing the joins used.
"""
if isinstance(filter_expr, dict):
raise FieldError("Cannot parse keyword query as dict")
if isinstance(filter_expr, Q):
return self._add_q(
filter_expr,
branch_negated=branch_negated,
current_negated=current_negated,
used_aliases=can_reuse,
allow_joins=allow_joins,
split_subq=split_subq,
check_filterable=check_filterable,
)
if hasattr(filter_expr, 'resolve_expression'):
if not getattr(filter_expr, 'conditional', False):
raise TypeError('Cannot filter against a non-conditional expression.')
condition = self.build_lookup(
['exact'], filter_expr.resolve_expression(self, allow_joins=allow_joins), True
)
clause = self.where_class()
clause.add(condition, AND)
return clause, []
arg, value = filter_expr
if not arg:
raise FieldError("Cannot parse keyword query %r" % arg)
lookups, parts, reffed_expression = self.solve_lookup_type(arg)
if check_filterable:
self.check_filterable(reffed_expression)
if not allow_joins and len(parts) > 1:
raise FieldError("Joined field references are not permitted in this query")
pre_joins = self.alias_refcount.copy()
value = self.resolve_lookup_value(value, can_reuse, allow_joins)
used_joins = {k for k, v in self.alias_refcount.items() if v > pre_joins.get(k, 0)}
if check_filterable:
self.check_filterable(value)
clause = self.where_class()
if reffed_expression:
condition = self.build_lookup(lookups, reffed_expression, value)
clause.add(condition, AND)
return clause, []
opts = self.get_meta()
alias = self.get_initial_alias()
allow_many = not branch_negated or not split_subq
try:
join_info = self.setup_joins(
parts, opts, alias, can_reuse=can_reuse, allow_many=allow_many,
reuse_with_filtered_relation=reuse_with_filtered_relation,
)
# Prevent iterator from being consumed by check_related_objects()
if isinstance(value, Iterator):
value = list(value)
self.check_related_objects(join_info.final_field, value, join_info.opts)
# split_exclude() needs to know which joins were generated for the
# lookup parts
self._lookup_joins = join_info.joins
except MultiJoin as e:
return self.split_exclude(filter_expr, can_reuse, e.names_with_path)
# Update used_joins before trimming since they are reused to determine
# which joins could be later promoted to INNER.
used_joins.update(join_info.joins)
targets, alias, join_list = self.trim_joins(join_info.targets, join_info.joins, join_info.path)
if can_reuse is not None:
can_reuse.update(join_list)
if join_info.final_field.is_relation:
# No support for transforms for relational fields
num_lookups = len(lookups)
if num_lookups > 1:
raise FieldError('Related Field got invalid lookup: {}'.format(lookups[0]))
if len(targets) == 1:
col = self._get_col(targets[0], join_info.final_field, alias)
else:
col = MultiColSource(alias, targets, join_info.targets, join_info.final_field)
else:
col = self._get_col(targets[0], join_info.final_field, alias)
condition = self.build_lookup(lookups, col, value)
lookup_type = condition.lookup_name
clause.add(condition, AND)
require_outer = lookup_type == 'isnull' and condition.rhs is True and not current_negated
if current_negated and (lookup_type != 'isnull' or condition.rhs is False) and condition.rhs is not None:
require_outer = True
if (lookup_type != 'isnull' and (
self.is_nullable(targets[0]) or
self.alias_map[join_list[-1]].join_type == LOUTER)):
# The condition added here will be SQL like this:
# NOT (col IS NOT NULL), where the first NOT is added in
# upper layers of code. The reason for addition is that if col
# is null, then col != someval will result in SQL "unknown"
# which isn't the same as in Python. The Python None handling
# is wanted, and it can be gotten by
# (col IS NULL OR col != someval)
# <=>
# NOT (col IS NOT NULL AND col = someval).
lookup_class = targets[0].get_lookup('isnull')
col = self._get_col(targets[0], join_info.targets[0], alias)
clause.add(lookup_class(col, False), AND)
return clause, used_joins if not require_outer else ()
def add_filter(self, filter_clause):
self.add_q(Q(**{filter_clause[0]: filter_clause[1]}))
def add_q(self, q_object):
"""
A preprocessor for the internal _add_q(). Responsible for doing final
join promotion.
"""
# For join promotion this case is doing an AND for the added q_object
# and existing conditions. So, any existing inner join forces the join
# type to remain inner. Existing outer joins can however be demoted.
# (Consider case where rel_a is LOUTER and rel_a__col=1 is added - if
# rel_a doesn't produce any rows, then the whole condition must fail.
# So, demotion is OK.
existing_inner = {a for a in self.alias_map if self.alias_map[a].join_type == INNER}
clause, _ = self._add_q(q_object, self.used_aliases)
if clause:
self.where.add(clause, AND)
self.demote_joins(existing_inner)
def build_where(self, filter_expr):
return self.build_filter(filter_expr, allow_joins=False)[0]
def _add_q(self, q_object, used_aliases, branch_negated=False,
current_negated=False, allow_joins=True, split_subq=True,
check_filterable=True):
"""Add a Q-object to the current filter."""
connector = q_object.connector
current_negated = current_negated ^ q_object.negated
branch_negated = branch_negated or q_object.negated
target_clause = self.where_class(connector=connector,
negated=q_object.negated)
joinpromoter = JoinPromoter(q_object.connector, len(q_object.children), current_negated)
for child in q_object.children:
child_clause, needed_inner = self.build_filter(
child, can_reuse=used_aliases, branch_negated=branch_negated,
current_negated=current_negated, allow_joins=allow_joins,
split_subq=split_subq, check_filterable=check_filterable,
)
joinpromoter.add_votes(needed_inner)
if child_clause:
target_clause.add(child_clause, connector)
needed_inner = joinpromoter.update_join_types(self)
return target_clause, needed_inner
def build_filtered_relation_q(self, q_object, reuse, branch_negated=False, current_negated=False):
"""Add a FilteredRelation object to the current filter."""
connector = q_object.connector
current_negated ^= q_object.negated
branch_negated = branch_negated or q_object.negated
target_clause = self.where_class(connector=connector, negated=q_object.negated)
for child in q_object.children:
if isinstance(child, Node):
child_clause = self.build_filtered_relation_q(
child, reuse=reuse, branch_negated=branch_negated,
current_negated=current_negated,
)
else:
child_clause, _ = self.build_filter(
child, can_reuse=reuse, branch_negated=branch_negated,
current_negated=current_negated,
allow_joins=True, split_subq=False,
reuse_with_filtered_relation=True,
)
target_clause.add(child_clause, connector)
return target_clause
def add_filtered_relation(self, filtered_relation, alias):
filtered_relation.alias = alias
lookups = dict(get_children_from_q(filtered_relation.condition))
for lookup in chain((filtered_relation.relation_name,), lookups):
lookup_parts, field_parts, _ = self.solve_lookup_type(lookup)
shift = 2 if not lookup_parts else 1
if len(field_parts) > (shift + len(lookup_parts)):
raise ValueError(
"FilteredRelation's condition doesn't support nested "
"relations (got %r)." % lookup
)
self._filtered_relations[filtered_relation.alias] = filtered_relation
def names_to_path(self, names, opts, allow_many=True, fail_on_missing=False):
"""
Walk the list of names and turns them into PathInfo tuples. A single
name in 'names' can generate multiple PathInfos (m2m, for example).
'names' is the path of names to travel, 'opts' is the model Options we
start the name resolving from, 'allow_many' is as for setup_joins().
If fail_on_missing is set to True, then a name that can't be resolved
will generate a FieldError.
Return a list of PathInfo tuples. In addition return the final field
(the last used join field) and target (which is a field guaranteed to
contain the same value as the final field). Finally, return those names
that weren't found (which are likely transforms and the final lookup).
"""
path, names_with_path = [], []
for pos, name in enumerate(names):
cur_names_with_path = (name, [])
if name == 'pk':
name = opts.pk.name
field = None
filtered_relation = None
try:
field = opts.get_field(name)
except FieldDoesNotExist:
if name in self.annotation_select:
field = self.annotation_select[name].output_field
elif name in self._filtered_relations and pos == 0:
filtered_relation = self._filtered_relations[name]
field = opts.get_field(filtered_relation.relation_name)
if field is not None:
# Fields that contain one-to-many relations with a generic
# model (like a GenericForeignKey) cannot generate reverse
# relations and therefore cannot be used for reverse querying.
if field.is_relation and not field.related_model:
raise FieldError(
"Field %r does not generate an automatic reverse "
"relation and therefore cannot be used for reverse "
"querying. If it is a GenericForeignKey, consider "
"adding a GenericRelation." % name
)
try:
model = field.model._meta.concrete_model
except AttributeError:
# QuerySet.annotate() may introduce fields that aren't
# attached to a model.
model = None
else:
# We didn't find the current field, so move position back
# one step.
pos -= 1
if pos == -1 or fail_on_missing:
available = sorted([
*get_field_names_from_opts(opts),
*self.annotation_select,
*self._filtered_relations,
])
raise FieldError("Cannot resolve keyword '%s' into field. "
"Choices are: %s" % (name, ", ".join(available)))
break
# Check if we need any joins for concrete inheritance cases (the
# field lives in parent, but we are currently in one of its
# children)
if model is not opts.model:
path_to_parent = opts.get_path_to_parent(model)
if path_to_parent:
path.extend(path_to_parent)
cur_names_with_path[1].extend(path_to_parent)
opts = path_to_parent[-1].to_opts
if hasattr(field, 'get_path_info'):
pathinfos = field.get_path_info(filtered_relation)
if not allow_many:
for inner_pos, p in enumerate(pathinfos):
if p.m2m:
cur_names_with_path[1].extend(pathinfos[0:inner_pos + 1])
names_with_path.append(cur_names_with_path)
raise MultiJoin(pos + 1, names_with_path)
last = pathinfos[-1]
path.extend(pathinfos)
final_field = last.join_field
opts = last.to_opts
targets = last.target_fields
cur_names_with_path[1].extend(pathinfos)
names_with_path.append(cur_names_with_path)
else:
# Local non-relational field.
final_field = field
targets = (field,)
if fail_on_missing and pos + 1 != len(names):
raise FieldError(
"Cannot resolve keyword %r into field. Join on '%s'"
" not permitted." % (names[pos + 1], name))
break
return path, final_field, targets, names[pos + 1:]
def setup_joins(self, names, opts, alias, can_reuse=None, allow_many=True,
reuse_with_filtered_relation=False):
"""
Compute the necessary table joins for the passage through the fields
given in 'names'. 'opts' is the Options class for the current model
(which gives the table we are starting from), 'alias' is the alias for
the table to start the joining from.
The 'can_reuse' defines the reverse foreign key joins we can reuse. It
can be None in which case all joins are reusable or a set of aliases
that can be reused. Note that non-reverse foreign keys are always
reusable when using setup_joins().
The 'reuse_with_filtered_relation' can be used to force 'can_reuse'
parameter and force the relation on the given connections.
If 'allow_many' is False, then any reverse foreign key seen will
generate a MultiJoin exception.
Return the final field involved in the joins, the target field (used
for any 'where' constraint), the final 'opts' value, the joins, the
field path traveled to generate the joins, and a transform function
that takes a field and alias and is equivalent to `field.get_col(alias)`
in the simple case but wraps field transforms if they were included in
names.
The target field is the field containing the concrete value. Final
field can be something different, for example foreign key pointing to
that value. Final field is needed for example in some value
conversions (convert 'obj' in fk__id=obj to pk val using the foreign
key field for example).
"""
joins = [alias]
# The transform can't be applied yet, as joins must be trimmed later.
# To avoid making every caller of this method look up transforms
# directly, compute transforms here and create a partial that converts
# fields to the appropriate wrapped version.
def final_transformer(field, alias):
return field.get_col(alias)
# Try resolving all the names as fields first. If there's an error,
# treat trailing names as lookups until a field can be resolved.
last_field_exception = None
for pivot in range(len(names), 0, -1):
try:
path, final_field, targets, rest = self.names_to_path(
names[:pivot], opts, allow_many, fail_on_missing=True,
)
except FieldError as exc:
if pivot == 1:
# The first item cannot be a lookup, so it's safe
# to raise the field error here.
raise
else:
last_field_exception = exc
else:
# The transforms are the remaining items that couldn't be
# resolved into fields.
transforms = names[pivot:]
break
for name in transforms:
def transform(field, alias, *, name, previous):
try:
wrapped = previous(field, alias)
return self.try_transform(wrapped, name)
except FieldError:
# FieldError is raised if the transform doesn't exist.
if isinstance(final_field, Field) and last_field_exception:
raise last_field_exception
else:
raise
final_transformer = functools.partial(transform, name=name, previous=final_transformer)
# Then, add the path to the query's joins. Note that we can't trim
# joins at this stage - we will need the information about join type
# of the trimmed joins.
for join in path:
if join.filtered_relation:
filtered_relation = join.filtered_relation.clone()
table_alias = filtered_relation.alias
else:
filtered_relation = None
table_alias = None
opts = join.to_opts
if join.direct:
nullable = self.is_nullable(join.join_field)
else:
nullable = True
connection = Join(
opts.db_table, alias, table_alias, INNER, join.join_field,
nullable, filtered_relation=filtered_relation,
)
reuse = can_reuse if join.m2m or reuse_with_filtered_relation else None
alias = self.join(
connection, reuse=reuse,
reuse_with_filtered_relation=reuse_with_filtered_relation,
)
joins.append(alias)
if filtered_relation:
filtered_relation.path = joins[:]
return JoinInfo(final_field, targets, opts, joins, path, final_transformer)
def trim_joins(self, targets, joins, path):
"""
The 'target' parameter is the final field being joined to, 'joins'
is the full list of join aliases. The 'path' contain the PathInfos
used to create the joins.
Return the final target field and table alias and the new active
joins.
Always trim any direct join if the target column is already in the
previous table. Can't trim reverse joins as it's unknown if there's
anything on the other side of the join.
"""
joins = joins[:]
for pos, info in enumerate(reversed(path)):
if len(joins) == 1 or not info.direct:
break
if info.filtered_relation:
break
join_targets = {t.column for t in info.join_field.foreign_related_fields}
cur_targets = {t.column for t in targets}
if not cur_targets.issubset(join_targets):
break
targets_dict = {r[1].column: r[0] for r in info.join_field.related_fields if r[1].column in cur_targets}
targets = tuple(targets_dict[t.column] for t in targets)
self.unref_alias(joins.pop())
return targets, joins[-1], joins
@classmethod
def _gen_cols(cls, exprs):
for expr in exprs:
if isinstance(expr, Col):
yield expr
else:
yield from cls._gen_cols(expr.get_source_expressions())
@classmethod
def _gen_col_aliases(cls, exprs):
yield from (expr.alias for expr in cls._gen_cols(exprs))
def resolve_ref(self, name, allow_joins=True, reuse=None, summarize=False):
if not allow_joins and LOOKUP_SEP in name:
raise FieldError("Joined field references are not permitted in this query")
annotation = self.annotations.get(name)
if annotation is not None:
if not allow_joins:
for alias in self._gen_col_aliases([annotation]):
if isinstance(self.alias_map[alias], Join):
raise FieldError(
'Joined field references are not permitted in '
'this query'
)
if summarize:
# Summarize currently means we are doing an aggregate() query
# which is executed as a wrapped subquery if any of the
# aggregate() elements reference an existing annotation. In
# that case we need to return a Ref to the subquery's annotation.
return Ref(name, self.annotation_select[name])
else:
return annotation
else:
field_list = name.split(LOOKUP_SEP)
join_info = self.setup_joins(field_list, self.get_meta(), self.get_initial_alias(), can_reuse=reuse)
targets, final_alias, join_list = self.trim_joins(join_info.targets, join_info.joins, join_info.path)
if not allow_joins and len(join_list) > 1:
raise FieldError('Joined field references are not permitted in this query')
if len(targets) > 1:
raise FieldError("Referencing multicolumn fields with F() objects "
"isn't supported")
# Verify that the last lookup in name is a field or a transform:
# transform_function() raises FieldError if not.
join_info.transform_function(targets[0], final_alias)
if reuse is not None:
reuse.update(join_list)
return self._get_col(targets[0], join_info.targets[0], join_list[-1])
def split_exclude(self, filter_expr, can_reuse, names_with_path):
"""
When doing an exclude against any kind of N-to-many relation, we need
to use a subquery. This method constructs the nested query, given the
original exclude filter (filter_expr) and the portion up to the first
N-to-many relation field.
For example, if the origin filter is ~Q(child__name='foo'), filter_expr
is ('child__name', 'foo') and can_reuse is a set of joins usable for
filters in the original query.
We will turn this into equivalent of:
WHERE NOT (pk IN (SELECT parent_id FROM thetable
WHERE name = 'foo' AND parent_id IS NOT NULL))
It might be worth it to consider using WHERE NOT EXISTS as that has
saner null handling, and is easier for the backend's optimizer to
handle.
"""
filter_lhs, filter_rhs = filter_expr
if isinstance(filter_rhs, OuterRef):
filter_expr = (filter_lhs, OuterRef(filter_rhs))
elif isinstance(filter_rhs, F):
filter_expr = (filter_lhs, OuterRef(filter_rhs.name))
# Generate the inner query.
query = Query(self.model)
query._filtered_relations = self._filtered_relations
query.add_filter(filter_expr)
query.clear_ordering(True)
# Try to have as simple as possible subquery -> trim leading joins from
# the subquery.
trimmed_prefix, contains_louter = query.trim_start(names_with_path)
# Add extra check to make sure the selected field will not be null
# since we are adding an IN <subquery> clause. This prevents the
# database from tripping over IN (...,NULL,...) selects and returning
# nothing
col = query.select[0]
select_field = col.target
alias = col.alias
if self.is_nullable(select_field):
lookup_class = select_field.get_lookup('isnull')
lookup = lookup_class(select_field.get_col(alias), False)
query.where.add(lookup, AND)
if alias in can_reuse:
pk = select_field.model._meta.pk
# Need to add a restriction so that outer query's filters are in effect for
# the subquery, too.
query.bump_prefix(self)
lookup_class = select_field.get_lookup('exact')
# Note that the query.select[0].alias is different from alias
# due to bump_prefix above.
lookup = lookup_class(pk.get_col(query.select[0].alias),
pk.get_col(alias))
query.where.add(lookup, AND)
query.external_aliases[alias] = True
condition, needed_inner = self.build_filter(
('%s__in' % trimmed_prefix, query),
current_negated=True, branch_negated=True, can_reuse=can_reuse)
if contains_louter:
or_null_condition, _ = self.build_filter(
('%s__isnull' % trimmed_prefix, True),
current_negated=True, branch_negated=True, can_reuse=can_reuse)
condition.add(or_null_condition, OR)
# Note that the end result will be:
# (outercol NOT IN innerq AND outercol IS NOT NULL) OR outercol IS NULL.
# This might look crazy but due to how IN works, this seems to be
# correct. If the IS NOT NULL check is removed then outercol NOT
# IN will return UNKNOWN. If the IS NULL check is removed, then if
# outercol IS NULL we will not match the row.
return condition, needed_inner
def set_empty(self):
self.where.add(NothingNode(), AND)
def is_empty(self):
return any(isinstance(c, NothingNode) for c in self.where.children)
def set_limits(self, low=None, high=None):
"""
Adjust the limits on the rows retrieved. Use low/high to set these,
as it makes it more Pythonic to read and write. When the SQL query is
created, convert them to the appropriate offset and limit values.
Apply any limits passed in here to the existing constraints. Add low
to the current low value and clamp both to any existing high value.
"""
if high is not None:
if self.high_mark is not None:
self.high_mark = min(self.high_mark, self.low_mark + high)
else:
self.high_mark = self.low_mark + high
if low is not None:
if self.high_mark is not None:
self.low_mark = min(self.high_mark, self.low_mark + low)
else:
self.low_mark = self.low_mark + low
if self.low_mark == self.high_mark:
self.set_empty()
def clear_limits(self):
"""Clear any existing limits."""
self.low_mark, self.high_mark = 0, None
@property
def is_sliced(self):
return self.low_mark != 0 or self.high_mark is not None
def has_limit_one(self):
return self.high_mark is not None and (self.high_mark - self.low_mark) == 1
def can_filter(self):
"""
Return True if adding filters to this instance is still possible.
Typically, this means no limits or offsets have been put on the results.
"""
return not self.is_sliced
def clear_select_clause(self):
"""Remove all fields from SELECT clause."""
self.select = ()
self.default_cols = False
self.select_related = False
self.set_extra_mask(())
self.set_annotation_mask(())
def clear_select_fields(self):
"""
Clear the list of fields to select (but not extra_select columns).
Some queryset types completely replace any existing list of select
columns.
"""
self.select = ()
self.values_select = ()
def add_select_col(self, col):
self.select += col,
self.values_select += col.output_field.name,
def set_select(self, cols):
self.default_cols = False
self.select = tuple(cols)
def add_distinct_fields(self, *field_names):
"""
Add and resolve the given fields to the query's "distinct on" clause.
"""
self.distinct_fields = field_names
self.distinct = True
def add_fields(self, field_names, allow_m2m=True):
"""
Add the given (model) fields to the select set. Add the field names in
the order specified.
"""
alias = self.get_initial_alias()
opts = self.get_meta()
try:
cols = []
for name in field_names:
# Join promotion note - we must not remove any rows here, so
# if there is no existing joins, use outer join.
join_info = self.setup_joins(name.split(LOOKUP_SEP), opts, alias, allow_many=allow_m2m)
targets, final_alias, joins = self.trim_joins(
join_info.targets,
join_info.joins,
join_info.path,
)
for target in targets:
cols.append(join_info.transform_function(target, final_alias))
if cols:
self.set_select(cols)
except MultiJoin:
raise FieldError("Invalid field name: '%s'" % name)
except FieldError:
if LOOKUP_SEP in name:
# For lookups spanning over relationships, show the error
# from the model on which the lookup failed.
raise
else:
names = sorted([
*get_field_names_from_opts(opts), *self.extra,
*self.annotation_select, *self._filtered_relations
])
raise FieldError("Cannot resolve keyword %r into field. "
"Choices are: %s" % (name, ", ".join(names)))
def add_ordering(self, *ordering):
"""
Add items from the 'ordering' sequence to the query's "order by"
clause. These items are either field names (not column names) --
possibly with a direction prefix ('-' or '?') -- or OrderBy
expressions.
If 'ordering' is empty, clear all ordering from the query.
"""
errors = []
for item in ordering:
if isinstance(item, str):
if '.' in item:
warnings.warn(
'Passing column raw column aliases to order_by() is '
'deprecated. Wrap %r in a RawSQL expression before '
'passing it to order_by().' % item,
category=RemovedInDjango40Warning,
stacklevel=3,
)
continue
if item == '?':
continue
if item.startswith('-'):
item = item[1:]
if item in self.annotations:
continue
if self.extra and item in self.extra:
continue
# names_to_path() validates the lookup. A descriptive
# FieldError will be raise if it's not.
self.names_to_path(item.split(LOOKUP_SEP), self.model._meta)
elif not hasattr(item, 'resolve_expression'):
errors.append(item)
if getattr(item, 'contains_aggregate', False):
raise FieldError(
'Using an aggregate in order_by() without also including '
'it in annotate() is not allowed: %s' % item
)
if errors:
raise FieldError('Invalid order_by arguments: %s' % errors)
if ordering:
self.order_by += ordering
else:
self.default_ordering = False
def clear_ordering(self, force_empty):
"""
Remove any ordering settings. If 'force_empty' is True, there will be
no ordering in the resulting query (not even the model's default).
"""
self.order_by = ()
self.extra_order_by = ()
if force_empty:
self.default_ordering = False
def set_group_by(self, allow_aliases=True):
"""
Expand the GROUP BY clause required by the query.
This will usually be the set of all non-aggregate fields in the
return data. If the database backend supports grouping by the
primary key, and the query would be equivalent, the optimization
will be made automatically.
"""
# Column names from JOINs to check collisions with aliases.
if allow_aliases:
column_names = set()
seen_models = set()
for join in list(self.alias_map.values())[1:]: # Skip base table.
model = join.join_field.related_model
if model not in seen_models:
column_names.update({
field.column
for field in model._meta.local_concrete_fields
})
seen_models.add(model)
group_by = list(self.select)
if self.annotation_select:
for alias, annotation in self.annotation_select.items():
signature = inspect.signature(annotation.get_group_by_cols)
if 'alias' not in signature.parameters:
annotation_class = annotation.__class__
msg = (
'`alias=None` must be added to the signature of '
'%s.%s.get_group_by_cols().'
) % (annotation_class.__module__, annotation_class.__qualname__)
warnings.warn(msg, category=RemovedInDjango40Warning)
group_by_cols = annotation.get_group_by_cols()
else:
if not allow_aliases or alias in column_names:
alias = None
group_by_cols = annotation.get_group_by_cols(alias=alias)
group_by.extend(group_by_cols)
self.group_by = tuple(group_by)
def add_select_related(self, fields):
"""
Set up the select_related data structure so that we only select
certain related models (as opposed to all models, when
self.select_related=True).
"""
if isinstance(self.select_related, bool):
field_dict = {}
else:
field_dict = self.select_related
for field in fields:
d = field_dict
for part in field.split(LOOKUP_SEP):
d = d.setdefault(part, {})
self.select_related = field_dict
def add_extra(self, select, select_params, where, params, tables, order_by):
"""
Add data to the various extra_* attributes for user-created additions
to the query.
"""
if select:
# We need to pair any placeholder markers in the 'select'
# dictionary with their parameters in 'select_params' so that
# subsequent updates to the select dictionary also adjust the
# parameters appropriately.
select_pairs = {}
if select_params:
param_iter = iter(select_params)
else:
param_iter = iter([])
for name, entry in select.items():
entry = str(entry)
entry_params = []
pos = entry.find("%s")
while pos != -1:
if pos == 0 or entry[pos - 1] != '%':
entry_params.append(next(param_iter))
pos = entry.find("%s", pos + 2)
select_pairs[name] = (entry, entry_params)
self.extra.update(select_pairs)
if where or params:
self.where.add(ExtraWhere(where, params), AND)
if tables:
self.extra_tables += tuple(tables)
if order_by:
self.extra_order_by = order_by
def clear_deferred_loading(self):
"""Remove any fields from the deferred loading set."""
self.deferred_loading = (frozenset(), True)
def add_deferred_loading(self, field_names):
"""
Add the given list of model field names to the set of fields to
exclude from loading from the database when automatic column selection
is done. Add the new field names to any existing field names that
are deferred (or removed from any existing field names that are marked
as the only ones for immediate loading).
"""
# Fields on related models are stored in the literal double-underscore
# format, so that we can use a set datastructure. We do the foo__bar
# splitting and handling when computing the SQL column names (as part of
# get_columns()).
existing, defer = self.deferred_loading
if defer:
# Add to existing deferred names.
self.deferred_loading = existing.union(field_names), True
else:
# Remove names from the set of any existing "immediate load" names.
self.deferred_loading = existing.difference(field_names), False
def add_immediate_loading(self, field_names):
"""
Add the given list of model field names to the set of fields to
retrieve when the SQL is executed ("immediate loading" fields). The
field names replace any existing immediate loading field names. If
there are field names already specified for deferred loading, remove
those names from the new field_names before storing the new names
for immediate loading. (That is, immediate loading overrides any
existing immediate values, but respects existing deferrals.)
"""
existing, defer = self.deferred_loading
field_names = set(field_names)
if 'pk' in field_names:
field_names.remove('pk')
field_names.add(self.get_meta().pk.name)
if defer:
# Remove any existing deferred names from the current set before
# setting the new names.
self.deferred_loading = field_names.difference(existing), False
else:
# Replace any existing "immediate load" field names.
self.deferred_loading = frozenset(field_names), False
def get_loaded_field_names(self):
"""
If any fields are marked to be deferred, return a dictionary mapping
models to a set of names in those fields that will be loaded. If a
model is not in the returned dictionary, none of its fields are
deferred.
If no fields are marked for deferral, return an empty dictionary.
"""
# We cache this because we call this function multiple times
# (compiler.fill_related_selections, query.iterator)
try:
return self._loaded_field_names_cache
except AttributeError:
collection = {}
self.deferred_to_data(collection, self.get_loaded_field_names_cb)
self._loaded_field_names_cache = collection
return collection
def get_loaded_field_names_cb(self, target, model, fields):
"""Callback used by get_deferred_field_names()."""
target[model] = {f.attname for f in fields}
def set_annotation_mask(self, names):
"""Set the mask of annotations that will be returned by the SELECT."""
if names is None:
self.annotation_select_mask = None
else:
self.annotation_select_mask = set(names)
self._annotation_select_cache = None
def append_annotation_mask(self, names):
if self.annotation_select_mask is not None:
self.set_annotation_mask(self.annotation_select_mask.union(names))
def set_extra_mask(self, names):
"""
Set the mask of extra select items that will be returned by SELECT.
Don't remove them from the Query since they might be used later.
"""
if names is None:
self.extra_select_mask = None
else:
self.extra_select_mask = set(names)
self._extra_select_cache = None
def set_values(self, fields):
self.select_related = False
self.clear_deferred_loading()
self.clear_select_fields()
if fields:
field_names = []
extra_names = []
annotation_names = []
if not self.extra and not self.annotations:
# Shortcut - if there are no extra or annotations, then
# the values() clause must be just field names.
field_names = list(fields)
else:
self.default_cols = False
for f in fields:
if f in self.extra_select:
extra_names.append(f)
elif f in self.annotation_select:
annotation_names.append(f)
else:
field_names.append(f)
self.set_extra_mask(extra_names)
self.set_annotation_mask(annotation_names)
else:
field_names = [f.attname for f in self.model._meta.concrete_fields]
# Selected annotations must be known before setting the GROUP BY
# clause.
if self.group_by is True:
self.add_fields((f.attname for f in self.model._meta.concrete_fields), False)
# Disable GROUP BY aliases to avoid orphaning references to the
# SELECT clause which is about to be cleared.
self.set_group_by(allow_aliases=False)
self.clear_select_fields()
elif self.group_by:
# Resolve GROUP BY annotation references if they are not part of
# the selected fields anymore.
group_by = []
for expr in self.group_by:
if isinstance(expr, Ref) and expr.refs not in field_names:
expr = self.annotations[expr.refs]
group_by.append(expr)
self.group_by = tuple(group_by)
self.values_select = tuple(field_names)
self.add_fields(field_names, True)
@property
def annotation_select(self):
"""
Return the dictionary of aggregate columns that are not masked and
should be used in the SELECT clause. Cache this result for performance.
"""
if self._annotation_select_cache is not None:
return self._annotation_select_cache
elif not self.annotations:
return {}
elif self.annotation_select_mask is not None:
self._annotation_select_cache = {
k: v for k, v in self.annotations.items()
if k in self.annotation_select_mask
}
return self._annotation_select_cache
else:
return self.annotations
@property
def extra_select(self):
if self._extra_select_cache is not None:
return self._extra_select_cache
if not self.extra:
return {}
elif self.extra_select_mask is not None:
self._extra_select_cache = {
k: v for k, v in self.extra.items()
if k in self.extra_select_mask
}
return self._extra_select_cache
else:
return self.extra
def trim_start(self, names_with_path):
"""
Trim joins from the start of the join path. The candidates for trim
are the PathInfos in names_with_path structure that are m2m joins.
Also set the select column so the start matches the join.
This method is meant to be used for generating the subquery joins &
cols in split_exclude().
Return a lookup usable for doing outerq.filter(lookup=self) and a
boolean indicating if the joins in the prefix contain a LEFT OUTER join.
_"""
all_paths = []
for _, paths in names_with_path:
all_paths.extend(paths)
contains_louter = False
# Trim and operate only on tables that were generated for
# the lookup part of the query. That is, avoid trimming
# joins generated for F() expressions.
lookup_tables = [
t for t in self.alias_map
if t in self._lookup_joins or t == self.base_table
]
for trimmed_paths, path in enumerate(all_paths):
if path.m2m:
break
if self.alias_map[lookup_tables[trimmed_paths + 1]].join_type == LOUTER:
contains_louter = True
alias = lookup_tables[trimmed_paths]
self.unref_alias(alias)
# The path.join_field is a Rel, lets get the other side's field
join_field = path.join_field.field
# Build the filter prefix.
paths_in_prefix = trimmed_paths
trimmed_prefix = []
for name, path in names_with_path:
if paths_in_prefix - len(path) < 0:
break
trimmed_prefix.append(name)
paths_in_prefix -= len(path)
trimmed_prefix.append(
join_field.foreign_related_fields[0].name)
trimmed_prefix = LOOKUP_SEP.join(trimmed_prefix)
# Lets still see if we can trim the first join from the inner query
# (that is, self). We can't do this for:
# - LEFT JOINs because we would miss those rows that have nothing on
# the outer side,
# - INNER JOINs from filtered relations because we would miss their
# filters.
first_join = self.alias_map[lookup_tables[trimmed_paths + 1]]
if first_join.join_type != LOUTER and not first_join.filtered_relation:
select_fields = [r[0] for r in join_field.related_fields]
select_alias = lookup_tables[trimmed_paths + 1]
self.unref_alias(lookup_tables[trimmed_paths])
extra_restriction = join_field.get_extra_restriction(
self.where_class, None, lookup_tables[trimmed_paths + 1])
if extra_restriction:
self.where.add(extra_restriction, AND)
else:
# TODO: It might be possible to trim more joins from the start of the
# inner query if it happens to have a longer join chain containing the
# values in select_fields. Lets punt this one for now.
select_fields = [r[1] for r in join_field.related_fields]
select_alias = lookup_tables[trimmed_paths]
# The found starting point is likely a Join instead of a BaseTable reference.
# But the first entry in the query's FROM clause must not be a JOIN.
for table in self.alias_map:
if self.alias_refcount[table] > 0:
self.alias_map[table] = BaseTable(self.alias_map[table].table_name, table)
break
self.set_select([f.get_col(select_alias) for f in select_fields])
return trimmed_prefix, contains_louter
def is_nullable(self, field):
"""
Check if the given field should be treated as nullable.
Some backends treat '' as null and Django treats such fields as
nullable for those backends. In such situations field.null can be
False even if we should treat the field as nullable.
"""
# We need to use DEFAULT_DB_ALIAS here, as QuerySet does not have
# (nor should it have) knowledge of which connection is going to be
# used. The proper fix would be to defer all decisions where
# is_nullable() is needed to the compiler stage, but that is not easy
# to do currently.
return (
connections[DEFAULT_DB_ALIAS].features.interprets_empty_strings_as_nulls and
field.empty_strings_allowed
) or field.null
def get_order_dir(field, default='ASC'):
"""
Return the field name and direction for an order specification. For
example, '-foo' is returned as ('foo', 'DESC').
The 'default' param is used to indicate which way no prefix (or a '+'
prefix) should sort. The '-' prefix always sorts the opposite way.
"""
dirn = ORDER_DIR[default]
if field[0] == '-':
return field[1:], dirn[1]
return field, dirn[0]
def add_to_dict(data, key, value):
"""
Add "value" to the set of values for "key", whether or not "key" already
exists.
"""
if key in data:
data[key].add(value)
else:
data[key] = {value}
def is_reverse_o2o(field):
"""
Check if the given field is reverse-o2o. The field is expected to be some
sort of relation field or related object.
"""
return field.is_relation and field.one_to_one and not field.concrete
class JoinPromoter:
"""
A class to abstract away join promotion problems for complex filter
conditions.
"""
def __init__(self, connector, num_children, negated):
self.connector = connector
self.negated = negated
if self.negated:
if connector == AND:
self.effective_connector = OR
else:
self.effective_connector = AND
else:
self.effective_connector = self.connector
self.num_children = num_children
# Maps of table alias to how many times it is seen as required for
# inner and/or outer joins.
self.votes = Counter()
def add_votes(self, votes):
"""
Add single vote per item to self.votes. Parameter can be any
iterable.
"""
self.votes.update(votes)
def update_join_types(self, query):
"""
Change join types so that the generated query is as efficient as
possible, but still correct. So, change as many joins as possible
to INNER, but don't make OUTER joins INNER if that could remove
results from the query.
"""
to_promote = set()
to_demote = set()
# The effective_connector is used so that NOT (a AND b) is treated
# similarly to (a OR b) for join promotion.
for table, votes in self.votes.items():
# We must use outer joins in OR case when the join isn't contained
# in all of the joins. Otherwise the INNER JOIN itself could remove
# valid results. Consider the case where a model with rel_a and
# rel_b relations is queried with rel_a__col=1 | rel_b__col=2. Now,
# if rel_a join doesn't produce any results is null (for example
# reverse foreign key or null value in direct foreign key), and
# there is a matching row in rel_b with col=2, then an INNER join
# to rel_a would remove a valid match from the query. So, we need
# to promote any existing INNER to LOUTER (it is possible this
# promotion in turn will be demoted later on).
if self.effective_connector == 'OR' and votes < self.num_children:
to_promote.add(table)
# If connector is AND and there is a filter that can match only
# when there is a joinable row, then use INNER. For example, in
# rel_a__col=1 & rel_b__col=2, if either of the rels produce NULL
# as join output, then the col=1 or col=2 can't match (as
# NULL=anything is always false).
# For the OR case, if all children voted for a join to be inner,
# then we can use INNER for the join. For example:
# (rel_a__col__icontains=Alex | rel_a__col__icontains=Russell)
# then if rel_a doesn't produce any rows, the whole condition
# can't match. Hence we can safely use INNER join.
if self.effective_connector == 'AND' or (
self.effective_connector == 'OR' and votes == self.num_children):
to_demote.add(table)
# Finally, what happens in cases where we have:
# (rel_a__col=1|rel_b__col=2) & rel_a__col__gte=0
# Now, we first generate the OR clause, and promote joins for it
# in the first if branch above. Both rel_a and rel_b are promoted
# to LOUTER joins. After that we do the AND case. The OR case
# voted no inner joins but the rel_a__col__gte=0 votes inner join
# for rel_a. We demote it back to INNER join (in AND case a single
# vote is enough). The demotion is OK, if rel_a doesn't produce
# rows, then the rel_a__col__gte=0 clause can't be true, and thus
# the whole clause must be false. So, it is safe to use INNER
# join.
# Note that in this example we could just as well have the __gte
# clause and the OR clause swapped. Or we could replace the __gte
# clause with an OR clause containing rel_a__col=1|rel_a__col=2,
# and again we could safely demote to INNER.
query.promote_joins(to_promote)
query.demote_joins(to_demote)
return to_demote
|
7d89d842b042b0d553bfd1f6f8385cd8685cf82b6a9a2ef8532d49c02c086175 | import collections
import re
from functools import partial
from itertools import chain
from django.core.exceptions import EmptyResultSet, FieldError
from django.db import DatabaseError, NotSupportedError
from django.db.models.constants import LOOKUP_SEP
from django.db.models.expressions import OrderBy, Random, RawSQL, Ref, Value
from django.db.models.functions import Cast
from django.db.models.query_utils import Q, select_related_descend
from django.db.models.sql.constants import (
CURSOR, GET_ITERATOR_CHUNK_SIZE, MULTI, NO_RESULTS, ORDER_DIR, SINGLE,
)
from django.db.models.sql.query import Query, get_order_dir
from django.db.transaction import TransactionManagementError
from django.utils.functional import cached_property
from django.utils.hashable import make_hashable
from django.utils.regex_helper import _lazy_re_compile
class SQLCompiler:
# Multiline ordering SQL clause may appear from RawSQL.
ordering_parts = _lazy_re_compile(
r'^(.*)\s(?:ASC|DESC).*',
re.MULTILINE | re.DOTALL,
)
def __init__(self, query, connection, using):
self.query = query
self.connection = connection
self.using = using
self.quote_cache = {'*': '*'}
# The select, klass_info, and annotations are needed by QuerySet.iterator()
# these are set as a side-effect of executing the query. Note that we calculate
# separately a list of extra select columns needed for grammatical correctness
# of the query, but these columns are not included in self.select.
self.select = None
self.annotation_col_map = None
self.klass_info = None
self._meta_ordering = None
def setup_query(self):
if all(self.query.alias_refcount[a] == 0 for a in self.query.alias_map):
self.query.get_initial_alias()
self.select, self.klass_info, self.annotation_col_map = self.get_select()
self.col_count = len(self.select)
def pre_sql_setup(self):
"""
Do any necessary class setup immediately prior to producing SQL. This
is for things that can't necessarily be done in __init__ because we
might not have all the pieces in place at that time.
"""
self.setup_query()
order_by = self.get_order_by()
self.where, self.having = self.query.where.split_having()
extra_select = self.get_extra_select(order_by, self.select)
self.has_extra_select = bool(extra_select)
group_by = self.get_group_by(self.select + extra_select, order_by)
return extra_select, order_by, group_by
def get_group_by(self, select, order_by):
"""
Return a list of 2-tuples of form (sql, params).
The logic of what exactly the GROUP BY clause contains is hard
to describe in other words than "if it passes the test suite,
then it is correct".
"""
# Some examples:
# SomeModel.objects.annotate(Count('somecol'))
# GROUP BY: all fields of the model
#
# SomeModel.objects.values('name').annotate(Count('somecol'))
# GROUP BY: name
#
# SomeModel.objects.annotate(Count('somecol')).values('name')
# GROUP BY: all cols of the model
#
# SomeModel.objects.values('name', 'pk').annotate(Count('somecol')).values('pk')
# GROUP BY: name, pk
#
# SomeModel.objects.values('name').annotate(Count('somecol')).values('pk')
# GROUP BY: name, pk
#
# In fact, the self.query.group_by is the minimal set to GROUP BY. It
# can't be ever restricted to a smaller set, but additional columns in
# HAVING, ORDER BY, and SELECT clauses are added to it. Unfortunately
# the end result is that it is impossible to force the query to have
# a chosen GROUP BY clause - you can almost do this by using the form:
# .values(*wanted_cols).annotate(AnAggregate())
# but any later annotations, extra selects, values calls that
# refer some column outside of the wanted_cols, order_by, or even
# filter calls can alter the GROUP BY clause.
# The query.group_by is either None (no GROUP BY at all), True
# (group by select fields), or a list of expressions to be added
# to the group by.
if self.query.group_by is None:
return []
expressions = []
if self.query.group_by is not True:
# If the group by is set to a list (by .values() call most likely),
# then we need to add everything in it to the GROUP BY clause.
# Backwards compatibility hack for setting query.group_by. Remove
# when we have public API way of forcing the GROUP BY clause.
# Converts string references to expressions.
for expr in self.query.group_by:
if not hasattr(expr, 'as_sql'):
expressions.append(self.query.resolve_ref(expr))
else:
expressions.append(expr)
# Note that even if the group_by is set, it is only the minimal
# set to group by. So, we need to add cols in select, order_by, and
# having into the select in any case.
ref_sources = {
expr.source for expr in expressions if isinstance(expr, Ref)
}
for expr, _, _ in select:
# Skip members of the select clause that are already included
# by reference.
if expr in ref_sources:
continue
cols = expr.get_group_by_cols()
for col in cols:
expressions.append(col)
for expr, (sql, params, is_ref) in order_by:
# Skip References to the select clause, as all expressions in the
# select clause are already part of the group by.
if not is_ref:
expressions.extend(expr.get_group_by_cols())
having_group_by = self.having.get_group_by_cols() if self.having else ()
for expr in having_group_by:
expressions.append(expr)
result = []
seen = set()
expressions = self.collapse_group_by(expressions, having_group_by)
for expr in expressions:
sql, params = self.compile(expr)
sql, params = expr.select_format(self, sql, params)
params_hash = make_hashable(params)
if (sql, params_hash) not in seen:
result.append((sql, params))
seen.add((sql, params_hash))
return result
def collapse_group_by(self, expressions, having):
# If the DB can group by primary key, then group by the primary key of
# query's main model. Note that for PostgreSQL the GROUP BY clause must
# include the primary key of every table, but for MySQL it is enough to
# have the main table's primary key.
if self.connection.features.allows_group_by_pk:
# Determine if the main model's primary key is in the query.
pk = None
for expr in expressions:
# Is this a reference to query's base table primary key? If the
# expression isn't a Col-like, then skip the expression.
if (getattr(expr, 'target', None) == self.query.model._meta.pk and
getattr(expr, 'alias', None) == self.query.base_table):
pk = expr
break
# If the main model's primary key is in the query, group by that
# field, HAVING expressions, and expressions associated with tables
# that don't have a primary key included in the grouped columns.
if pk:
pk_aliases = {
expr.alias for expr in expressions
if hasattr(expr, 'target') and expr.target.primary_key
}
expressions = [pk] + [
expr for expr in expressions
if expr in having or (
getattr(expr, 'alias', None) is not None and expr.alias not in pk_aliases
)
]
elif self.connection.features.allows_group_by_selected_pks:
# Filter out all expressions associated with a table's primary key
# present in the grouped columns. This is done by identifying all
# tables that have their primary key included in the grouped
# columns and removing non-primary key columns referring to them.
# Unmanaged models are excluded because they could be representing
# database views on which the optimization might not be allowed.
pks = {
expr for expr in expressions
if (
hasattr(expr, 'target') and
expr.target.primary_key and
self.connection.features.allows_group_by_selected_pks_on_model(expr.target.model)
)
}
aliases = {expr.alias for expr in pks}
expressions = [
expr for expr in expressions if expr in pks or getattr(expr, 'alias', None) not in aliases
]
return expressions
def get_select(self):
"""
Return three values:
- a list of 3-tuples of (expression, (sql, params), alias)
- a klass_info structure,
- a dictionary of annotations
The (sql, params) is what the expression will produce, and alias is the
"AS alias" for the column (possibly None).
The klass_info structure contains the following information:
- The base model of the query.
- Which columns for that model are present in the query (by
position of the select clause).
- related_klass_infos: [f, klass_info] to descent into
The annotations is a dictionary of {'attname': column position} values.
"""
select = []
klass_info = None
annotations = {}
select_idx = 0
for alias, (sql, params) in self.query.extra_select.items():
annotations[alias] = select_idx
select.append((RawSQL(sql, params), alias))
select_idx += 1
assert not (self.query.select and self.query.default_cols)
if self.query.default_cols:
cols = self.get_default_columns()
else:
# self.query.select is a special case. These columns never go to
# any model.
cols = self.query.select
if cols:
select_list = []
for col in cols:
select_list.append(select_idx)
select.append((col, None))
select_idx += 1
klass_info = {
'model': self.query.model,
'select_fields': select_list,
}
for alias, annotation in self.query.annotation_select.items():
annotations[alias] = select_idx
select.append((annotation, alias))
select_idx += 1
if self.query.select_related:
related_klass_infos = self.get_related_selections(select)
klass_info['related_klass_infos'] = related_klass_infos
def get_select_from_parent(klass_info):
for ki in klass_info['related_klass_infos']:
if ki['from_parent']:
ki['select_fields'] = (klass_info['select_fields'] +
ki['select_fields'])
get_select_from_parent(ki)
get_select_from_parent(klass_info)
ret = []
for col, alias in select:
try:
sql, params = self.compile(col)
except EmptyResultSet:
# Select a predicate that's always False.
sql, params = '0', ()
else:
sql, params = col.select_format(self, sql, params)
ret.append((col, (sql, params), alias))
return ret, klass_info, annotations
def get_order_by(self):
"""
Return a list of 2-tuples of form (expr, (sql, params, is_ref)) for the
ORDER BY clause.
The order_by clause can alter the select clause (for example it
can add aliases to clauses that do not yet have one, or it can
add totally new select clauses).
"""
if self.query.extra_order_by:
ordering = self.query.extra_order_by
elif not self.query.default_ordering:
ordering = self.query.order_by
elif self.query.order_by:
ordering = self.query.order_by
elif self.query.get_meta().ordering:
ordering = self.query.get_meta().ordering
self._meta_ordering = ordering
else:
ordering = []
if self.query.standard_ordering:
asc, desc = ORDER_DIR['ASC']
else:
asc, desc = ORDER_DIR['DESC']
order_by = []
for field in ordering:
if hasattr(field, 'resolve_expression'):
if isinstance(field, Value):
# output_field must be resolved for constants.
field = Cast(field, field.output_field)
if not isinstance(field, OrderBy):
field = field.asc()
if not self.query.standard_ordering:
field = field.copy()
field.reverse_ordering()
order_by.append((field, False))
continue
if field == '?': # random
order_by.append((OrderBy(Random()), False))
continue
col, order = get_order_dir(field, asc)
descending = order == 'DESC'
if col in self.query.annotation_select:
# Reference to expression in SELECT clause
order_by.append((
OrderBy(Ref(col, self.query.annotation_select[col]), descending=descending),
True))
continue
if col in self.query.annotations:
# References to an expression which is masked out of the SELECT
# clause.
expr = self.query.annotations[col]
if isinstance(expr, Value):
# output_field must be resolved for constants.
expr = Cast(expr, expr.output_field)
order_by.append((OrderBy(expr, descending=descending), False))
continue
if '.' in field:
# This came in through an extra(order_by=...) addition. Pass it
# on verbatim.
table, col = col.split('.', 1)
order_by.append((
OrderBy(
RawSQL('%s.%s' % (self.quote_name_unless_alias(table), col), []),
descending=descending
), False))
continue
if not self.query.extra or col not in self.query.extra:
# 'col' is of the form 'field' or 'field1__field2' or
# '-field1__field2__field', etc.
order_by.extend(self.find_ordering_name(
field, self.query.get_meta(), default_order=asc))
else:
if col not in self.query.extra_select:
order_by.append((
OrderBy(RawSQL(*self.query.extra[col]), descending=descending),
False))
else:
order_by.append((
OrderBy(Ref(col, RawSQL(*self.query.extra[col])), descending=descending),
True))
result = []
seen = set()
for expr, is_ref in order_by:
resolved = expr.resolve_expression(self.query, allow_joins=True, reuse=None)
if self.query.combinator:
src = resolved.get_source_expressions()[0]
# Relabel order by columns to raw numbers if this is a combined
# query; necessary since the columns can't be referenced by the
# fully qualified name and the simple column names may collide.
for idx, (sel_expr, _, col_alias) in enumerate(self.select):
if is_ref and col_alias == src.refs:
src = src.source
elif col_alias:
continue
if src == sel_expr:
resolved.set_source_expressions([RawSQL('%d' % (idx + 1), ())])
break
else:
if col_alias:
raise DatabaseError('ORDER BY term does not match any column in the result set.')
# Add column used in ORDER BY clause without an alias to
# the selected columns.
self.query.add_select_col(src)
resolved.set_source_expressions([RawSQL('%d' % len(self.query.select), ())])
sql, params = self.compile(resolved)
# Don't add the same column twice, but the order direction is
# not taken into account so we strip it. When this entire method
# is refactored into expressions, then we can check each part as we
# generate it.
without_ordering = self.ordering_parts.search(sql)[1]
params_hash = make_hashable(params)
if (without_ordering, params_hash) in seen:
continue
seen.add((without_ordering, params_hash))
result.append((resolved, (sql, params, is_ref)))
return result
def get_extra_select(self, order_by, select):
extra_select = []
if self.query.distinct and not self.query.distinct_fields:
select_sql = [t[1] for t in select]
for expr, (sql, params, is_ref) in order_by:
without_ordering = self.ordering_parts.search(sql)[1]
if not is_ref and (without_ordering, params) not in select_sql:
extra_select.append((expr, (without_ordering, params), None))
return extra_select
def quote_name_unless_alias(self, name):
"""
A wrapper around connection.ops.quote_name that doesn't quote aliases
for table names. This avoids problems with some SQL dialects that treat
quoted strings specially (e.g. PostgreSQL).
"""
if name in self.quote_cache:
return self.quote_cache[name]
if ((name in self.query.alias_map and name not in self.query.table_map) or
name in self.query.extra_select or (
self.query.external_aliases.get(name) and name not in self.query.table_map)):
self.quote_cache[name] = name
return name
r = self.connection.ops.quote_name(name)
self.quote_cache[name] = r
return r
def compile(self, node):
vendor_impl = getattr(node, 'as_' + self.connection.vendor, None)
if vendor_impl:
sql, params = vendor_impl(self, self.connection)
else:
sql, params = node.as_sql(self, self.connection)
return sql, params
def get_combinator_sql(self, combinator, all):
features = self.connection.features
compilers = [
query.get_compiler(self.using, self.connection)
for query in self.query.combined_queries if not query.is_empty()
]
if not features.supports_slicing_ordering_in_compound:
for query, compiler in zip(self.query.combined_queries, compilers):
if query.low_mark or query.high_mark:
raise DatabaseError('LIMIT/OFFSET not allowed in subqueries of compound statements.')
if compiler.get_order_by():
raise DatabaseError('ORDER BY not allowed in subqueries of compound statements.')
parts = ()
for compiler in compilers:
try:
# If the columns list is limited, then all combined queries
# must have the same columns list. Set the selects defined on
# the query on all combined queries, if not already set.
if not compiler.query.values_select and self.query.values_select:
compiler.query = compiler.query.clone()
compiler.query.set_values((
*self.query.extra_select,
*self.query.values_select,
*self.query.annotation_select,
))
part_sql, part_args = compiler.as_sql()
if compiler.query.combinator:
# Wrap in a subquery if wrapping in parentheses isn't
# supported.
if not features.supports_parentheses_in_compound:
part_sql = 'SELECT * FROM ({})'.format(part_sql)
# Add parentheses when combining with compound query if not
# already added for all compound queries.
elif not features.supports_slicing_ordering_in_compound:
part_sql = '({})'.format(part_sql)
parts += ((part_sql, part_args),)
except EmptyResultSet:
# Omit the empty queryset with UNION and with DIFFERENCE if the
# first queryset is nonempty.
if combinator == 'union' or (combinator == 'difference' and parts):
continue
raise
if not parts:
raise EmptyResultSet
combinator_sql = self.connection.ops.set_operators[combinator]
if all and combinator == 'union':
combinator_sql += ' ALL'
braces = '({})' if features.supports_slicing_ordering_in_compound else '{}'
sql_parts, args_parts = zip(*((braces.format(sql), args) for sql, args in parts))
result = [' {} '.format(combinator_sql).join(sql_parts)]
params = []
for part in args_parts:
params.extend(part)
return result, params
def as_sql(self, with_limits=True, with_col_aliases=False):
"""
Create the SQL for this query. Return the SQL string and list of
parameters.
If 'with_limits' is False, any limit/offset information is not included
in the query.
"""
refcounts_before = self.query.alias_refcount.copy()
try:
extra_select, order_by, group_by = self.pre_sql_setup()
for_update_part = None
# Is a LIMIT/OFFSET clause needed?
with_limit_offset = with_limits and (self.query.high_mark is not None or self.query.low_mark)
combinator = self.query.combinator
features = self.connection.features
if combinator:
if not getattr(features, 'supports_select_{}'.format(combinator)):
raise NotSupportedError('{} is not supported on this database backend.'.format(combinator))
result, params = self.get_combinator_sql(combinator, self.query.combinator_all)
else:
distinct_fields, distinct_params = self.get_distinct()
# This must come after 'select', 'ordering', and 'distinct'
# (see docstring of get_from_clause() for details).
from_, f_params = self.get_from_clause()
where, w_params = self.compile(self.where) if self.where is not None else ("", [])
having, h_params = self.compile(self.having) if self.having is not None else ("", [])
result = ['SELECT']
params = []
if self.query.distinct:
distinct_result, distinct_params = self.connection.ops.distinct_sql(
distinct_fields,
distinct_params,
)
result += distinct_result
params += distinct_params
out_cols = []
col_idx = 1
for _, (s_sql, s_params), alias in self.select + extra_select:
if alias:
s_sql = '%s AS %s' % (s_sql, self.connection.ops.quote_name(alias))
elif with_col_aliases:
s_sql = '%s AS %s' % (s_sql, 'Col%d' % col_idx)
col_idx += 1
params.extend(s_params)
out_cols.append(s_sql)
result += [', '.join(out_cols), 'FROM', *from_]
params.extend(f_params)
if self.query.select_for_update and self.connection.features.has_select_for_update:
if self.connection.get_autocommit():
raise TransactionManagementError('select_for_update cannot be used outside of a transaction.')
if with_limit_offset and not self.connection.features.supports_select_for_update_with_limit:
raise NotSupportedError(
'LIMIT/OFFSET is not supported with '
'select_for_update on this database backend.'
)
nowait = self.query.select_for_update_nowait
skip_locked = self.query.select_for_update_skip_locked
of = self.query.select_for_update_of
no_key = self.query.select_for_no_key_update
# If it's a NOWAIT/SKIP LOCKED/OF/NO KEY query but the
# backend doesn't support it, raise NotSupportedError to
# prevent a possible deadlock.
if nowait and not self.connection.features.has_select_for_update_nowait:
raise NotSupportedError('NOWAIT is not supported on this database backend.')
elif skip_locked and not self.connection.features.has_select_for_update_skip_locked:
raise NotSupportedError('SKIP LOCKED is not supported on this database backend.')
elif of and not self.connection.features.has_select_for_update_of:
raise NotSupportedError('FOR UPDATE OF is not supported on this database backend.')
elif no_key and not self.connection.features.has_select_for_no_key_update:
raise NotSupportedError(
'FOR NO KEY UPDATE is not supported on this '
'database backend.'
)
for_update_part = self.connection.ops.for_update_sql(
nowait=nowait,
skip_locked=skip_locked,
of=self.get_select_for_update_of_arguments(),
no_key=no_key,
)
if for_update_part and self.connection.features.for_update_after_from:
result.append(for_update_part)
if where:
result.append('WHERE %s' % where)
params.extend(w_params)
grouping = []
for g_sql, g_params in group_by:
grouping.append(g_sql)
params.extend(g_params)
if grouping:
if distinct_fields:
raise NotImplementedError('annotate() + distinct(fields) is not implemented.')
order_by = order_by or self.connection.ops.force_no_ordering()
result.append('GROUP BY %s' % ', '.join(grouping))
if self._meta_ordering:
order_by = None
if having:
result.append('HAVING %s' % having)
params.extend(h_params)
if self.query.explain_query:
result.insert(0, self.connection.ops.explain_query_prefix(
self.query.explain_format,
**self.query.explain_options
))
if order_by:
ordering = []
for _, (o_sql, o_params, _) in order_by:
ordering.append(o_sql)
params.extend(o_params)
result.append('ORDER BY %s' % ', '.join(ordering))
if with_limit_offset:
result.append(self.connection.ops.limit_offset_sql(self.query.low_mark, self.query.high_mark))
if for_update_part and not self.connection.features.for_update_after_from:
result.append(for_update_part)
if self.query.subquery and extra_select:
# If the query is used as a subquery, the extra selects would
# result in more columns than the left-hand side expression is
# expecting. This can happen when a subquery uses a combination
# of order_by() and distinct(), forcing the ordering expressions
# to be selected as well. Wrap the query in another subquery
# to exclude extraneous selects.
sub_selects = []
sub_params = []
for index, (select, _, alias) in enumerate(self.select, start=1):
if not alias and with_col_aliases:
alias = 'col%d' % index
if alias:
sub_selects.append("%s.%s" % (
self.connection.ops.quote_name('subquery'),
self.connection.ops.quote_name(alias),
))
else:
select_clone = select.relabeled_clone({select.alias: 'subquery'})
subselect, subparams = select_clone.as_sql(self, self.connection)
sub_selects.append(subselect)
sub_params.extend(subparams)
return 'SELECT %s FROM (%s) subquery' % (
', '.join(sub_selects),
' '.join(result),
), tuple(sub_params + params)
return ' '.join(result), tuple(params)
finally:
# Finally do cleanup - get rid of the joins we created above.
self.query.reset_refcounts(refcounts_before)
def get_default_columns(self, start_alias=None, opts=None, from_parent=None):
"""
Compute the default columns for selecting every field in the base
model. Will sometimes be called to pull in related models (e.g. via
select_related), in which case "opts" and "start_alias" will be given
to provide a starting point for the traversal.
Return a list of strings, quoted appropriately for use in SQL
directly, as well as a set of aliases used in the select statement (if
'as_pairs' is True, return a list of (alias, col_name) pairs instead
of strings as the first component and None as the second component).
"""
result = []
if opts is None:
opts = self.query.get_meta()
only_load = self.deferred_to_columns()
start_alias = start_alias or self.query.get_initial_alias()
# The 'seen_models' is used to optimize checking the needed parent
# alias for a given field. This also includes None -> start_alias to
# be used by local fields.
seen_models = {None: start_alias}
for field in opts.concrete_fields:
model = field.model._meta.concrete_model
# A proxy model will have a different model and concrete_model. We
# will assign None if the field belongs to this model.
if model == opts.model:
model = None
if from_parent and model is not None and issubclass(
from_parent._meta.concrete_model, model._meta.concrete_model):
# Avoid loading data for already loaded parents.
# We end up here in the case select_related() resolution
# proceeds from parent model to child model. In that case the
# parent model data is already present in the SELECT clause,
# and we want to avoid reloading the same data again.
continue
if field.model in only_load and field.attname not in only_load[field.model]:
continue
alias = self.query.join_parent_model(opts, model, start_alias,
seen_models)
column = field.get_col(alias)
result.append(column)
return result
def get_distinct(self):
"""
Return a quoted list of fields to use in DISTINCT ON part of the query.
This method can alter the tables in the query, and thus it must be
called before get_from_clause().
"""
result = []
params = []
opts = self.query.get_meta()
for name in self.query.distinct_fields:
parts = name.split(LOOKUP_SEP)
_, targets, alias, joins, path, _, transform_function = self._setup_joins(parts, opts, None)
targets, alias, _ = self.query.trim_joins(targets, joins, path)
for target in targets:
if name in self.query.annotation_select:
result.append(name)
else:
r, p = self.compile(transform_function(target, alias))
result.append(r)
params.append(p)
return result, params
def find_ordering_name(self, name, opts, alias=None, default_order='ASC',
already_seen=None):
"""
Return the table alias (the name might be ambiguous, the alias will
not be) and column name for ordering by the given 'name' parameter.
The 'name' is of the form 'field1__field2__...__fieldN'.
"""
name, order = get_order_dir(name, default_order)
descending = order == 'DESC'
pieces = name.split(LOOKUP_SEP)
field, targets, alias, joins, path, opts, transform_function = self._setup_joins(pieces, opts, alias)
# If we get to this point and the field is a relation to another model,
# append the default ordering for that model unless it is the pk
# shortcut or the attribute name of the field that is specified.
if field.is_relation and opts.ordering and getattr(field, 'attname', None) != name and name != 'pk':
# Firstly, avoid infinite loops.
already_seen = already_seen or set()
join_tuple = tuple(getattr(self.query.alias_map[j], 'join_cols', None) for j in joins)
if join_tuple in already_seen:
raise FieldError('Infinite loop caused by ordering.')
already_seen.add(join_tuple)
results = []
for item in opts.ordering:
if hasattr(item, 'resolve_expression') and not isinstance(item, OrderBy):
item = item.desc() if descending else item.asc()
if isinstance(item, OrderBy):
results.append((item, False))
continue
results.extend(self.find_ordering_name(item, opts, alias,
order, already_seen))
return results
targets, alias, _ = self.query.trim_joins(targets, joins, path)
return [(OrderBy(transform_function(t, alias), descending=descending), False) for t in targets]
def _setup_joins(self, pieces, opts, alias):
"""
Helper method for get_order_by() and get_distinct().
get_ordering() and get_distinct() must produce same target columns on
same input, as the prefixes of get_ordering() and get_distinct() must
match. Executing SQL where this is not true is an error.
"""
alias = alias or self.query.get_initial_alias()
field, targets, opts, joins, path, transform_function = self.query.setup_joins(pieces, opts, alias)
alias = joins[-1]
return field, targets, alias, joins, path, opts, transform_function
def get_from_clause(self):
"""
Return a list of strings that are joined together to go after the
"FROM" part of the query, as well as a list any extra parameters that
need to be included. Subclasses, can override this to create a
from-clause via a "select".
This should only be called after any SQL construction methods that
might change the tables that are needed. This means the select columns,
ordering, and distinct must be done first.
"""
result = []
params = []
for alias in tuple(self.query.alias_map):
if not self.query.alias_refcount[alias]:
continue
try:
from_clause = self.query.alias_map[alias]
except KeyError:
# Extra tables can end up in self.tables, but not in the
# alias_map if they aren't in a join. That's OK. We skip them.
continue
clause_sql, clause_params = self.compile(from_clause)
result.append(clause_sql)
params.extend(clause_params)
for t in self.query.extra_tables:
alias, _ = self.query.table_alias(t)
# Only add the alias if it's not already present (the table_alias()
# call increments the refcount, so an alias refcount of one means
# this is the only reference).
if alias not in self.query.alias_map or self.query.alias_refcount[alias] == 1:
result.append(', %s' % self.quote_name_unless_alias(alias))
return result, params
def get_related_selections(self, select, opts=None, root_alias=None, cur_depth=1,
requested=None, restricted=None):
"""
Fill in the information needed for a select_related query. The current
depth is measured as the number of connections away from the root model
(for example, cur_depth=1 means we are looking at models with direct
connections to the root model).
"""
def _get_field_choices():
direct_choices = (f.name for f in opts.fields if f.is_relation)
reverse_choices = (
f.field.related_query_name()
for f in opts.related_objects if f.field.unique
)
return chain(direct_choices, reverse_choices, self.query._filtered_relations)
related_klass_infos = []
if not restricted and cur_depth > self.query.max_depth:
# We've recursed far enough; bail out.
return related_klass_infos
if not opts:
opts = self.query.get_meta()
root_alias = self.query.get_initial_alias()
only_load = self.query.get_loaded_field_names()
# Setup for the case when only particular related fields should be
# included in the related selection.
fields_found = set()
if requested is None:
restricted = isinstance(self.query.select_related, dict)
if restricted:
requested = self.query.select_related
def get_related_klass_infos(klass_info, related_klass_infos):
klass_info['related_klass_infos'] = related_klass_infos
for f in opts.fields:
field_model = f.model._meta.concrete_model
fields_found.add(f.name)
if restricted:
next = requested.get(f.name, {})
if not f.is_relation:
# If a non-related field is used like a relation,
# or if a single non-relational field is given.
if next or f.name in requested:
raise FieldError(
"Non-relational field given in select_related: '%s'. "
"Choices are: %s" % (
f.name,
", ".join(_get_field_choices()) or '(none)',
)
)
else:
next = False
if not select_related_descend(f, restricted, requested,
only_load.get(field_model)):
continue
klass_info = {
'model': f.remote_field.model,
'field': f,
'reverse': False,
'local_setter': f.set_cached_value,
'remote_setter': f.remote_field.set_cached_value if f.unique else lambda x, y: None,
'from_parent': False,
}
related_klass_infos.append(klass_info)
select_fields = []
_, _, _, joins, _, _ = self.query.setup_joins(
[f.name], opts, root_alias)
alias = joins[-1]
columns = self.get_default_columns(start_alias=alias, opts=f.remote_field.model._meta)
for col in columns:
select_fields.append(len(select))
select.append((col, None))
klass_info['select_fields'] = select_fields
next_klass_infos = self.get_related_selections(
select, f.remote_field.model._meta, alias, cur_depth + 1, next, restricted)
get_related_klass_infos(klass_info, next_klass_infos)
if restricted:
related_fields = [
(o.field, o.related_model)
for o in opts.related_objects
if o.field.unique and not o.many_to_many
]
for f, model in related_fields:
if not select_related_descend(f, restricted, requested,
only_load.get(model), reverse=True):
continue
related_field_name = f.related_query_name()
fields_found.add(related_field_name)
join_info = self.query.setup_joins([related_field_name], opts, root_alias)
alias = join_info.joins[-1]
from_parent = issubclass(model, opts.model) and model is not opts.model
klass_info = {
'model': model,
'field': f,
'reverse': True,
'local_setter': f.remote_field.set_cached_value,
'remote_setter': f.set_cached_value,
'from_parent': from_parent,
}
related_klass_infos.append(klass_info)
select_fields = []
columns = self.get_default_columns(
start_alias=alias, opts=model._meta, from_parent=opts.model)
for col in columns:
select_fields.append(len(select))
select.append((col, None))
klass_info['select_fields'] = select_fields
next = requested.get(f.related_query_name(), {})
next_klass_infos = self.get_related_selections(
select, model._meta, alias, cur_depth + 1,
next, restricted)
get_related_klass_infos(klass_info, next_klass_infos)
def local_setter(obj, from_obj):
# Set a reverse fk object when relation is non-empty.
if from_obj:
f.remote_field.set_cached_value(from_obj, obj)
def remote_setter(name, obj, from_obj):
setattr(from_obj, name, obj)
for name in list(requested):
# Filtered relations work only on the topmost level.
if cur_depth > 1:
break
if name in self.query._filtered_relations:
fields_found.add(name)
f, _, join_opts, joins, _, _ = self.query.setup_joins([name], opts, root_alias)
model = join_opts.model
alias = joins[-1]
from_parent = issubclass(model, opts.model) and model is not opts.model
klass_info = {
'model': model,
'field': f,
'reverse': True,
'local_setter': local_setter,
'remote_setter': partial(remote_setter, name),
'from_parent': from_parent,
}
related_klass_infos.append(klass_info)
select_fields = []
columns = self.get_default_columns(
start_alias=alias, opts=model._meta,
from_parent=opts.model,
)
for col in columns:
select_fields.append(len(select))
select.append((col, None))
klass_info['select_fields'] = select_fields
next_requested = requested.get(name, {})
next_klass_infos = self.get_related_selections(
select, opts=model._meta, root_alias=alias,
cur_depth=cur_depth + 1, requested=next_requested,
restricted=restricted,
)
get_related_klass_infos(klass_info, next_klass_infos)
fields_not_found = set(requested).difference(fields_found)
if fields_not_found:
invalid_fields = ("'%s'" % s for s in fields_not_found)
raise FieldError(
'Invalid field name(s) given in select_related: %s. '
'Choices are: %s' % (
', '.join(invalid_fields),
', '.join(_get_field_choices()) or '(none)',
)
)
return related_klass_infos
def get_select_for_update_of_arguments(self):
"""
Return a quoted list of arguments for the SELECT FOR UPDATE OF part of
the query.
"""
def _get_parent_klass_info(klass_info):
for parent_model, parent_link in klass_info['model']._meta.parents.items():
parent_list = parent_model._meta.get_parent_list()
yield {
'model': parent_model,
'field': parent_link,
'reverse': False,
'select_fields': [
select_index
for select_index in klass_info['select_fields']
# Selected columns from a model or its parents.
if (
self.select[select_index][0].target.model == parent_model or
self.select[select_index][0].target.model in parent_list
)
],
}
def _get_first_selected_col_from_model(klass_info):
"""
Find the first selected column from a model. If it doesn't exist,
don't lock a model.
select_fields is filled recursively, so it also contains fields
from the parent models.
"""
for select_index in klass_info['select_fields']:
if self.select[select_index][0].target.model == klass_info['model']:
return self.select[select_index][0]
def _get_field_choices():
"""Yield all allowed field paths in breadth-first search order."""
queue = collections.deque([(None, self.klass_info)])
while queue:
parent_path, klass_info = queue.popleft()
if parent_path is None:
path = []
yield 'self'
else:
field = klass_info['field']
if klass_info['reverse']:
field = field.remote_field
path = parent_path + [field.name]
yield LOOKUP_SEP.join(path)
queue.extend(
(path, klass_info)
for klass_info in _get_parent_klass_info(klass_info)
)
queue.extend(
(path, klass_info)
for klass_info in klass_info.get('related_klass_infos', [])
)
result = []
invalid_names = []
for name in self.query.select_for_update_of:
klass_info = self.klass_info
if name == 'self':
col = _get_first_selected_col_from_model(klass_info)
else:
for part in name.split(LOOKUP_SEP):
klass_infos = (
*klass_info.get('related_klass_infos', []),
*_get_parent_klass_info(klass_info),
)
for related_klass_info in klass_infos:
field = related_klass_info['field']
if related_klass_info['reverse']:
field = field.remote_field
if field.name == part:
klass_info = related_klass_info
break
else:
klass_info = None
break
if klass_info is None:
invalid_names.append(name)
continue
col = _get_first_selected_col_from_model(klass_info)
if col is not None:
if self.connection.features.select_for_update_of_column:
result.append(self.compile(col)[0])
else:
result.append(self.quote_name_unless_alias(col.alias))
if invalid_names:
raise FieldError(
'Invalid field name(s) given in select_for_update(of=(...)): %s. '
'Only relational fields followed in the query are allowed. '
'Choices are: %s.' % (
', '.join(invalid_names),
', '.join(_get_field_choices()),
)
)
return result
def deferred_to_columns(self):
"""
Convert the self.deferred_loading data structure to mapping of table
names to sets of column names which are to be loaded. Return the
dictionary.
"""
columns = {}
self.query.deferred_to_data(columns, self.query.get_loaded_field_names_cb)
return columns
def get_converters(self, expressions):
converters = {}
for i, expression in enumerate(expressions):
if expression:
backend_converters = self.connection.ops.get_db_converters(expression)
field_converters = expression.get_db_converters(self.connection)
if backend_converters or field_converters:
converters[i] = (backend_converters + field_converters, expression)
return converters
def apply_converters(self, rows, converters):
connection = self.connection
converters = list(converters.items())
for row in map(list, rows):
for pos, (convs, expression) in converters:
value = row[pos]
for converter in convs:
value = converter(value, expression, connection)
row[pos] = value
yield row
def results_iter(self, results=None, tuple_expected=False, chunked_fetch=False,
chunk_size=GET_ITERATOR_CHUNK_SIZE):
"""Return an iterator over the results from executing this query."""
if results is None:
results = self.execute_sql(MULTI, chunked_fetch=chunked_fetch, chunk_size=chunk_size)
fields = [s[0] for s in self.select[0:self.col_count]]
converters = self.get_converters(fields)
rows = chain.from_iterable(results)
if converters:
rows = self.apply_converters(rows, converters)
if tuple_expected:
rows = map(tuple, rows)
return rows
def has_results(self):
"""
Backends (e.g. NoSQL) can override this in order to use optimized
versions of "query has any results."
"""
# This is always executed on a query clone, so we can modify self.query
self.query.add_extra({'a': 1}, None, None, None, None, None)
self.query.set_extra_mask(['a'])
return bool(self.execute_sql(SINGLE))
def execute_sql(self, result_type=MULTI, chunked_fetch=False, chunk_size=GET_ITERATOR_CHUNK_SIZE):
"""
Run the query against the database and return the result(s). The
return value is a single data item if result_type is SINGLE, or an
iterator over the results if the result_type is MULTI.
result_type is either MULTI (use fetchmany() to retrieve all rows),
SINGLE (only retrieve a single row), or None. In this last case, the
cursor is returned if any query is executed, since it's used by
subclasses such as InsertQuery). It's possible, however, that no query
is needed, as the filters describe an empty set. In that case, None is
returned, to avoid any unnecessary database interaction.
"""
result_type = result_type or NO_RESULTS
try:
sql, params = self.as_sql()
if not sql:
raise EmptyResultSet
except EmptyResultSet:
if result_type == MULTI:
return iter([])
else:
return
if chunked_fetch:
cursor = self.connection.chunked_cursor()
else:
cursor = self.connection.cursor()
try:
cursor.execute(sql, params)
except Exception:
# Might fail for server-side cursors (e.g. connection closed)
cursor.close()
raise
if result_type == CURSOR:
# Give the caller the cursor to process and close.
return cursor
if result_type == SINGLE:
try:
val = cursor.fetchone()
if val:
return val[0:self.col_count]
return val
finally:
# done with the cursor
cursor.close()
if result_type == NO_RESULTS:
cursor.close()
return
result = cursor_iter(
cursor, self.connection.features.empty_fetchmany_value,
self.col_count if self.has_extra_select else None,
chunk_size,
)
if not chunked_fetch or not self.connection.features.can_use_chunked_reads:
try:
# If we are using non-chunked reads, we return the same data
# structure as normally, but ensure it is all read into memory
# before going any further. Use chunked_fetch if requested,
# unless the database doesn't support it.
return list(result)
finally:
# done with the cursor
cursor.close()
return result
def as_subquery_condition(self, alias, columns, compiler):
qn = compiler.quote_name_unless_alias
qn2 = self.connection.ops.quote_name
for index, select_col in enumerate(self.query.select):
lhs_sql, lhs_params = self.compile(select_col)
rhs = '%s.%s' % (qn(alias), qn2(columns[index]))
self.query.where.add(
RawSQL('%s = %s' % (lhs_sql, rhs), lhs_params), 'AND')
sql, params = self.as_sql()
return 'EXISTS (%s)' % sql, params
def explain_query(self):
result = list(self.execute_sql())
# Some backends return 1 item tuples with strings, and others return
# tuples with integers and strings. Flatten them out into strings.
for row in result[0]:
if not isinstance(row, str):
yield ' '.join(str(c) for c in row)
else:
yield row
class SQLInsertCompiler(SQLCompiler):
returning_fields = None
returning_params = tuple()
def field_as_sql(self, field, val):
"""
Take a field and a value intended to be saved on that field, and
return placeholder SQL and accompanying params. Check for raw values,
expressions, and fields with get_placeholder() defined in that order.
When field is None, consider the value raw and use it as the
placeholder, with no corresponding parameters returned.
"""
if field is None:
# A field value of None means the value is raw.
sql, params = val, []
elif hasattr(val, 'as_sql'):
# This is an expression, let's compile it.
sql, params = self.compile(val)
elif hasattr(field, 'get_placeholder'):
# Some fields (e.g. geo fields) need special munging before
# they can be inserted.
sql, params = field.get_placeholder(val, self, self.connection), [val]
else:
# Return the common case for the placeholder
sql, params = '%s', [val]
# The following hook is only used by Oracle Spatial, which sometimes
# needs to yield 'NULL' and [] as its placeholder and params instead
# of '%s' and [None]. The 'NULL' placeholder is produced earlier by
# OracleOperations.get_geom_placeholder(). The following line removes
# the corresponding None parameter. See ticket #10888.
params = self.connection.ops.modify_insert_params(sql, params)
return sql, params
def prepare_value(self, field, value):
"""
Prepare a value to be used in a query by resolving it if it is an
expression and otherwise calling the field's get_db_prep_save().
"""
if hasattr(value, 'resolve_expression'):
value = value.resolve_expression(self.query, allow_joins=False, for_save=True)
# Don't allow values containing Col expressions. They refer to
# existing columns on a row, but in the case of insert the row
# doesn't exist yet.
if value.contains_column_references:
raise ValueError(
'Failed to insert expression "%s" on %s. F() expressions '
'can only be used to update, not to insert.' % (value, field)
)
if value.contains_aggregate:
raise FieldError(
'Aggregate functions are not allowed in this query '
'(%s=%r).' % (field.name, value)
)
if value.contains_over_clause:
raise FieldError(
'Window expressions are not allowed in this query (%s=%r).'
% (field.name, value)
)
else:
value = field.get_db_prep_save(value, connection=self.connection)
return value
def pre_save_val(self, field, obj):
"""
Get the given field's value off the given obj. pre_save() is used for
things like auto_now on DateTimeField. Skip it if this is a raw query.
"""
if self.query.raw:
return getattr(obj, field.attname)
return field.pre_save(obj, add=True)
def assemble_as_sql(self, fields, value_rows):
"""
Take a sequence of N fields and a sequence of M rows of values, and
generate placeholder SQL and parameters for each field and value.
Return a pair containing:
* a sequence of M rows of N SQL placeholder strings, and
* a sequence of M rows of corresponding parameter values.
Each placeholder string may contain any number of '%s' interpolation
strings, and each parameter row will contain exactly as many params
as the total number of '%s's in the corresponding placeholder row.
"""
if not value_rows:
return [], []
# list of (sql, [params]) tuples for each object to be saved
# Shape: [n_objs][n_fields][2]
rows_of_fields_as_sql = (
(self.field_as_sql(field, v) for field, v in zip(fields, row))
for row in value_rows
)
# tuple like ([sqls], [[params]s]) for each object to be saved
# Shape: [n_objs][2][n_fields]
sql_and_param_pair_rows = (zip(*row) for row in rows_of_fields_as_sql)
# Extract separate lists for placeholders and params.
# Each of these has shape [n_objs][n_fields]
placeholder_rows, param_rows = zip(*sql_and_param_pair_rows)
# Params for each field are still lists, and need to be flattened.
param_rows = [[p for ps in row for p in ps] for row in param_rows]
return placeholder_rows, param_rows
def as_sql(self):
# We don't need quote_name_unless_alias() here, since these are all
# going to be column names (so we can avoid the extra overhead).
qn = self.connection.ops.quote_name
opts = self.query.get_meta()
insert_statement = self.connection.ops.insert_statement(ignore_conflicts=self.query.ignore_conflicts)
result = ['%s %s' % (insert_statement, qn(opts.db_table))]
fields = self.query.fields or [opts.pk]
result.append('(%s)' % ', '.join(qn(f.column) for f in fields))
if self.query.fields:
value_rows = [
[self.prepare_value(field, self.pre_save_val(field, obj)) for field in fields]
for obj in self.query.objs
]
else:
# An empty object.
value_rows = [[self.connection.ops.pk_default_value()] for _ in self.query.objs]
fields = [None]
# Currently the backends just accept values when generating bulk
# queries and generate their own placeholders. Doing that isn't
# necessary and it should be possible to use placeholders and
# expressions in bulk inserts too.
can_bulk = (not self.returning_fields and self.connection.features.has_bulk_insert)
placeholder_rows, param_rows = self.assemble_as_sql(fields, value_rows)
ignore_conflicts_suffix_sql = self.connection.ops.ignore_conflicts_suffix_sql(
ignore_conflicts=self.query.ignore_conflicts
)
if self.returning_fields and self.connection.features.can_return_columns_from_insert:
if self.connection.features.can_return_rows_from_bulk_insert:
result.append(self.connection.ops.bulk_insert_sql(fields, placeholder_rows))
params = param_rows
else:
result.append("VALUES (%s)" % ", ".join(placeholder_rows[0]))
params = [param_rows[0]]
if ignore_conflicts_suffix_sql:
result.append(ignore_conflicts_suffix_sql)
# Skip empty r_sql to allow subclasses to customize behavior for
# 3rd party backends. Refs #19096.
r_sql, self.returning_params = self.connection.ops.return_insert_columns(self.returning_fields)
if r_sql:
result.append(r_sql)
params += [self.returning_params]
return [(" ".join(result), tuple(chain.from_iterable(params)))]
if can_bulk:
result.append(self.connection.ops.bulk_insert_sql(fields, placeholder_rows))
if ignore_conflicts_suffix_sql:
result.append(ignore_conflicts_suffix_sql)
return [(" ".join(result), tuple(p for ps in param_rows for p in ps))]
else:
if ignore_conflicts_suffix_sql:
result.append(ignore_conflicts_suffix_sql)
return [
(" ".join(result + ["VALUES (%s)" % ", ".join(p)]), vals)
for p, vals in zip(placeholder_rows, param_rows)
]
def execute_sql(self, returning_fields=None):
assert not (
returning_fields and len(self.query.objs) != 1 and
not self.connection.features.can_return_rows_from_bulk_insert
)
self.returning_fields = returning_fields
with self.connection.cursor() as cursor:
for sql, params in self.as_sql():
cursor.execute(sql, params)
if not self.returning_fields:
return []
if self.connection.features.can_return_rows_from_bulk_insert and len(self.query.objs) > 1:
return self.connection.ops.fetch_returned_insert_rows(cursor)
if self.connection.features.can_return_columns_from_insert:
assert len(self.query.objs) == 1
return [self.connection.ops.fetch_returned_insert_columns(cursor, self.returning_params)]
return [(self.connection.ops.last_insert_id(
cursor, self.query.get_meta().db_table, self.query.get_meta().pk.column
),)]
class SQLDeleteCompiler(SQLCompiler):
@cached_property
def single_alias(self):
# Ensure base table is in aliases.
self.query.get_initial_alias()
return sum(self.query.alias_refcount[t] > 0 for t in self.query.alias_map) == 1
def _as_sql(self, query):
result = [
'DELETE FROM %s' % self.quote_name_unless_alias(query.base_table)
]
where, params = self.compile(query.where)
if where:
result.append('WHERE %s' % where)
return ' '.join(result), tuple(params)
def as_sql(self):
"""
Create the SQL for this query. Return the SQL string and list of
parameters.
"""
if self.single_alias:
return self._as_sql(self.query)
innerq = self.query.clone()
innerq.__class__ = Query
innerq.clear_select_clause()
pk = self.query.model._meta.pk
innerq.select = [
pk.get_col(self.query.get_initial_alias())
]
outerq = Query(self.query.model)
outerq.where = self.query.where_class()
outerq.add_q(Q(pk__in=innerq))
return self._as_sql(outerq)
class SQLUpdateCompiler(SQLCompiler):
def as_sql(self):
"""
Create the SQL for this query. Return the SQL string and list of
parameters.
"""
self.pre_sql_setup()
if not self.query.values:
return '', ()
qn = self.quote_name_unless_alias
values, update_params = [], []
for field, model, val in self.query.values:
if hasattr(val, 'resolve_expression'):
val = val.resolve_expression(self.query, allow_joins=False, for_save=True)
if val.contains_aggregate:
raise FieldError(
'Aggregate functions are not allowed in this query '
'(%s=%r).' % (field.name, val)
)
if val.contains_over_clause:
raise FieldError(
'Window expressions are not allowed in this query '
'(%s=%r).' % (field.name, val)
)
elif hasattr(val, 'prepare_database_save'):
if field.remote_field:
val = field.get_db_prep_save(
val.prepare_database_save(field),
connection=self.connection,
)
else:
raise TypeError(
"Tried to update field %s with a model instance, %r. "
"Use a value compatible with %s."
% (field, val, field.__class__.__name__)
)
else:
val = field.get_db_prep_save(val, connection=self.connection)
# Getting the placeholder for the field.
if hasattr(field, 'get_placeholder'):
placeholder = field.get_placeholder(val, self, self.connection)
else:
placeholder = '%s'
name = field.column
if hasattr(val, 'as_sql'):
sql, params = self.compile(val)
values.append('%s = %s' % (qn(name), placeholder % sql))
update_params.extend(params)
elif val is not None:
values.append('%s = %s' % (qn(name), placeholder))
update_params.append(val)
else:
values.append('%s = NULL' % qn(name))
table = self.query.base_table
result = [
'UPDATE %s SET' % qn(table),
', '.join(values),
]
where, params = self.compile(self.query.where)
if where:
result.append('WHERE %s' % where)
return ' '.join(result), tuple(update_params + params)
def execute_sql(self, result_type):
"""
Execute the specified update. Return the number of rows affected by
the primary update query. The "primary update query" is the first
non-empty query that is executed. Row counts for any subsequent,
related queries are not available.
"""
cursor = super().execute_sql(result_type)
try:
rows = cursor.rowcount if cursor else 0
is_empty = cursor is None
finally:
if cursor:
cursor.close()
for query in self.query.get_related_updates():
aux_rows = query.get_compiler(self.using).execute_sql(result_type)
if is_empty and aux_rows:
rows = aux_rows
is_empty = False
return rows
def pre_sql_setup(self):
"""
If the update depends on results from other tables, munge the "where"
conditions to match the format required for (portable) SQL updates.
If multiple updates are required, pull out the id values to update at
this point so that they don't change as a result of the progressive
updates.
"""
refcounts_before = self.query.alias_refcount.copy()
# Ensure base table is in the query
self.query.get_initial_alias()
count = self.query.count_active_tables()
if not self.query.related_updates and count == 1:
return
query = self.query.chain(klass=Query)
query.select_related = False
query.clear_ordering(True)
query.extra = {}
query.select = []
query.add_fields([query.get_meta().pk.name])
super().pre_sql_setup()
must_pre_select = count > 1 and not self.connection.features.update_can_self_select
# Now we adjust the current query: reset the where clause and get rid
# of all the tables we don't need (since they're in the sub-select).
self.query.where = self.query.where_class()
if self.query.related_updates or must_pre_select:
# Either we're using the idents in multiple update queries (so
# don't want them to change), or the db backend doesn't support
# selecting from the updating table (e.g. MySQL).
idents = []
for rows in query.get_compiler(self.using).execute_sql(MULTI):
idents.extend(r[0] for r in rows)
self.query.add_filter(('pk__in', idents))
self.query.related_ids = idents
else:
# The fast path. Filters and updates in one query.
self.query.add_filter(('pk__in', query))
self.query.reset_refcounts(refcounts_before)
class SQLAggregateCompiler(SQLCompiler):
def as_sql(self):
"""
Create the SQL for this query. Return the SQL string and list of
parameters.
"""
sql, params = [], []
for annotation in self.query.annotation_select.values():
ann_sql, ann_params = self.compile(annotation)
ann_sql, ann_params = annotation.select_format(self, ann_sql, ann_params)
sql.append(ann_sql)
params.extend(ann_params)
self.col_count = len(self.query.annotation_select)
sql = ', '.join(sql)
params = tuple(params)
sql = 'SELECT %s FROM (%s) subquery' % (sql, self.query.subquery)
params = params + self.query.sub_params
return sql, params
def cursor_iter(cursor, sentinel, col_count, itersize):
"""
Yield blocks of rows from a cursor and ensure the cursor is closed when
done.
"""
try:
for rows in iter((lambda: cursor.fetchmany(itersize)), sentinel):
yield rows if col_count is None else [r[:col_count] for r in rows]
finally:
cursor.close()
|
13ae27e129838354d5cefab0887a9ed6457a1c4cb708936a4825326c8be565a5 | from django.db import ProgrammingError
from django.utils.functional import cached_property
class BaseDatabaseFeatures:
gis_enabled = False
allows_group_by_pk = False
allows_group_by_selected_pks = False
empty_fetchmany_value = []
update_can_self_select = True
# Does the backend distinguish between '' and None?
interprets_empty_strings_as_nulls = False
# Does the backend allow inserting duplicate NULL rows in a nullable
# unique field? All core backends implement this correctly, but other
# databases such as SQL Server do not.
supports_nullable_unique_constraints = True
# Does the backend allow inserting duplicate rows when a unique_together
# constraint exists and some fields are nullable but not all of them?
supports_partially_nullable_unique_constraints = True
# Does the backend support initially deferrable unique constraints?
supports_deferrable_unique_constraints = False
can_use_chunked_reads = True
can_return_columns_from_insert = False
can_return_rows_from_bulk_insert = False
has_bulk_insert = True
uses_savepoints = True
can_release_savepoints = False
# If True, don't use integer foreign keys referring to, e.g., positive
# integer primary keys.
related_fields_match_type = False
allow_sliced_subqueries_with_in = True
has_select_for_update = False
has_select_for_update_nowait = False
has_select_for_update_skip_locked = False
has_select_for_update_of = False
has_select_for_no_key_update = False
# Does the database's SELECT FOR UPDATE OF syntax require a column rather
# than a table?
select_for_update_of_column = False
# Does the default test database allow multiple connections?
# Usually an indication that the test database is in-memory
test_db_allows_multiple_connections = True
# Can an object be saved without an explicit primary key?
supports_unspecified_pk = False
# Can a fixture contain forward references? i.e., are
# FK constraints checked at the end of transaction, or
# at the end of each save operation?
supports_forward_references = True
# Does the backend truncate names properly when they are too long?
truncates_names = False
# Is there a REAL datatype in addition to floats/doubles?
has_real_datatype = False
supports_subqueries_in_group_by = True
# Is there a true datatype for uuid?
has_native_uuid_field = False
# Is there a true datatype for timedeltas?
has_native_duration_field = False
# Does the database driver supports same type temporal data subtraction
# by returning the type used to store duration field?
supports_temporal_subtraction = False
# Does the __regex lookup support backreferencing and grouping?
supports_regex_backreferencing = True
# Can date/datetime lookups be performed using a string?
supports_date_lookup_using_string = True
# Can datetimes with timezones be used?
supports_timezones = True
# Does the database have a copy of the zoneinfo database?
has_zoneinfo_database = True
# When performing a GROUP BY, is an ORDER BY NULL required
# to remove any ordering?
requires_explicit_null_ordering_when_grouping = False
# Does the backend order NULL values as largest or smallest?
nulls_order_largest = False
# Does the backend support NULLS FIRST and NULLS LAST in ORDER BY?
supports_order_by_nulls_modifier = True
# Does the backend orders NULLS FIRST by default?
order_by_nulls_first = False
# The database's limit on the number of query parameters.
max_query_params = None
# Can an object have an autoincrement primary key of 0? MySQL says No.
allows_auto_pk_0 = True
# Do we need to NULL a ForeignKey out, or can the constraint check be
# deferred
can_defer_constraint_checks = False
# date_interval_sql can properly handle mixed Date/DateTime fields and timedeltas
supports_mixed_date_datetime_comparisons = True
# Does the backend support tablespaces? Default to False because it isn't
# in the SQL standard.
supports_tablespaces = False
# Does the backend reset sequences between tests?
supports_sequence_reset = True
# Can the backend introspect the default value of a column?
can_introspect_default = True
# Confirm support for introspected foreign keys
# Every database can do this reliably, except MySQL,
# which can't do it for MyISAM tables
can_introspect_foreign_keys = True
# Can the backend introspect an AutoField, instead of an IntegerField?
can_introspect_autofield = False
# Can the backend introspect a BigIntegerField, instead of an IntegerField?
can_introspect_big_integer_field = True
# Can the backend introspect an BinaryField, instead of an TextField?
can_introspect_binary_field = True
# Can the backend introspect an DecimalField, instead of an FloatField?
can_introspect_decimal_field = True
# Can the backend introspect a DurationField, instead of a BigIntegerField?
can_introspect_duration_field = True
# Can the backend introspect an IPAddressField, instead of an CharField?
can_introspect_ip_address_field = False
# Can the backend introspect a PositiveIntegerField, instead of an IntegerField?
can_introspect_positive_integer_field = False
# Can the backend introspect a SmallIntegerField, instead of an IntegerField?
can_introspect_small_integer_field = False
# Can the backend introspect a TimeField, instead of a DateTimeField?
can_introspect_time_field = True
# Some backends may not be able to differentiate BigAutoField or
# SmallAutoField from other fields such as AutoField.
introspected_big_auto_field_type = 'BigAutoField'
introspected_small_auto_field_type = 'SmallAutoField'
# Some backends may not be able to differentiate BooleanField from other
# fields such as IntegerField.
introspected_boolean_field_type = 'BooleanField'
# Can the backend introspect the column order (ASC/DESC) for indexes?
supports_index_column_ordering = True
# Does the backend support introspection of materialized views?
can_introspect_materialized_views = False
# Support for the DISTINCT ON clause
can_distinct_on_fields = False
# Does the backend prevent running SQL queries in broken transactions?
atomic_transactions = True
# Can we roll back DDL in a transaction?
can_rollback_ddl = False
# Does it support operations requiring references rename in a transaction?
supports_atomic_references_rename = True
# Can we issue more than one ALTER COLUMN clause in an ALTER TABLE?
supports_combined_alters = False
# Does it support foreign keys?
supports_foreign_keys = True
# Can it create foreign key constraints inline when adding columns?
can_create_inline_fk = True
# Does it support CHECK constraints?
supports_column_check_constraints = True
supports_table_check_constraints = True
# Does the backend support introspection of CHECK constraints?
can_introspect_check_constraints = True
# Does the backend support 'pyformat' style ("... %(name)s ...", {'name': value})
# parameter passing? Note this can be provided by the backend even if not
# supported by the Python driver
supports_paramstyle_pyformat = True
# Does the backend require literal defaults, rather than parameterized ones?
requires_literal_defaults = False
# Does the backend require a connection reset after each material schema change?
connection_persists_old_columns = False
# What kind of error does the backend throw when accessing closed cursor?
closed_cursor_error_class = ProgrammingError
# Does 'a' LIKE 'A' match?
has_case_insensitive_like = True
# Suffix for backends that don't support "SELECT xxx;" queries.
bare_select_suffix = ''
# If NULL is implied on columns without needing to be explicitly specified
implied_column_null = False
# Does the backend support "select for update" queries with limit (and offset)?
supports_select_for_update_with_limit = True
# Does the backend ignore null expressions in GREATEST and LEAST queries unless
# every expression is null?
greatest_least_ignores_nulls = False
# Can the backend clone databases for parallel test execution?
# Defaults to False to allow third-party backends to opt-in.
can_clone_databases = False
# Does the backend consider table names with different casing to
# be equal?
ignores_table_name_case = False
# Place FOR UPDATE right after FROM clause. Used on MSSQL.
for_update_after_from = False
# Combinatorial flags
supports_select_union = True
supports_select_intersection = True
supports_select_difference = True
supports_slicing_ordering_in_compound = False
supports_parentheses_in_compound = True
# Does the database support SQL 2003 FILTER (WHERE ...) in aggregate
# expressions?
supports_aggregate_filter_clause = False
# Does the backend support indexing a TextField?
supports_index_on_text_field = True
# Does the backend support window expressions (expression OVER (...))?
supports_over_clause = False
supports_frame_range_fixed_distance = False
only_supports_unbounded_with_preceding_and_following = False
# Does the backend support CAST with precision?
supports_cast_with_precision = True
# How many second decimals does the database return when casting a value to
# a type with time?
time_cast_precision = 6
# SQL to create a procedure for use by the Django test suite. The
# functionality of the procedure isn't important.
create_test_procedure_without_params_sql = None
create_test_procedure_with_int_param_sql = None
# Does the backend support keyword parameters for cursor.callproc()?
supports_callproc_kwargs = False
# Convert CharField results from bytes to str in database functions.
db_functions_convert_bytes_to_str = False
# What formats does the backend EXPLAIN syntax support?
supported_explain_formats = set()
# Does DatabaseOperations.explain_query_prefix() raise ValueError if
# unknown kwargs are passed to QuerySet.explain()?
validates_explain_options = True
# Does the backend support the default parameter in lead() and lag()?
supports_default_in_lead_lag = True
# Does the backend support ignoring constraint or uniqueness errors during
# INSERT?
supports_ignore_conflicts = True
# Does this backend require casting the results of CASE expressions used
# in UPDATE statements to ensure the expression has the correct type?
requires_casted_case_in_updates = False
# Does the backend support partial indexes (CREATE INDEX ... WHERE ...)?
supports_partial_indexes = True
supports_functions_in_partial_indexes = True
# Does the database allow more than one constraint or index on the same
# field(s)?
allows_multiple_constraints_on_same_fields = True
# Does the backend support boolean expressions in SELECT and GROUP BY
# clauses?
supports_boolean_expr_in_select_clause = True
# Does the backend support JSONField?
supports_json_field = True
# Can the backend introspect a JSONField?
can_introspect_json_field = True
# Does the backend support primitives in JSONField?
supports_primitives_in_json_field = True
# Is there a true datatype for JSON?
has_native_json_field = False
# Does the backend use PostgreSQL-style JSON operators like '->'?
has_json_operators = False
def __init__(self, connection):
self.connection = connection
@cached_property
def supports_explaining_query_execution(self):
"""Does this backend support explaining query execution?"""
return self.connection.ops.explain_prefix is not None
@cached_property
def supports_transactions(self):
"""Confirm support for transactions."""
with self.connection.cursor() as cursor:
cursor.execute('CREATE TABLE ROLLBACK_TEST (X INT)')
self.connection.set_autocommit(False)
cursor.execute('INSERT INTO ROLLBACK_TEST (X) VALUES (8)')
self.connection.rollback()
self.connection.set_autocommit(True)
cursor.execute('SELECT COUNT(X) FROM ROLLBACK_TEST')
count, = cursor.fetchone()
cursor.execute('DROP TABLE ROLLBACK_TEST')
return count == 0
def allows_group_by_selected_pks_on_model(self, model):
if not self.allows_group_by_selected_pks:
return False
return model._meta.managed
|
e980919c4bf71e1108633b2fce5770d04ceab25265dab4f4a7153b9001ed0758 | import datetime
import decimal
from importlib import import_module
import sqlparse
from django.conf import settings
from django.db import NotSupportedError, transaction
from django.db.backends import utils
from django.utils import timezone
from django.utils.encoding import force_str
class BaseDatabaseOperations:
"""
Encapsulate backend-specific differences, such as the way a backend
performs ordering or calculates the ID of a recently-inserted row.
"""
compiler_module = "django.db.models.sql.compiler"
# Integer field safe ranges by `internal_type` as documented
# in docs/ref/models/fields.txt.
integer_field_ranges = {
'SmallIntegerField': (-32768, 32767),
'IntegerField': (-2147483648, 2147483647),
'BigIntegerField': (-9223372036854775808, 9223372036854775807),
'PositiveBigIntegerField': (0, 9223372036854775807),
'PositiveSmallIntegerField': (0, 32767),
'PositiveIntegerField': (0, 2147483647),
'SmallAutoField': (-32768, 32767),
'AutoField': (-2147483648, 2147483647),
'BigAutoField': (-9223372036854775808, 9223372036854775807),
}
set_operators = {
'union': 'UNION',
'intersection': 'INTERSECT',
'difference': 'EXCEPT',
}
# Mapping of Field.get_internal_type() (typically the model field's class
# name) to the data type to use for the Cast() function, if different from
# DatabaseWrapper.data_types.
cast_data_types = {}
# CharField data type if the max_length argument isn't provided.
cast_char_field_without_max_length = None
# Start and end points for window expressions.
PRECEDING = 'PRECEDING'
FOLLOWING = 'FOLLOWING'
UNBOUNDED_PRECEDING = 'UNBOUNDED ' + PRECEDING
UNBOUNDED_FOLLOWING = 'UNBOUNDED ' + FOLLOWING
CURRENT_ROW = 'CURRENT ROW'
# Prefix for EXPLAIN queries, or None EXPLAIN isn't supported.
explain_prefix = None
def __init__(self, connection):
self.connection = connection
self._cache = None
def autoinc_sql(self, table, column):
"""
Return any SQL needed to support auto-incrementing primary keys, or
None if no SQL is necessary.
This SQL is executed when a table is created.
"""
return None
def bulk_batch_size(self, fields, objs):
"""
Return the maximum allowed batch size for the backend. The fields
are the fields going to be inserted in the batch, the objs contains
all the objects to be inserted.
"""
return len(objs)
def cache_key_culling_sql(self):
"""
Return an SQL query that retrieves the first cache key greater than the
n smallest.
This is used by the 'db' cache backend to determine where to start
culling.
"""
return "SELECT cache_key FROM %s ORDER BY cache_key LIMIT 1 OFFSET %%s"
def unification_cast_sql(self, output_field):
"""
Given a field instance, return the SQL that casts the result of a union
to that type. The resulting string should contain a '%s' placeholder
for the expression being cast.
"""
return '%s'
def date_extract_sql(self, lookup_type, field_name):
"""
Given a lookup_type of 'year', 'month', or 'day', return the SQL that
extracts a value from the given date field field_name.
"""
raise NotImplementedError('subclasses of BaseDatabaseOperations may require a date_extract_sql() method')
def date_interval_sql(self, timedelta):
"""
Implement the date interval functionality for expressions.
"""
raise NotImplementedError('subclasses of BaseDatabaseOperations may require a date_interval_sql() method')
def date_trunc_sql(self, lookup_type, field_name):
"""
Given a lookup_type of 'year', 'month', or 'day', return the SQL that
truncates the given date field field_name to a date object with only
the given specificity.
"""
raise NotImplementedError('subclasses of BaseDatabaseOperations may require a date_trunc_sql() method.')
def datetime_cast_date_sql(self, field_name, tzname):
"""
Return the SQL to cast a datetime value to date value.
"""
raise NotImplementedError(
'subclasses of BaseDatabaseOperations may require a '
'datetime_cast_date_sql() method.'
)
def datetime_cast_time_sql(self, field_name, tzname):
"""
Return the SQL to cast a datetime value to time value.
"""
raise NotImplementedError('subclasses of BaseDatabaseOperations may require a datetime_cast_time_sql() method')
def datetime_extract_sql(self, lookup_type, field_name, tzname):
"""
Given a lookup_type of 'year', 'month', 'day', 'hour', 'minute', or
'second', return the SQL that extracts a value from the given
datetime field field_name.
"""
raise NotImplementedError('subclasses of BaseDatabaseOperations may require a datetime_extract_sql() method')
def datetime_trunc_sql(self, lookup_type, field_name, tzname):
"""
Given a lookup_type of 'year', 'month', 'day', 'hour', 'minute', or
'second', return the SQL that truncates the given datetime field
field_name to a datetime object with only the given specificity.
"""
raise NotImplementedError('subclasses of BaseDatabaseOperations may require a datetime_trunc_sql() method')
def time_trunc_sql(self, lookup_type, field_name):
"""
Given a lookup_type of 'hour', 'minute' or 'second', return the SQL
that truncates the given time field field_name to a time object with
only the given specificity.
"""
raise NotImplementedError('subclasses of BaseDatabaseOperations may require a time_trunc_sql() method')
def time_extract_sql(self, lookup_type, field_name):
"""
Given a lookup_type of 'hour', 'minute', or 'second', return the SQL
that extracts a value from the given time field field_name.
"""
return self.date_extract_sql(lookup_type, field_name)
def json_cast_text_sql(self, field_name):
"""Return the SQL to cast a JSON value to text value."""
raise NotImplementedError(
'subclasses of BaseDatabaseOperations may require a '
'json_cast_text_sql() method'
)
def deferrable_sql(self):
"""
Return the SQL to make a constraint "initially deferred" during a
CREATE TABLE statement.
"""
return ''
def distinct_sql(self, fields, params):
"""
Return an SQL DISTINCT clause which removes duplicate rows from the
result set. If any fields are given, only check the given fields for
duplicates.
"""
if fields:
raise NotSupportedError('DISTINCT ON fields is not supported by this database backend')
else:
return ['DISTINCT'], []
def fetch_returned_insert_columns(self, cursor, returning_params):
"""
Given a cursor object that has just performed an INSERT...RETURNING
statement into a table, return the newly created data.
"""
return cursor.fetchone()
def field_cast_sql(self, db_type, internal_type):
"""
Given a column type (e.g. 'BLOB', 'VARCHAR') and an internal type
(e.g. 'GenericIPAddressField'), return the SQL to cast it before using
it in a WHERE statement. The resulting string should contain a '%s'
placeholder for the column being searched against.
"""
return '%s'
def force_no_ordering(self):
"""
Return a list used in the "ORDER BY" clause to force no ordering at
all. Return an empty list to include nothing in the ordering.
"""
return []
def for_update_sql(self, nowait=False, skip_locked=False, of=(), no_key=False):
"""
Return the FOR UPDATE SQL clause to lock rows for an update operation.
"""
return 'FOR%s UPDATE%s%s%s' % (
' NO KEY' if no_key else '',
' OF %s' % ', '.join(of) if of else '',
' NOWAIT' if nowait else '',
' SKIP LOCKED' if skip_locked else '',
)
def _get_limit_offset_params(self, low_mark, high_mark):
offset = low_mark or 0
if high_mark is not None:
return (high_mark - offset), offset
elif offset:
return self.connection.ops.no_limit_value(), offset
return None, offset
def limit_offset_sql(self, low_mark, high_mark):
"""Return LIMIT/OFFSET SQL clause."""
limit, offset = self._get_limit_offset_params(low_mark, high_mark)
return ' '.join(sql for sql in (
('LIMIT %d' % limit) if limit else None,
('OFFSET %d' % offset) if offset else None,
) if sql)
def last_executed_query(self, cursor, sql, params):
"""
Return a string of the query last executed by the given cursor, with
placeholders replaced with actual values.
`sql` is the raw query containing placeholders and `params` is the
sequence of parameters. These are used by default, but this method
exists for database backends to provide a better implementation
according to their own quoting schemes.
"""
# Convert params to contain string values.
def to_string(s):
return force_str(s, strings_only=True, errors='replace')
if isinstance(params, (list, tuple)):
u_params = tuple(to_string(val) for val in params)
elif params is None:
u_params = ()
else:
u_params = {to_string(k): to_string(v) for k, v in params.items()}
return "QUERY = %r - PARAMS = %r" % (sql, u_params)
def last_insert_id(self, cursor, table_name, pk_name):
"""
Given a cursor object that has just performed an INSERT statement into
a table that has an auto-incrementing ID, return the newly created ID.
`pk_name` is the name of the primary-key column.
"""
return cursor.lastrowid
def lookup_cast(self, lookup_type, internal_type=None):
"""
Return the string to use in a query when performing lookups
("contains", "like", etc.). It should contain a '%s' placeholder for
the column being searched against.
"""
return "%s"
def max_in_list_size(self):
"""
Return the maximum number of items that can be passed in a single 'IN'
list condition, or None if the backend does not impose a limit.
"""
return None
def max_name_length(self):
"""
Return the maximum length of table and column names, or None if there
is no limit.
"""
return None
def no_limit_value(self):
"""
Return the value to use for the LIMIT when we are wanting "LIMIT
infinity". Return None if the limit clause can be omitted in this case.
"""
raise NotImplementedError('subclasses of BaseDatabaseOperations may require a no_limit_value() method')
def pk_default_value(self):
"""
Return the value to use during an INSERT statement to specify that
the field should use its default value.
"""
return 'DEFAULT'
def prepare_sql_script(self, sql):
"""
Take an SQL script that may contain multiple lines and return a list
of statements to feed to successive cursor.execute() calls.
Since few databases are able to process raw SQL scripts in a single
cursor.execute() call and PEP 249 doesn't talk about this use case,
the default implementation is conservative.
"""
return [
sqlparse.format(statement, strip_comments=True)
for statement in sqlparse.split(sql) if statement
]
def process_clob(self, value):
"""
Return the value of a CLOB column, for backends that return a locator
object that requires additional processing.
"""
return value
def return_insert_columns(self, fields):
"""
For backends that support returning columns as part of an insert query,
return the SQL and params to append to the INSERT query. The returned
fragment should contain a format string to hold the appropriate column.
"""
pass
def compiler(self, compiler_name):
"""
Return the SQLCompiler class corresponding to the given name,
in the namespace corresponding to the `compiler_module` attribute
on this backend.
"""
if self._cache is None:
self._cache = import_module(self.compiler_module)
return getattr(self._cache, compiler_name)
def quote_name(self, name):
"""
Return a quoted version of the given table, index, or column name. Do
not quote the given name if it's already been quoted.
"""
raise NotImplementedError('subclasses of BaseDatabaseOperations may require a quote_name() method')
def random_function_sql(self):
"""Return an SQL expression that returns a random value."""
return 'RANDOM()'
def regex_lookup(self, lookup_type):
"""
Return the string to use in a query when performing regular expression
lookups (using "regex" or "iregex"). It should contain a '%s'
placeholder for the column being searched against.
If the feature is not supported (or part of it is not supported), raise
NotImplementedError.
"""
raise NotImplementedError('subclasses of BaseDatabaseOperations may require a regex_lookup() method')
def savepoint_create_sql(self, sid):
"""
Return the SQL for starting a new savepoint. Only required if the
"uses_savepoints" feature is True. The "sid" parameter is a string
for the savepoint id.
"""
return "SAVEPOINT %s" % self.quote_name(sid)
def savepoint_commit_sql(self, sid):
"""
Return the SQL for committing the given savepoint.
"""
return "RELEASE SAVEPOINT %s" % self.quote_name(sid)
def savepoint_rollback_sql(self, sid):
"""
Return the SQL for rolling back the given savepoint.
"""
return "ROLLBACK TO SAVEPOINT %s" % self.quote_name(sid)
def set_time_zone_sql(self):
"""
Return the SQL that will set the connection's time zone.
Return '' if the backend doesn't support time zones.
"""
return ''
def sql_flush(self, style, tables, *, reset_sequences=False, allow_cascade=False):
"""
Return a list of SQL statements required to remove all data from
the given database tables (without actually removing the tables
themselves).
The `style` argument is a Style object as returned by either
color_style() or no_style() in django.core.management.color.
If `reset_sequences` is True, the list includes SQL statements required
to reset the sequences.
The `allow_cascade` argument determines whether truncation may cascade
to tables with foreign keys pointing the tables being truncated.
PostgreSQL requires a cascade even if these tables are empty.
"""
raise NotImplementedError('subclasses of BaseDatabaseOperations must provide an sql_flush() method')
def execute_sql_flush(self, sql_list):
"""Execute a list of SQL statements to flush the database."""
with transaction.atomic(
using=self.connection.alias,
savepoint=self.connection.features.can_rollback_ddl,
):
with self.connection.cursor() as cursor:
for sql in sql_list:
cursor.execute(sql)
def sequence_reset_by_name_sql(self, style, sequences):
"""
Return a list of the SQL statements required to reset sequences
passed in `sequences`.
The `style` argument is a Style object as returned by either
color_style() or no_style() in django.core.management.color.
"""
return []
def sequence_reset_sql(self, style, model_list):
"""
Return a list of the SQL statements required to reset sequences for
the given models.
The `style` argument is a Style object as returned by either
color_style() or no_style() in django.core.management.color.
"""
return [] # No sequence reset required by default.
def start_transaction_sql(self):
"""Return the SQL statement required to start a transaction."""
return "BEGIN;"
def end_transaction_sql(self, success=True):
"""Return the SQL statement required to end a transaction."""
if not success:
return "ROLLBACK;"
return "COMMIT;"
def tablespace_sql(self, tablespace, inline=False):
"""
Return the SQL that will be used in a query to define the tablespace.
Return '' if the backend doesn't support tablespaces.
If `inline` is True, append the SQL to a row; otherwise append it to
the entire CREATE TABLE or CREATE INDEX statement.
"""
return ''
def prep_for_like_query(self, x):
"""Prepare a value for use in a LIKE query."""
return str(x).replace("\\", "\\\\").replace("%", r"\%").replace("_", r"\_")
# Same as prep_for_like_query(), but called for "iexact" matches, which
# need not necessarily be implemented using "LIKE" in the backend.
prep_for_iexact_query = prep_for_like_query
def validate_autopk_value(self, value):
"""
Certain backends do not accept some values for "serial" fields
(for example zero in MySQL). Raise a ValueError if the value is
invalid, otherwise return the validated value.
"""
return value
def adapt_unknown_value(self, value):
"""
Transform a value to something compatible with the backend driver.
This method only depends on the type of the value. It's designed for
cases where the target type isn't known, such as .raw() SQL queries.
As a consequence it may not work perfectly in all circumstances.
"""
if isinstance(value, datetime.datetime): # must be before date
return self.adapt_datetimefield_value(value)
elif isinstance(value, datetime.date):
return self.adapt_datefield_value(value)
elif isinstance(value, datetime.time):
return self.adapt_timefield_value(value)
elif isinstance(value, decimal.Decimal):
return self.adapt_decimalfield_value(value)
else:
return value
def adapt_datefield_value(self, value):
"""
Transform a date value to an object compatible with what is expected
by the backend driver for date columns.
"""
if value is None:
return None
return str(value)
def adapt_datetimefield_value(self, value):
"""
Transform a datetime value to an object compatible with what is expected
by the backend driver for datetime columns.
"""
if value is None:
return None
return str(value)
def adapt_timefield_value(self, value):
"""
Transform a time value to an object compatible with what is expected
by the backend driver for time columns.
"""
if value is None:
return None
if timezone.is_aware(value):
raise ValueError("Django does not support timezone-aware times.")
return str(value)
def adapt_decimalfield_value(self, value, max_digits=None, decimal_places=None):
"""
Transform a decimal.Decimal value to an object compatible with what is
expected by the backend driver for decimal (numeric) columns.
"""
return utils.format_number(value, max_digits, decimal_places)
def adapt_ipaddressfield_value(self, value):
"""
Transform a string representation of an IP address into the expected
type for the backend driver.
"""
return value or None
def year_lookup_bounds_for_date_field(self, value):
"""
Return a two-elements list with the lower and upper bound to be used
with a BETWEEN operator to query a DateField value using a year
lookup.
`value` is an int, containing the looked-up year.
"""
first = datetime.date(value, 1, 1)
second = datetime.date(value, 12, 31)
first = self.adapt_datefield_value(first)
second = self.adapt_datefield_value(second)
return [first, second]
def year_lookup_bounds_for_datetime_field(self, value):
"""
Return a two-elements list with the lower and upper bound to be used
with a BETWEEN operator to query a DateTimeField value using a year
lookup.
`value` is an int, containing the looked-up year.
"""
first = datetime.datetime(value, 1, 1)
second = datetime.datetime(value, 12, 31, 23, 59, 59, 999999)
if settings.USE_TZ:
tz = timezone.get_current_timezone()
first = timezone.make_aware(first, tz)
second = timezone.make_aware(second, tz)
first = self.adapt_datetimefield_value(first)
second = self.adapt_datetimefield_value(second)
return [first, second]
def get_db_converters(self, expression):
"""
Return a list of functions needed to convert field data.
Some field types on some backends do not provide data in the correct
format, this is the hook for converter functions.
"""
return []
def convert_durationfield_value(self, value, expression, connection):
if value is not None:
return datetime.timedelta(0, 0, value)
def check_expression_support(self, expression):
"""
Check that the backend supports the provided expression.
This is used on specific backends to rule out known expressions
that have problematic or nonexistent implementations. If the
expression has a known problem, the backend should raise
NotSupportedError.
"""
pass
def conditional_expression_supported_in_where_clause(self, expression):
"""
Return True, if the conditional expression is supported in the WHERE
clause.
"""
return True
def combine_expression(self, connector, sub_expressions):
"""
Combine a list of subexpressions into a single expression, using
the provided connecting operator. This is required because operators
can vary between backends (e.g., Oracle with %% and &) and between
subexpression types (e.g., date expressions).
"""
conn = ' %s ' % connector
return conn.join(sub_expressions)
def combine_duration_expression(self, connector, sub_expressions):
return self.combine_expression(connector, sub_expressions)
def binary_placeholder_sql(self, value):
"""
Some backends require special syntax to insert binary content (MySQL
for example uses '_binary %s').
"""
return '%s'
def modify_insert_params(self, placeholder, params):
"""
Allow modification of insert parameters. Needed for Oracle Spatial
backend due to #10888.
"""
return params
def integer_field_range(self, internal_type):
"""
Given an integer field internal type (e.g. 'PositiveIntegerField'),
return a tuple of the (min_value, max_value) form representing the
range of the column type bound to the field.
"""
return self.integer_field_ranges[internal_type]
def subtract_temporals(self, internal_type, lhs, rhs):
if self.connection.features.supports_temporal_subtraction:
lhs_sql, lhs_params = lhs
rhs_sql, rhs_params = rhs
return '(%s - %s)' % (lhs_sql, rhs_sql), (*lhs_params, *rhs_params)
raise NotSupportedError("This backend does not support %s subtraction." % internal_type)
def window_frame_start(self, start):
if isinstance(start, int):
if start < 0:
return '%d %s' % (abs(start), self.PRECEDING)
elif start == 0:
return self.CURRENT_ROW
elif start is None:
return self.UNBOUNDED_PRECEDING
raise ValueError("start argument must be a negative integer, zero, or None, but got '%s'." % start)
def window_frame_end(self, end):
if isinstance(end, int):
if end == 0:
return self.CURRENT_ROW
elif end > 0:
return '%d %s' % (end, self.FOLLOWING)
elif end is None:
return self.UNBOUNDED_FOLLOWING
raise ValueError("end argument must be a positive integer, zero, or None, but got '%s'." % end)
def window_frame_rows_start_end(self, start=None, end=None):
"""
Return SQL for start and end points in an OVER clause window frame.
"""
if not self.connection.features.supports_over_clause:
raise NotSupportedError('This backend does not support window expressions.')
return self.window_frame_start(start), self.window_frame_end(end)
def window_frame_range_start_end(self, start=None, end=None):
start_, end_ = self.window_frame_rows_start_end(start, end)
if (
self.connection.features.only_supports_unbounded_with_preceding_and_following and
((start and start < 0) or (end and end > 0))
):
raise NotSupportedError(
'%s only supports UNBOUNDED together with PRECEDING and '
'FOLLOWING.' % self.connection.display_name
)
return start_, end_
def explain_query_prefix(self, format=None, **options):
if not self.connection.features.supports_explaining_query_execution:
raise NotSupportedError('This backend does not support explaining query execution.')
if format:
supported_formats = self.connection.features.supported_explain_formats
normalized_format = format.upper()
if normalized_format not in supported_formats:
msg = '%s is not a recognized format.' % normalized_format
if supported_formats:
msg += ' Allowed formats: %s' % ', '.join(sorted(supported_formats))
raise ValueError(msg)
if options:
raise ValueError('Unknown options: %s' % ', '.join(sorted(options.keys())))
return self.explain_prefix
def insert_statement(self, ignore_conflicts=False):
return 'INSERT INTO'
def ignore_conflicts_suffix_sql(self, ignore_conflicts=None):
return ''
|
ead194780716b71aea0f48bf3a508a330b441baa3e72d479b0fe33f00107e15c | import operator
from django.db.backends.base.features import BaseDatabaseFeatures
from django.utils.functional import cached_property
class DatabaseFeatures(BaseDatabaseFeatures):
empty_fetchmany_value = ()
allows_group_by_pk = True
related_fields_match_type = True
# MySQL doesn't support sliced subqueries with IN/ALL/ANY/SOME.
allow_sliced_subqueries_with_in = False
has_select_for_update = True
supports_forward_references = False
supports_regex_backreferencing = False
supports_date_lookup_using_string = False
can_introspect_autofield = True
can_introspect_binary_field = False
can_introspect_duration_field = False
can_introspect_small_integer_field = True
can_introspect_positive_integer_field = True
introspected_boolean_field_type = 'IntegerField'
supports_index_column_ordering = False
supports_timezones = False
requires_explicit_null_ordering_when_grouping = True
allows_auto_pk_0 = False
can_release_savepoints = True
atomic_transactions = False
can_clone_databases = True
supports_temporal_subtraction = True
supports_select_intersection = False
supports_select_difference = False
supports_slicing_ordering_in_compound = True
supports_index_on_text_field = False
has_case_insensitive_like = False
create_test_procedure_without_params_sql = """
CREATE PROCEDURE test_procedure ()
BEGIN
DECLARE V_I INTEGER;
SET V_I = 1;
END;
"""
create_test_procedure_with_int_param_sql = """
CREATE PROCEDURE test_procedure (P_I INTEGER)
BEGIN
DECLARE V_I INTEGER;
SET V_I = P_I;
END;
"""
db_functions_convert_bytes_to_str = True
# Neither MySQL nor MariaDB support partial indexes.
supports_partial_indexes = False
supports_order_by_nulls_modifier = False
order_by_nulls_first = True
@cached_property
def _mysql_storage_engine(self):
"Internal method used in Django tests. Don't rely on this from your code"
with self.connection.cursor() as cursor:
cursor.execute("SELECT ENGINE FROM INFORMATION_SCHEMA.ENGINES WHERE SUPPORT = 'DEFAULT'")
result = cursor.fetchone()
return result[0]
@cached_property
def update_can_self_select(self):
return self.connection.mysql_is_mariadb and self.connection.mysql_version >= (10, 3, 2)
@cached_property
def can_introspect_foreign_keys(self):
"Confirm support for introspected foreign keys"
return self._mysql_storage_engine != 'MyISAM'
@cached_property
def can_return_columns_from_insert(self):
return self.connection.mysql_is_mariadb and self.connection.mysql_version >= (10, 5, 0)
can_return_rows_from_bulk_insert = property(operator.attrgetter('can_return_columns_from_insert'))
@cached_property
def has_zoneinfo_database(self):
# Test if the time zone definitions are installed. CONVERT_TZ returns
# NULL if 'UTC' timezone isn't loaded into the mysql.time_zone.
with self.connection.cursor() as cursor:
cursor.execute("SELECT CONVERT_TZ('2001-01-01 01:00:00', 'UTC', 'UTC')")
return cursor.fetchone()[0] is not None
@cached_property
def is_sql_auto_is_null_enabled(self):
with self.connection.cursor() as cursor:
cursor.execute('SELECT @@SQL_AUTO_IS_NULL')
result = cursor.fetchone()
return result and result[0] == 1
@cached_property
def supports_over_clause(self):
if self.connection.mysql_is_mariadb:
return True
return self.connection.mysql_version >= (8, 0, 2)
supports_frame_range_fixed_distance = property(operator.attrgetter('supports_over_clause'))
@cached_property
def supports_column_check_constraints(self):
if self.connection.mysql_is_mariadb:
return self.connection.mysql_version >= (10, 2, 1)
return self.connection.mysql_version >= (8, 0, 16)
supports_table_check_constraints = property(operator.attrgetter('supports_column_check_constraints'))
@cached_property
def can_introspect_check_constraints(self):
if self.connection.mysql_is_mariadb:
version = self.connection.mysql_version
return (version >= (10, 2, 22) and version < (10, 3)) or version >= (10, 3, 10)
return self.connection.mysql_version >= (8, 0, 16)
@cached_property
def has_select_for_update_skip_locked(self):
return not self.connection.mysql_is_mariadb and self.connection.mysql_version >= (8, 0, 1)
@cached_property
def has_select_for_update_nowait(self):
if self.connection.mysql_is_mariadb:
return self.connection.mysql_version >= (10, 3, 0)
return self.connection.mysql_version >= (8, 0, 1)
@cached_property
def supports_explain_analyze(self):
return self.connection.mysql_is_mariadb or self.connection.mysql_version >= (8, 0, 18)
@cached_property
def supported_explain_formats(self):
# Alias MySQL's TRADITIONAL to TEXT for consistency with other
# backends.
formats = {'JSON', 'TEXT', 'TRADITIONAL'}
if not self.connection.mysql_is_mariadb and self.connection.mysql_version >= (8, 0, 16):
formats.add('TREE')
return formats
@cached_property
def supports_transactions(self):
"""
All storage engines except MyISAM support transactions.
"""
return self._mysql_storage_engine != 'MyISAM'
@cached_property
def ignores_table_name_case(self):
with self.connection.cursor() as cursor:
cursor.execute('SELECT @@LOWER_CASE_TABLE_NAMES')
result = cursor.fetchone()
return result and result[0] != 0
@cached_property
def supports_default_in_lead_lag(self):
# To be added in https://jira.mariadb.org/browse/MDEV-12981.
return not self.connection.mysql_is_mariadb
@cached_property
def supports_json_field(self):
if self.connection.mysql_is_mariadb:
return self.connection.mysql_version >= (10, 2, 7)
return self.connection.mysql_version >= (5, 7, 8)
@cached_property
def can_introspect_json_field(self):
if self.connection.mysql_is_mariadb:
return self.supports_json_field and self.can_introspect_check_constraints
return self.supports_json_field
|
75f6b3aff74878145ae7bb112a24f6e53c8b682e07b8d6e1de6e39392fae72c4 | import uuid
from django.conf import settings
from django.db.backends.base.operations import BaseDatabaseOperations
from django.utils import timezone
from django.utils.duration import duration_microseconds
from django.utils.encoding import force_str
class DatabaseOperations(BaseDatabaseOperations):
compiler_module = "django.db.backends.mysql.compiler"
# MySQL stores positive fields as UNSIGNED ints.
integer_field_ranges = {
**BaseDatabaseOperations.integer_field_ranges,
'PositiveSmallIntegerField': (0, 65535),
'PositiveIntegerField': (0, 4294967295),
'PositiveBigIntegerField': (0, 18446744073709551615),
}
cast_data_types = {
'AutoField': 'signed integer',
'BigAutoField': 'signed integer',
'SmallAutoField': 'signed integer',
'CharField': 'char(%(max_length)s)',
'DecimalField': 'decimal(%(max_digits)s, %(decimal_places)s)',
'TextField': 'char',
'IntegerField': 'signed integer',
'BigIntegerField': 'signed integer',
'SmallIntegerField': 'signed integer',
'PositiveBigIntegerField': 'unsigned integer',
'PositiveIntegerField': 'unsigned integer',
'PositiveSmallIntegerField': 'unsigned integer',
}
cast_char_field_without_max_length = 'char'
explain_prefix = 'EXPLAIN'
def date_extract_sql(self, lookup_type, field_name):
# https://dev.mysql.com/doc/mysql/en/date-and-time-functions.html
if lookup_type == 'week_day':
# DAYOFWEEK() returns an integer, 1-7, Sunday=1.
return "DAYOFWEEK(%s)" % field_name
elif lookup_type == 'iso_week_day':
# WEEKDAY() returns an integer, 0-6, Monday=0.
return "WEEKDAY(%s) + 1" % field_name
elif lookup_type == 'week':
# Override the value of default_week_format for consistency with
# other database backends.
# Mode 3: Monday, 1-53, with 4 or more days this year.
return "WEEK(%s, 3)" % field_name
elif lookup_type == 'iso_year':
# Get the year part from the YEARWEEK function, which returns a
# number as year * 100 + week.
return "TRUNCATE(YEARWEEK(%s, 3), -2) / 100" % field_name
else:
# EXTRACT returns 1-53 based on ISO-8601 for the week number.
return "EXTRACT(%s FROM %s)" % (lookup_type.upper(), field_name)
def date_trunc_sql(self, lookup_type, field_name):
fields = {
'year': '%%Y-01-01',
'month': '%%Y-%%m-01',
} # Use double percents to escape.
if lookup_type in fields:
format_str = fields[lookup_type]
return "CAST(DATE_FORMAT(%s, '%s') AS DATE)" % (field_name, format_str)
elif lookup_type == 'quarter':
return "MAKEDATE(YEAR(%s), 1) + INTERVAL QUARTER(%s) QUARTER - INTERVAL 1 QUARTER" % (
field_name, field_name
)
elif lookup_type == 'week':
return "DATE_SUB(%s, INTERVAL WEEKDAY(%s) DAY)" % (
field_name, field_name
)
else:
return "DATE(%s)" % (field_name)
def _prepare_tzname_delta(self, tzname):
if '+' in tzname:
return tzname[tzname.find('+'):]
elif '-' in tzname:
return tzname[tzname.find('-'):]
return tzname
def _convert_field_to_tz(self, field_name, tzname):
if settings.USE_TZ and self.connection.timezone_name != tzname:
field_name = "CONVERT_TZ(%s, '%s', '%s')" % (
field_name,
self.connection.timezone_name,
self._prepare_tzname_delta(tzname),
)
return field_name
def datetime_cast_date_sql(self, field_name, tzname):
field_name = self._convert_field_to_tz(field_name, tzname)
return "DATE(%s)" % field_name
def datetime_cast_time_sql(self, field_name, tzname):
field_name = self._convert_field_to_tz(field_name, tzname)
return "TIME(%s)" % field_name
def datetime_extract_sql(self, lookup_type, field_name, tzname):
field_name = self._convert_field_to_tz(field_name, tzname)
return self.date_extract_sql(lookup_type, field_name)
def datetime_trunc_sql(self, lookup_type, field_name, tzname):
field_name = self._convert_field_to_tz(field_name, tzname)
fields = ['year', 'month', 'day', 'hour', 'minute', 'second']
format = ('%%Y-', '%%m', '-%%d', ' %%H:', '%%i', ':%%s') # Use double percents to escape.
format_def = ('0000-', '01', '-01', ' 00:', '00', ':00')
if lookup_type == 'quarter':
return (
"CAST(DATE_FORMAT(MAKEDATE(YEAR({field_name}), 1) + "
"INTERVAL QUARTER({field_name}) QUARTER - " +
"INTERVAL 1 QUARTER, '%%Y-%%m-01 00:00:00') AS DATETIME)"
).format(field_name=field_name)
if lookup_type == 'week':
return (
"CAST(DATE_FORMAT(DATE_SUB({field_name}, "
"INTERVAL WEEKDAY({field_name}) DAY), "
"'%%Y-%%m-%%d 00:00:00') AS DATETIME)"
).format(field_name=field_name)
try:
i = fields.index(lookup_type) + 1
except ValueError:
sql = field_name
else:
format_str = ''.join(format[:i] + format_def[i:])
sql = "CAST(DATE_FORMAT(%s, '%s') AS DATETIME)" % (field_name, format_str)
return sql
def time_trunc_sql(self, lookup_type, field_name):
fields = {
'hour': '%%H:00:00',
'minute': '%%H:%%i:00',
'second': '%%H:%%i:%%s',
} # Use double percents to escape.
if lookup_type in fields:
format_str = fields[lookup_type]
return "CAST(DATE_FORMAT(%s, '%s') AS TIME)" % (field_name, format_str)
else:
return "TIME(%s)" % (field_name)
def date_interval_sql(self, timedelta):
return 'INTERVAL %s MICROSECOND' % duration_microseconds(timedelta)
def fetch_returned_insert_rows(self, cursor):
"""
Given a cursor object that has just performed an INSERT...RETURNING
statement into a table, return the tuple of returned data.
"""
return cursor.fetchall()
def format_for_duration_arithmetic(self, sql):
return 'INTERVAL %s MICROSECOND' % sql
def force_no_ordering(self):
"""
"ORDER BY NULL" prevents MySQL from implicitly ordering by grouped
columns. If no ordering would otherwise be applied, we don't want any
implicit sorting going on.
"""
return [(None, ("NULL", [], False))]
def last_executed_query(self, cursor, sql, params):
# With MySQLdb, cursor objects have an (undocumented) "_executed"
# attribute where the exact query sent to the database is saved.
# See MySQLdb/cursors.py in the source distribution.
# MySQLdb returns string, PyMySQL bytes.
return force_str(getattr(cursor, '_executed', None), errors='replace')
def no_limit_value(self):
# 2**64 - 1, as recommended by the MySQL documentation
return 18446744073709551615
def quote_name(self, name):
if name.startswith("`") and name.endswith("`"):
return name # Quoting once is enough.
return "`%s`" % name
def random_function_sql(self):
return 'RAND()'
def return_insert_columns(self, fields):
# MySQL and MariaDB < 10.5.0 don't support an INSERT...RETURNING
# statement.
if not fields:
return '', ()
columns = [
'%s.%s' % (
self.quote_name(field.model._meta.db_table),
self.quote_name(field.column),
) for field in fields
]
return 'RETURNING %s' % ', '.join(columns), ()
def sql_flush(self, style, tables, *, reset_sequences=False, allow_cascade=False):
if not tables:
return []
sql = ['SET FOREIGN_KEY_CHECKS = 0;']
if reset_sequences:
# It's faster to TRUNCATE tables that require a sequence reset
# since ALTER TABLE AUTO_INCREMENT is slower than TRUNCATE.
sql.extend(
'%s %s;' % (
style.SQL_KEYWORD('TRUNCATE'),
style.SQL_FIELD(self.quote_name(table_name)),
) for table_name in tables
)
else:
# Otherwise issue a simple DELETE since it's faster than TRUNCATE
# and preserves sequences.
sql.extend(
'%s %s %s;' % (
style.SQL_KEYWORD('DELETE'),
style.SQL_KEYWORD('FROM'),
style.SQL_FIELD(self.quote_name(table_name)),
) for table_name in tables
)
sql.append('SET FOREIGN_KEY_CHECKS = 1;')
return sql
def sequence_reset_by_name_sql(self, style, sequences):
return [
'%s %s %s %s = 1;' % (
style.SQL_KEYWORD('ALTER'),
style.SQL_KEYWORD('TABLE'),
style.SQL_FIELD(self.quote_name(sequence_info['table'])),
style.SQL_FIELD('AUTO_INCREMENT'),
) for sequence_info in sequences
]
def validate_autopk_value(self, value):
# MySQLism: zero in AUTO_INCREMENT field does not work. Refs #17653.
if value == 0:
raise ValueError('The database backend does not accept 0 as a '
'value for AutoField.')
return value
def adapt_datetimefield_value(self, value):
if value is None:
return None
# Expression values are adapted by the database.
if hasattr(value, 'resolve_expression'):
return value
# MySQL doesn't support tz-aware datetimes
if timezone.is_aware(value):
if settings.USE_TZ:
value = timezone.make_naive(value, self.connection.timezone)
else:
raise ValueError("MySQL backend does not support timezone-aware datetimes when USE_TZ is False.")
return str(value)
def adapt_timefield_value(self, value):
if value is None:
return None
# Expression values are adapted by the database.
if hasattr(value, 'resolve_expression'):
return value
# MySQL doesn't support tz-aware times
if timezone.is_aware(value):
raise ValueError("MySQL backend does not support timezone-aware times.")
return str(value)
def max_name_length(self):
return 64
def bulk_insert_sql(self, fields, placeholder_rows):
placeholder_rows_sql = (", ".join(row) for row in placeholder_rows)
values_sql = ", ".join("(%s)" % sql for sql in placeholder_rows_sql)
return "VALUES " + values_sql
def combine_expression(self, connector, sub_expressions):
if connector == '^':
return 'POW(%s)' % ','.join(sub_expressions)
# Convert the result to a signed integer since MySQL's binary operators
# return an unsigned integer.
elif connector in ('&', '|', '<<', '#'):
connector = '^' if connector == '#' else connector
return 'CONVERT(%s, SIGNED)' % connector.join(sub_expressions)
elif connector == '>>':
lhs, rhs = sub_expressions
return 'FLOOR(%(lhs)s / POW(2, %(rhs)s))' % {'lhs': lhs, 'rhs': rhs}
return super().combine_expression(connector, sub_expressions)
def get_db_converters(self, expression):
converters = super().get_db_converters(expression)
internal_type = expression.output_field.get_internal_type()
if internal_type in ['BooleanField', 'NullBooleanField']:
converters.append(self.convert_booleanfield_value)
elif internal_type == 'DateTimeField':
if settings.USE_TZ:
converters.append(self.convert_datetimefield_value)
elif internal_type == 'UUIDField':
converters.append(self.convert_uuidfield_value)
return converters
def convert_booleanfield_value(self, value, expression, connection):
if value in (0, 1):
value = bool(value)
return value
def convert_datetimefield_value(self, value, expression, connection):
if value is not None:
value = timezone.make_aware(value, self.connection.timezone)
return value
def convert_uuidfield_value(self, value, expression, connection):
if value is not None:
value = uuid.UUID(value)
return value
def binary_placeholder_sql(self, value):
return '_binary %s' if value is not None and not hasattr(value, 'as_sql') else '%s'
def subtract_temporals(self, internal_type, lhs, rhs):
lhs_sql, lhs_params = lhs
rhs_sql, rhs_params = rhs
if internal_type == 'TimeField':
if self.connection.mysql_is_mariadb:
# MariaDB includes the microsecond component in TIME_TO_SEC as
# a decimal. MySQL returns an integer without microseconds.
return 'CAST((TIME_TO_SEC(%(lhs)s) - TIME_TO_SEC(%(rhs)s)) * 1000000 AS SIGNED)' % {
'lhs': lhs_sql, 'rhs': rhs_sql
}, (*lhs_params, *rhs_params)
return (
"((TIME_TO_SEC(%(lhs)s) * 1000000 + MICROSECOND(%(lhs)s)) -"
" (TIME_TO_SEC(%(rhs)s) * 1000000 + MICROSECOND(%(rhs)s)))"
) % {'lhs': lhs_sql, 'rhs': rhs_sql}, tuple(lhs_params) * 2 + tuple(rhs_params) * 2
params = (*rhs_params, *lhs_params)
return "TIMESTAMPDIFF(MICROSECOND, %s, %s)" % (rhs_sql, lhs_sql), params
def explain_query_prefix(self, format=None, **options):
# Alias MySQL's TRADITIONAL to TEXT for consistency with other backends.
if format and format.upper() == 'TEXT':
format = 'TRADITIONAL'
elif not format and 'TREE' in self.connection.features.supported_explain_formats:
# Use TREE by default (if supported) as it's more informative.
format = 'TREE'
analyze = options.pop('analyze', False)
prefix = super().explain_query_prefix(format, **options)
if analyze and self.connection.features.supports_explain_analyze:
# MariaDB uses ANALYZE instead of EXPLAIN ANALYZE.
prefix = 'ANALYZE' if self.connection.mysql_is_mariadb else prefix + ' ANALYZE'
if format and not (analyze and not self.connection.mysql_is_mariadb):
# Only MariaDB supports the analyze option with formats.
prefix += ' FORMAT=%s' % format
return prefix
def regex_lookup(self, lookup_type):
# REGEXP BINARY doesn't work correctly in MySQL 8+ and REGEXP_LIKE
# doesn't exist in MySQL 5.x or in MariaDB.
if self.connection.mysql_version < (8, 0, 0) or self.connection.mysql_is_mariadb:
if lookup_type == 'regex':
return '%s REGEXP BINARY %s'
return '%s REGEXP %s'
match_option = 'c' if lookup_type == 'regex' else 'i'
return "REGEXP_LIKE(%%s, %%s, '%s')" % match_option
def insert_statement(self, ignore_conflicts=False):
return 'INSERT IGNORE INTO' if ignore_conflicts else super().insert_statement(ignore_conflicts)
def lookup_cast(self, lookup_type, internal_type=None):
lookup = '%s'
if internal_type == 'JSONField':
if self.connection.mysql_is_mariadb or lookup_type in (
'iexact', 'contains', 'icontains', 'startswith', 'istartswith',
'endswith', 'iendswith', 'regex', 'iregex',
):
lookup = 'JSON_UNQUOTE(%s)'
return lookup
|
6a115090614d7582c22a774df2ae45867b4722f8b5a3bd16f8698fa0826479f3 | import operator
from django.db import InterfaceError
from django.db.backends.base.features import BaseDatabaseFeatures
from django.utils.functional import cached_property
class DatabaseFeatures(BaseDatabaseFeatures):
allows_group_by_selected_pks = True
can_return_columns_from_insert = True
can_return_rows_from_bulk_insert = True
has_real_datatype = True
has_native_uuid_field = True
has_native_duration_field = True
has_native_json_field = True
can_defer_constraint_checks = True
has_select_for_update = True
has_select_for_update_nowait = True
has_select_for_update_of = True
has_select_for_update_skip_locked = True
has_select_for_no_key_update = True
can_release_savepoints = True
supports_tablespaces = True
supports_transactions = True
can_introspect_autofield = True
can_introspect_ip_address_field = True
can_introspect_materialized_views = True
can_introspect_small_integer_field = True
can_distinct_on_fields = True
can_rollback_ddl = True
supports_combined_alters = True
nulls_order_largest = True
closed_cursor_error_class = InterfaceError
has_case_insensitive_like = False
greatest_least_ignores_nulls = True
can_clone_databases = True
supports_temporal_subtraction = True
supports_slicing_ordering_in_compound = True
create_test_procedure_without_params_sql = """
CREATE FUNCTION test_procedure () RETURNS void AS $$
DECLARE
V_I INTEGER;
BEGIN
V_I := 1;
END;
$$ LANGUAGE plpgsql;"""
create_test_procedure_with_int_param_sql = """
CREATE FUNCTION test_procedure (P_I INTEGER) RETURNS void AS $$
DECLARE
V_I INTEGER;
BEGIN
V_I := P_I;
END;
$$ LANGUAGE plpgsql;"""
requires_casted_case_in_updates = True
supports_over_clause = True
only_supports_unbounded_with_preceding_and_following = True
supports_aggregate_filter_clause = True
supported_explain_formats = {'JSON', 'TEXT', 'XML', 'YAML'}
validates_explain_options = False # A query will error on invalid options.
supports_deferrable_unique_constraints = True
has_json_operators = True
@cached_property
def is_postgresql_10(self):
return self.connection.pg_version >= 100000
@cached_property
def is_postgresql_11(self):
return self.connection.pg_version >= 110000
@cached_property
def is_postgresql_12(self):
return self.connection.pg_version >= 120000
has_brin_autosummarize = property(operator.attrgetter('is_postgresql_10'))
has_websearch_to_tsquery = property(operator.attrgetter('is_postgresql_11'))
supports_table_partitions = property(operator.attrgetter('is_postgresql_10'))
|
bf01168b52404d3669faec51a98d0f0e099dfe5e1bf2645796786b18c7315105 | import re
from collections import namedtuple
import sqlparse
from django.db.backends.base.introspection import (
BaseDatabaseIntrospection, FieldInfo as BaseFieldInfo, TableInfo,
)
from django.db.models import Index
from django.utils.regex_helper import _lazy_re_compile
FieldInfo = namedtuple('FieldInfo', BaseFieldInfo._fields + ('pk', 'has_json_constraint'))
field_size_re = _lazy_re_compile(r'^\s*(?:var)?char\s*\(\s*(\d+)\s*\)\s*$')
def get_field_size(name):
""" Extract the size number from a "varchar(11)" type name """
m = field_size_re.search(name)
return int(m[1]) if m else None
# This light wrapper "fakes" a dictionary interface, because some SQLite data
# types include variables in them -- e.g. "varchar(30)" -- and can't be matched
# as a simple dictionary lookup.
class FlexibleFieldLookupDict:
# Maps SQL types to Django Field types. Some of the SQL types have multiple
# entries here because SQLite allows for anything and doesn't normalize the
# field type; it uses whatever was given.
base_data_types_reverse = {
'bool': 'BooleanField',
'boolean': 'BooleanField',
'smallint': 'SmallIntegerField',
'smallint unsigned': 'PositiveSmallIntegerField',
'smallinteger': 'SmallIntegerField',
'int': 'IntegerField',
'integer': 'IntegerField',
'bigint': 'BigIntegerField',
'integer unsigned': 'PositiveIntegerField',
'bigint unsigned': 'PositiveBigIntegerField',
'decimal': 'DecimalField',
'real': 'FloatField',
'text': 'TextField',
'char': 'CharField',
'varchar': 'CharField',
'blob': 'BinaryField',
'date': 'DateField',
'datetime': 'DateTimeField',
'time': 'TimeField',
}
def __getitem__(self, key):
key = key.lower().split('(', 1)[0].strip()
return self.base_data_types_reverse[key]
class DatabaseIntrospection(BaseDatabaseIntrospection):
data_types_reverse = FlexibleFieldLookupDict()
def get_field_type(self, data_type, description):
field_type = super().get_field_type(data_type, description)
if description.pk and field_type in {'BigIntegerField', 'IntegerField', 'SmallIntegerField'}:
# No support for BigAutoField or SmallAutoField as SQLite treats
# all integer primary keys as signed 64-bit integers.
return 'AutoField'
if description.has_json_constraint:
return 'JSONField'
return field_type
def get_table_list(self, cursor):
"""Return a list of table and view names in the current database."""
# Skip the sqlite_sequence system table used for autoincrement key
# generation.
cursor.execute("""
SELECT name, type FROM sqlite_master
WHERE type in ('table', 'view') AND NOT name='sqlite_sequence'
ORDER BY name""")
return [TableInfo(row[0], row[1][0]) for row in cursor.fetchall()]
def get_table_description(self, cursor, table_name):
"""
Return a description of the table with the DB-API cursor.description
interface.
"""
cursor.execute('PRAGMA table_info(%s)' % self.connection.ops.quote_name(table_name))
table_info = cursor.fetchall()
json_columns = set()
if self.connection.features.can_introspect_json_field:
for line in table_info:
column = line[1]
json_constraint_sql = '%%json_valid("%s")%%' % column
has_json_constraint = cursor.execute("""
SELECT sql
FROM sqlite_master
WHERE
type = 'table' AND
name = %s AND
sql LIKE %s
""", [table_name, json_constraint_sql]).fetchone()
if has_json_constraint:
json_columns.add(column)
return [
FieldInfo(
name, data_type, None, get_field_size(data_type), None, None,
not notnull, default, pk == 1, name in json_columns
)
for cid, name, data_type, notnull, default, pk in table_info
]
def get_sequences(self, cursor, table_name, table_fields=()):
pk_col = self.get_primary_key_column(cursor, table_name)
return [{'table': table_name, 'column': pk_col}]
def get_relations(self, cursor, table_name):
"""
Return a dictionary of {field_name: (field_name_other_table, other_table)}
representing all relationships to the given table.
"""
# Dictionary of relations to return
relations = {}
# Schema for this table
cursor.execute(
"SELECT sql, type FROM sqlite_master "
"WHERE tbl_name = %s AND type IN ('table', 'view')",
[table_name]
)
create_sql, table_type = cursor.fetchone()
if table_type == 'view':
# It might be a view, then no results will be returned
return relations
results = create_sql[create_sql.index('(') + 1:create_sql.rindex(')')]
# Walk through and look for references to other tables. SQLite doesn't
# really have enforced references, but since it echoes out the SQL used
# to create the table we can look for REFERENCES statements used there.
for field_desc in results.split(','):
field_desc = field_desc.strip()
if field_desc.startswith("UNIQUE"):
continue
m = re.search(r'references (\S*) ?\(["|]?(.*)["|]?\)', field_desc, re.I)
if not m:
continue
table, column = [s.strip('"') for s in m.groups()]
if field_desc.startswith("FOREIGN KEY"):
# Find name of the target FK field
m = re.match(r'FOREIGN KEY\s*\(([^\)]*)\).*', field_desc, re.I)
field_name = m[1].strip('"')
else:
field_name = field_desc.split()[0].strip('"')
cursor.execute("SELECT sql FROM sqlite_master WHERE tbl_name = %s", [table])
result = cursor.fetchall()[0]
other_table_results = result[0].strip()
li, ri = other_table_results.index('('), other_table_results.rindex(')')
other_table_results = other_table_results[li + 1:ri]
for other_desc in other_table_results.split(','):
other_desc = other_desc.strip()
if other_desc.startswith('UNIQUE'):
continue
other_name = other_desc.split(' ', 1)[0].strip('"')
if other_name == column:
relations[field_name] = (other_name, table)
break
return relations
def get_key_columns(self, cursor, table_name):
"""
Return a list of (column_name, referenced_table_name, referenced_column_name)
for all key columns in given table.
"""
key_columns = []
# Schema for this table
cursor.execute("SELECT sql FROM sqlite_master WHERE tbl_name = %s AND type = %s", [table_name, "table"])
results = cursor.fetchone()[0].strip()
results = results[results.index('(') + 1:results.rindex(')')]
# Walk through and look for references to other tables. SQLite doesn't
# really have enforced references, but since it echoes out the SQL used
# to create the table we can look for REFERENCES statements used there.
for field_index, field_desc in enumerate(results.split(',')):
field_desc = field_desc.strip()
if field_desc.startswith("UNIQUE"):
continue
m = re.search(r'"(.*)".*references (.*) \(["|](.*)["|]\)', field_desc, re.I)
if not m:
continue
# This will append (column_name, referenced_table_name, referenced_column_name) to key_columns
key_columns.append(tuple(s.strip('"') for s in m.groups()))
return key_columns
def get_primary_key_column(self, cursor, table_name):
"""Return the column name of the primary key for the given table."""
# Don't use PRAGMA because that causes issues with some transactions
cursor.execute(
"SELECT sql, type FROM sqlite_master "
"WHERE tbl_name = %s AND type IN ('table', 'view')",
[table_name]
)
row = cursor.fetchone()
if row is None:
raise ValueError("Table %s does not exist" % table_name)
create_sql, table_type = row
if table_type == 'view':
# Views don't have a primary key.
return None
fields_sql = create_sql[create_sql.index('(') + 1:create_sql.rindex(')')]
for field_desc in fields_sql.split(','):
field_desc = field_desc.strip()
m = re.match(r'(?:(?:["`\[])(.*)(?:["`\]])|(\w+)).*PRIMARY KEY.*', field_desc)
if m:
return m[1] if m[1] else m[2]
return None
def _get_foreign_key_constraints(self, cursor, table_name):
constraints = {}
cursor.execute('PRAGMA foreign_key_list(%s)' % self.connection.ops.quote_name(table_name))
for row in cursor.fetchall():
# Remaining on_update/on_delete/match values are of no interest.
id_, _, table, from_, to = row[:5]
constraints['fk_%d' % id_] = {
'columns': [from_],
'primary_key': False,
'unique': False,
'foreign_key': (table, to),
'check': False,
'index': False,
}
return constraints
def _parse_column_or_constraint_definition(self, tokens, columns):
token = None
is_constraint_definition = None
field_name = None
constraint_name = None
unique = False
unique_columns = []
check = False
check_columns = []
braces_deep = 0
for token in tokens:
if token.match(sqlparse.tokens.Punctuation, '('):
braces_deep += 1
elif token.match(sqlparse.tokens.Punctuation, ')'):
braces_deep -= 1
if braces_deep < 0:
# End of columns and constraints for table definition.
break
elif braces_deep == 0 and token.match(sqlparse.tokens.Punctuation, ','):
# End of current column or constraint definition.
break
# Detect column or constraint definition by first token.
if is_constraint_definition is None:
is_constraint_definition = token.match(sqlparse.tokens.Keyword, 'CONSTRAINT')
if is_constraint_definition:
continue
if is_constraint_definition:
# Detect constraint name by second token.
if constraint_name is None:
if token.ttype in (sqlparse.tokens.Name, sqlparse.tokens.Keyword):
constraint_name = token.value
elif token.ttype == sqlparse.tokens.Literal.String.Symbol:
constraint_name = token.value[1:-1]
# Start constraint columns parsing after UNIQUE keyword.
if token.match(sqlparse.tokens.Keyword, 'UNIQUE'):
unique = True
unique_braces_deep = braces_deep
elif unique:
if unique_braces_deep == braces_deep:
if unique_columns:
# Stop constraint parsing.
unique = False
continue
if token.ttype in (sqlparse.tokens.Name, sqlparse.tokens.Keyword):
unique_columns.append(token.value)
elif token.ttype == sqlparse.tokens.Literal.String.Symbol:
unique_columns.append(token.value[1:-1])
else:
# Detect field name by first token.
if field_name is None:
if token.ttype in (sqlparse.tokens.Name, sqlparse.tokens.Keyword):
field_name = token.value
elif token.ttype == sqlparse.tokens.Literal.String.Symbol:
field_name = token.value[1:-1]
if token.match(sqlparse.tokens.Keyword, 'UNIQUE'):
unique_columns = [field_name]
# Start constraint columns parsing after CHECK keyword.
if token.match(sqlparse.tokens.Keyword, 'CHECK'):
check = True
check_braces_deep = braces_deep
elif check:
if check_braces_deep == braces_deep:
if check_columns:
# Stop constraint parsing.
check = False
continue
if token.ttype in (sqlparse.tokens.Name, sqlparse.tokens.Keyword):
if token.value in columns:
check_columns.append(token.value)
elif token.ttype == sqlparse.tokens.Literal.String.Symbol:
if token.value[1:-1] in columns:
check_columns.append(token.value[1:-1])
unique_constraint = {
'unique': True,
'columns': unique_columns,
'primary_key': False,
'foreign_key': None,
'check': False,
'index': False,
} if unique_columns else None
check_constraint = {
'check': True,
'columns': check_columns,
'primary_key': False,
'unique': False,
'foreign_key': None,
'index': False,
} if check_columns else None
return constraint_name, unique_constraint, check_constraint, token
def _parse_table_constraints(self, sql, columns):
# Check constraint parsing is based of SQLite syntax diagram.
# https://www.sqlite.org/syntaxdiagrams.html#table-constraint
statement = sqlparse.parse(sql)[0]
constraints = {}
unnamed_constrains_index = 0
tokens = (token for token in statement.flatten() if not token.is_whitespace)
# Go to columns and constraint definition
for token in tokens:
if token.match(sqlparse.tokens.Punctuation, '('):
break
# Parse columns and constraint definition
while True:
constraint_name, unique, check, end_token = self._parse_column_or_constraint_definition(tokens, columns)
if unique:
if constraint_name:
constraints[constraint_name] = unique
else:
unnamed_constrains_index += 1
constraints['__unnamed_constraint_%s__' % unnamed_constrains_index] = unique
if check:
if constraint_name:
constraints[constraint_name] = check
else:
unnamed_constrains_index += 1
constraints['__unnamed_constraint_%s__' % unnamed_constrains_index] = check
if end_token.match(sqlparse.tokens.Punctuation, ')'):
break
return constraints
def get_constraints(self, cursor, table_name):
"""
Retrieve any constraints or keys (unique, pk, fk, check, index) across
one or more columns.
"""
constraints = {}
# Find inline check constraints.
try:
table_schema = cursor.execute(
"SELECT sql FROM sqlite_master WHERE type='table' and name=%s" % (
self.connection.ops.quote_name(table_name),
)
).fetchone()[0]
except TypeError:
# table_name is a view.
pass
else:
columns = {info.name for info in self.get_table_description(cursor, table_name)}
constraints.update(self._parse_table_constraints(table_schema, columns))
# Get the index info
cursor.execute("PRAGMA index_list(%s)" % self.connection.ops.quote_name(table_name))
for row in cursor.fetchall():
# SQLite 3.8.9+ has 5 columns, however older versions only give 3
# columns. Discard last 2 columns if there.
number, index, unique = row[:3]
cursor.execute(
"SELECT sql FROM sqlite_master "
"WHERE type='index' AND name=%s" % self.connection.ops.quote_name(index)
)
# There's at most one row.
sql, = cursor.fetchone() or (None,)
# Inline constraints are already detected in
# _parse_table_constraints(). The reasons to avoid fetching inline
# constraints from `PRAGMA index_list` are:
# - Inline constraints can have a different name and information
# than what `PRAGMA index_list` gives.
# - Not all inline constraints may appear in `PRAGMA index_list`.
if not sql:
# An inline constraint
continue
# Get the index info for that index
cursor.execute('PRAGMA index_info(%s)' % self.connection.ops.quote_name(index))
for index_rank, column_rank, column in cursor.fetchall():
if index not in constraints:
constraints[index] = {
"columns": [],
"primary_key": False,
"unique": bool(unique),
"foreign_key": None,
"check": False,
"index": True,
}
constraints[index]['columns'].append(column)
# Add type and column orders for indexes
if constraints[index]['index'] and not constraints[index]['unique']:
# SQLite doesn't support any index type other than b-tree
constraints[index]['type'] = Index.suffix
order_info = sql.split('(')[-1].split(')')[0].split(',')
orders = ['DESC' if info.endswith('DESC') else 'ASC' for info in order_info]
constraints[index]['orders'] = orders
# Get the PK
pk_column = self.get_primary_key_column(cursor, table_name)
if pk_column:
# SQLite doesn't actually give a name to the PK constraint,
# so we invent one. This is fine, as the SQLite backend never
# deletes PK constraints by name, as you can't delete constraints
# in SQLite; we remake the table with a new PK instead.
constraints["__primary__"] = {
"columns": [pk_column],
"primary_key": True,
"unique": False, # It's not actually a unique constraint.
"foreign_key": None,
"check": False,
"index": False,
}
constraints.update(self._get_foreign_key_constraints(cursor, table_name))
return constraints
|
92e6b726d6ccf5895cab19c9b183cfeacda2e0af011b98521a8f15f939e5d082 | from itertools import chain
from django.utils.itercompat import is_iterable
class Tags:
"""
Built-in tags for internal checks.
"""
admin = 'admin'
async_support = 'async_support'
caches = 'caches'
compatibility = 'compatibility'
database = 'database'
models = 'models'
security = 'security'
signals = 'signals'
staticfiles = 'staticfiles'
templates = 'templates'
translation = 'translation'
urls = 'urls'
class CheckRegistry:
def __init__(self):
self.registered_checks = set()
self.deployment_checks = set()
def register(self, check=None, *tags, **kwargs):
"""
Can be used as a function or a decorator. Register given function
`f` labeled with given `tags`. The function should receive **kwargs
and return list of Errors and Warnings.
Example::
registry = CheckRegistry()
@registry.register('mytag', 'anothertag')
def my_check(apps, **kwargs):
# ... perform checks and collect `errors` ...
return errors
# or
registry.register(my_check, 'mytag', 'anothertag')
"""
def inner(check):
check.tags = tags
checks = self.deployment_checks if kwargs.get('deploy') else self.registered_checks
checks.add(check)
return check
if callable(check):
return inner(check)
else:
if check:
tags += (check,)
return inner
def run_checks(self, app_configs=None, tags=None, include_deployment_checks=False, databases=None):
"""
Run all registered checks and return list of Errors and Warnings.
"""
errors = []
checks = self.get_checks(include_deployment_checks)
if tags is not None:
checks = [check for check in checks if not set(check.tags).isdisjoint(tags)]
for check in checks:
new_errors = check(app_configs=app_configs, databases=databases)
assert is_iterable(new_errors), (
"The function %r did not return a list. All functions registered "
"with the checks registry must return a list." % check)
errors.extend(new_errors)
return errors
def tag_exists(self, tag, include_deployment_checks=False):
return tag in self.tags_available(include_deployment_checks)
def tags_available(self, deployment_checks=False):
return set(chain.from_iterable(
check.tags for check in self.get_checks(deployment_checks)
))
def get_checks(self, include_deployment_checks=False):
checks = list(self.registered_checks)
if include_deployment_checks:
checks.extend(self.deployment_checks)
return checks
registry = CheckRegistry()
register = registry.register
run_checks = registry.run_checks
tag_exists = registry.tag_exists
|
1a7ac0841ae5159e295114e6ba3fc5edacb504bfb3f618de8dfdebd567fac965 | """
Base classes for writing management commands (named commands which can
be executed through ``django-admin`` or ``manage.py``).
"""
import os
import sys
import warnings
from argparse import ArgumentParser, HelpFormatter
from io import TextIOBase
import django
from django.core import checks
from django.core.exceptions import ImproperlyConfigured
from django.core.management.color import color_style, no_style
from django.db import DEFAULT_DB_ALIAS, connections
from django.utils.deprecation import RemovedInDjango41Warning
ALL_CHECKS = '__all__'
class CommandError(Exception):
"""
Exception class indicating a problem while executing a management
command.
If this exception is raised during the execution of a management
command, it will be caught and turned into a nicely-printed error
message to the appropriate output stream (i.e., stderr); as a
result, raising this exception (with a sensible description of the
error) is the preferred way to indicate that something has gone
wrong in the execution of a command.
"""
def __init__(self, *args, returncode=1, **kwargs):
self.returncode = returncode
super().__init__(*args, **kwargs)
class SystemCheckError(CommandError):
"""
The system check framework detected unrecoverable errors.
"""
pass
class CommandParser(ArgumentParser):
"""
Customized ArgumentParser class to improve some error messages and prevent
SystemExit in several occasions, as SystemExit is unacceptable when a
command is called programmatically.
"""
def __init__(self, *, missing_args_message=None, called_from_command_line=None, **kwargs):
self.missing_args_message = missing_args_message
self.called_from_command_line = called_from_command_line
super().__init__(**kwargs)
def parse_args(self, args=None, namespace=None):
# Catch missing argument for a better error message
if (self.missing_args_message and
not (args or any(not arg.startswith('-') for arg in args))):
self.error(self.missing_args_message)
return super().parse_args(args, namespace)
def error(self, message):
if self.called_from_command_line:
super().error(message)
else:
raise CommandError("Error: %s" % message)
def handle_default_options(options):
"""
Include any default options that all commands should accept here
so that ManagementUtility can handle them before searching for
user commands.
"""
if options.settings:
os.environ['DJANGO_SETTINGS_MODULE'] = options.settings
if options.pythonpath:
sys.path.insert(0, options.pythonpath)
def no_translations(handle_func):
"""Decorator that forces a command to run with translations deactivated."""
def wrapped(*args, **kwargs):
from django.utils import translation
saved_locale = translation.get_language()
translation.deactivate_all()
try:
res = handle_func(*args, **kwargs)
finally:
if saved_locale is not None:
translation.activate(saved_locale)
return res
return wrapped
class DjangoHelpFormatter(HelpFormatter):
"""
Customized formatter so that command-specific arguments appear in the
--help output before arguments common to all commands.
"""
show_last = {
'--version', '--verbosity', '--traceback', '--settings', '--pythonpath',
'--no-color', '--force-color', '--skip-checks',
}
def _reordered_actions(self, actions):
return sorted(
actions,
key=lambda a: set(a.option_strings) & self.show_last != set()
)
def add_usage(self, usage, actions, *args, **kwargs):
super().add_usage(usage, self._reordered_actions(actions), *args, **kwargs)
def add_arguments(self, actions):
super().add_arguments(self._reordered_actions(actions))
class OutputWrapper(TextIOBase):
"""
Wrapper around stdout/stderr
"""
@property
def style_func(self):
return self._style_func
@style_func.setter
def style_func(self, style_func):
if style_func and self.isatty():
self._style_func = style_func
else:
self._style_func = lambda x: x
def __init__(self, out, ending='\n'):
self._out = out
self.style_func = None
self.ending = ending
def __getattr__(self, name):
return getattr(self._out, name)
def isatty(self):
return hasattr(self._out, 'isatty') and self._out.isatty()
def write(self, msg='', style_func=None, ending=None):
ending = self.ending if ending is None else ending
if ending and not msg.endswith(ending):
msg += ending
style_func = style_func or self.style_func
self._out.write(style_func(msg))
class BaseCommand:
"""
The base class from which all management commands ultimately
derive.
Use this class if you want access to all of the mechanisms which
parse the command-line arguments and work out what code to call in
response; if you don't need to change any of that behavior,
consider using one of the subclasses defined in this file.
If you are interested in overriding/customizing various aspects of
the command-parsing and -execution behavior, the normal flow works
as follows:
1. ``django-admin`` or ``manage.py`` loads the command class
and calls its ``run_from_argv()`` method.
2. The ``run_from_argv()`` method calls ``create_parser()`` to get
an ``ArgumentParser`` for the arguments, parses them, performs
any environment changes requested by options like
``pythonpath``, and then calls the ``execute()`` method,
passing the parsed arguments.
3. The ``execute()`` method attempts to carry out the command by
calling the ``handle()`` method with the parsed arguments; any
output produced by ``handle()`` will be printed to standard
output and, if the command is intended to produce a block of
SQL statements, will be wrapped in ``BEGIN`` and ``COMMIT``.
4. If ``handle()`` or ``execute()`` raised any exception (e.g.
``CommandError``), ``run_from_argv()`` will instead print an error
message to ``stderr``.
Thus, the ``handle()`` method is typically the starting point for
subclasses; many built-in commands and command types either place
all of their logic in ``handle()``, or perform some additional
parsing work in ``handle()`` and then delegate from it to more
specialized methods as needed.
Several attributes affect behavior at various steps along the way:
``help``
A short description of the command, which will be printed in
help messages.
``output_transaction``
A boolean indicating whether the command outputs SQL
statements; if ``True``, the output will automatically be
wrapped with ``BEGIN;`` and ``COMMIT;``. Default value is
``False``.
``requires_migrations_checks``
A boolean; if ``True``, the command prints a warning if the set of
migrations on disk don't match the migrations in the database.
``requires_system_checks``
A list or tuple of tags, e.g. [Tags.staticfiles, Tags.models]. System
checks registered in the chosen tags will be checked for errors prior
to executing the command. The value '__all__' can be used to specify
that all system checks should be performed. Default value is '__all__'.
To validate an individual application's models
rather than all applications' models, call
``self.check(app_configs)`` from ``handle()``, where ``app_configs``
is the list of application's configuration provided by the
app registry.
``stealth_options``
A tuple of any options the command uses which aren't defined by the
argument parser.
"""
# Metadata about this command.
help = ''
# Configuration shortcuts that alter various logic.
_called_from_command_line = False
output_transaction = False # Whether to wrap the output in a "BEGIN; COMMIT;"
requires_migrations_checks = False
requires_system_checks = '__all__'
# Arguments, common to all commands, which aren't defined by the argument
# parser.
base_stealth_options = ('stderr', 'stdout')
# Command-specific options not defined by the argument parser.
stealth_options = ()
def __init__(self, stdout=None, stderr=None, no_color=False, force_color=False):
self.stdout = OutputWrapper(stdout or sys.stdout)
self.stderr = OutputWrapper(stderr or sys.stderr)
if no_color and force_color:
raise CommandError("'no_color' and 'force_color' can't be used together.")
if no_color:
self.style = no_style()
else:
self.style = color_style(force_color)
self.stderr.style_func = self.style.ERROR
if self.requires_system_checks in [False, True]:
warnings.warn(
"Using a boolean value for requires_system_checks is "
"deprecated. Use '__all__' instead of True, and [] (an empty "
"list) instead of False.",
RemovedInDjango41Warning,
)
self.requires_system_checks = ALL_CHECKS if self.requires_system_checks else []
if (
not isinstance(self.requires_system_checks, (list, tuple)) and
self.requires_system_checks != ALL_CHECKS
):
raise TypeError('requires_system_checks must be a list or tuple.')
def get_version(self):
"""
Return the Django version, which should be correct for all built-in
Django commands. User-supplied commands can override this method to
return their own version.
"""
return django.get_version()
def create_parser(self, prog_name, subcommand, **kwargs):
"""
Create and return the ``ArgumentParser`` which will be used to
parse the arguments to this command.
"""
parser = CommandParser(
prog='%s %s' % (os.path.basename(prog_name), subcommand),
description=self.help or None,
formatter_class=DjangoHelpFormatter,
missing_args_message=getattr(self, 'missing_args_message', None),
called_from_command_line=getattr(self, '_called_from_command_line', None),
**kwargs
)
parser.add_argument('--version', action='version', version=self.get_version())
parser.add_argument(
'-v', '--verbosity', default=1,
type=int, choices=[0, 1, 2, 3],
help='Verbosity level; 0=minimal output, 1=normal output, 2=verbose output, 3=very verbose output',
)
parser.add_argument(
'--settings',
help=(
'The Python path to a settings module, e.g. '
'"myproject.settings.main". If this isn\'t provided, the '
'DJANGO_SETTINGS_MODULE environment variable will be used.'
),
)
parser.add_argument(
'--pythonpath',
help='A directory to add to the Python path, e.g. "/home/djangoprojects/myproject".',
)
parser.add_argument('--traceback', action='store_true', help='Raise on CommandError exceptions')
parser.add_argument(
'--no-color', action='store_true',
help="Don't colorize the command output.",
)
parser.add_argument(
'--force-color', action='store_true',
help='Force colorization of the command output.',
)
if self.requires_system_checks:
parser.add_argument(
'--skip-checks', action='store_true',
help='Skip system checks.',
)
self.add_arguments(parser)
return parser
def add_arguments(self, parser):
"""
Entry point for subclassed commands to add custom arguments.
"""
pass
def print_help(self, prog_name, subcommand):
"""
Print the help message for this command, derived from
``self.usage()``.
"""
parser = self.create_parser(prog_name, subcommand)
parser.print_help()
def run_from_argv(self, argv):
"""
Set up any environment changes requested (e.g., Python path
and Django settings), then run this command. If the
command raises a ``CommandError``, intercept it and print it sensibly
to stderr. If the ``--traceback`` option is present or the raised
``Exception`` is not ``CommandError``, raise it.
"""
self._called_from_command_line = True
parser = self.create_parser(argv[0], argv[1])
options = parser.parse_args(argv[2:])
cmd_options = vars(options)
# Move positional args out of options to mimic legacy optparse
args = cmd_options.pop('args', ())
handle_default_options(options)
try:
self.execute(*args, **cmd_options)
except CommandError as e:
if options.traceback:
raise
# SystemCheckError takes care of its own formatting.
if isinstance(e, SystemCheckError):
self.stderr.write(str(e), lambda x: x)
else:
self.stderr.write('%s: %s' % (e.__class__.__name__, e))
sys.exit(e.returncode)
finally:
try:
connections.close_all()
except ImproperlyConfigured:
# Ignore if connections aren't setup at this point (e.g. no
# configured settings).
pass
def execute(self, *args, **options):
"""
Try to execute this command, performing system checks if needed (as
controlled by the ``requires_system_checks`` attribute, except if
force-skipped).
"""
if options['force_color'] and options['no_color']:
raise CommandError("The --no-color and --force-color options can't be used together.")
if options['force_color']:
self.style = color_style(force_color=True)
elif options['no_color']:
self.style = no_style()
self.stderr.style_func = None
if options.get('stdout'):
self.stdout = OutputWrapper(options['stdout'])
if options.get('stderr'):
self.stderr = OutputWrapper(options['stderr'])
if self.requires_system_checks and not options['skip_checks']:
if self.requires_system_checks == ALL_CHECKS:
self.check()
else:
self.check(tags=self.requires_system_checks)
if self.requires_migrations_checks:
self.check_migrations()
output = self.handle(*args, **options)
if output:
if self.output_transaction:
connection = connections[options.get('database', DEFAULT_DB_ALIAS)]
output = '%s\n%s\n%s' % (
self.style.SQL_KEYWORD(connection.ops.start_transaction_sql()),
output,
self.style.SQL_KEYWORD(connection.ops.end_transaction_sql()),
)
self.stdout.write(output)
return output
def check(self, app_configs=None, tags=None, display_num_errors=False,
include_deployment_checks=False, fail_level=checks.ERROR,
databases=None):
"""
Use the system check framework to validate entire Django project.
Raise CommandError for any serious message (error or critical errors).
If there are only light messages (like warnings), print them to stderr
and don't raise an exception.
"""
all_issues = checks.run_checks(
app_configs=app_configs,
tags=tags,
include_deployment_checks=include_deployment_checks,
databases=databases,
)
header, body, footer = "", "", ""
visible_issue_count = 0 # excludes silenced warnings
if all_issues:
debugs = [e for e in all_issues if e.level < checks.INFO and not e.is_silenced()]
infos = [e for e in all_issues if checks.INFO <= e.level < checks.WARNING and not e.is_silenced()]
warnings = [e for e in all_issues if checks.WARNING <= e.level < checks.ERROR and not e.is_silenced()]
errors = [e for e in all_issues if checks.ERROR <= e.level < checks.CRITICAL and not e.is_silenced()]
criticals = [e for e in all_issues if checks.CRITICAL <= e.level and not e.is_silenced()]
sorted_issues = [
(criticals, 'CRITICALS'),
(errors, 'ERRORS'),
(warnings, 'WARNINGS'),
(infos, 'INFOS'),
(debugs, 'DEBUGS'),
]
for issues, group_name in sorted_issues:
if issues:
visible_issue_count += len(issues)
formatted = (
self.style.ERROR(str(e))
if e.is_serious()
else self.style.WARNING(str(e))
for e in issues)
formatted = "\n".join(sorted(formatted))
body += '\n%s:\n%s\n' % (group_name, formatted)
if visible_issue_count:
header = "System check identified some issues:\n"
if display_num_errors:
if visible_issue_count:
footer += '\n'
footer += "System check identified %s (%s silenced)." % (
"no issues" if visible_issue_count == 0 else
"1 issue" if visible_issue_count == 1 else
"%s issues" % visible_issue_count,
len(all_issues) - visible_issue_count,
)
if any(e.is_serious(fail_level) and not e.is_silenced() for e in all_issues):
msg = self.style.ERROR("SystemCheckError: %s" % header) + body + footer
raise SystemCheckError(msg)
else:
msg = header + body + footer
if msg:
if visible_issue_count:
self.stderr.write(msg, lambda x: x)
else:
self.stdout.write(msg)
def check_migrations(self):
"""
Print a warning if the set of migrations on disk don't match the
migrations in the database.
"""
from django.db.migrations.executor import MigrationExecutor
try:
executor = MigrationExecutor(connections[DEFAULT_DB_ALIAS])
except ImproperlyConfigured:
# No databases are configured (or the dummy one)
return
plan = executor.migration_plan(executor.loader.graph.leaf_nodes())
if plan:
apps_waiting_migration = sorted({migration.app_label for migration, backwards in plan})
self.stdout.write(
self.style.NOTICE(
"\nYou have %(unapplied_migration_count)s unapplied migration(s). "
"Your project may not work properly until you apply the "
"migrations for app(s): %(apps_waiting_migration)s." % {
"unapplied_migration_count": len(plan),
"apps_waiting_migration": ", ".join(apps_waiting_migration),
}
)
)
self.stdout.write(self.style.NOTICE("Run 'python manage.py migrate' to apply them."))
def handle(self, *args, **options):
"""
The actual logic of the command. Subclasses must implement
this method.
"""
raise NotImplementedError('subclasses of BaseCommand must provide a handle() method')
class AppCommand(BaseCommand):
"""
A management command which takes one or more installed application labels
as arguments, and does something with each of them.
Rather than implementing ``handle()``, subclasses must implement
``handle_app_config()``, which will be called once for each application.
"""
missing_args_message = "Enter at least one application label."
def add_arguments(self, parser):
parser.add_argument('args', metavar='app_label', nargs='+', help='One or more application label.')
def handle(self, *app_labels, **options):
from django.apps import apps
try:
app_configs = [apps.get_app_config(app_label) for app_label in app_labels]
except (LookupError, ImportError) as e:
raise CommandError("%s. Are you sure your INSTALLED_APPS setting is correct?" % e)
output = []
for app_config in app_configs:
app_output = self.handle_app_config(app_config, **options)
if app_output:
output.append(app_output)
return '\n'.join(output)
def handle_app_config(self, app_config, **options):
"""
Perform the command's actions for app_config, an AppConfig instance
corresponding to an application label given on the command line.
"""
raise NotImplementedError(
"Subclasses of AppCommand must provide"
"a handle_app_config() method.")
class LabelCommand(BaseCommand):
"""
A management command which takes one or more arbitrary arguments
(labels) on the command line, and does something with each of
them.
Rather than implementing ``handle()``, subclasses must implement
``handle_label()``, which will be called once for each label.
If the arguments should be names of installed applications, use
``AppCommand`` instead.
"""
label = 'label'
missing_args_message = "Enter at least one %s." % label
def add_arguments(self, parser):
parser.add_argument('args', metavar=self.label, nargs='+')
def handle(self, *labels, **options):
output = []
for label in labels:
label_output = self.handle_label(label, **options)
if label_output:
output.append(label_output)
return '\n'.join(output)
def handle_label(self, label, **options):
"""
Perform the command's actions for ``label``, which will be the
string as given on the command line.
"""
raise NotImplementedError('subclasses of LabelCommand must provide a handle_label() method')
|
4f5afe8f963c85598feaf166f018090075c928cd9389ca0601bfb647c1260c9e | import cgi
import mimetypes
import os
import posixpath
import shutil
import stat
import tempfile
from importlib import import_module
from urllib.request import urlretrieve
import django
from django.conf import settings
from django.core.management.base import BaseCommand, CommandError
from django.core.management.utils import handle_extensions
from django.template import Context, Engine
from django.utils import archive
from django.utils.version import get_docs_version
class TemplateCommand(BaseCommand):
"""
Copy either a Django application layout template or a Django project
layout template into the specified directory.
:param style: A color style object (see django.core.management.color).
:param app_or_project: The string 'app' or 'project'.
:param name: The name of the application or project.
:param directory: The directory to which the template should be copied.
:param options: The additional variables passed to project or app templates
"""
requires_system_checks = []
# The supported URL schemes
url_schemes = ['http', 'https', 'ftp']
# Rewrite the following suffixes when determining the target filename.
rewrite_template_suffixes = (
# Allow shipping invalid .py files without byte-compilation.
('.py-tpl', '.py'),
)
def add_arguments(self, parser):
parser.add_argument('name', help='Name of the application or project.')
parser.add_argument('directory', nargs='?', help='Optional destination directory')
parser.add_argument('--template', help='The path or URL to load the template from.')
parser.add_argument(
'--extension', '-e', dest='extensions',
action='append', default=['py'],
help='The file extension(s) to render (default: "py"). '
'Separate multiple extensions with commas, or use '
'-e multiple times.'
)
parser.add_argument(
'--name', '-n', dest='files',
action='append', default=[],
help='The file name(s) to render. Separate multiple file names '
'with commas, or use -n multiple times.'
)
def handle(self, app_or_project, name, target=None, **options):
self.app_or_project = app_or_project
self.a_or_an = 'an' if app_or_project == 'app' else 'a'
self.paths_to_remove = []
self.verbosity = options['verbosity']
self.validate_name(name)
# if some directory is given, make sure it's nicely expanded
if target is None:
top_dir = os.path.join(os.getcwd(), name)
try:
os.makedirs(top_dir)
except FileExistsError:
raise CommandError("'%s' already exists" % top_dir)
except OSError as e:
raise CommandError(e)
else:
if app_or_project == 'app':
self.validate_name(os.path.basename(target), 'directory')
top_dir = os.path.abspath(os.path.expanduser(target))
if not os.path.exists(top_dir):
raise CommandError("Destination directory '%s' does not "
"exist, please create it first." % top_dir)
extensions = tuple(handle_extensions(options['extensions']))
extra_files = []
for file in options['files']:
extra_files.extend(map(lambda x: x.strip(), file.split(',')))
if self.verbosity >= 2:
self.stdout.write(
'Rendering %s template files with extensions: %s'
% (app_or_project, ', '.join(extensions))
)
self.stdout.write(
'Rendering %s template files with filenames: %s'
% (app_or_project, ', '.join(extra_files))
)
base_name = '%s_name' % app_or_project
base_subdir = '%s_template' % app_or_project
base_directory = '%s_directory' % app_or_project
camel_case_name = 'camel_case_%s_name' % app_or_project
camel_case_value = ''.join(x for x in name.title() if x != '_')
context = Context({
**options,
base_name: name,
base_directory: top_dir,
camel_case_name: camel_case_value,
'docs_version': get_docs_version(),
'django_version': django.__version__,
}, autoescape=False)
# Setup a stub settings environment for template rendering
if not settings.configured:
settings.configure()
django.setup()
template_dir = self.handle_template(options['template'],
base_subdir)
prefix_length = len(template_dir) + 1
for root, dirs, files in os.walk(template_dir):
path_rest = root[prefix_length:]
relative_dir = path_rest.replace(base_name, name)
if relative_dir:
target_dir = os.path.join(top_dir, relative_dir)
os.makedirs(target_dir, exist_ok=True)
for dirname in dirs[:]:
if dirname.startswith('.') or dirname == '__pycache__':
dirs.remove(dirname)
for filename in files:
if filename.endswith(('.pyo', '.pyc', '.py.class')):
# Ignore some files as they cause various breakages.
continue
old_path = os.path.join(root, filename)
new_path = os.path.join(
top_dir, relative_dir, filename.replace(base_name, name)
)
for old_suffix, new_suffix in self.rewrite_template_suffixes:
if new_path.endswith(old_suffix):
new_path = new_path[:-len(old_suffix)] + new_suffix
break # Only rewrite once
if os.path.exists(new_path):
raise CommandError(
"%s already exists. Overlaying %s %s into an existing "
"directory won't replace conflicting files." % (
new_path, self.a_or_an, app_or_project,
)
)
# Only render the Python files, as we don't want to
# accidentally render Django templates files
if new_path.endswith(extensions) or filename in extra_files:
with open(old_path, encoding='utf-8') as template_file:
content = template_file.read()
template = Engine().from_string(content)
content = template.render(context)
with open(new_path, 'w', encoding='utf-8') as new_file:
new_file.write(content)
else:
shutil.copyfile(old_path, new_path)
if self.verbosity >= 2:
self.stdout.write('Creating %s' % new_path)
try:
shutil.copymode(old_path, new_path)
self.make_writeable(new_path)
except OSError:
self.stderr.write(
"Notice: Couldn't set permission bits on %s. You're "
"probably using an uncommon filesystem setup. No "
"problem." % new_path, self.style.NOTICE)
if self.paths_to_remove:
if self.verbosity >= 2:
self.stdout.write('Cleaning up temporary files.')
for path_to_remove in self.paths_to_remove:
if os.path.isfile(path_to_remove):
os.remove(path_to_remove)
else:
shutil.rmtree(path_to_remove)
def handle_template(self, template, subdir):
"""
Determine where the app or project templates are.
Use django.__path__[0] as the default because the Django install
directory isn't known.
"""
if template is None:
return os.path.join(django.__path__[0], 'conf', subdir)
else:
if template.startswith('file://'):
template = template[7:]
expanded_template = os.path.expanduser(template)
expanded_template = os.path.normpath(expanded_template)
if os.path.isdir(expanded_template):
return expanded_template
if self.is_url(template):
# downloads the file and returns the path
absolute_path = self.download(template)
else:
absolute_path = os.path.abspath(expanded_template)
if os.path.exists(absolute_path):
return self.extract(absolute_path)
raise CommandError("couldn't handle %s template %s." %
(self.app_or_project, template))
def validate_name(self, name, name_or_dir='name'):
if name is None:
raise CommandError('you must provide {an} {app} name'.format(
an=self.a_or_an,
app=self.app_or_project,
))
# Check it's a valid directory name.
if not name.isidentifier():
raise CommandError(
"'{name}' is not a valid {app} {type}. Please make sure the "
"{type} is a valid identifier.".format(
name=name,
app=self.app_or_project,
type=name_or_dir,
)
)
# Check it cannot be imported.
try:
import_module(name)
except ImportError:
pass
else:
raise CommandError(
"'{name}' conflicts with the name of an existing Python "
"module and cannot be used as {an} {app} {type}. Please try "
"another {type}.".format(
name=name,
an=self.a_or_an,
app=self.app_or_project,
type=name_or_dir,
)
)
def download(self, url):
"""
Download the given URL and return the file name.
"""
def cleanup_url(url):
tmp = url.rstrip('/')
filename = tmp.split('/')[-1]
if url.endswith('/'):
display_url = tmp + '/'
else:
display_url = url
return filename, display_url
prefix = 'django_%s_template_' % self.app_or_project
tempdir = tempfile.mkdtemp(prefix=prefix, suffix='_download')
self.paths_to_remove.append(tempdir)
filename, display_url = cleanup_url(url)
if self.verbosity >= 2:
self.stdout.write('Downloading %s' % display_url)
try:
the_path, info = urlretrieve(url, os.path.join(tempdir, filename))
except OSError as e:
raise CommandError("couldn't download URL %s to %s: %s" %
(url, filename, e))
used_name = the_path.split('/')[-1]
# Trying to get better name from response headers
content_disposition = info.get('content-disposition')
if content_disposition:
_, params = cgi.parse_header(content_disposition)
guessed_filename = params.get('filename') or used_name
else:
guessed_filename = used_name
# Falling back to content type guessing
ext = self.splitext(guessed_filename)[1]
content_type = info.get('content-type')
if not ext and content_type:
ext = mimetypes.guess_extension(content_type)
if ext:
guessed_filename += ext
# Move the temporary file to a filename that has better
# chances of being recognized by the archive utils
if used_name != guessed_filename:
guessed_path = os.path.join(tempdir, guessed_filename)
shutil.move(the_path, guessed_path)
return guessed_path
# Giving up
return the_path
def splitext(self, the_path):
"""
Like os.path.splitext, but takes off .tar, too
"""
base, ext = posixpath.splitext(the_path)
if base.lower().endswith('.tar'):
ext = base[-4:] + ext
base = base[:-4]
return base, ext
def extract(self, filename):
"""
Extract the given file to a temporarily and return
the path of the directory with the extracted content.
"""
prefix = 'django_%s_template_' % self.app_or_project
tempdir = tempfile.mkdtemp(prefix=prefix, suffix='_extract')
self.paths_to_remove.append(tempdir)
if self.verbosity >= 2:
self.stdout.write('Extracting %s' % filename)
try:
archive.extract(filename, tempdir)
return tempdir
except (archive.ArchiveException, OSError) as e:
raise CommandError("couldn't extract file %s to %s: %s" %
(filename, tempdir, e))
def is_url(self, template):
"""Return True if the name looks like a URL."""
if ':' not in template:
return False
scheme = template.split(':', 1)[0].lower()
return scheme in self.url_schemes
def make_writeable(self, filename):
"""
Make sure that the file is writeable.
Useful if our source is read-only.
"""
if not os.access(filename, os.W_OK):
st = os.stat(filename)
new_permissions = stat.S_IMODE(st.st_mode) | stat.S_IWUSR
os.chmod(filename, new_permissions)
|
4842a53cfbd86aed058103efe3013a583cca2bf00aa065d51449277c1035f3cf | """
A Python "serializer". Doesn't do much serializing per se -- just converts to
and from basic Python data types (lists, dicts, strings, etc.). Useful as a basis for
other serializers.
"""
from django.apps import apps
from django.core.serializers import base
from django.db import DEFAULT_DB_ALIAS, models
from django.utils.encoding import is_protected_type
class Serializer(base.Serializer):
"""
Serialize a QuerySet to basic Python objects.
"""
internal_use_only = True
def start_serialization(self):
self._current = None
self.objects = []
def end_serialization(self):
pass
def start_object(self, obj):
self._current = {}
def end_object(self, obj):
self.objects.append(self.get_dump_object(obj))
self._current = None
def get_dump_object(self, obj):
data = {'model': str(obj._meta)}
if not self.use_natural_primary_keys or not hasattr(obj, 'natural_key'):
data["pk"] = self._value_from_field(obj, obj._meta.pk)
data['fields'] = self._current
return data
def _value_from_field(self, obj, field):
value = field.value_from_object(obj)
# Protected types (i.e., primitives like None, numbers, dates,
# and Decimals) are passed through as is. All other values are
# converted to string first.
return value if is_protected_type(value) else field.value_to_string(obj)
def handle_field(self, obj, field):
self._current[field.name] = self._value_from_field(obj, field)
def handle_fk_field(self, obj, field):
if self.use_natural_foreign_keys and hasattr(field.remote_field.model, 'natural_key'):
related = getattr(obj, field.name)
if related:
value = related.natural_key()
else:
value = None
else:
value = self._value_from_field(obj, field)
self._current[field.name] = value
def handle_m2m_field(self, obj, field):
if field.remote_field.through._meta.auto_created:
if self.use_natural_foreign_keys and hasattr(field.remote_field.model, 'natural_key'):
def m2m_value(value):
return value.natural_key()
else:
def m2m_value(value):
return self._value_from_field(value, value._meta.pk)
m2m_iter = getattr(obj, '_prefetched_objects_cache', {}).get(
field.name,
getattr(obj, field.name).iterator(),
)
self._current[field.name] = [m2m_value(related) for related in m2m_iter]
def getvalue(self):
return self.objects
def Deserializer(object_list, *, using=DEFAULT_DB_ALIAS, ignorenonexistent=False, **options):
"""
Deserialize simple Python objects back into Django ORM instances.
It's expected that you pass the Python objects themselves (instead of a
stream or a string) to the constructor
"""
handle_forward_references = options.pop('handle_forward_references', False)
field_names_cache = {} # Model: <list of field_names>
for d in object_list:
# Look up the model and starting build a dict of data for it.
try:
Model = _get_model(d["model"])
except base.DeserializationError:
if ignorenonexistent:
continue
else:
raise
data = {}
if 'pk' in d:
try:
data[Model._meta.pk.attname] = Model._meta.pk.to_python(d.get('pk'))
except Exception as e:
raise base.DeserializationError.WithData(e, d['model'], d.get('pk'), None)
m2m_data = {}
deferred_fields = {}
if Model not in field_names_cache:
field_names_cache[Model] = {f.name for f in Model._meta.get_fields()}
field_names = field_names_cache[Model]
# Handle each field
for (field_name, field_value) in d["fields"].items():
if ignorenonexistent and field_name not in field_names:
# skip fields no longer on model
continue
field = Model._meta.get_field(field_name)
# Handle M2M relations
if field.remote_field and isinstance(field.remote_field, models.ManyToManyRel):
try:
values = base.deserialize_m2m_values(field, field_value, using, handle_forward_references)
except base.M2MDeserializationError as e:
raise base.DeserializationError.WithData(e.original_exc, d['model'], d.get('pk'), e.pk)
if values == base.DEFER_FIELD:
deferred_fields[field] = field_value
else:
m2m_data[field.name] = values
# Handle FK fields
elif field.remote_field and isinstance(field.remote_field, models.ManyToOneRel):
try:
value = base.deserialize_fk_value(field, field_value, using, handle_forward_references)
except Exception as e:
raise base.DeserializationError.WithData(e, d['model'], d.get('pk'), field_value)
if value == base.DEFER_FIELD:
deferred_fields[field] = field_value
else:
data[field.attname] = value
# Handle all other fields
else:
try:
data[field.name] = field.to_python(field_value)
except Exception as e:
raise base.DeserializationError.WithData(e, d['model'], d.get('pk'), field_value)
obj = base.build_instance(Model, data, using)
yield base.DeserializedObject(obj, m2m_data, deferred_fields)
def _get_model(model_identifier):
"""Look up a model from an "app_label.model_name" string."""
try:
return apps.get_model(model_identifier)
except (LookupError, TypeError):
raise base.DeserializationError("Invalid model identifier: '%s'" % model_identifier)
|
86164690674745fe425e9110f5d995c405f408e6992787ceac6f1bda0049ac29 | """
XML serializer.
"""
from xml.dom import pulldom
from xml.sax import handler
from xml.sax.expatreader import ExpatParser as _ExpatParser
from django.apps import apps
from django.conf import settings
from django.core.exceptions import ObjectDoesNotExist
from django.core.serializers import base
from django.db import DEFAULT_DB_ALIAS, models
from django.utils.xmlutils import (
SimplerXMLGenerator, UnserializableContentError,
)
class Serializer(base.Serializer):
"""Serialize a QuerySet to XML."""
def indent(self, level):
if self.options.get('indent') is not None:
self.xml.ignorableWhitespace('\n' + ' ' * self.options.get('indent') * level)
def start_serialization(self):
"""
Start serialization -- open the XML document and the root element.
"""
self.xml = SimplerXMLGenerator(self.stream, self.options.get("encoding", settings.DEFAULT_CHARSET))
self.xml.startDocument()
self.xml.startElement("django-objects", {"version": "1.0"})
def end_serialization(self):
"""
End serialization -- end the document.
"""
self.indent(0)
self.xml.endElement("django-objects")
self.xml.endDocument()
def start_object(self, obj):
"""
Called as each object is handled.
"""
if not hasattr(obj, "_meta"):
raise base.SerializationError("Non-model object (%s) encountered during serialization" % type(obj))
self.indent(1)
attrs = {'model': str(obj._meta)}
if not self.use_natural_primary_keys or not hasattr(obj, 'natural_key'):
obj_pk = obj.pk
if obj_pk is not None:
attrs['pk'] = str(obj_pk)
self.xml.startElement("object", attrs)
def end_object(self, obj):
"""
Called after handling all fields for an object.
"""
self.indent(1)
self.xml.endElement("object")
def handle_field(self, obj, field):
"""
Handle each field on an object (except for ForeignKeys and
ManyToManyFields).
"""
self.indent(2)
self.xml.startElement('field', {
'name': field.name,
'type': field.get_internal_type(),
})
# Get a "string version" of the object's data.
if getattr(obj, field.name) is not None:
try:
self.xml.characters(field.value_to_string(obj))
except UnserializableContentError:
raise ValueError("%s.%s (pk:%s) contains unserializable characters" % (
obj.__class__.__name__, field.name, obj.pk))
else:
self.xml.addQuickElement("None")
self.xml.endElement("field")
def handle_fk_field(self, obj, field):
"""
Handle a ForeignKey (they need to be treated slightly
differently from regular fields).
"""
self._start_relational_field(field)
related_att = getattr(obj, field.get_attname())
if related_att is not None:
if self.use_natural_foreign_keys and hasattr(field.remote_field.model, 'natural_key'):
related = getattr(obj, field.name)
# If related object has a natural key, use it
related = related.natural_key()
# Iterable natural keys are rolled out as subelements
for key_value in related:
self.xml.startElement("natural", {})
self.xml.characters(str(key_value))
self.xml.endElement("natural")
else:
self.xml.characters(str(related_att))
else:
self.xml.addQuickElement("None")
self.xml.endElement("field")
def handle_m2m_field(self, obj, field):
"""
Handle a ManyToManyField. Related objects are only serialized as
references to the object's PK (i.e. the related *data* is not dumped,
just the relation).
"""
if field.remote_field.through._meta.auto_created:
self._start_relational_field(field)
if self.use_natural_foreign_keys and hasattr(field.remote_field.model, 'natural_key'):
# If the objects in the m2m have a natural key, use it
def handle_m2m(value):
natural = value.natural_key()
# Iterable natural keys are rolled out as subelements
self.xml.startElement("object", {})
for key_value in natural:
self.xml.startElement("natural", {})
self.xml.characters(str(key_value))
self.xml.endElement("natural")
self.xml.endElement("object")
else:
def handle_m2m(value):
self.xml.addQuickElement("object", attrs={
'pk': str(value.pk)
})
m2m_iter = getattr(obj, '_prefetched_objects_cache', {}).get(
field.name,
getattr(obj, field.name).iterator(),
)
for relobj in m2m_iter:
handle_m2m(relobj)
self.xml.endElement("field")
def _start_relational_field(self, field):
"""Output the <field> element for relational fields."""
self.indent(2)
self.xml.startElement('field', {
'name': field.name,
'rel': field.remote_field.__class__.__name__,
'to': str(field.remote_field.model._meta),
})
class Deserializer(base.Deserializer):
"""Deserialize XML."""
def __init__(self, stream_or_string, *, using=DEFAULT_DB_ALIAS, ignorenonexistent=False, **options):
super().__init__(stream_or_string, **options)
self.handle_forward_references = options.pop('handle_forward_references', False)
self.event_stream = pulldom.parse(self.stream, self._make_parser())
self.db = using
self.ignore = ignorenonexistent
def _make_parser(self):
"""Create a hardened XML parser (no custom/external entities)."""
return DefusedExpatParser()
def __next__(self):
for event, node in self.event_stream:
if event == "START_ELEMENT" and node.nodeName == "object":
self.event_stream.expandNode(node)
return self._handle_object(node)
raise StopIteration
def _handle_object(self, node):
"""Convert an <object> node to a DeserializedObject."""
# Look up the model using the model loading mechanism. If this fails,
# bail.
Model = self._get_model_from_node(node, "model")
# Start building a data dictionary from the object.
data = {}
if node.hasAttribute('pk'):
data[Model._meta.pk.attname] = Model._meta.pk.to_python(
node.getAttribute('pk'))
# Also start building a dict of m2m data (this is saved as
# {m2m_accessor_attribute : [list_of_related_objects]})
m2m_data = {}
deferred_fields = {}
field_names = {f.name for f in Model._meta.get_fields()}
# Deserialize each field.
for field_node in node.getElementsByTagName("field"):
# If the field is missing the name attribute, bail (are you
# sensing a pattern here?)
field_name = field_node.getAttribute("name")
if not field_name:
raise base.DeserializationError("<field> node is missing the 'name' attribute")
# Get the field from the Model. This will raise a
# FieldDoesNotExist if, well, the field doesn't exist, which will
# be propagated correctly unless ignorenonexistent=True is used.
if self.ignore and field_name not in field_names:
continue
field = Model._meta.get_field(field_name)
# As is usually the case, relation fields get the special treatment.
if field.remote_field and isinstance(field.remote_field, models.ManyToManyRel):
value = self._handle_m2m_field_node(field_node, field)
if value == base.DEFER_FIELD:
deferred_fields[field] = [
[
getInnerText(nat_node).strip()
for nat_node in obj_node.getElementsByTagName('natural')
]
for obj_node in field_node.getElementsByTagName('object')
]
else:
m2m_data[field.name] = value
elif field.remote_field and isinstance(field.remote_field, models.ManyToOneRel):
value = self._handle_fk_field_node(field_node, field)
if value == base.DEFER_FIELD:
deferred_fields[field] = [
getInnerText(k).strip()
for k in field_node.getElementsByTagName('natural')
]
else:
data[field.attname] = value
else:
if field_node.getElementsByTagName('None'):
value = None
else:
value = field.to_python(getInnerText(field_node).strip())
data[field.name] = value
obj = base.build_instance(Model, data, self.db)
# Return a DeserializedObject so that the m2m data has a place to live.
return base.DeserializedObject(obj, m2m_data, deferred_fields)
def _handle_fk_field_node(self, node, field):
"""
Handle a <field> node for a ForeignKey
"""
# Check if there is a child node named 'None', returning None if so.
if node.getElementsByTagName('None'):
return None
else:
model = field.remote_field.model
if hasattr(model._default_manager, 'get_by_natural_key'):
keys = node.getElementsByTagName('natural')
if keys:
# If there are 'natural' subelements, it must be a natural key
field_value = [getInnerText(k).strip() for k in keys]
try:
obj = model._default_manager.db_manager(self.db).get_by_natural_key(*field_value)
except ObjectDoesNotExist:
if self.handle_forward_references:
return base.DEFER_FIELD
else:
raise
obj_pk = getattr(obj, field.remote_field.field_name)
# If this is a natural foreign key to an object that
# has a FK/O2O as the foreign key, use the FK value
if field.remote_field.model._meta.pk.remote_field:
obj_pk = obj_pk.pk
else:
# Otherwise, treat like a normal PK
field_value = getInnerText(node).strip()
obj_pk = model._meta.get_field(field.remote_field.field_name).to_python(field_value)
return obj_pk
else:
field_value = getInnerText(node).strip()
return model._meta.get_field(field.remote_field.field_name).to_python(field_value)
def _handle_m2m_field_node(self, node, field):
"""
Handle a <field> node for a ManyToManyField.
"""
model = field.remote_field.model
default_manager = model._default_manager
if hasattr(default_manager, 'get_by_natural_key'):
def m2m_convert(n):
keys = n.getElementsByTagName('natural')
if keys:
# If there are 'natural' subelements, it must be a natural key
field_value = [getInnerText(k).strip() for k in keys]
obj_pk = default_manager.db_manager(self.db).get_by_natural_key(*field_value).pk
else:
# Otherwise, treat like a normal PK value.
obj_pk = model._meta.pk.to_python(n.getAttribute('pk'))
return obj_pk
else:
def m2m_convert(n):
return model._meta.pk.to_python(n.getAttribute('pk'))
values = []
try:
for c in node.getElementsByTagName('object'):
values.append(m2m_convert(c))
except Exception as e:
if isinstance(e, ObjectDoesNotExist) and self.handle_forward_references:
return base.DEFER_FIELD
else:
raise base.M2MDeserializationError(e, c)
else:
return values
def _get_model_from_node(self, node, attr):
"""
Look up a model from a <object model=...> or a <field rel=... to=...>
node.
"""
model_identifier = node.getAttribute(attr)
if not model_identifier:
raise base.DeserializationError(
"<%s> node is missing the required '%s' attribute"
% (node.nodeName, attr))
try:
return apps.get_model(model_identifier)
except (LookupError, TypeError):
raise base.DeserializationError(
"<%s> node has invalid model identifier: '%s'"
% (node.nodeName, model_identifier))
def getInnerText(node):
"""Get all the inner text of a DOM node (recursively)."""
# inspired by https://mail.python.org/pipermail/xml-sig/2005-March/011022.html
inner_text = []
for child in node.childNodes:
if child.nodeType == child.TEXT_NODE or child.nodeType == child.CDATA_SECTION_NODE:
inner_text.append(child.data)
elif child.nodeType == child.ELEMENT_NODE:
inner_text.extend(getInnerText(child))
else:
pass
return "".join(inner_text)
# Below code based on Christian Heimes' defusedxml
class DefusedExpatParser(_ExpatParser):
"""
An expat parser hardened against XML bomb attacks.
Forbid DTDs, external entity references
"""
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.setFeature(handler.feature_external_ges, False)
self.setFeature(handler.feature_external_pes, False)
def start_doctype_decl(self, name, sysid, pubid, has_internal_subset):
raise DTDForbidden(name, sysid, pubid)
def entity_decl(self, name, is_parameter_entity, value, base,
sysid, pubid, notation_name):
raise EntitiesForbidden(name, value, base, sysid, pubid, notation_name)
def unparsed_entity_decl(self, name, base, sysid, pubid, notation_name):
# expat 1.2
raise EntitiesForbidden(name, None, base, sysid, pubid, notation_name)
def external_entity_ref_handler(self, context, base, sysid, pubid):
raise ExternalReferenceForbidden(context, base, sysid, pubid)
def reset(self):
_ExpatParser.reset(self)
parser = self._parser
parser.StartDoctypeDeclHandler = self.start_doctype_decl
parser.EntityDeclHandler = self.entity_decl
parser.UnparsedEntityDeclHandler = self.unparsed_entity_decl
parser.ExternalEntityRefHandler = self.external_entity_ref_handler
class DefusedXmlException(ValueError):
"""Base exception."""
def __repr__(self):
return str(self)
class DTDForbidden(DefusedXmlException):
"""Document type definition is forbidden."""
def __init__(self, name, sysid, pubid):
super().__init__()
self.name = name
self.sysid = sysid
self.pubid = pubid
def __str__(self):
tpl = "DTDForbidden(name='{}', system_id={!r}, public_id={!r})"
return tpl.format(self.name, self.sysid, self.pubid)
class EntitiesForbidden(DefusedXmlException):
"""Entity definition is forbidden."""
def __init__(self, name, value, base, sysid, pubid, notation_name):
super().__init__()
self.name = name
self.value = value
self.base = base
self.sysid = sysid
self.pubid = pubid
self.notation_name = notation_name
def __str__(self):
tpl = "EntitiesForbidden(name='{}', system_id={!r}, public_id={!r})"
return tpl.format(self.name, self.sysid, self.pubid)
class ExternalReferenceForbidden(DefusedXmlException):
"""Resolving an external reference is forbidden."""
def __init__(self, context, base, sysid, pubid):
super().__init__()
self.context = context
self.base = base
self.sysid = sysid
self.pubid = pubid
def __str__(self):
tpl = "ExternalReferenceForbidden(system_id='{}', public_id={})"
return tpl.format(self.sysid, self.pubid)
|
52844409c369c522d05ff4e3c0b8582c58c63952b37eaefbfec0ee824e644a04 | import asyncio
import logging
import types
from asgiref.sync import async_to_sync, sync_to_async
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured, MiddlewareNotUsed
from django.core.signals import request_finished
from django.db import connections, transaction
from django.urls import get_resolver, set_urlconf
from django.utils.log import log_response
from django.utils.module_loading import import_string
from .exception import convert_exception_to_response
logger = logging.getLogger('django.request')
class BaseHandler:
_view_middleware = None
_template_response_middleware = None
_exception_middleware = None
_middleware_chain = None
def load_middleware(self, is_async=False):
"""
Populate middleware lists from settings.MIDDLEWARE.
Must be called after the environment is fixed (see __call__ in subclasses).
"""
self._view_middleware = []
self._template_response_middleware = []
self._exception_middleware = []
get_response = self._get_response_async if is_async else self._get_response
handler = convert_exception_to_response(get_response)
handler_is_async = is_async
for middleware_path in reversed(settings.MIDDLEWARE):
middleware = import_string(middleware_path)
middleware_can_sync = getattr(middleware, 'sync_capable', True)
middleware_can_async = getattr(middleware, 'async_capable', False)
if not middleware_can_sync and not middleware_can_async:
raise RuntimeError(
'Middleware %s must have at least one of '
'sync_capable/async_capable set to True.' % middleware_path
)
elif not handler_is_async and middleware_can_sync:
middleware_is_async = False
else:
middleware_is_async = middleware_can_async
try:
# Adapt handler, if needed.
handler = self.adapt_method_mode(
middleware_is_async, handler, handler_is_async,
debug=settings.DEBUG, name='middleware %s' % middleware_path,
)
mw_instance = middleware(handler)
except MiddlewareNotUsed as exc:
if settings.DEBUG:
if str(exc):
logger.debug('MiddlewareNotUsed(%r): %s', middleware_path, exc)
else:
logger.debug('MiddlewareNotUsed: %r', middleware_path)
continue
if mw_instance is None:
raise ImproperlyConfigured(
'Middleware factory %s returned None.' % middleware_path
)
if hasattr(mw_instance, 'process_view'):
self._view_middleware.insert(
0,
self.adapt_method_mode(is_async, mw_instance.process_view),
)
if hasattr(mw_instance, 'process_template_response'):
self._template_response_middleware.append(
self.adapt_method_mode(is_async, mw_instance.process_template_response),
)
if hasattr(mw_instance, 'process_exception'):
# The exception-handling stack is still always synchronous for
# now, so adapt that way.
self._exception_middleware.append(
self.adapt_method_mode(False, mw_instance.process_exception),
)
handler = convert_exception_to_response(mw_instance)
handler_is_async = middleware_is_async
# Adapt the top of the stack, if needed.
handler = self.adapt_method_mode(is_async, handler, handler_is_async)
# We only assign to this when initialization is complete as it is used
# as a flag for initialization being complete.
self._middleware_chain = handler
def adapt_method_mode(
self, is_async, method, method_is_async=None, debug=False, name=None,
):
"""
Adapt a method to be in the correct "mode":
- If is_async is False:
- Synchronous methods are left alone
- Asynchronous methods are wrapped with async_to_sync
- If is_async is True:
- Synchronous methods are wrapped with sync_to_async()
- Asynchronous methods are left alone
"""
if method_is_async is None:
method_is_async = asyncio.iscoroutinefunction(method)
if debug and not name:
name = name or 'method %s()' % method.__qualname__
if is_async:
if not method_is_async:
if debug:
logger.debug('Synchronous %s adapted.', name)
return sync_to_async(method, thread_sensitive=True)
elif method_is_async:
if debug:
logger.debug('Asynchronous %s adapted.', name)
return async_to_sync(method)
return method
def get_response(self, request):
"""Return an HttpResponse object for the given HttpRequest."""
# Setup default url resolver for this thread
set_urlconf(settings.ROOT_URLCONF)
response = self._middleware_chain(request)
response._resource_closers.append(request.close)
if response.status_code >= 400:
log_response(
'%s: %s', response.reason_phrase, request.path,
response=response,
request=request,
)
return response
async def get_response_async(self, request):
"""
Asynchronous version of get_response.
Funneling everything, including WSGI, into a single async
get_response() is too slow. Avoid the context switch by using
a separate async response path.
"""
# Setup default url resolver for this thread.
set_urlconf(settings.ROOT_URLCONF)
response = await self._middleware_chain(request)
response._resource_closers.append(request.close)
if response.status_code >= 400:
await sync_to_async(log_response)(
'%s: %s', response.reason_phrase, request.path,
response=response,
request=request,
)
return response
def _get_response(self, request):
"""
Resolve and call the view, then apply view, exception, and
template_response middleware. This method is everything that happens
inside the request/response middleware.
"""
response = None
callback, callback_args, callback_kwargs = self.resolve_request(request)
# Apply view middleware
for middleware_method in self._view_middleware:
response = middleware_method(request, callback, callback_args, callback_kwargs)
if response:
break
if response is None:
wrapped_callback = self.make_view_atomic(callback)
# If it is an asynchronous view, run it in a subthread.
if asyncio.iscoroutinefunction(wrapped_callback):
wrapped_callback = async_to_sync(wrapped_callback)
try:
response = wrapped_callback(request, *callback_args, **callback_kwargs)
except Exception as e:
response = self.process_exception_by_middleware(e, request)
# Complain if the view returned None (a common error).
self.check_response(response, callback)
# If the response supports deferred rendering, apply template
# response middleware and then render the response
if hasattr(response, 'render') and callable(response.render):
for middleware_method in self._template_response_middleware:
response = middleware_method(request, response)
# Complain if the template response middleware returned None (a common error).
self.check_response(
response,
middleware_method,
name='%s.process_template_response' % (
middleware_method.__self__.__class__.__name__,
)
)
try:
response = response.render()
except Exception as e:
response = self.process_exception_by_middleware(e, request)
return response
async def _get_response_async(self, request):
"""
Resolve and call the view, then apply view, exception, and
template_response middleware. This method is everything that happens
inside the request/response middleware.
"""
response = None
callback, callback_args, callback_kwargs = self.resolve_request(request)
# Apply view middleware.
for middleware_method in self._view_middleware:
response = await middleware_method(request, callback, callback_args, callback_kwargs)
if response:
break
if response is None:
wrapped_callback = self.make_view_atomic(callback)
# If it is a synchronous view, run it in a subthread
if not asyncio.iscoroutinefunction(wrapped_callback):
wrapped_callback = sync_to_async(wrapped_callback, thread_sensitive=True)
try:
response = await wrapped_callback(request, *callback_args, **callback_kwargs)
except Exception as e:
response = await sync_to_async(
self.process_exception_by_middleware,
thread_sensitive=True,
)(e, request)
# Complain if the view returned None or an uncalled coroutine.
self.check_response(response, callback)
# If the response supports deferred rendering, apply template
# response middleware and then render the response
if hasattr(response, 'render') and callable(response.render):
for middleware_method in self._template_response_middleware:
response = await middleware_method(request, response)
# Complain if the template response middleware returned None or
# an uncalled coroutine.
self.check_response(
response,
middleware_method,
name='%s.process_template_response' % (
middleware_method.__self__.__class__.__name__,
)
)
try:
if asyncio.iscoroutinefunction(response.render):
response = await response.render()
else:
response = await sync_to_async(response.render, thread_sensitive=True)()
except Exception as e:
response = await sync_to_async(
self.process_exception_by_middleware,
thread_sensitive=True,
)(e, request)
# Make sure the response is not a coroutine
if asyncio.iscoroutine(response):
raise RuntimeError('Response is still a coroutine.')
return response
def resolve_request(self, request):
"""
Retrieve/set the urlconf for the request. Return the view resolved,
with its args and kwargs.
"""
# Work out the resolver.
if hasattr(request, 'urlconf'):
urlconf = request.urlconf
set_urlconf(urlconf)
resolver = get_resolver(urlconf)
else:
resolver = get_resolver()
# Resolve the view, and assign the match object back to the request.
resolver_match = resolver.resolve(request.path_info)
request.resolver_match = resolver_match
return resolver_match
def check_response(self, response, callback, name=None):
"""
Raise an error if the view returned None or an uncalled coroutine.
"""
if not(response is None or asyncio.iscoroutine(response)):
return
if not name:
if isinstance(callback, types.FunctionType): # FBV
name = 'The view %s.%s' % (callback.__module__, callback.__name__)
else: # CBV
name = 'The view %s.%s.__call__' % (
callback.__module__,
callback.__class__.__name__,
)
if response is None:
raise ValueError(
"%s didn't return an HttpResponse object. It returned None "
"instead." % name
)
elif asyncio.iscoroutine(response):
raise ValueError(
"%s didn't return an HttpResponse object. It returned an "
"unawaited coroutine instead. You may need to add an 'await' "
"into your view." % name
)
# Other utility methods.
def make_view_atomic(self, view):
non_atomic_requests = getattr(view, '_non_atomic_requests', set())
for db in connections.all():
if db.settings_dict['ATOMIC_REQUESTS'] and db.alias not in non_atomic_requests:
if asyncio.iscoroutinefunction(view):
raise RuntimeError(
'You cannot use ATOMIC_REQUESTS with async views.'
)
view = transaction.atomic(using=db.alias)(view)
return view
def process_exception_by_middleware(self, exception, request):
"""
Pass the exception to the exception middleware. If no middleware
return a response for this exception, raise it.
"""
for middleware_method in self._exception_middleware:
response = middleware_method(request, exception)
if response:
return response
raise
def reset_urlconf(sender, **kwargs):
"""Reset the URLconf after each request is finished."""
set_urlconf(None)
request_finished.connect(reset_urlconf)
|
4e7a2712d06dde59d2a345734922b5f68eab39c47dd922115e788e77f92bd79d | import mimetypes
from email import (
charset as Charset, encoders as Encoders, generator, message_from_string,
)
from email.errors import HeaderParseError
from email.header import Header
from email.headerregistry import Address, parser
from email.message import Message
from email.mime.base import MIMEBase
from email.mime.message import MIMEMessage
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
from email.utils import formatdate, getaddresses, make_msgid
from io import BytesIO, StringIO
from pathlib import Path
from django.conf import settings
from django.core.mail.utils import DNS_NAME
from django.utils.encoding import force_str, punycode
# Don't BASE64-encode UTF-8 messages so that we avoid unwanted attention from
# some spam filters.
utf8_charset = Charset.Charset('utf-8')
utf8_charset.body_encoding = None # Python defaults to BASE64
utf8_charset_qp = Charset.Charset('utf-8')
utf8_charset_qp.body_encoding = Charset.QP
# Default MIME type to use on attachments (if it is not explicitly given
# and cannot be guessed).
DEFAULT_ATTACHMENT_MIME_TYPE = 'application/octet-stream'
RFC5322_EMAIL_LINE_LENGTH_LIMIT = 998
class BadHeaderError(ValueError):
pass
# Header names that contain structured address data (RFC #5322)
ADDRESS_HEADERS = {
'from',
'sender',
'reply-to',
'to',
'cc',
'bcc',
'resent-from',
'resent-sender',
'resent-to',
'resent-cc',
'resent-bcc',
}
def forbid_multi_line_headers(name, val, encoding):
"""Forbid multi-line headers to prevent header injection."""
encoding = encoding or settings.DEFAULT_CHARSET
val = str(val) # val may be lazy
if '\n' in val or '\r' in val:
raise BadHeaderError("Header values can't contain newlines (got %r for header %r)" % (val, name))
try:
val.encode('ascii')
except UnicodeEncodeError:
if name.lower() in ADDRESS_HEADERS:
val = ', '.join(sanitize_address(addr, encoding) for addr in getaddresses((val,)))
else:
val = Header(val, encoding).encode()
else:
if name.lower() == 'subject':
val = Header(val).encode()
return name, val
def sanitize_address(addr, encoding):
"""
Format a pair of (name, address) or an email address string.
"""
address = None
if not isinstance(addr, tuple):
addr = force_str(addr)
try:
token, rest = parser.get_mailbox(addr)
except (HeaderParseError, ValueError, IndexError):
raise ValueError('Invalid address "%s"' % addr)
else:
if rest:
# The entire email address must be parsed.
raise ValueError(
'Invalid address; only %s could be parsed from "%s"'
% (token, addr)
)
nm = token.display_name or ''
localpart = token.local_part
domain = token.domain or ''
else:
nm, address = addr
localpart, domain = address.rsplit('@', 1)
nm = Header(nm, encoding).encode()
# Avoid UTF-8 encode, if it's possible.
try:
localpart.encode('ascii')
except UnicodeEncodeError:
localpart = Header(localpart, encoding).encode()
domain = punycode(domain)
parsed_address = Address(nm, username=localpart, domain=domain)
return str(parsed_address)
class MIMEMixin:
def as_string(self, unixfrom=False, linesep='\n'):
"""Return the entire formatted message as a string.
Optional `unixfrom' when True, means include the Unix From_ envelope
header.
This overrides the default as_string() implementation to not mangle
lines that begin with 'From '. See bug #13433 for details.
"""
fp = StringIO()
g = generator.Generator(fp, mangle_from_=False)
g.flatten(self, unixfrom=unixfrom, linesep=linesep)
return fp.getvalue()
def as_bytes(self, unixfrom=False, linesep='\n'):
"""Return the entire formatted message as bytes.
Optional `unixfrom' when True, means include the Unix From_ envelope
header.
This overrides the default as_bytes() implementation to not mangle
lines that begin with 'From '. See bug #13433 for details.
"""
fp = BytesIO()
g = generator.BytesGenerator(fp, mangle_from_=False)
g.flatten(self, unixfrom=unixfrom, linesep=linesep)
return fp.getvalue()
class SafeMIMEMessage(MIMEMixin, MIMEMessage):
def __setitem__(self, name, val):
# message/rfc822 attachments must be ASCII
name, val = forbid_multi_line_headers(name, val, 'ascii')
MIMEMessage.__setitem__(self, name, val)
class SafeMIMEText(MIMEMixin, MIMEText):
def __init__(self, _text, _subtype='plain', _charset=None):
self.encoding = _charset
MIMEText.__init__(self, _text, _subtype=_subtype, _charset=_charset)
def __setitem__(self, name, val):
name, val = forbid_multi_line_headers(name, val, self.encoding)
MIMEText.__setitem__(self, name, val)
def set_payload(self, payload, charset=None):
if charset == 'utf-8' and not isinstance(charset, Charset.Charset):
has_long_lines = any(
len(line.encode()) > RFC5322_EMAIL_LINE_LENGTH_LIMIT
for line in payload.splitlines()
)
# Quoted-Printable encoding has the side effect of shortening long
# lines, if any (#22561).
charset = utf8_charset_qp if has_long_lines else utf8_charset
MIMEText.set_payload(self, payload, charset=charset)
class SafeMIMEMultipart(MIMEMixin, MIMEMultipart):
def __init__(self, _subtype='mixed', boundary=None, _subparts=None, encoding=None, **_params):
self.encoding = encoding
MIMEMultipart.__init__(self, _subtype, boundary, _subparts, **_params)
def __setitem__(self, name, val):
name, val = forbid_multi_line_headers(name, val, self.encoding)
MIMEMultipart.__setitem__(self, name, val)
class EmailMessage:
"""A container for email information."""
content_subtype = 'plain'
mixed_subtype = 'mixed'
encoding = None # None => use settings default
def __init__(self, subject='', body='', from_email=None, to=None, bcc=None,
connection=None, attachments=None, headers=None, cc=None,
reply_to=None):
"""
Initialize a single email message (which can be sent to multiple
recipients).
"""
if to:
if isinstance(to, str):
raise TypeError('"to" argument must be a list or tuple')
self.to = list(to)
else:
self.to = []
if cc:
if isinstance(cc, str):
raise TypeError('"cc" argument must be a list or tuple')
self.cc = list(cc)
else:
self.cc = []
if bcc:
if isinstance(bcc, str):
raise TypeError('"bcc" argument must be a list or tuple')
self.bcc = list(bcc)
else:
self.bcc = []
if reply_to:
if isinstance(reply_to, str):
raise TypeError('"reply_to" argument must be a list or tuple')
self.reply_to = list(reply_to)
else:
self.reply_to = []
self.from_email = from_email or settings.DEFAULT_FROM_EMAIL
self.subject = subject
self.body = body or ''
self.attachments = []
if attachments:
for attachment in attachments:
if isinstance(attachment, MIMEBase):
self.attach(attachment)
else:
self.attach(*attachment)
self.extra_headers = headers or {}
self.connection = connection
def get_connection(self, fail_silently=False):
from django.core.mail import get_connection
if not self.connection:
self.connection = get_connection(fail_silently=fail_silently)
return self.connection
def message(self):
encoding = self.encoding or settings.DEFAULT_CHARSET
msg = SafeMIMEText(self.body, self.content_subtype, encoding)
msg = self._create_message(msg)
msg['Subject'] = self.subject
msg['From'] = self.extra_headers.get('From', self.from_email)
self._set_list_header_if_not_empty(msg, 'To', self.to)
self._set_list_header_if_not_empty(msg, 'Cc', self.cc)
self._set_list_header_if_not_empty(msg, 'Reply-To', self.reply_to)
# Email header names are case-insensitive (RFC 2045), so we have to
# accommodate that when doing comparisons.
header_names = [key.lower() for key in self.extra_headers]
if 'date' not in header_names:
# formatdate() uses stdlib methods to format the date, which use
# the stdlib/OS concept of a timezone, however, Django sets the
# TZ environment variable based on the TIME_ZONE setting which
# will get picked up by formatdate().
msg['Date'] = formatdate(localtime=settings.EMAIL_USE_LOCALTIME)
if 'message-id' not in header_names:
# Use cached DNS_NAME for performance
msg['Message-ID'] = make_msgid(domain=DNS_NAME)
for name, value in self.extra_headers.items():
if name.lower() != 'from': # From is already handled
msg[name] = value
return msg
def recipients(self):
"""
Return a list of all recipients of the email (includes direct
addressees as well as Cc and Bcc entries).
"""
return [email for email in (self.to + self.cc + self.bcc) if email]
def send(self, fail_silently=False):
"""Send the email message."""
if not self.recipients():
# Don't bother creating the network connection if there's nobody to
# send to.
return 0
return self.get_connection(fail_silently).send_messages([self])
def attach(self, filename=None, content=None, mimetype=None):
"""
Attach a file with the given filename and content. The filename can
be omitted and the mimetype is guessed, if not provided.
If the first parameter is a MIMEBase subclass, insert it directly
into the resulting message attachments.
For a text/* mimetype (guessed or specified), when a bytes object is
specified as content, decode it as UTF-8. If that fails, set the
mimetype to DEFAULT_ATTACHMENT_MIME_TYPE and don't decode the content.
"""
if isinstance(filename, MIMEBase):
assert content is None
assert mimetype is None
self.attachments.append(filename)
else:
assert content is not None
mimetype = mimetype or mimetypes.guess_type(filename)[0] or DEFAULT_ATTACHMENT_MIME_TYPE
basetype, subtype = mimetype.split('/', 1)
if basetype == 'text':
if isinstance(content, bytes):
try:
content = content.decode()
except UnicodeDecodeError:
# If mimetype suggests the file is text but it's
# actually binary, read() raises a UnicodeDecodeError.
mimetype = DEFAULT_ATTACHMENT_MIME_TYPE
self.attachments.append((filename, content, mimetype))
def attach_file(self, path, mimetype=None):
"""
Attach a file from the filesystem.
Set the mimetype to DEFAULT_ATTACHMENT_MIME_TYPE if it isn't specified
and cannot be guessed.
For a text/* mimetype (guessed or specified), decode the file's content
as UTF-8. If that fails, set the mimetype to
DEFAULT_ATTACHMENT_MIME_TYPE and don't decode the content.
"""
path = Path(path)
with path.open('rb') as file:
content = file.read()
self.attach(path.name, content, mimetype)
def _create_message(self, msg):
return self._create_attachments(msg)
def _create_attachments(self, msg):
if self.attachments:
encoding = self.encoding or settings.DEFAULT_CHARSET
body_msg = msg
msg = SafeMIMEMultipart(_subtype=self.mixed_subtype, encoding=encoding)
if self.body or body_msg.is_multipart():
msg.attach(body_msg)
for attachment in self.attachments:
if isinstance(attachment, MIMEBase):
msg.attach(attachment)
else:
msg.attach(self._create_attachment(*attachment))
return msg
def _create_mime_attachment(self, content, mimetype):
"""
Convert the content, mimetype pair into a MIME attachment object.
If the mimetype is message/rfc822, content may be an
email.Message or EmailMessage object, as well as a str.
"""
basetype, subtype = mimetype.split('/', 1)
if basetype == 'text':
encoding = self.encoding or settings.DEFAULT_CHARSET
attachment = SafeMIMEText(content, subtype, encoding)
elif basetype == 'message' and subtype == 'rfc822':
# Bug #18967: per RFC2046 s5.2.1, message/rfc822 attachments
# must not be base64 encoded.
if isinstance(content, EmailMessage):
# convert content into an email.Message first
content = content.message()
elif not isinstance(content, Message):
# For compatibility with existing code, parse the message
# into an email.Message object if it is not one already.
content = message_from_string(force_str(content))
attachment = SafeMIMEMessage(content, subtype)
else:
# Encode non-text attachments with base64.
attachment = MIMEBase(basetype, subtype)
attachment.set_payload(content)
Encoders.encode_base64(attachment)
return attachment
def _create_attachment(self, filename, content, mimetype=None):
"""
Convert the filename, content, mimetype triple into a MIME attachment
object.
"""
attachment = self._create_mime_attachment(content, mimetype)
if filename:
try:
filename.encode('ascii')
except UnicodeEncodeError:
filename = ('utf-8', '', filename)
attachment.add_header('Content-Disposition', 'attachment', filename=filename)
return attachment
def _set_list_header_if_not_empty(self, msg, header, values):
"""
Set msg's header, either from self.extra_headers, if present, or from
the values argument.
"""
if values:
try:
value = self.extra_headers[header]
except KeyError:
value = ', '.join(str(v) for v in values)
msg[header] = value
class EmailMultiAlternatives(EmailMessage):
"""
A version of EmailMessage that makes it easy to send multipart/alternative
messages. For example, including text and HTML versions of the text is
made easier.
"""
alternative_subtype = 'alternative'
def __init__(self, subject='', body='', from_email=None, to=None, bcc=None,
connection=None, attachments=None, headers=None, alternatives=None,
cc=None, reply_to=None):
"""
Initialize a single email message (which can be sent to multiple
recipients).
"""
super().__init__(
subject, body, from_email, to, bcc, connection, attachments,
headers, cc, reply_to,
)
self.alternatives = alternatives or []
def attach_alternative(self, content, mimetype):
"""Attach an alternative content representation."""
assert content is not None
assert mimetype is not None
self.alternatives.append((content, mimetype))
def _create_message(self, msg):
return self._create_attachments(self._create_alternatives(msg))
def _create_alternatives(self, msg):
encoding = self.encoding or settings.DEFAULT_CHARSET
if self.alternatives:
body_msg = msg
msg = SafeMIMEMultipart(_subtype=self.alternative_subtype, encoding=encoding)
if self.body:
msg.attach(body_msg)
for alternative in self.alternatives:
msg.attach(self._create_mime_attachment(*alternative))
return msg
|
5654f482e0edda3ef32e7f5cae41d23abc37d4a0e2fce08fea09ac83a5f71cc8 | import sys
import time
from importlib import import_module
from django.apps import apps
from django.core.management.base import (
BaseCommand, CommandError, no_translations,
)
from django.core.management.sql import (
emit_post_migrate_signal, emit_pre_migrate_signal,
)
from django.db import DEFAULT_DB_ALIAS, connections, router
from django.db.migrations.autodetector import MigrationAutodetector
from django.db.migrations.executor import MigrationExecutor
from django.db.migrations.loader import AmbiguityError
from django.db.migrations.state import ModelState, ProjectState
from django.utils.module_loading import module_has_submodule
from django.utils.text import Truncator
class Command(BaseCommand):
help = "Updates database schema. Manages both apps with migrations and those without."
requires_system_checks = []
def add_arguments(self, parser):
parser.add_argument(
'--skip-checks', action='store_true',
help='Skip system checks.',
)
parser.add_argument(
'app_label', nargs='?',
help='App label of an application to synchronize the state.',
)
parser.add_argument(
'migration_name', nargs='?',
help='Database state will be brought to the state after that '
'migration. Use the name "zero" to unapply all migrations.',
)
parser.add_argument(
'--noinput', '--no-input', action='store_false', dest='interactive',
help='Tells Django to NOT prompt the user for input of any kind.',
)
parser.add_argument(
'--database',
default=DEFAULT_DB_ALIAS,
help='Nominates a database to synchronize. Defaults to the "default" database.',
)
parser.add_argument(
'--fake', action='store_true',
help='Mark migrations as run without actually running them.',
)
parser.add_argument(
'--fake-initial', action='store_true',
help='Detect if tables already exist and fake-apply initial migrations if so. Make sure '
'that the current database schema matches your initial migration before using this '
'flag. Django will only check for an existing table name.',
)
parser.add_argument(
'--plan', action='store_true',
help='Shows a list of the migration actions that will be performed.',
)
parser.add_argument(
'--run-syncdb', action='store_true',
help='Creates tables for apps without migrations.',
)
parser.add_argument(
'--check', action='store_true', dest='check_unapplied',
help='Exits with a non-zero status if unapplied migrations exist.',
)
@no_translations
def handle(self, *args, **options):
database = options['database']
if not options['skip_checks']:
self.check(databases=[database])
self.verbosity = options['verbosity']
self.interactive = options['interactive']
# Import the 'management' module within each installed app, to register
# dispatcher events.
for app_config in apps.get_app_configs():
if module_has_submodule(app_config.module, "management"):
import_module('.management', app_config.name)
# Get the database we're operating from
connection = connections[database]
# Hook for backends needing any database preparation
connection.prepare_database()
# Work out which apps have migrations and which do not
executor = MigrationExecutor(connection, self.migration_progress_callback)
# Raise an error if any migrations are applied before their dependencies.
executor.loader.check_consistent_history(connection)
# Before anything else, see if there's conflicting apps and drop out
# hard if there are any
conflicts = executor.loader.detect_conflicts()
if conflicts:
name_str = "; ".join(
"%s in %s" % (", ".join(names), app)
for app, names in conflicts.items()
)
raise CommandError(
"Conflicting migrations detected; multiple leaf nodes in the "
"migration graph: (%s).\nTo fix them run "
"'python manage.py makemigrations --merge'" % name_str
)
# If they supplied command line arguments, work out what they mean.
run_syncdb = options['run_syncdb']
target_app_labels_only = True
if options['app_label']:
# Validate app_label.
app_label = options['app_label']
try:
apps.get_app_config(app_label)
except LookupError as err:
raise CommandError(str(err))
if run_syncdb:
if app_label in executor.loader.migrated_apps:
raise CommandError("Can't use run_syncdb with app '%s' as it has migrations." % app_label)
elif app_label not in executor.loader.migrated_apps:
raise CommandError("App '%s' does not have migrations." % app_label)
if options['app_label'] and options['migration_name']:
migration_name = options['migration_name']
if migration_name == "zero":
targets = [(app_label, None)]
else:
try:
migration = executor.loader.get_migration_by_prefix(app_label, migration_name)
except AmbiguityError:
raise CommandError(
"More than one migration matches '%s' in app '%s'. "
"Please be more specific." %
(migration_name, app_label)
)
except KeyError:
raise CommandError("Cannot find a migration matching '%s' from app '%s'." % (
migration_name, app_label))
targets = [(app_label, migration.name)]
target_app_labels_only = False
elif options['app_label']:
targets = [key for key in executor.loader.graph.leaf_nodes() if key[0] == app_label]
else:
targets = executor.loader.graph.leaf_nodes()
plan = executor.migration_plan(targets)
exit_dry = plan and options['check_unapplied']
if options['plan']:
self.stdout.write('Planned operations:', self.style.MIGRATE_LABEL)
if not plan:
self.stdout.write(' No planned migration operations.')
for migration, backwards in plan:
self.stdout.write(str(migration), self.style.MIGRATE_HEADING)
for operation in migration.operations:
message, is_error = self.describe_operation(operation, backwards)
style = self.style.WARNING if is_error else None
self.stdout.write(' ' + message, style)
if exit_dry:
sys.exit(1)
return
if exit_dry:
sys.exit(1)
# At this point, ignore run_syncdb if there aren't any apps to sync.
run_syncdb = options['run_syncdb'] and executor.loader.unmigrated_apps
# Print some useful info
if self.verbosity >= 1:
self.stdout.write(self.style.MIGRATE_HEADING("Operations to perform:"))
if run_syncdb:
if options['app_label']:
self.stdout.write(
self.style.MIGRATE_LABEL(" Synchronize unmigrated app: %s" % app_label)
)
else:
self.stdout.write(
self.style.MIGRATE_LABEL(" Synchronize unmigrated apps: ") +
(", ".join(sorted(executor.loader.unmigrated_apps)))
)
if target_app_labels_only:
self.stdout.write(
self.style.MIGRATE_LABEL(" Apply all migrations: ") +
(", ".join(sorted({a for a, n in targets})) or "(none)")
)
else:
if targets[0][1] is None:
self.stdout.write(
self.style.MIGRATE_LABEL(' Unapply all migrations: ') +
str(targets[0][0])
)
else:
self.stdout.write(self.style.MIGRATE_LABEL(
" Target specific migration: ") + "%s, from %s"
% (targets[0][1], targets[0][0])
)
pre_migrate_state = executor._create_project_state(with_applied_migrations=True)
pre_migrate_apps = pre_migrate_state.apps
emit_pre_migrate_signal(
self.verbosity, self.interactive, connection.alias, apps=pre_migrate_apps, plan=plan,
)
# Run the syncdb phase.
if run_syncdb:
if self.verbosity >= 1:
self.stdout.write(self.style.MIGRATE_HEADING("Synchronizing apps without migrations:"))
if options['app_label']:
self.sync_apps(connection, [app_label])
else:
self.sync_apps(connection, executor.loader.unmigrated_apps)
# Migrate!
if self.verbosity >= 1:
self.stdout.write(self.style.MIGRATE_HEADING("Running migrations:"))
if not plan:
if self.verbosity >= 1:
self.stdout.write(" No migrations to apply.")
# If there's changes that aren't in migrations yet, tell them how to fix it.
autodetector = MigrationAutodetector(
executor.loader.project_state(),
ProjectState.from_apps(apps),
)
changes = autodetector.changes(graph=executor.loader.graph)
if changes:
self.stdout.write(self.style.NOTICE(
" Your models have changes that are not yet reflected "
"in a migration, and so won't be applied."
))
self.stdout.write(self.style.NOTICE(
" Run 'manage.py makemigrations' to make new "
"migrations, and then re-run 'manage.py migrate' to "
"apply them."
))
fake = False
fake_initial = False
else:
fake = options['fake']
fake_initial = options['fake_initial']
post_migrate_state = executor.migrate(
targets, plan=plan, state=pre_migrate_state.clone(), fake=fake,
fake_initial=fake_initial,
)
# post_migrate signals have access to all models. Ensure that all models
# are reloaded in case any are delayed.
post_migrate_state.clear_delayed_apps_cache()
post_migrate_apps = post_migrate_state.apps
# Re-render models of real apps to include relationships now that
# we've got a final state. This wouldn't be necessary if real apps
# models were rendered with relationships in the first place.
with post_migrate_apps.bulk_update():
model_keys = []
for model_state in post_migrate_apps.real_models:
model_key = model_state.app_label, model_state.name_lower
model_keys.append(model_key)
post_migrate_apps.unregister_model(*model_key)
post_migrate_apps.render_multiple([
ModelState.from_model(apps.get_model(*model)) for model in model_keys
])
# Send the post_migrate signal, so individual apps can do whatever they need
# to do at this point.
emit_post_migrate_signal(
self.verbosity, self.interactive, connection.alias, apps=post_migrate_apps, plan=plan,
)
def migration_progress_callback(self, action, migration=None, fake=False):
if self.verbosity >= 1:
compute_time = self.verbosity > 1
if action == "apply_start":
if compute_time:
self.start = time.monotonic()
self.stdout.write(" Applying %s..." % migration, ending="")
self.stdout.flush()
elif action == "apply_success":
elapsed = " (%.3fs)" % (time.monotonic() - self.start) if compute_time else ""
if fake:
self.stdout.write(self.style.SUCCESS(" FAKED" + elapsed))
else:
self.stdout.write(self.style.SUCCESS(" OK" + elapsed))
elif action == "unapply_start":
if compute_time:
self.start = time.monotonic()
self.stdout.write(" Unapplying %s..." % migration, ending="")
self.stdout.flush()
elif action == "unapply_success":
elapsed = " (%.3fs)" % (time.monotonic() - self.start) if compute_time else ""
if fake:
self.stdout.write(self.style.SUCCESS(" FAKED" + elapsed))
else:
self.stdout.write(self.style.SUCCESS(" OK" + elapsed))
elif action == "render_start":
if compute_time:
self.start = time.monotonic()
self.stdout.write(" Rendering model states...", ending="")
self.stdout.flush()
elif action == "render_success":
elapsed = " (%.3fs)" % (time.monotonic() - self.start) if compute_time else ""
self.stdout.write(self.style.SUCCESS(" DONE" + elapsed))
def sync_apps(self, connection, app_labels):
"""Run the old syncdb-style operation on a list of app_labels."""
with connection.cursor() as cursor:
tables = connection.introspection.table_names(cursor)
# Build the manifest of apps and models that are to be synchronized.
all_models = [
(
app_config.label,
router.get_migratable_models(app_config, connection.alias, include_auto_created=False),
)
for app_config in apps.get_app_configs()
if app_config.models_module is not None and app_config.label in app_labels
]
def model_installed(model):
opts = model._meta
converter = connection.introspection.identifier_converter
return not (
(converter(opts.db_table) in tables) or
(opts.auto_created and converter(opts.auto_created._meta.db_table) in tables)
)
manifest = {
app_name: list(filter(model_installed, model_list))
for app_name, model_list in all_models
}
# Create the tables for each model
if self.verbosity >= 1:
self.stdout.write(' Creating tables...')
with connection.schema_editor() as editor:
for app_name, model_list in manifest.items():
for model in model_list:
# Never install unmanaged models, etc.
if not model._meta.can_migrate(connection):
continue
if self.verbosity >= 3:
self.stdout.write(
' Processing %s.%s model' % (app_name, model._meta.object_name)
)
if self.verbosity >= 1:
self.stdout.write(' Creating table %s' % model._meta.db_table)
editor.create_model(model)
# Deferred SQL is executed when exiting the editor's context.
if self.verbosity >= 1:
self.stdout.write(' Running deferred SQL...')
@staticmethod
def describe_operation(operation, backwards):
"""Return a string that describes a migration operation for --plan."""
prefix = ''
is_error = False
if hasattr(operation, 'code'):
code = operation.reverse_code if backwards else operation.code
action = (code.__doc__ or '') if code else None
elif hasattr(operation, 'sql'):
action = operation.reverse_sql if backwards else operation.sql
else:
action = ''
if backwards:
prefix = 'Undo '
if action is not None:
action = str(action).replace('\n', '')
elif backwards:
action = 'IRREVERSIBLE'
is_error = True
if action:
action = ' -> ' + action
truncated = Truncator(action)
return prefix + operation.describe() + truncated.chars(40), is_error
|
168ad45ee0e55435d6aedeb178ca3a6c2f04dd7a02f60d81933146f2582a5198 | from django.apps import apps
from django.core import checks
from django.core.checks.registry import registry
from django.core.management.base import BaseCommand, CommandError
class Command(BaseCommand):
help = "Checks the entire Django project for potential problems."
requires_system_checks = []
def add_arguments(self, parser):
parser.add_argument('args', metavar='app_label', nargs='*')
parser.add_argument(
'--tag', '-t', action='append', dest='tags',
help='Run only checks labeled with given tag.',
)
parser.add_argument(
'--list-tags', action='store_true',
help='List available tags.',
)
parser.add_argument(
'--deploy', action='store_true',
help='Check deployment settings.',
)
parser.add_argument(
'--fail-level',
default='ERROR',
choices=['CRITICAL', 'ERROR', 'WARNING', 'INFO', 'DEBUG'],
help=(
'Message level that will cause the command to exit with a '
'non-zero status. Default is ERROR.'
),
)
parser.add_argument(
'--database', action='append', dest='databases',
help='Run database related checks against these aliases.',
)
def handle(self, *app_labels, **options):
include_deployment_checks = options['deploy']
if options['list_tags']:
self.stdout.write('\n'.join(sorted(registry.tags_available(include_deployment_checks))))
return
if app_labels:
app_configs = [apps.get_app_config(app_label) for app_label in app_labels]
else:
app_configs = None
tags = options['tags']
if tags:
try:
invalid_tag = next(
tag for tag in tags if not checks.tag_exists(tag, include_deployment_checks)
)
except StopIteration:
# no invalid tags
pass
else:
raise CommandError('There is no system check with the "%s" tag.' % invalid_tag)
self.check(
app_configs=app_configs,
tags=tags,
display_num_errors=True,
include_deployment_checks=include_deployment_checks,
fail_level=getattr(checks, options['fail_level']),
databases=options['databases'],
)
|
7a35e328e260235638d6574729a737e25058641e2eb0a6b983c88d8c60989071 | import errno
import os
import re
import socket
import sys
from datetime import datetime
from django.conf import settings
from django.core.management.base import BaseCommand, CommandError
from django.core.servers.basehttp import (
WSGIServer, get_internal_wsgi_application, run,
)
from django.utils import autoreload
from django.utils.regex_helper import _lazy_re_compile
naiveip_re = _lazy_re_compile(r"""^(?:
(?P<addr>
(?P<ipv4>\d{1,3}(?:\.\d{1,3}){3}) | # IPv4 address
(?P<ipv6>\[[a-fA-F0-9:]+\]) | # IPv6 address
(?P<fqdn>[a-zA-Z0-9-]+(?:\.[a-zA-Z0-9-]+)*) # FQDN
):)?(?P<port>\d+)$""", re.X)
class Command(BaseCommand):
help = "Starts a lightweight Web server for development."
# Validation is called explicitly each time the server is reloaded.
requires_system_checks = []
stealth_options = ('shutdown_message',)
default_addr = '127.0.0.1'
default_addr_ipv6 = '::1'
default_port = '8000'
protocol = 'http'
server_cls = WSGIServer
def add_arguments(self, parser):
parser.add_argument(
'addrport', nargs='?',
help='Optional port number, or ipaddr:port'
)
parser.add_argument(
'--ipv6', '-6', action='store_true', dest='use_ipv6',
help='Tells Django to use an IPv6 address.',
)
parser.add_argument(
'--nothreading', action='store_false', dest='use_threading',
help='Tells Django to NOT use threading.',
)
parser.add_argument(
'--noreload', action='store_false', dest='use_reloader',
help='Tells Django to NOT use the auto-reloader.',
)
def execute(self, *args, **options):
if options['no_color']:
# We rely on the environment because it's currently the only
# way to reach WSGIRequestHandler. This seems an acceptable
# compromise considering `runserver` runs indefinitely.
os.environ["DJANGO_COLORS"] = "nocolor"
super().execute(*args, **options)
def get_handler(self, *args, **options):
"""Return the default WSGI handler for the runner."""
return get_internal_wsgi_application()
def handle(self, *args, **options):
if not settings.DEBUG and not settings.ALLOWED_HOSTS:
raise CommandError('You must set settings.ALLOWED_HOSTS if DEBUG is False.')
self.use_ipv6 = options['use_ipv6']
if self.use_ipv6 and not socket.has_ipv6:
raise CommandError('Your Python does not support IPv6.')
self._raw_ipv6 = False
if not options['addrport']:
self.addr = ''
self.port = self.default_port
else:
m = re.match(naiveip_re, options['addrport'])
if m is None:
raise CommandError('"%s" is not a valid port number '
'or address:port pair.' % options['addrport'])
self.addr, _ipv4, _ipv6, _fqdn, self.port = m.groups()
if not self.port.isdigit():
raise CommandError("%r is not a valid port number." % self.port)
if self.addr:
if _ipv6:
self.addr = self.addr[1:-1]
self.use_ipv6 = True
self._raw_ipv6 = True
elif self.use_ipv6 and not _fqdn:
raise CommandError('"%s" is not a valid IPv6 address.' % self.addr)
if not self.addr:
self.addr = self.default_addr_ipv6 if self.use_ipv6 else self.default_addr
self._raw_ipv6 = self.use_ipv6
self.run(**options)
def run(self, **options):
"""Run the server, using the autoreloader if needed."""
use_reloader = options['use_reloader']
if use_reloader:
autoreload.run_with_reloader(self.inner_run, **options)
else:
self.inner_run(None, **options)
def inner_run(self, *args, **options):
# If an exception was silenced in ManagementUtility.execute in order
# to be raised in the child process, raise it now.
autoreload.raise_last_exception()
threading = options['use_threading']
# 'shutdown_message' is a stealth option.
shutdown_message = options.get('shutdown_message', '')
quit_command = 'CTRL-BREAK' if sys.platform == 'win32' else 'CONTROL-C'
self.stdout.write("Performing system checks...\n\n")
self.check(display_num_errors=True)
# Need to check migrations here, so can't use the
# requires_migrations_check attribute.
self.check_migrations()
now = datetime.now().strftime('%B %d, %Y - %X')
self.stdout.write(now)
self.stdout.write((
"Django version %(version)s, using settings %(settings)r\n"
"Starting development server at %(protocol)s://%(addr)s:%(port)s/\n"
"Quit the server with %(quit_command)s."
) % {
"version": self.get_version(),
"settings": settings.SETTINGS_MODULE,
"protocol": self.protocol,
"addr": '[%s]' % self.addr if self._raw_ipv6 else self.addr,
"port": self.port,
"quit_command": quit_command,
})
try:
handler = self.get_handler(*args, **options)
run(self.addr, int(self.port), handler,
ipv6=self.use_ipv6, threading=threading, server_cls=self.server_cls)
except OSError as e:
# Use helpful error messages instead of ugly tracebacks.
ERRORS = {
errno.EACCES: "You don't have permission to access that port.",
errno.EADDRINUSE: "That port is already in use.",
errno.EADDRNOTAVAIL: "That IP address can't be assigned to.",
}
try:
error_text = ERRORS[e.errno]
except KeyError:
error_text = e
self.stderr.write("Error: %s" % error_text)
# Need to use an OS exit because sys.exit doesn't work in a thread
os._exit(1)
except KeyboardInterrupt:
if shutdown_message:
self.stdout.write(shutdown_message)
sys.exit(0)
|
f1f10726dc3e9389d7f1f80b0367041c4892d8f613d909a19bce899d916c6cc3 | from django.core.management import call_command
from django.core.management.base import BaseCommand
from django.db import connection
class Command(BaseCommand):
help = 'Runs a development server with data from the given fixture(s).'
requires_system_checks = []
def add_arguments(self, parser):
parser.add_argument(
'args', metavar='fixture', nargs='*',
help='Path(s) to fixtures to load before running the server.',
)
parser.add_argument(
'--noinput', '--no-input', action='store_false', dest='interactive',
help='Tells Django to NOT prompt the user for input of any kind.',
)
parser.add_argument(
'--addrport', default='',
help='Port number or ipaddr:port to run the server on.',
)
parser.add_argument(
'--ipv6', '-6', action='store_true', dest='use_ipv6',
help='Tells Django to use an IPv6 address.',
)
def handle(self, *fixture_labels, **options):
verbosity = options['verbosity']
interactive = options['interactive']
# Create a test database.
db_name = connection.creation.create_test_db(verbosity=verbosity, autoclobber=not interactive, serialize=False)
# Import the fixture data into the test database.
call_command('loaddata', *fixture_labels, **{'verbosity': verbosity})
# Run the development server. Turn off auto-reloading because it causes
# a strange error -- it causes this handle() method to be called
# multiple times.
shutdown_message = (
'\nServer stopped.\nNote that the test database, %r, has not been '
'deleted. You can explore it on your own.' % db_name
)
use_threading = connection.features.test_db_allows_multiple_connections
call_command(
'runserver',
addrport=options['addrport'],
shutdown_message=shutdown_message,
use_reloader=False,
use_ipv6=options['use_ipv6'],
use_threading=use_threading
)
|
b713e0be6030d83113913252c062695630589142926fe1ef1dda0f0be7f05a62 | from django.core.management.base import BaseCommand
def module_to_dict(module, omittable=lambda k: k.startswith('_') or not k.isupper()):
"""Convert a module namespace to a Python dictionary."""
return {k: repr(getattr(module, k)) for k in dir(module) if not omittable(k)}
class Command(BaseCommand):
help = """Displays differences between the current settings.py and Django's
default settings."""
requires_system_checks = []
def add_arguments(self, parser):
parser.add_argument(
'--all', action='store_true',
help=(
'Display all settings, regardless of their value. In "hash" '
'mode, default values are prefixed by "###".'
),
)
parser.add_argument(
'--default', metavar='MODULE',
help=(
"The settings module to compare the current settings against. Leave empty to "
"compare against Django's default settings."
),
)
parser.add_argument(
'--output', default='hash', choices=('hash', 'unified'),
help=(
"Selects the output format. 'hash' mode displays each changed "
"setting, with the settings that don't appear in the defaults "
"followed by ###. 'unified' mode prefixes the default setting "
"with a minus sign, followed by the changed setting prefixed "
"with a plus sign."
),
)
def handle(self, **options):
from django.conf import settings, Settings, global_settings
# Because settings are imported lazily, we need to explicitly load them.
if not settings.configured:
settings._setup()
user_settings = module_to_dict(settings._wrapped)
default = options['default']
default_settings = module_to_dict(Settings(default) if default else global_settings)
output_func = {
'hash': self.output_hash,
'unified': self.output_unified,
}[options['output']]
return '\n'.join(output_func(user_settings, default_settings, **options))
def output_hash(self, user_settings, default_settings, **options):
# Inspired by Postfix's "postconf -n".
output = []
for key in sorted(user_settings):
if key not in default_settings:
output.append("%s = %s ###" % (key, user_settings[key]))
elif user_settings[key] != default_settings[key]:
output.append("%s = %s" % (key, user_settings[key]))
elif options['all']:
output.append("### %s = %s" % (key, user_settings[key]))
return output
def output_unified(self, user_settings, default_settings, **options):
output = []
for key in sorted(user_settings):
if key not in default_settings:
output.append(self.style.SUCCESS("+ %s = %s" % (key, user_settings[key])))
elif user_settings[key] != default_settings[key]:
output.append(self.style.ERROR("- %s = %s" % (key, default_settings[key])))
output.append(self.style.SUCCESS("+ %s = %s" % (key, user_settings[key])))
elif options['all']:
output.append(" %s = %s" % (key, user_settings[key]))
return output
|
faf6c518ed97947b912af5038abe53d1b1f0bb0bd0c92c1b33b4b15eedb3061f | import codecs
import concurrent.futures
import glob
import os
from django.core.management.base import BaseCommand, CommandError
from django.core.management.utils import (
find_command, is_ignored_path, popen_wrapper,
)
def has_bom(fn):
with open(fn, 'rb') as f:
sample = f.read(4)
return sample.startswith((codecs.BOM_UTF8, codecs.BOM_UTF16_LE, codecs.BOM_UTF16_BE))
def is_writable(path):
# Known side effect: updating file access/modified time to current time if
# it is writable.
try:
with open(path, 'a'):
os.utime(path, None)
except OSError:
return False
return True
class Command(BaseCommand):
help = 'Compiles .po files to .mo files for use with builtin gettext support.'
requires_system_checks = []
program = 'msgfmt'
program_options = ['--check-format']
def add_arguments(self, parser):
parser.add_argument(
'--locale', '-l', action='append', default=[],
help='Locale(s) to process (e.g. de_AT). Default is to process all. '
'Can be used multiple times.',
)
parser.add_argument(
'--exclude', '-x', action='append', default=[],
help='Locales to exclude. Default is none. Can be used multiple times.',
)
parser.add_argument(
'--use-fuzzy', '-f', dest='fuzzy', action='store_true',
help='Use fuzzy translations.',
)
parser.add_argument(
'--ignore', '-i', action='append', dest='ignore_patterns',
default=[], metavar='PATTERN',
help='Ignore directories matching this glob-style pattern. '
'Use multiple times to ignore more.',
)
def handle(self, **options):
locale = options['locale']
exclude = options['exclude']
ignore_patterns = set(options['ignore_patterns'])
self.verbosity = options['verbosity']
if options['fuzzy']:
self.program_options = self.program_options + ['-f']
if find_command(self.program) is None:
raise CommandError("Can't find %s. Make sure you have GNU gettext "
"tools 0.15 or newer installed." % self.program)
basedirs = [os.path.join('conf', 'locale'), 'locale']
if os.environ.get('DJANGO_SETTINGS_MODULE'):
from django.conf import settings
basedirs.extend(settings.LOCALE_PATHS)
# Walk entire tree, looking for locale directories
for dirpath, dirnames, filenames in os.walk('.', topdown=True):
for dirname in dirnames:
if is_ignored_path(os.path.normpath(os.path.join(dirpath, dirname)), ignore_patterns):
dirnames.remove(dirname)
elif dirname == 'locale':
basedirs.append(os.path.join(dirpath, dirname))
# Gather existing directories.
basedirs = set(map(os.path.abspath, filter(os.path.isdir, basedirs)))
if not basedirs:
raise CommandError("This script should be run from the Django Git "
"checkout or your project or app tree, or with "
"the settings module specified.")
# Build locale list
all_locales = []
for basedir in basedirs:
locale_dirs = filter(os.path.isdir, glob.glob('%s/*' % basedir))
all_locales.extend(map(os.path.basename, locale_dirs))
# Account for excluded locales
locales = locale or all_locales
locales = set(locales).difference(exclude)
self.has_errors = False
for basedir in basedirs:
if locales:
dirs = [os.path.join(basedir, locale, 'LC_MESSAGES') for locale in locales]
else:
dirs = [basedir]
locations = []
for ldir in dirs:
for dirpath, dirnames, filenames in os.walk(ldir):
locations.extend((dirpath, f) for f in filenames if f.endswith('.po'))
if locations:
self.compile_messages(locations)
if self.has_errors:
raise CommandError('compilemessages generated one or more errors.')
def compile_messages(self, locations):
"""
Locations is a list of tuples: [(directory, file), ...]
"""
with concurrent.futures.ThreadPoolExecutor() as executor:
futures = []
for i, (dirpath, f) in enumerate(locations):
if self.verbosity > 0:
self.stdout.write('processing file %s in %s' % (f, dirpath))
po_path = os.path.join(dirpath, f)
if has_bom(po_path):
self.stderr.write(
'The %s file has a BOM (Byte Order Mark). Django only '
'supports .po files encoded in UTF-8 and without any BOM.' % po_path
)
self.has_errors = True
continue
base_path = os.path.splitext(po_path)[0]
# Check writability on first location
if i == 0 and not is_writable(base_path + '.mo'):
self.stderr.write(
'The po files under %s are in a seemingly not writable location. '
'mo files will not be updated/created.' % dirpath
)
self.has_errors = True
return
args = [self.program] + self.program_options + [
'-o', base_path + '.mo', base_path + '.po'
]
futures.append(executor.submit(popen_wrapper, args))
for future in concurrent.futures.as_completed(futures):
output, errors, status = future.result()
if status:
if self.verbosity > 0:
if errors:
self.stderr.write("Execution of %s failed: %s" % (self.program, errors))
else:
self.stderr.write("Execution of %s failed" % self.program)
self.has_errors = True
|
0bcca29446a30c32af923765e17313ddcf33d28c74f2a7e87d333296efc6def2 | import functools
import glob
import gzip
import os
import sys
import warnings
import zipfile
from itertools import product
from django.apps import apps
from django.conf import settings
from django.core import serializers
from django.core.exceptions import ImproperlyConfigured
from django.core.management.base import BaseCommand, CommandError
from django.core.management.color import no_style
from django.core.management.utils import parse_apps_and_model_labels
from django.db import (
DEFAULT_DB_ALIAS, DatabaseError, IntegrityError, connections, router,
transaction,
)
from django.utils.functional import cached_property
try:
import bz2
has_bz2 = True
except ImportError:
has_bz2 = False
try:
import lzma
has_lzma = True
except ImportError:
has_lzma = False
READ_STDIN = '-'
class Command(BaseCommand):
help = 'Installs the named fixture(s) in the database.'
missing_args_message = (
"No database fixture specified. Please provide the path of at least "
"one fixture in the command line."
)
def add_arguments(self, parser):
parser.add_argument('args', metavar='fixture', nargs='+', help='Fixture labels.')
parser.add_argument(
'--database', default=DEFAULT_DB_ALIAS,
help='Nominates a specific database to load fixtures into. Defaults to the "default" database.',
)
parser.add_argument(
'--app', dest='app_label',
help='Only look for fixtures in the specified app.',
)
parser.add_argument(
'--ignorenonexistent', '-i', action='store_true', dest='ignore',
help='Ignores entries in the serialized data for fields that do not '
'currently exist on the model.',
)
parser.add_argument(
'-e', '--exclude', action='append', default=[],
help='An app_label or app_label.ModelName to exclude. Can be used multiple times.',
)
parser.add_argument(
'--format',
help='Format of serialized data when reading from stdin.',
)
def handle(self, *fixture_labels, **options):
self.ignore = options['ignore']
self.using = options['database']
self.app_label = options['app_label']
self.verbosity = options['verbosity']
self.excluded_models, self.excluded_apps = parse_apps_and_model_labels(options['exclude'])
self.format = options['format']
with transaction.atomic(using=self.using):
self.loaddata(fixture_labels)
# Close the DB connection -- unless we're still in a transaction. This
# is required as a workaround for an edge case in MySQL: if the same
# connection is used to create tables, load data, and query, the query
# can return incorrect results. See Django #7572, MySQL #37735.
if transaction.get_autocommit(self.using):
connections[self.using].close()
def loaddata(self, fixture_labels):
connection = connections[self.using]
# Keep a count of the installed objects and fixtures
self.fixture_count = 0
self.loaded_object_count = 0
self.fixture_object_count = 0
self.models = set()
self.serialization_formats = serializers.get_public_serializer_formats()
# Forcing binary mode may be revisited after dropping Python 2 support (see #22399)
self.compression_formats = {
None: (open, 'rb'),
'gz': (gzip.GzipFile, 'rb'),
'zip': (SingleZipReader, 'r'),
'stdin': (lambda *args: sys.stdin, None),
}
if has_bz2:
self.compression_formats['bz2'] = (bz2.BZ2File, 'r')
if has_lzma:
self.compression_formats['lzma'] = (lzma.LZMAFile, 'r')
self.compression_formats['xz'] = (lzma.LZMAFile, 'r')
# Django's test suite repeatedly tries to load initial_data fixtures
# from apps that don't have any fixtures. Because disabling constraint
# checks can be expensive on some database (especially MSSQL), bail
# out early if no fixtures are found.
for fixture_label in fixture_labels:
if self.find_fixtures(fixture_label):
break
else:
return
with connection.constraint_checks_disabled():
self.objs_with_deferred_fields = []
for fixture_label in fixture_labels:
self.load_label(fixture_label)
for obj in self.objs_with_deferred_fields:
obj.save_deferred_fields(using=self.using)
# Since we disabled constraint checks, we must manually check for
# any invalid keys that might have been added
table_names = [model._meta.db_table for model in self.models]
try:
connection.check_constraints(table_names=table_names)
except Exception as e:
e.args = ("Problem installing fixtures: %s" % e,)
raise
# If we found even one object in a fixture, we need to reset the
# database sequences.
if self.loaded_object_count > 0:
sequence_sql = connection.ops.sequence_reset_sql(no_style(), self.models)
if sequence_sql:
if self.verbosity >= 2:
self.stdout.write('Resetting sequences')
with connection.cursor() as cursor:
for line in sequence_sql:
cursor.execute(line)
if self.verbosity >= 1:
if self.fixture_object_count == self.loaded_object_count:
self.stdout.write(
"Installed %d object(s) from %d fixture(s)"
% (self.loaded_object_count, self.fixture_count)
)
else:
self.stdout.write(
"Installed %d object(s) (of %d) from %d fixture(s)"
% (self.loaded_object_count, self.fixture_object_count, self.fixture_count)
)
def load_label(self, fixture_label):
"""Load fixtures files for a given label."""
show_progress = self.verbosity >= 3
for fixture_file, fixture_dir, fixture_name in self.find_fixtures(fixture_label):
_, ser_fmt, cmp_fmt = self.parse_name(os.path.basename(fixture_file))
open_method, mode = self.compression_formats[cmp_fmt]
fixture = open_method(fixture_file, mode)
try:
self.fixture_count += 1
objects_in_fixture = 0
loaded_objects_in_fixture = 0
if self.verbosity >= 2:
self.stdout.write(
"Installing %s fixture '%s' from %s."
% (ser_fmt, fixture_name, humanize(fixture_dir))
)
objects = serializers.deserialize(
ser_fmt, fixture, using=self.using, ignorenonexistent=self.ignore,
handle_forward_references=True,
)
for obj in objects:
objects_in_fixture += 1
if (obj.object._meta.app_config in self.excluded_apps or
type(obj.object) in self.excluded_models):
continue
if router.allow_migrate_model(self.using, obj.object.__class__):
loaded_objects_in_fixture += 1
self.models.add(obj.object.__class__)
try:
obj.save(using=self.using)
if show_progress:
self.stdout.write(
'\rProcessed %i object(s).' % loaded_objects_in_fixture,
ending=''
)
# psycopg2 raises ValueError if data contains NUL chars.
except (DatabaseError, IntegrityError, ValueError) as e:
e.args = ("Could not load %(app_label)s.%(object_name)s(pk=%(pk)s): %(error_msg)s" % {
'app_label': obj.object._meta.app_label,
'object_name': obj.object._meta.object_name,
'pk': obj.object.pk,
'error_msg': e,
},)
raise
if obj.deferred_fields:
self.objs_with_deferred_fields.append(obj)
if objects and show_progress:
self.stdout.write() # Add a newline after progress indicator.
self.loaded_object_count += loaded_objects_in_fixture
self.fixture_object_count += objects_in_fixture
except Exception as e:
if not isinstance(e, CommandError):
e.args = ("Problem installing fixture '%s': %s" % (fixture_file, e),)
raise
finally:
fixture.close()
# Warn if the fixture we loaded contains 0 objects.
if objects_in_fixture == 0:
warnings.warn(
"No fixture data found for '%s'. (File format may be "
"invalid.)" % fixture_name,
RuntimeWarning
)
@functools.lru_cache(maxsize=None)
def find_fixtures(self, fixture_label):
"""Find fixture files for a given label."""
if fixture_label == READ_STDIN:
return [(READ_STDIN, None, READ_STDIN)]
fixture_name, ser_fmt, cmp_fmt = self.parse_name(fixture_label)
databases = [self.using, None]
cmp_fmts = list(self.compression_formats) if cmp_fmt is None else [cmp_fmt]
ser_fmts = serializers.get_public_serializer_formats() if ser_fmt is None else [ser_fmt]
if self.verbosity >= 2:
self.stdout.write("Loading '%s' fixtures..." % fixture_name)
if os.path.isabs(fixture_name):
fixture_dirs = [os.path.dirname(fixture_name)]
fixture_name = os.path.basename(fixture_name)
else:
fixture_dirs = self.fixture_dirs
if os.path.sep in os.path.normpath(fixture_name):
fixture_dirs = [os.path.join(dir_, os.path.dirname(fixture_name))
for dir_ in fixture_dirs]
fixture_name = os.path.basename(fixture_name)
suffixes = (
'.'.join(ext for ext in combo if ext)
for combo in product(databases, ser_fmts, cmp_fmts)
)
targets = {'.'.join((fixture_name, suffix)) for suffix in suffixes}
fixture_files = []
for fixture_dir in fixture_dirs:
if self.verbosity >= 2:
self.stdout.write("Checking %s for fixtures..." % humanize(fixture_dir))
fixture_files_in_dir = []
path = os.path.join(fixture_dir, fixture_name)
for candidate in glob.iglob(glob.escape(path) + '*'):
if os.path.basename(candidate) in targets:
# Save the fixture_dir and fixture_name for future error messages.
fixture_files_in_dir.append((candidate, fixture_dir, fixture_name))
if self.verbosity >= 2 and not fixture_files_in_dir:
self.stdout.write("No fixture '%s' in %s." %
(fixture_name, humanize(fixture_dir)))
# Check kept for backwards-compatibility; it isn't clear why
# duplicates are only allowed in different directories.
if len(fixture_files_in_dir) > 1:
raise CommandError(
"Multiple fixtures named '%s' in %s. Aborting." %
(fixture_name, humanize(fixture_dir)))
fixture_files.extend(fixture_files_in_dir)
if not fixture_files:
raise CommandError("No fixture named '%s' found." % fixture_name)
return fixture_files
@cached_property
def fixture_dirs(self):
"""
Return a list of fixture directories.
The list contains the 'fixtures' subdirectory of each installed
application, if it exists, the directories in FIXTURE_DIRS, and the
current directory.
"""
dirs = []
fixture_dirs = settings.FIXTURE_DIRS
if len(fixture_dirs) != len(set(fixture_dirs)):
raise ImproperlyConfigured("settings.FIXTURE_DIRS contains duplicates.")
for app_config in apps.get_app_configs():
app_label = app_config.label
app_dir = os.path.join(app_config.path, 'fixtures')
if app_dir in fixture_dirs:
raise ImproperlyConfigured(
"'%s' is a default fixture directory for the '%s' app "
"and cannot be listed in settings.FIXTURE_DIRS." % (app_dir, app_label)
)
if self.app_label and app_label != self.app_label:
continue
if os.path.isdir(app_dir):
dirs.append(app_dir)
dirs.extend(fixture_dirs)
dirs.append('')
return [os.path.realpath(d) for d in dirs]
def parse_name(self, fixture_name):
"""
Split fixture name in name, serialization format, compression format.
"""
if fixture_name == READ_STDIN:
if not self.format:
raise CommandError('--format must be specified when reading from stdin.')
return READ_STDIN, self.format, 'stdin'
parts = fixture_name.rsplit('.', 2)
if len(parts) > 1 and parts[-1] in self.compression_formats:
cmp_fmt = parts[-1]
parts = parts[:-1]
else:
cmp_fmt = None
if len(parts) > 1:
if parts[-1] in self.serialization_formats:
ser_fmt = parts[-1]
parts = parts[:-1]
else:
raise CommandError(
"Problem installing fixture '%s': %s is not a known "
"serialization format." % ('.'.join(parts[:-1]), parts[-1]))
else:
ser_fmt = None
name = '.'.join(parts)
return name, ser_fmt, cmp_fmt
class SingleZipReader(zipfile.ZipFile):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
if len(self.namelist()) != 1:
raise ValueError("Zip-compressed fixtures must contain one file.")
def read(self):
return zipfile.ZipFile.read(self, self.namelist()[0])
def humanize(dirname):
return "'%s'" % dirname if dirname else 'absolute path'
|
48183b899a4ae59efe7987382a437230dc91b0047c5d87fce34f77ea38f347e8 | import sys
from django.conf import settings
from django.core.management.base import BaseCommand
from django.core.management.utils import get_command_line_option
from django.test.utils import get_runner
class Command(BaseCommand):
help = 'Discover and run tests in the specified modules or the current directory.'
# DiscoverRunner runs the checks after databases are set up.
requires_system_checks = []
test_runner = None
def run_from_argv(self, argv):
"""
Pre-parse the command line to extract the value of the --testrunner
option. This allows a test runner to define additional command line
arguments.
"""
self.test_runner = get_command_line_option(argv, '--testrunner')
super().run_from_argv(argv)
def add_arguments(self, parser):
parser.add_argument(
'args', metavar='test_label', nargs='*',
help='Module paths to test; can be modulename, modulename.TestCase or modulename.TestCase.test_method'
)
parser.add_argument(
'--noinput', '--no-input', action='store_false', dest='interactive',
help='Tells Django to NOT prompt the user for input of any kind.',
)
parser.add_argument(
'--failfast', action='store_true',
help='Tells Django to stop running the test suite after first failed test.',
)
parser.add_argument(
'--testrunner',
help='Tells Django to use specified test runner class instead of '
'the one specified by the TEST_RUNNER setting.',
)
test_runner_class = get_runner(settings, self.test_runner)
if hasattr(test_runner_class, 'add_arguments'):
test_runner_class.add_arguments(parser)
def handle(self, *test_labels, **options):
TestRunner = get_runner(settings, options['testrunner'])
test_runner = TestRunner(**options)
failures = test_runner.run_tests(test_labels)
if failures:
sys.exit(1)
|
e2cd53ec355b80ee2a38d42a80d4aab1df7ebf1cfbbd17e1c9748cdc9325d8d1 | from django.conf import settings
from django.core.cache import caches
from django.core.cache.backends.db import BaseDatabaseCache
from django.core.management.base import BaseCommand, CommandError
from django.db import (
DEFAULT_DB_ALIAS, DatabaseError, connections, models, router, transaction,
)
class Command(BaseCommand):
help = "Creates the tables needed to use the SQL cache backend."
requires_system_checks = []
def add_arguments(self, parser):
parser.add_argument(
'args', metavar='table_name', nargs='*',
help='Optional table names. Otherwise, settings.CACHES is used to find cache tables.',
)
parser.add_argument(
'--database',
default=DEFAULT_DB_ALIAS,
help='Nominates a database onto which the cache tables will be '
'installed. Defaults to the "default" database.',
)
parser.add_argument(
'--dry-run', action='store_true',
help='Does not create the table, just prints the SQL that would be run.',
)
def handle(self, *tablenames, **options):
db = options['database']
self.verbosity = options['verbosity']
dry_run = options['dry_run']
if tablenames:
# Legacy behavior, tablename specified as argument
for tablename in tablenames:
self.create_table(db, tablename, dry_run)
else:
for cache_alias in settings.CACHES:
cache = caches[cache_alias]
if isinstance(cache, BaseDatabaseCache):
self.create_table(db, cache._table, dry_run)
def create_table(self, database, tablename, dry_run):
cache = BaseDatabaseCache(tablename, {})
if not router.allow_migrate_model(database, cache.cache_model_class):
return
connection = connections[database]
if tablename in connection.introspection.table_names():
if self.verbosity > 0:
self.stdout.write("Cache table '%s' already exists." % tablename)
return
fields = (
# "key" is a reserved word in MySQL, so use "cache_key" instead.
models.CharField(name='cache_key', max_length=255, unique=True, primary_key=True),
models.TextField(name='value'),
models.DateTimeField(name='expires', db_index=True),
)
table_output = []
index_output = []
qn = connection.ops.quote_name
for f in fields:
field_output = [
qn(f.name),
f.db_type(connection=connection),
'%sNULL' % ('NOT ' if not f.null else ''),
]
if f.primary_key:
field_output.append("PRIMARY KEY")
elif f.unique:
field_output.append("UNIQUE")
if f.db_index:
unique = "UNIQUE " if f.unique else ""
index_output.append(
"CREATE %sINDEX %s ON %s (%s);" %
(unique, qn('%s_%s' % (tablename, f.name)), qn(tablename), qn(f.name))
)
table_output.append(" ".join(field_output))
full_statement = ["CREATE TABLE %s (" % qn(tablename)]
for i, line in enumerate(table_output):
full_statement.append(' %s%s' % (line, ',' if i < len(table_output) - 1 else ''))
full_statement.append(');')
full_statement = "\n".join(full_statement)
if dry_run:
self.stdout.write(full_statement)
for statement in index_output:
self.stdout.write(statement)
return
with transaction.atomic(using=database, savepoint=connection.features.can_rollback_ddl):
with connection.cursor() as curs:
try:
curs.execute(full_statement)
except DatabaseError as e:
raise CommandError(
"Cache table '%s' could not be created.\nThe error was: %s." %
(tablename, e))
for statement in index_output:
curs.execute(statement)
if self.verbosity > 1:
self.stdout.write("Cache table '%s' created." % tablename)
|
ff125eb2bd904de2555eb1691683aa0dedbd0f3cfbc5d02407815b8008e8d541 | import subprocess
from django.core.management.base import BaseCommand, CommandError
from django.db import DEFAULT_DB_ALIAS, connections
class Command(BaseCommand):
help = (
"Runs the command-line client for specified database, or the "
"default database if none is provided."
)
requires_system_checks = []
def add_arguments(self, parser):
parser.add_argument(
'--database', default=DEFAULT_DB_ALIAS,
help='Nominates a database onto which to open a shell. Defaults to the "default" database.',
)
parameters = parser.add_argument_group('parameters', prefix_chars='--')
parameters.add_argument('parameters', nargs='*')
def handle(self, **options):
connection = connections[options['database']]
try:
connection.client.runshell(options['parameters'])
except FileNotFoundError:
# Note that we're assuming the FileNotFoundError relates to the
# command missing. It could be raised for some other reason, in
# which case this error message would be inaccurate. Still, this
# message catches the common case.
raise CommandError(
'You appear not to have the %r program installed or on your path.' %
connection.client.executable_name
)
except subprocess.CalledProcessError as e:
raise CommandError(
'"%s" returned non-zero exit status %s.' % (
' '.join(e.cmd),
e.returncode,
),
returncode=e.returncode,
)
|
e1430415a7e643204538d890c9ced276f239584e4f227e48ccf7975c950529a3 | import glob
import os
import re
import sys
from functools import total_ordering
from itertools import dropwhile
import django
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.core.files.temp import NamedTemporaryFile
from django.core.management.base import BaseCommand, CommandError
from django.core.management.utils import (
find_command, handle_extensions, is_ignored_path, popen_wrapper,
)
from django.utils.encoding import DEFAULT_LOCALE_ENCODING
from django.utils.functional import cached_property
from django.utils.jslex import prepare_js_for_gettext
from django.utils.regex_helper import _lazy_re_compile
from django.utils.text import get_text_list
from django.utils.translation import templatize
plural_forms_re = _lazy_re_compile(r'^(?P<value>"Plural-Forms.+?\\n")\s*$', re.MULTILINE | re.DOTALL)
STATUS_OK = 0
NO_LOCALE_DIR = object()
def check_programs(*programs):
for program in programs:
if find_command(program) is None:
raise CommandError(
"Can't find %s. Make sure you have GNU gettext tools 0.15 or "
"newer installed." % program
)
@total_ordering
class TranslatableFile:
def __init__(self, dirpath, file_name, locale_dir):
self.file = file_name
self.dirpath = dirpath
self.locale_dir = locale_dir
def __repr__(self):
return "<%s: %s>" % (
self.__class__.__name__,
os.sep.join([self.dirpath, self.file]),
)
def __eq__(self, other):
return self.path == other.path
def __lt__(self, other):
return self.path < other.path
@property
def path(self):
return os.path.join(self.dirpath, self.file)
class BuildFile:
"""
Represent the state of a translatable file during the build process.
"""
def __init__(self, command, domain, translatable):
self.command = command
self.domain = domain
self.translatable = translatable
@cached_property
def is_templatized(self):
if self.domain == 'djangojs':
return self.command.gettext_version < (0, 18, 3)
elif self.domain == 'django':
file_ext = os.path.splitext(self.translatable.file)[1]
return file_ext != '.py'
return False
@cached_property
def path(self):
return self.translatable.path
@cached_property
def work_path(self):
"""
Path to a file which is being fed into GNU gettext pipeline. This may
be either a translatable or its preprocessed version.
"""
if not self.is_templatized:
return self.path
extension = {
'djangojs': 'c',
'django': 'py',
}.get(self.domain)
filename = '%s.%s' % (self.translatable.file, extension)
return os.path.join(self.translatable.dirpath, filename)
def preprocess(self):
"""
Preprocess (if necessary) a translatable file before passing it to
xgettext GNU gettext utility.
"""
if not self.is_templatized:
return
with open(self.path, encoding='utf-8') as fp:
src_data = fp.read()
if self.domain == 'djangojs':
content = prepare_js_for_gettext(src_data)
elif self.domain == 'django':
content = templatize(src_data, origin=self.path[2:])
with open(self.work_path, 'w', encoding='utf-8') as fp:
fp.write(content)
def postprocess_messages(self, msgs):
"""
Postprocess messages generated by xgettext GNU gettext utility.
Transform paths as if these messages were generated from original
translatable files rather than from preprocessed versions.
"""
if not self.is_templatized:
return msgs
# Remove '.py' suffix
if os.name == 'nt':
# Preserve '.\' prefix on Windows to respect gettext behavior
old_path = self.work_path
new_path = self.path
else:
old_path = self.work_path[2:]
new_path = self.path[2:]
return re.sub(
r'^(#: .*)(' + re.escape(old_path) + r')',
lambda match: match[0].replace(old_path, new_path),
msgs,
flags=re.MULTILINE
)
def cleanup(self):
"""
Remove a preprocessed copy of a translatable file (if any).
"""
if self.is_templatized:
# This check is needed for the case of a symlinked file and its
# source being processed inside a single group (locale dir);
# removing either of those two removes both.
if os.path.exists(self.work_path):
os.unlink(self.work_path)
def normalize_eols(raw_contents):
"""
Take a block of raw text that will be passed through str.splitlines() to
get universal newlines treatment.
Return the resulting block of text with normalized `\n` EOL sequences ready
to be written to disk using current platform's native EOLs.
"""
lines_list = raw_contents.splitlines()
# Ensure last line has its EOL
if lines_list and lines_list[-1]:
lines_list.append('')
return '\n'.join(lines_list)
def write_pot_file(potfile, msgs):
"""
Write the `potfile` with the `msgs` contents, making sure its format is
valid.
"""
pot_lines = msgs.splitlines()
if os.path.exists(potfile):
# Strip the header
lines = dropwhile(len, pot_lines)
else:
lines = []
found, header_read = False, False
for line in pot_lines:
if not found and not header_read:
if 'charset=CHARSET' in line:
found = True
line = line.replace('charset=CHARSET', 'charset=UTF-8')
if not line and not found:
header_read = True
lines.append(line)
msgs = '\n'.join(lines)
# Force newlines of POT files to '\n' to work around
# https://savannah.gnu.org/bugs/index.php?52395
with open(potfile, 'a', encoding='utf-8', newline='\n') as fp:
fp.write(msgs)
class Command(BaseCommand):
help = (
"Runs over the entire source tree of the current directory and "
"pulls out all strings marked for translation. It creates (or updates) a message "
"file in the conf/locale (in the django tree) or locale (for projects and "
"applications) directory.\n\nYou must run this command with one of either the "
"--locale, --exclude, or --all options."
)
translatable_file_class = TranslatableFile
build_file_class = BuildFile
requires_system_checks = []
msgmerge_options = ['-q', '--previous']
msguniq_options = ['--to-code=utf-8']
msgattrib_options = ['--no-obsolete']
xgettext_options = ['--from-code=UTF-8', '--add-comments=Translators']
def add_arguments(self, parser):
parser.add_argument(
'--locale', '-l', default=[], action='append',
help='Creates or updates the message files for the given locale(s) (e.g. pt_BR). '
'Can be used multiple times.',
)
parser.add_argument(
'--exclude', '-x', default=[], action='append',
help='Locales to exclude. Default is none. Can be used multiple times.',
)
parser.add_argument(
'--domain', '-d', default='django',
help='The domain of the message files (default: "django").',
)
parser.add_argument(
'--all', '-a', action='store_true',
help='Updates the message files for all existing locales.',
)
parser.add_argument(
'--extension', '-e', dest='extensions', action='append',
help='The file extension(s) to examine (default: "html,txt,py", or "js" '
'if the domain is "djangojs"). Separate multiple extensions with '
'commas, or use -e multiple times.',
)
parser.add_argument(
'--symlinks', '-s', action='store_true',
help='Follows symlinks to directories when examining source code '
'and templates for translation strings.',
)
parser.add_argument(
'--ignore', '-i', action='append', dest='ignore_patterns',
default=[], metavar='PATTERN',
help='Ignore files or directories matching this glob-style pattern. '
'Use multiple times to ignore more.',
)
parser.add_argument(
'--no-default-ignore', action='store_false', dest='use_default_ignore_patterns',
help="Don't ignore the common glob-style patterns 'CVS', '.*', '*~' and '*.pyc'.",
)
parser.add_argument(
'--no-wrap', action='store_true',
help="Don't break long message lines into several lines.",
)
parser.add_argument(
'--no-location', action='store_true',
help="Don't write '#: filename:line' lines.",
)
parser.add_argument(
'--add-location',
choices=('full', 'file', 'never'), const='full', nargs='?',
help=(
"Controls '#: filename:line' lines. If the option is 'full' "
"(the default if not given), the lines include both file name "
"and line number. If it's 'file', the line number is omitted. If "
"it's 'never', the lines are suppressed (same as --no-location). "
"--add-location requires gettext 0.19 or newer."
),
)
parser.add_argument(
'--no-obsolete', action='store_true',
help="Remove obsolete message strings.",
)
parser.add_argument(
'--keep-pot', action='store_true',
help="Keep .pot file after making messages. Useful when debugging.",
)
def handle(self, *args, **options):
locale = options['locale']
exclude = options['exclude']
self.domain = options['domain']
self.verbosity = options['verbosity']
process_all = options['all']
extensions = options['extensions']
self.symlinks = options['symlinks']
ignore_patterns = options['ignore_patterns']
if options['use_default_ignore_patterns']:
ignore_patterns += ['CVS', '.*', '*~', '*.pyc']
self.ignore_patterns = list(set(ignore_patterns))
# Avoid messing with mutable class variables
if options['no_wrap']:
self.msgmerge_options = self.msgmerge_options[:] + ['--no-wrap']
self.msguniq_options = self.msguniq_options[:] + ['--no-wrap']
self.msgattrib_options = self.msgattrib_options[:] + ['--no-wrap']
self.xgettext_options = self.xgettext_options[:] + ['--no-wrap']
if options['no_location']:
self.msgmerge_options = self.msgmerge_options[:] + ['--no-location']
self.msguniq_options = self.msguniq_options[:] + ['--no-location']
self.msgattrib_options = self.msgattrib_options[:] + ['--no-location']
self.xgettext_options = self.xgettext_options[:] + ['--no-location']
if options['add_location']:
if self.gettext_version < (0, 19):
raise CommandError(
"The --add-location option requires gettext 0.19 or later. "
"You have %s." % '.'.join(str(x) for x in self.gettext_version)
)
arg_add_location = "--add-location=%s" % options['add_location']
self.msgmerge_options = self.msgmerge_options[:] + [arg_add_location]
self.msguniq_options = self.msguniq_options[:] + [arg_add_location]
self.msgattrib_options = self.msgattrib_options[:] + [arg_add_location]
self.xgettext_options = self.xgettext_options[:] + [arg_add_location]
self.no_obsolete = options['no_obsolete']
self.keep_pot = options['keep_pot']
if self.domain not in ('django', 'djangojs'):
raise CommandError("currently makemessages only supports domains "
"'django' and 'djangojs'")
if self.domain == 'djangojs':
exts = extensions or ['js']
else:
exts = extensions or ['html', 'txt', 'py']
self.extensions = handle_extensions(exts)
if (not locale and not exclude and not process_all) or self.domain is None:
raise CommandError(
"Type '%s help %s' for usage information."
% (os.path.basename(sys.argv[0]), sys.argv[1])
)
if self.verbosity > 1:
self.stdout.write(
'examining files with the extensions: %s'
% get_text_list(list(self.extensions), 'and')
)
self.invoked_for_django = False
self.locale_paths = []
self.default_locale_path = None
if os.path.isdir(os.path.join('conf', 'locale')):
self.locale_paths = [os.path.abspath(os.path.join('conf', 'locale'))]
self.default_locale_path = self.locale_paths[0]
self.invoked_for_django = True
else:
if self.settings_available:
self.locale_paths.extend(settings.LOCALE_PATHS)
# Allow to run makemessages inside an app dir
if os.path.isdir('locale'):
self.locale_paths.append(os.path.abspath('locale'))
if self.locale_paths:
self.default_locale_path = self.locale_paths[0]
os.makedirs(self.default_locale_path, exist_ok=True)
# Build locale list
looks_like_locale = re.compile(r'[a-z]{2}')
locale_dirs = filter(os.path.isdir, glob.glob('%s/*' % self.default_locale_path))
all_locales = [
lang_code for lang_code in map(os.path.basename, locale_dirs)
if looks_like_locale.match(lang_code)
]
# Account for excluded locales
if process_all:
locales = all_locales
else:
locales = locale or all_locales
locales = set(locales).difference(exclude)
if locales:
check_programs('msguniq', 'msgmerge', 'msgattrib')
check_programs('xgettext')
try:
potfiles = self.build_potfiles()
# Build po files for each selected locale
for locale in locales:
if self.verbosity > 0:
self.stdout.write('processing locale %s' % locale)
for potfile in potfiles:
self.write_po_file(potfile, locale)
finally:
if not self.keep_pot:
self.remove_potfiles()
@cached_property
def gettext_version(self):
# Gettext tools will output system-encoded bytestrings instead of UTF-8,
# when looking up the version. It's especially a problem on Windows.
out, err, status = popen_wrapper(
['xgettext', '--version'],
stdout_encoding=DEFAULT_LOCALE_ENCODING,
)
m = re.search(r'(\d+)\.(\d+)\.?(\d+)?', out)
if m:
return tuple(int(d) for d in m.groups() if d is not None)
else:
raise CommandError("Unable to get gettext version. Is it installed?")
@cached_property
def settings_available(self):
try:
settings.LOCALE_PATHS
except ImproperlyConfigured:
if self.verbosity > 1:
self.stderr.write("Running without configured settings.")
return False
return True
def build_potfiles(self):
"""
Build pot files and apply msguniq to them.
"""
file_list = self.find_files(".")
self.remove_potfiles()
self.process_files(file_list)
potfiles = []
for path in self.locale_paths:
potfile = os.path.join(path, '%s.pot' % self.domain)
if not os.path.exists(potfile):
continue
args = ['msguniq'] + self.msguniq_options + [potfile]
msgs, errors, status = popen_wrapper(args)
if errors:
if status != STATUS_OK:
raise CommandError(
"errors happened while running msguniq\n%s" % errors)
elif self.verbosity > 0:
self.stdout.write(errors)
msgs = normalize_eols(msgs)
with open(potfile, 'w', encoding='utf-8') as fp:
fp.write(msgs)
potfiles.append(potfile)
return potfiles
def remove_potfiles(self):
for path in self.locale_paths:
pot_path = os.path.join(path, '%s.pot' % self.domain)
if os.path.exists(pot_path):
os.unlink(pot_path)
def find_files(self, root):
"""
Get all files in the given root. Also check that there is a matching
locale dir for each file.
"""
all_files = []
ignored_roots = []
if self.settings_available:
ignored_roots = [os.path.normpath(p) for p in (settings.MEDIA_ROOT, settings.STATIC_ROOT) if p]
for dirpath, dirnames, filenames in os.walk(root, topdown=True, followlinks=self.symlinks):
for dirname in dirnames[:]:
if (is_ignored_path(os.path.normpath(os.path.join(dirpath, dirname)), self.ignore_patterns) or
os.path.join(os.path.abspath(dirpath), dirname) in ignored_roots):
dirnames.remove(dirname)
if self.verbosity > 1:
self.stdout.write('ignoring directory %s' % dirname)
elif dirname == 'locale':
dirnames.remove(dirname)
self.locale_paths.insert(0, os.path.join(os.path.abspath(dirpath), dirname))
for filename in filenames:
file_path = os.path.normpath(os.path.join(dirpath, filename))
file_ext = os.path.splitext(filename)[1]
if file_ext not in self.extensions or is_ignored_path(file_path, self.ignore_patterns):
if self.verbosity > 1:
self.stdout.write('ignoring file %s in %s' % (filename, dirpath))
else:
locale_dir = None
for path in self.locale_paths:
if os.path.abspath(dirpath).startswith(os.path.dirname(path)):
locale_dir = path
break
locale_dir = locale_dir or self.default_locale_path or NO_LOCALE_DIR
all_files.append(self.translatable_file_class(dirpath, filename, locale_dir))
return sorted(all_files)
def process_files(self, file_list):
"""
Group translatable files by locale directory and run pot file build
process for each group.
"""
file_groups = {}
for translatable in file_list:
file_group = file_groups.setdefault(translatable.locale_dir, [])
file_group.append(translatable)
for locale_dir, files in file_groups.items():
self.process_locale_dir(locale_dir, files)
def process_locale_dir(self, locale_dir, files):
"""
Extract translatable literals from the specified files, creating or
updating the POT file for a given locale directory.
Use the xgettext GNU gettext utility.
"""
build_files = []
for translatable in files:
if self.verbosity > 1:
self.stdout.write('processing file %s in %s' % (
translatable.file, translatable.dirpath
))
if self.domain not in ('djangojs', 'django'):
continue
build_file = self.build_file_class(self, self.domain, translatable)
try:
build_file.preprocess()
except UnicodeDecodeError as e:
self.stdout.write(
'UnicodeDecodeError: skipped file %s in %s (reason: %s)' % (
translatable.file, translatable.dirpath, e,
)
)
continue
build_files.append(build_file)
if self.domain == 'djangojs':
is_templatized = build_file.is_templatized
args = [
'xgettext',
'-d', self.domain,
'--language=%s' % ('C' if is_templatized else 'JavaScript',),
'--keyword=gettext_noop',
'--keyword=gettext_lazy',
'--keyword=ngettext_lazy:1,2',
'--keyword=pgettext:1c,2',
'--keyword=npgettext:1c,2,3',
'--output=-',
]
elif self.domain == 'django':
args = [
'xgettext',
'-d', self.domain,
'--language=Python',
'--keyword=gettext_noop',
'--keyword=gettext_lazy',
'--keyword=ngettext_lazy:1,2',
'--keyword=ugettext_noop',
'--keyword=ugettext_lazy',
'--keyword=ungettext_lazy:1,2',
'--keyword=pgettext:1c,2',
'--keyword=npgettext:1c,2,3',
'--keyword=pgettext_lazy:1c,2',
'--keyword=npgettext_lazy:1c,2,3',
'--output=-',
]
else:
return
input_files = [bf.work_path for bf in build_files]
with NamedTemporaryFile(mode='w+') as input_files_list:
input_files_list.write('\n'.join(input_files))
input_files_list.flush()
args.extend(['--files-from', input_files_list.name])
args.extend(self.xgettext_options)
msgs, errors, status = popen_wrapper(args)
if errors:
if status != STATUS_OK:
for build_file in build_files:
build_file.cleanup()
raise CommandError(
'errors happened while running xgettext on %s\n%s' %
('\n'.join(input_files), errors)
)
elif self.verbosity > 0:
# Print warnings
self.stdout.write(errors)
if msgs:
if locale_dir is NO_LOCALE_DIR:
file_path = os.path.normpath(build_files[0].path)
raise CommandError(
'Unable to find a locale path to store translations for '
'file %s' % file_path
)
for build_file in build_files:
msgs = build_file.postprocess_messages(msgs)
potfile = os.path.join(locale_dir, '%s.pot' % self.domain)
write_pot_file(potfile, msgs)
for build_file in build_files:
build_file.cleanup()
def write_po_file(self, potfile, locale):
"""
Create or update the PO file for self.domain and `locale`.
Use contents of the existing `potfile`.
Use msgmerge and msgattrib GNU gettext utilities.
"""
basedir = os.path.join(os.path.dirname(potfile), locale, 'LC_MESSAGES')
os.makedirs(basedir, exist_ok=True)
pofile = os.path.join(basedir, '%s.po' % self.domain)
if os.path.exists(pofile):
args = ['msgmerge'] + self.msgmerge_options + [pofile, potfile]
msgs, errors, status = popen_wrapper(args)
if errors:
if status != STATUS_OK:
raise CommandError(
"errors happened while running msgmerge\n%s" % errors)
elif self.verbosity > 0:
self.stdout.write(errors)
else:
with open(potfile, encoding='utf-8') as fp:
msgs = fp.read()
if not self.invoked_for_django:
msgs = self.copy_plural_forms(msgs, locale)
msgs = normalize_eols(msgs)
msgs = msgs.replace(
"#. #-#-#-#-# %s.pot (PACKAGE VERSION) #-#-#-#-#\n" % self.domain, "")
with open(pofile, 'w', encoding='utf-8') as fp:
fp.write(msgs)
if self.no_obsolete:
args = ['msgattrib'] + self.msgattrib_options + ['-o', pofile, pofile]
msgs, errors, status = popen_wrapper(args)
if errors:
if status != STATUS_OK:
raise CommandError(
"errors happened while running msgattrib\n%s" % errors)
elif self.verbosity > 0:
self.stdout.write(errors)
def copy_plural_forms(self, msgs, locale):
"""
Copy plural forms header contents from a Django catalog of locale to
the msgs string, inserting it at the right place. msgs should be the
contents of a newly created .po file.
"""
django_dir = os.path.normpath(os.path.join(os.path.dirname(django.__file__)))
if self.domain == 'djangojs':
domains = ('djangojs', 'django')
else:
domains = ('django',)
for domain in domains:
django_po = os.path.join(django_dir, 'conf', 'locale', locale, 'LC_MESSAGES', '%s.po' % domain)
if os.path.exists(django_po):
with open(django_po, encoding='utf-8') as fp:
m = plural_forms_re.search(fp.read())
if m:
plural_form_line = m['value']
if self.verbosity > 1:
self.stdout.write('copying plural forms: %s' % plural_form_line)
lines = []
found = False
for line in msgs.splitlines():
if not found and (not line or plural_forms_re.search(line)):
line = plural_form_line
found = True
lines.append(line)
msgs = '\n'.join(lines)
break
return msgs
|
ee4c9e8694e36ee7e6af16150b0f9252f9b500583c4fc523a9615c0236cba4ac | import keyword
import re
from django.core.management.base import BaseCommand, CommandError
from django.db import DEFAULT_DB_ALIAS, connections
from django.db.models.constants import LOOKUP_SEP
class Command(BaseCommand):
help = "Introspects the database tables in the given database and outputs a Django model module."
requires_system_checks = []
stealth_options = ('table_name_filter',)
db_module = 'django.db'
def add_arguments(self, parser):
parser.add_argument(
'table', nargs='*', type=str,
help='Selects what tables or views should be introspected.',
)
parser.add_argument(
'--database', default=DEFAULT_DB_ALIAS,
help='Nominates a database to introspect. Defaults to using the "default" database.',
)
parser.add_argument(
'--include-partitions', action='store_true', help='Also output models for partition tables.',
)
parser.add_argument(
'--include-views', action='store_true', help='Also output models for database views.',
)
def handle(self, **options):
try:
for line in self.handle_inspection(options):
self.stdout.write(line)
except NotImplementedError:
raise CommandError("Database inspection isn't supported for the currently selected database backend.")
def handle_inspection(self, options):
connection = connections[options['database']]
# 'table_name_filter' is a stealth option
table_name_filter = options.get('table_name_filter')
def table2model(table_name):
return re.sub(r'[^a-zA-Z0-9]', '', table_name.title())
with connection.cursor() as cursor:
yield "# This is an auto-generated Django model module."
yield "# You'll have to do the following manually to clean this up:"
yield "# * Rearrange models' order"
yield "# * Make sure each model has one field with primary_key=True"
yield "# * Make sure each ForeignKey and OneToOneField has `on_delete` set to the desired behavior"
yield (
"# * Remove `managed = False` lines if you wish to allow "
"Django to create, modify, and delete the table"
)
yield "# Feel free to rename the models, but don't rename db_table values or field names."
yield 'from %s import models' % self.db_module
known_models = []
table_info = connection.introspection.get_table_list(cursor)
# Determine types of tables and/or views to be introspected.
types = {'t'}
if options['include_partitions']:
types.add('p')
if options['include_views']:
types.add('v')
for table_name in (options['table'] or sorted(info.name for info in table_info if info.type in types)):
if table_name_filter is not None and callable(table_name_filter):
if not table_name_filter(table_name):
continue
try:
try:
relations = connection.introspection.get_relations(cursor, table_name)
except NotImplementedError:
relations = {}
try:
constraints = connection.introspection.get_constraints(cursor, table_name)
except NotImplementedError:
constraints = {}
primary_key_column = connection.introspection.get_primary_key_column(cursor, table_name)
unique_columns = [
c['columns'][0] for c in constraints.values()
if c['unique'] and len(c['columns']) == 1
]
table_description = connection.introspection.get_table_description(cursor, table_name)
except Exception as e:
yield "# Unable to inspect table '%s'" % table_name
yield "# The error was: %s" % e
continue
yield ''
yield ''
yield 'class %s(models.Model):' % table2model(table_name)
known_models.append(table2model(table_name))
used_column_names = [] # Holds column names used in the table so far
column_to_field_name = {} # Maps column names to names of model fields
for row in table_description:
comment_notes = [] # Holds Field notes, to be displayed in a Python comment.
extra_params = {} # Holds Field parameters such as 'db_column'.
column_name = row.name
is_relation = column_name in relations
att_name, params, notes = self.normalize_col_name(
column_name, used_column_names, is_relation)
extra_params.update(params)
comment_notes.extend(notes)
used_column_names.append(att_name)
column_to_field_name[column_name] = att_name
# Add primary_key and unique, if necessary.
if column_name == primary_key_column:
extra_params['primary_key'] = True
elif column_name in unique_columns:
extra_params['unique'] = True
if is_relation:
if extra_params.pop('unique', False) or extra_params.get('primary_key'):
rel_type = 'OneToOneField'
else:
rel_type = 'ForeignKey'
rel_to = (
"self" if relations[column_name][1] == table_name
else table2model(relations[column_name][1])
)
if rel_to in known_models:
field_type = '%s(%s' % (rel_type, rel_to)
else:
field_type = "%s('%s'" % (rel_type, rel_to)
else:
# Calling `get_field_type` to get the field type string and any
# additional parameters and notes.
field_type, field_params, field_notes = self.get_field_type(connection, table_name, row)
extra_params.update(field_params)
comment_notes.extend(field_notes)
field_type += '('
# Don't output 'id = meta.AutoField(primary_key=True)', because
# that's assumed if it doesn't exist.
if att_name == 'id' and extra_params == {'primary_key': True}:
if field_type == 'AutoField(':
continue
elif field_type == 'IntegerField(' and not connection.features.can_introspect_autofield:
comment_notes.append('AutoField?')
# Add 'null' and 'blank', if the 'null_ok' flag was present in the
# table description.
if row.null_ok: # If it's NULL...
extra_params['blank'] = True
extra_params['null'] = True
field_desc = '%s = %s%s' % (
att_name,
# Custom fields will have a dotted path
'' if '.' in field_type else 'models.',
field_type,
)
if field_type.startswith(('ForeignKey(', 'OneToOneField(')):
field_desc += ', models.DO_NOTHING'
if extra_params:
if not field_desc.endswith('('):
field_desc += ', '
field_desc += ', '.join('%s=%r' % (k, v) for k, v in extra_params.items())
field_desc += ')'
if comment_notes:
field_desc += ' # ' + ' '.join(comment_notes)
yield ' %s' % field_desc
is_view = any(info.name == table_name and info.type == 'v' for info in table_info)
is_partition = any(info.name == table_name and info.type == 'p' for info in table_info)
yield from self.get_meta(table_name, constraints, column_to_field_name, is_view, is_partition)
def normalize_col_name(self, col_name, used_column_names, is_relation):
"""
Modify the column name to make it Python-compatible as a field name
"""
field_params = {}
field_notes = []
new_name = col_name.lower()
if new_name != col_name:
field_notes.append('Field name made lowercase.')
if is_relation:
if new_name.endswith('_id'):
new_name = new_name[:-3]
else:
field_params['db_column'] = col_name
new_name, num_repl = re.subn(r'\W', '_', new_name)
if num_repl > 0:
field_notes.append('Field renamed to remove unsuitable characters.')
if new_name.find(LOOKUP_SEP) >= 0:
while new_name.find(LOOKUP_SEP) >= 0:
new_name = new_name.replace(LOOKUP_SEP, '_')
if col_name.lower().find(LOOKUP_SEP) >= 0:
# Only add the comment if the double underscore was in the original name
field_notes.append("Field renamed because it contained more than one '_' in a row.")
if new_name.startswith('_'):
new_name = 'field%s' % new_name
field_notes.append("Field renamed because it started with '_'.")
if new_name.endswith('_'):
new_name = '%sfield' % new_name
field_notes.append("Field renamed because it ended with '_'.")
if keyword.iskeyword(new_name):
new_name += '_field'
field_notes.append('Field renamed because it was a Python reserved word.')
if new_name[0].isdigit():
new_name = 'number_%s' % new_name
field_notes.append("Field renamed because it wasn't a valid Python identifier.")
if new_name in used_column_names:
num = 0
while '%s_%d' % (new_name, num) in used_column_names:
num += 1
new_name = '%s_%d' % (new_name, num)
field_notes.append('Field renamed because of name conflict.')
if col_name != new_name and field_notes:
field_params['db_column'] = col_name
return new_name, field_params, field_notes
def get_field_type(self, connection, table_name, row):
"""
Given the database connection, the table name, and the cursor row
description, this routine will return the given field type name, as
well as any additional keyword parameters and notes for the field.
"""
field_params = {}
field_notes = []
try:
field_type = connection.introspection.get_field_type(row.type_code, row)
except KeyError:
field_type = 'TextField'
field_notes.append('This field type is a guess.')
# Add max_length for all CharFields.
if field_type == 'CharField' and row.internal_size:
field_params['max_length'] = int(row.internal_size)
if field_type == 'DecimalField':
if row.precision is None or row.scale is None:
field_notes.append(
'max_digits and decimal_places have been guessed, as this '
'database handles decimal fields as float')
field_params['max_digits'] = row.precision if row.precision is not None else 10
field_params['decimal_places'] = row.scale if row.scale is not None else 5
else:
field_params['max_digits'] = row.precision
field_params['decimal_places'] = row.scale
return field_type, field_params, field_notes
def get_meta(self, table_name, constraints, column_to_field_name, is_view, is_partition):
"""
Return a sequence comprising the lines of code necessary
to construct the inner Meta class for the model corresponding
to the given database table name.
"""
unique_together = []
has_unsupported_constraint = False
for params in constraints.values():
if params['unique']:
columns = params['columns']
if None in columns:
has_unsupported_constraint = True
columns = [x for x in columns if x is not None]
if len(columns) > 1:
unique_together.append(str(tuple(column_to_field_name[c] for c in columns)))
if is_view:
managed_comment = " # Created from a view. Don't remove."
elif is_partition:
managed_comment = " # Created from a partition. Don't remove."
else:
managed_comment = ''
meta = ['']
if has_unsupported_constraint:
meta.append(' # A unique constraint could not be introspected.')
meta += [
' class Meta:',
' managed = False%s' % managed_comment,
' db_table = %r' % table_name
]
if unique_together:
tup = '(' + ', '.join(unique_together) + ',)'
meta += [" unique_together = %s" % tup]
return meta
|
4c2357ceea22fb93afef4884866bd9ab2c79afeb0d0e1725c50a7e4fe941678f | import os
import sys
import warnings
from itertools import takewhile
from django.apps import apps
from django.conf import settings
from django.core.management.base import (
BaseCommand, CommandError, no_translations,
)
from django.db import DEFAULT_DB_ALIAS, OperationalError, connections, router
from django.db.migrations import Migration
from django.db.migrations.autodetector import MigrationAutodetector
from django.db.migrations.loader import MigrationLoader
from django.db.migrations.questioner import (
InteractiveMigrationQuestioner, MigrationQuestioner,
NonInteractiveMigrationQuestioner,
)
from django.db.migrations.state import ProjectState
from django.db.migrations.utils import get_migration_name_timestamp
from django.db.migrations.writer import MigrationWriter
class Command(BaseCommand):
help = "Creates new migration(s) for apps."
def add_arguments(self, parser):
parser.add_argument(
'args', metavar='app_label', nargs='*',
help='Specify the app label(s) to create migrations for.',
)
parser.add_argument(
'--dry-run', action='store_true',
help="Just show what migrations would be made; don't actually write them.",
)
parser.add_argument(
'--merge', action='store_true',
help="Enable fixing of migration conflicts.",
)
parser.add_argument(
'--empty', action='store_true',
help="Create an empty migration.",
)
parser.add_argument(
'--noinput', '--no-input', action='store_false', dest='interactive',
help='Tells Django to NOT prompt the user for input of any kind.',
)
parser.add_argument(
'-n', '--name',
help="Use this name for migration file(s).",
)
parser.add_argument(
'--no-header', action='store_false', dest='include_header',
help='Do not add header comments to new migration file(s).',
)
parser.add_argument(
'--check', action='store_true', dest='check_changes',
help='Exit with a non-zero status if model changes are missing migrations.',
)
@no_translations
def handle(self, *app_labels, **options):
self.verbosity = options['verbosity']
self.interactive = options['interactive']
self.dry_run = options['dry_run']
self.merge = options['merge']
self.empty = options['empty']
self.migration_name = options['name']
if self.migration_name and not self.migration_name.isidentifier():
raise CommandError('The migration name must be a valid Python identifier.')
self.include_header = options['include_header']
check_changes = options['check_changes']
# Make sure the app they asked for exists
app_labels = set(app_labels)
has_bad_labels = False
for app_label in app_labels:
try:
apps.get_app_config(app_label)
except LookupError as err:
self.stderr.write(str(err))
has_bad_labels = True
if has_bad_labels:
sys.exit(2)
# Load the current graph state. Pass in None for the connection so
# the loader doesn't try to resolve replaced migrations from DB.
loader = MigrationLoader(None, ignore_no_migrations=True)
# Raise an error if any migrations are applied before their dependencies.
consistency_check_labels = {config.label for config in apps.get_app_configs()}
# Non-default databases are only checked if database routers used.
aliases_to_check = connections if settings.DATABASE_ROUTERS else [DEFAULT_DB_ALIAS]
for alias in sorted(aliases_to_check):
connection = connections[alias]
if (connection.settings_dict['ENGINE'] != 'django.db.backends.dummy' and any(
# At least one model must be migrated to the database.
router.allow_migrate(connection.alias, app_label, model_name=model._meta.object_name)
for app_label in consistency_check_labels
for model in apps.get_app_config(app_label).get_models()
)):
try:
loader.check_consistent_history(connection)
except OperationalError as error:
warnings.warn(
"Got an error checking a consistent migration history "
"performed for database connection '%s': %s."
% (alias, error),
RuntimeWarning,
)
# Before anything else, see if there's conflicting apps and drop out
# hard if there are any and they don't want to merge
conflicts = loader.detect_conflicts()
# If app_labels is specified, filter out conflicting migrations for unspecified apps
if app_labels:
conflicts = {
app_label: conflict for app_label, conflict in conflicts.items()
if app_label in app_labels
}
if conflicts and not self.merge:
name_str = "; ".join(
"%s in %s" % (", ".join(names), app)
for app, names in conflicts.items()
)
raise CommandError(
"Conflicting migrations detected; multiple leaf nodes in the "
"migration graph: (%s).\nTo fix them run "
"'python manage.py makemigrations --merge'" % name_str
)
# If they want to merge and there's nothing to merge, then politely exit
if self.merge and not conflicts:
self.stdout.write("No conflicts detected to merge.")
return
# If they want to merge and there is something to merge, then
# divert into the merge code
if self.merge and conflicts:
return self.handle_merge(loader, conflicts)
if self.interactive:
questioner = InteractiveMigrationQuestioner(specified_apps=app_labels, dry_run=self.dry_run)
else:
questioner = NonInteractiveMigrationQuestioner(specified_apps=app_labels, dry_run=self.dry_run)
# Set up autodetector
autodetector = MigrationAutodetector(
loader.project_state(),
ProjectState.from_apps(apps),
questioner,
)
# If they want to make an empty migration, make one for each app
if self.empty:
if not app_labels:
raise CommandError("You must supply at least one app label when using --empty.")
# Make a fake changes() result we can pass to arrange_for_graph
changes = {
app: [Migration("custom", app)]
for app in app_labels
}
changes = autodetector.arrange_for_graph(
changes=changes,
graph=loader.graph,
migration_name=self.migration_name,
)
self.write_migration_files(changes)
return
# Detect changes
changes = autodetector.changes(
graph=loader.graph,
trim_to_apps=app_labels or None,
convert_apps=app_labels or None,
migration_name=self.migration_name,
)
if not changes:
# No changes? Tell them.
if self.verbosity >= 1:
if app_labels:
if len(app_labels) == 1:
self.stdout.write("No changes detected in app '%s'" % app_labels.pop())
else:
self.stdout.write("No changes detected in apps '%s'" % ("', '".join(app_labels)))
else:
self.stdout.write("No changes detected")
else:
self.write_migration_files(changes)
if check_changes:
sys.exit(1)
def write_migration_files(self, changes):
"""
Take a changes dict and write them out as migration files.
"""
directory_created = {}
for app_label, app_migrations in changes.items():
if self.verbosity >= 1:
self.stdout.write(self.style.MIGRATE_HEADING("Migrations for '%s':" % app_label))
for migration in app_migrations:
# Describe the migration
writer = MigrationWriter(migration, self.include_header)
if self.verbosity >= 1:
# Display a relative path if it's below the current working
# directory, or an absolute path otherwise.
try:
migration_string = os.path.relpath(writer.path)
except ValueError:
migration_string = writer.path
if migration_string.startswith('..'):
migration_string = writer.path
self.stdout.write(' %s\n' % self.style.MIGRATE_LABEL(migration_string))
for operation in migration.operations:
self.stdout.write(' - %s' % operation.describe())
if not self.dry_run:
# Write the migrations file to the disk.
migrations_directory = os.path.dirname(writer.path)
if not directory_created.get(app_label):
os.makedirs(migrations_directory, exist_ok=True)
init_path = os.path.join(migrations_directory, "__init__.py")
if not os.path.isfile(init_path):
open(init_path, "w").close()
# We just do this once per app
directory_created[app_label] = True
migration_string = writer.as_string()
with open(writer.path, "w", encoding='utf-8') as fh:
fh.write(migration_string)
elif self.verbosity == 3:
# Alternatively, makemigrations --dry-run --verbosity 3
# will output the migrations to stdout rather than saving
# the file to the disk.
self.stdout.write(self.style.MIGRATE_HEADING(
"Full migrations file '%s':" % writer.filename
))
self.stdout.write(writer.as_string())
def handle_merge(self, loader, conflicts):
"""
Handles merging together conflicted migrations interactively,
if it's safe; otherwise, advises on how to fix it.
"""
if self.interactive:
questioner = InteractiveMigrationQuestioner()
else:
questioner = MigrationQuestioner(defaults={'ask_merge': True})
for app_label, migration_names in conflicts.items():
# Grab out the migrations in question, and work out their
# common ancestor.
merge_migrations = []
for migration_name in migration_names:
migration = loader.get_migration(app_label, migration_name)
migration.ancestry = [
mig for mig in loader.graph.forwards_plan((app_label, migration_name))
if mig[0] == migration.app_label
]
merge_migrations.append(migration)
def all_items_equal(seq):
return all(item == seq[0] for item in seq[1:])
merge_migrations_generations = zip(*(m.ancestry for m in merge_migrations))
common_ancestor_count = sum(1 for common_ancestor_generation
in takewhile(all_items_equal, merge_migrations_generations))
if not common_ancestor_count:
raise ValueError("Could not find common ancestor of %s" % migration_names)
# Now work out the operations along each divergent branch
for migration in merge_migrations:
migration.branch = migration.ancestry[common_ancestor_count:]
migrations_ops = (loader.get_migration(node_app, node_name).operations
for node_app, node_name in migration.branch)
migration.merged_operations = sum(migrations_ops, [])
# In future, this could use some of the Optimizer code
# (can_optimize_through) to automatically see if they're
# mergeable. For now, we always just prompt the user.
if self.verbosity > 0:
self.stdout.write(self.style.MIGRATE_HEADING("Merging %s" % app_label))
for migration in merge_migrations:
self.stdout.write(self.style.MIGRATE_LABEL(" Branch %s" % migration.name))
for operation in migration.merged_operations:
self.stdout.write(' - %s' % operation.describe())
if questioner.ask_merge(app_label):
# If they still want to merge it, then write out an empty
# file depending on the migrations needing merging.
numbers = [
MigrationAutodetector.parse_number(migration.name)
for migration in merge_migrations
]
try:
biggest_number = max(x for x in numbers if x is not None)
except ValueError:
biggest_number = 1
subclass = type("Migration", (Migration,), {
"dependencies": [(app_label, migration.name) for migration in merge_migrations],
})
migration_name = "%04i_%s" % (
biggest_number + 1,
self.migration_name or ("merge_%s" % get_migration_name_timestamp())
)
new_migration = subclass(migration_name, app_label)
writer = MigrationWriter(new_migration, self.include_header)
if not self.dry_run:
# Write the merge migrations file to the disk
with open(writer.path, "w", encoding='utf-8') as fh:
fh.write(writer.as_string())
if self.verbosity > 0:
self.stdout.write("\nCreated new merge migration %s" % writer.path)
elif self.verbosity == 3:
# Alternatively, makemigrations --merge --dry-run --verbosity 3
# will output the merge migrations to stdout rather than saving
# the file to the disk.
self.stdout.write(self.style.MIGRATE_HEADING(
"Full merge migrations file '%s':" % writer.filename
))
self.stdout.write(writer.as_string())
|
1fce8706f95c84dce5abca13dc961b00fb53085143918d0a686ed3f73dec68a3 | import os
import select
import sys
import traceback
from django.core.management import BaseCommand, CommandError
from django.utils.datastructures import OrderedSet
class Command(BaseCommand):
help = (
"Runs a Python interactive interpreter. Tries to use IPython or "
"bpython, if one of them is available. Any standard input is executed "
"as code."
)
requires_system_checks = []
shells = ['ipython', 'bpython', 'python']
def add_arguments(self, parser):
parser.add_argument(
'--no-startup', action='store_true',
help='When using plain Python, ignore the PYTHONSTARTUP environment variable and ~/.pythonrc.py script.',
)
parser.add_argument(
'-i', '--interface', choices=self.shells,
help='Specify an interactive interpreter interface. Available options: "ipython", "bpython", and "python"',
)
parser.add_argument(
'-c', '--command',
help='Instead of opening an interactive shell, run a command as Django and exit.',
)
def ipython(self, options):
from IPython import start_ipython
start_ipython(argv=[])
def bpython(self, options):
import bpython
bpython.embed()
def python(self, options):
import code
# Set up a dictionary to serve as the environment for the shell, so
# that tab completion works on objects that are imported at runtime.
imported_objects = {}
try: # Try activating rlcompleter, because it's handy.
import readline
except ImportError:
pass
else:
# We don't have to wrap the following import in a 'try', because
# we already know 'readline' was imported successfully.
import rlcompleter
readline.set_completer(rlcompleter.Completer(imported_objects).complete)
# Enable tab completion on systems using libedit (e.g. macOS).
# These lines are copied from Python's Lib/site.py.
readline_doc = getattr(readline, '__doc__', '')
if readline_doc is not None and 'libedit' in readline_doc:
readline.parse_and_bind("bind ^I rl_complete")
else:
readline.parse_and_bind("tab:complete")
# We want to honor both $PYTHONSTARTUP and .pythonrc.py, so follow system
# conventions and get $PYTHONSTARTUP first then .pythonrc.py.
if not options['no_startup']:
for pythonrc in OrderedSet([os.environ.get("PYTHONSTARTUP"), os.path.expanduser('~/.pythonrc.py')]):
if not pythonrc:
continue
if not os.path.isfile(pythonrc):
continue
with open(pythonrc) as handle:
pythonrc_code = handle.read()
# Match the behavior of the cpython shell where an error in
# PYTHONSTARTUP prints an exception and continues.
try:
exec(compile(pythonrc_code, pythonrc, 'exec'), imported_objects)
except Exception:
traceback.print_exc()
code.interact(local=imported_objects)
def handle(self, **options):
# Execute the command and exit.
if options['command']:
exec(options['command'])
return
# Execute stdin if it has anything to read and exit.
# Not supported on Windows due to select.select() limitations.
if sys.platform != 'win32' and not sys.stdin.isatty() and select.select([sys.stdin], [], [], 0)[0]:
exec(sys.stdin.read())
return
available_shells = [options['interface']] if options['interface'] else self.shells
for shell in available_shells:
try:
return getattr(self, shell)(options)
except ImportError:
pass
raise CommandError("Couldn't import {} interface.".format(shell))
|
b421066d5e7e3723baaa42c85e397e317f244fd0607d692226ece091f3b532ca | "Misc. utility functions/classes for admin documentation generator."
import re
from email.errors import HeaderParseError
from email.parser import HeaderParser
from inspect import cleandoc
from django.urls import reverse
from django.utils.regex_helper import _lazy_re_compile
from django.utils.safestring import mark_safe
try:
import docutils.core
import docutils.nodes
import docutils.parsers.rst.roles
except ImportError:
docutils_is_available = False
else:
docutils_is_available = True
def get_view_name(view_func):
mod_name = view_func.__module__
view_name = getattr(view_func, '__qualname__', view_func.__class__.__name__)
return mod_name + '.' + view_name
def parse_docstring(docstring):
"""
Parse out the parts of a docstring. Return (title, body, metadata).
"""
if not docstring:
return '', '', {}
docstring = cleandoc(docstring)
parts = re.split(r'\n{2,}', docstring)
title = parts[0]
if len(parts) == 1:
body = ''
metadata = {}
else:
parser = HeaderParser()
try:
metadata = parser.parsestr(parts[-1])
except HeaderParseError:
metadata = {}
body = "\n\n".join(parts[1:])
else:
metadata = dict(metadata.items())
if metadata:
body = "\n\n".join(parts[1:-1])
else:
body = "\n\n".join(parts[1:])
return title, body, metadata
def parse_rst(text, default_reference_context, thing_being_parsed=None):
"""
Convert the string from reST to an XHTML fragment.
"""
overrides = {
'doctitle_xform': True,
'initial_header_level': 3,
"default_reference_context": default_reference_context,
"link_base": reverse('django-admindocs-docroot').rstrip('/'),
'raw_enabled': False,
'file_insertion_enabled': False,
}
thing_being_parsed = thing_being_parsed and '<%s>' % thing_being_parsed
# Wrap ``text`` in some reST that sets the default role to ``cmsreference``,
# then restores it.
source = """
.. default-role:: cmsreference
%s
.. default-role::
"""
parts = docutils.core.publish_parts(
source % text,
source_path=thing_being_parsed, destination_path=None,
writer_name='html', settings_overrides=overrides,
)
return mark_safe(parts['fragment'])
#
# reST roles
#
ROLES = {
'model': '%s/models/%s/',
'view': '%s/views/%s/',
'template': '%s/templates/%s/',
'filter': '%s/filters/#%s',
'tag': '%s/tags/#%s',
}
def create_reference_role(rolename, urlbase):
def _role(name, rawtext, text, lineno, inliner, options=None, content=None):
if options is None:
options = {}
node = docutils.nodes.reference(
rawtext,
text,
refuri=(urlbase % (
inliner.document.settings.link_base,
text.lower(),
)),
**options
)
return [node], []
docutils.parsers.rst.roles.register_canonical_role(rolename, _role)
def default_reference_role(name, rawtext, text, lineno, inliner, options=None, content=None):
if options is None:
options = {}
context = inliner.document.settings.default_reference_context
node = docutils.nodes.reference(
rawtext,
text,
refuri=(ROLES[context] % (
inliner.document.settings.link_base,
text.lower(),
)),
**options
)
return [node], []
if docutils_is_available:
docutils.parsers.rst.roles.register_canonical_role('cmsreference', default_reference_role)
for name, urlbase in ROLES.items():
create_reference_role(name, urlbase)
# Match the beginning of a named or unnamed group.
named_group_matcher = _lazy_re_compile(r'\(\?P(<\w+>)')
unnamed_group_matcher = _lazy_re_compile(r'\(')
def replace_named_groups(pattern):
r"""
Find named groups in `pattern` and replace them with the group name. E.g.,
1. ^(?P<a>\w+)/b/(\w+)$ ==> ^<a>/b/(\w+)$
2. ^(?P<a>\w+)/b/(?P<c>\w+)/$ ==> ^<a>/b/<c>/$
3. ^(?P<a>\w+)/b/(\w+) ==> ^<a>/b/(\w+)
4. ^(?P<a>\w+)/b/(?P<c>\w+) ==> ^<a>/b/<c>
"""
named_group_indices = [
(m.start(0), m.end(0), m[1])
for m in named_group_matcher.finditer(pattern)
]
# Tuples of (named capture group pattern, group name).
group_pattern_and_name = []
# Loop over the groups and their start and end indices.
for start, end, group_name in named_group_indices:
# Handle nested parentheses, e.g. '^(?P<a>(x|y))/b'.
unmatched_open_brackets, prev_char = 1, None
for idx, val in enumerate(pattern[end:]):
# Check for unescaped `(` and `)`. They mark the start and end of a
# nested group.
if val == '(' and prev_char != '\\':
unmatched_open_brackets += 1
elif val == ')' and prev_char != '\\':
unmatched_open_brackets -= 1
prev_char = val
# If brackets are balanced, the end of the string for the current
# named capture group pattern has been reached.
if unmatched_open_brackets == 0:
group_pattern_and_name.append((pattern[start:end + idx + 1], group_name))
break
# Replace the string for named capture groups with their group names.
for group_pattern, group_name in group_pattern_and_name:
pattern = pattern.replace(group_pattern, group_name)
return pattern
def replace_unnamed_groups(pattern):
r"""
Find unnamed groups in `pattern` and replace them with '<var>'. E.g.,
1. ^(?P<a>\w+)/b/(\w+)$ ==> ^(?P<a>\w+)/b/<var>$
2. ^(?P<a>\w+)/b/((x|y)\w+)$ ==> ^(?P<a>\w+)/b/<var>$
3. ^(?P<a>\w+)/b/(\w+) ==> ^(?P<a>\w+)/b/<var>
4. ^(?P<a>\w+)/b/((x|y)\w+) ==> ^(?P<a>\w+)/b/<var>
"""
unnamed_group_indices = [m.start(0) for m in unnamed_group_matcher.finditer(pattern)]
# Indices of the start of unnamed capture groups.
group_indices = []
# Loop over the start indices of the groups.
for start in unnamed_group_indices:
# Handle nested parentheses, e.g. '^b/((x|y)\w+)$'.
unmatched_open_brackets, prev_char = 1, None
for idx, val in enumerate(pattern[start + 1:]):
# Check for unescaped `(` and `)`. They mark the start and end of
# a nested group.
if val == '(' and prev_char != '\\':
unmatched_open_brackets += 1
elif val == ')' and prev_char != '\\':
unmatched_open_brackets -= 1
prev_char = val
if unmatched_open_brackets == 0:
group_indices.append((start, start + 2 + idx))
break
# Remove unnamed group matches inside other unnamed capture groups.
group_start_end_indices = []
prev_end = None
for start, end in group_indices:
if prev_end and start > prev_end or not prev_end:
group_start_end_indices.append((start, end))
prev_end = end
if group_start_end_indices:
# Replace unnamed groups with <var>. Handle the fact that replacing the
# string between indices will change string length and thus indices
# will point to the wrong substring if not corrected.
final_pattern, prev_end = [], None
for start, end in group_start_end_indices:
if prev_end:
final_pattern.append(pattern[prev_end:start])
final_pattern.append(pattern[:start] + '<var>')
prev_end = end
final_pattern.append(pattern[prev_end:])
return ''.join(final_pattern)
else:
return pattern
|
5758125eaae4739abd838d95ecf51ac06256f5117312a3b08af7d477d1b04ac3 | from django.db import NotSupportedError
from django.db.models import Index
from django.utils.functional import cached_property
__all__ = [
'BloomIndex', 'BrinIndex', 'BTreeIndex', 'GinIndex', 'GistIndex',
'HashIndex', 'SpGistIndex',
]
class PostgresIndex(Index):
@cached_property
def max_name_length(self):
# Allow an index name longer than 30 characters when the suffix is
# longer than the usual 3 character limit. The 30 character limit for
# cross-database compatibility isn't applicable to PostgreSQL-specific
# indexes.
return Index.max_name_length - len(Index.suffix) + len(self.suffix)
def create_sql(self, model, schema_editor, using='', **kwargs):
self.check_supported(schema_editor)
statement = super().create_sql(model, schema_editor, using=' USING %s' % self.suffix, **kwargs)
with_params = self.get_with_params()
if with_params:
statement.parts['extra'] = 'WITH (%s) %s' % (
', '.join(with_params),
statement.parts['extra'],
)
return statement
def check_supported(self, schema_editor):
pass
def get_with_params(self):
return []
class BloomIndex(PostgresIndex):
suffix = 'bloom'
def __init__(self, *, length=None, columns=(), **kwargs):
super().__init__(**kwargs)
if len(self.fields) > 32:
raise ValueError('Bloom indexes support a maximum of 32 fields.')
if not isinstance(columns, (list, tuple)):
raise ValueError('BloomIndex.columns must be a list or tuple.')
if len(columns) > len(self.fields):
raise ValueError(
'BloomIndex.columns cannot have more values than fields.'
)
if not all(0 < col <= 4095 for col in columns):
raise ValueError(
'BloomIndex.columns must contain integers from 1 to 4095.',
)
if length is not None and not 0 < length <= 4096:
raise ValueError(
'BloomIndex.length must be None or an integer from 1 to 4096.',
)
self.length = length
self.columns = columns
def deconstruct(self):
path, args, kwargs = super().deconstruct()
if self.length is not None:
kwargs['length'] = self.length
if self.columns:
kwargs['columns'] = self.columns
return path, args, kwargs
def get_with_params(self):
with_params = []
if self.length is not None:
with_params.append('length = %d' % self.length)
if self.columns:
with_params.extend(
'col%d = %d' % (i, v)
for i, v in enumerate(self.columns, start=1)
)
return with_params
class BrinIndex(PostgresIndex):
suffix = 'brin'
def __init__(self, *, autosummarize=None, pages_per_range=None, **kwargs):
if pages_per_range is not None and pages_per_range <= 0:
raise ValueError('pages_per_range must be None or a positive integer')
self.autosummarize = autosummarize
self.pages_per_range = pages_per_range
super().__init__(**kwargs)
def deconstruct(self):
path, args, kwargs = super().deconstruct()
if self.autosummarize is not None:
kwargs['autosummarize'] = self.autosummarize
if self.pages_per_range is not None:
kwargs['pages_per_range'] = self.pages_per_range
return path, args, kwargs
def check_supported(self, schema_editor):
if self.autosummarize and not schema_editor.connection.features.has_brin_autosummarize:
raise NotSupportedError('BRIN option autosummarize requires PostgreSQL 10+.')
def get_with_params(self):
with_params = []
if self.autosummarize is not None:
with_params.append('autosummarize = %s' % ('on' if self.autosummarize else 'off'))
if self.pages_per_range is not None:
with_params.append('pages_per_range = %d' % self.pages_per_range)
return with_params
class BTreeIndex(PostgresIndex):
suffix = 'btree'
def __init__(self, *, fillfactor=None, **kwargs):
self.fillfactor = fillfactor
super().__init__(**kwargs)
def deconstruct(self):
path, args, kwargs = super().deconstruct()
if self.fillfactor is not None:
kwargs['fillfactor'] = self.fillfactor
return path, args, kwargs
def get_with_params(self):
with_params = []
if self.fillfactor is not None:
with_params.append('fillfactor = %d' % self.fillfactor)
return with_params
class GinIndex(PostgresIndex):
suffix = 'gin'
def __init__(self, *, fastupdate=None, gin_pending_list_limit=None, **kwargs):
self.fastupdate = fastupdate
self.gin_pending_list_limit = gin_pending_list_limit
super().__init__(**kwargs)
def deconstruct(self):
path, args, kwargs = super().deconstruct()
if self.fastupdate is not None:
kwargs['fastupdate'] = self.fastupdate
if self.gin_pending_list_limit is not None:
kwargs['gin_pending_list_limit'] = self.gin_pending_list_limit
return path, args, kwargs
def get_with_params(self):
with_params = []
if self.gin_pending_list_limit is not None:
with_params.append('gin_pending_list_limit = %d' % self.gin_pending_list_limit)
if self.fastupdate is not None:
with_params.append('fastupdate = %s' % ('on' if self.fastupdate else 'off'))
return with_params
class GistIndex(PostgresIndex):
suffix = 'gist'
def __init__(self, *, buffering=None, fillfactor=None, **kwargs):
self.buffering = buffering
self.fillfactor = fillfactor
super().__init__(**kwargs)
def deconstruct(self):
path, args, kwargs = super().deconstruct()
if self.buffering is not None:
kwargs['buffering'] = self.buffering
if self.fillfactor is not None:
kwargs['fillfactor'] = self.fillfactor
return path, args, kwargs
def get_with_params(self):
with_params = []
if self.buffering is not None:
with_params.append('buffering = %s' % ('on' if self.buffering else 'off'))
if self.fillfactor is not None:
with_params.append('fillfactor = %d' % self.fillfactor)
return with_params
class HashIndex(PostgresIndex):
suffix = 'hash'
def __init__(self, *, fillfactor=None, **kwargs):
self.fillfactor = fillfactor
super().__init__(**kwargs)
def deconstruct(self):
path, args, kwargs = super().deconstruct()
if self.fillfactor is not None:
kwargs['fillfactor'] = self.fillfactor
return path, args, kwargs
def get_with_params(self):
with_params = []
if self.fillfactor is not None:
with_params.append('fillfactor = %d' % self.fillfactor)
return with_params
class SpGistIndex(PostgresIndex):
suffix = 'spgist'
def __init__(self, *, fillfactor=None, **kwargs):
self.fillfactor = fillfactor
super().__init__(**kwargs)
def deconstruct(self):
path, args, kwargs = super().deconstruct()
if self.fillfactor is not None:
kwargs['fillfactor'] = self.fillfactor
return path, args, kwargs
def get_with_params(self):
with_params = []
if self.fillfactor is not None:
with_params.append('fillfactor = %d' % self.fillfactor)
return with_params
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.