hash
stringlengths
64
64
content
stringlengths
0
1.51M
3eb63d1e3b547cba16f629c5e2514922b9fdbf09adcb19cf28e3607ab00cd429
import json from django.contrib.postgres import lookups from django.contrib.postgres.forms import SimpleArrayField from django.contrib.postgres.validators import ArrayMaxLengthValidator from django.core import checks, exceptions from django.db.models import Field, IntegerField, Transform from django.db.models.fields.mixins import CheckFieldDefaultMixin from django.db.models.lookups import Exact, In from django.utils.translation import gettext_lazy as _ from ..utils import prefix_validation_error from .utils import AttributeSetter __all__ = ['ArrayField'] class ArrayField(CheckFieldDefaultMixin, Field): empty_strings_allowed = False default_error_messages = { 'item_invalid': _('Item %(nth)s in the array did not validate:'), 'nested_array_mismatch': _('Nested arrays must have the same length.'), } _default_hint = ('list', '[]') def __init__(self, base_field, size=None, **kwargs): self.base_field = base_field self.size = size if self.size: self.default_validators = [*self.default_validators, ArrayMaxLengthValidator(self.size)] # For performance, only add a from_db_value() method if the base field # implements it. if hasattr(self.base_field, 'from_db_value'): self.from_db_value = self._from_db_value super().__init__(**kwargs) @property def model(self): try: return self.__dict__['model'] except KeyError: raise AttributeError("'%s' object has no attribute 'model'" % self.__class__.__name__) @model.setter def model(self, model): self.__dict__['model'] = model self.base_field.model = model def check(self, **kwargs): errors = super().check(**kwargs) if self.base_field.remote_field: errors.append( checks.Error( 'Base field for array cannot be a related field.', obj=self, id='postgres.E002' ) ) else: # Remove the field name checks as they are not needed here. base_errors = self.base_field.check() if base_errors: messages = '\n '.join('%s (%s)' % (error.msg, error.id) for error in base_errors) errors.append( checks.Error( 'Base field for array has errors:\n %s' % messages, obj=self, id='postgres.E001' ) ) return errors def set_attributes_from_name(self, name): super().set_attributes_from_name(name) self.base_field.set_attributes_from_name(name) @property def description(self): return 'Array of %s' % self.base_field.description def db_type(self, connection): size = self.size or '' return '%s[%s]' % (self.base_field.db_type(connection), size) def cast_db_type(self, connection): size = self.size or '' return '%s[%s]' % (self.base_field.cast_db_type(connection), size) def get_placeholder(self, value, compiler, connection): return '%s::{}'.format(self.db_type(connection)) def get_db_prep_value(self, value, connection, prepared=False): if isinstance(value, (list, tuple)): return [self.base_field.get_db_prep_value(i, connection, prepared=False) for i in value] return value def deconstruct(self): name, path, args, kwargs = super().deconstruct() if path == 'django.contrib.postgres.fields.array.ArrayField': path = 'django.contrib.postgres.fields.ArrayField' kwargs.update({ 'base_field': self.base_field.clone(), 'size': self.size, }) return name, path, args, kwargs def to_python(self, value): if isinstance(value, str): # Assume we're deserializing vals = json.loads(value) value = [self.base_field.to_python(val) for val in vals] return value def _from_db_value(self, value, expression, connection): if value is None: return value return [ self.base_field.from_db_value(item, expression, connection) for item in value ] def value_to_string(self, obj): values = [] vals = self.value_from_object(obj) base_field = self.base_field for val in vals: if val is None: values.append(None) else: obj = AttributeSetter(base_field.attname, val) values.append(base_field.value_to_string(obj)) return json.dumps(values) def get_transform(self, name): transform = super().get_transform(name) if transform: return transform if '_' not in name: try: index = int(name) except ValueError: pass else: index += 1 # postgres uses 1-indexing return IndexTransformFactory(index, self.base_field) try: start, end = name.split('_') start = int(start) + 1 end = int(end) # don't add one here because postgres slices are weird except ValueError: pass else: return SliceTransformFactory(start, end) def validate(self, value, model_instance): super().validate(value, model_instance) for index, part in enumerate(value): try: self.base_field.validate(part, model_instance) except exceptions.ValidationError as error: raise prefix_validation_error( error, prefix=self.error_messages['item_invalid'], code='item_invalid', params={'nth': index + 1}, ) if isinstance(self.base_field, ArrayField): if len({len(i) for i in value}) > 1: raise exceptions.ValidationError( self.error_messages['nested_array_mismatch'], code='nested_array_mismatch', ) def run_validators(self, value): super().run_validators(value) for index, part in enumerate(value): try: self.base_field.run_validators(part) except exceptions.ValidationError as error: raise prefix_validation_error( error, prefix=self.error_messages['item_invalid'], code='item_invalid', params={'nth': index + 1}, ) def formfield(self, **kwargs): return super().formfield(**{ 'form_class': SimpleArrayField, 'base_field': self.base_field.formfield(), 'max_length': self.size, **kwargs, }) class ArrayCastRHSMixin: def process_rhs(self, compiler, connection): rhs, rhs_params = super().process_rhs(compiler, connection) cast_type = self.lhs.output_field.cast_db_type(connection) return '%s::%s' % (rhs, cast_type), rhs_params @ArrayField.register_lookup class ArrayContains(ArrayCastRHSMixin, lookups.DataContains): pass @ArrayField.register_lookup class ArrayContainedBy(ArrayCastRHSMixin, lookups.ContainedBy): pass @ArrayField.register_lookup class ArrayExact(ArrayCastRHSMixin, Exact): pass @ArrayField.register_lookup class ArrayOverlap(ArrayCastRHSMixin, lookups.Overlap): pass @ArrayField.register_lookup class ArrayLenTransform(Transform): lookup_name = 'len' output_field = IntegerField() def as_sql(self, compiler, connection): lhs, params = compiler.compile(self.lhs) # Distinguish NULL and empty arrays return ( 'CASE WHEN %(lhs)s IS NULL THEN NULL ELSE ' 'coalesce(array_length(%(lhs)s, 1), 0) END' ) % {'lhs': lhs}, params @ArrayField.register_lookup class ArrayInLookup(In): def get_prep_lookup(self): values = super().get_prep_lookup() if hasattr(values, 'resolve_expression'): return values # In.process_rhs() expects values to be hashable, so convert lists # to tuples. prepared_values = [] for value in values: if hasattr(value, 'resolve_expression'): prepared_values.append(value) else: prepared_values.append(tuple(value)) return prepared_values class IndexTransform(Transform): def __init__(self, index, base_field, *args, **kwargs): super().__init__(*args, **kwargs) self.index = index self.base_field = base_field def as_sql(self, compiler, connection): lhs, params = compiler.compile(self.lhs) return '%s[%%s]' % lhs, params + [self.index] @property def output_field(self): return self.base_field class IndexTransformFactory: def __init__(self, index, base_field): self.index = index self.base_field = base_field def __call__(self, *args, **kwargs): return IndexTransform(self.index, self.base_field, *args, **kwargs) class SliceTransform(Transform): def __init__(self, start, end, *args, **kwargs): super().__init__(*args, **kwargs) self.start = start self.end = end def as_sql(self, compiler, connection): lhs, params = compiler.compile(self.lhs) return '%s[%%s:%%s]' % lhs, params + [self.start, self.end] class SliceTransformFactory: def __init__(self, start, end): self.start = start self.end = end def __call__(self, *args, **kwargs): return SliceTransform(self.start, self.end, *args, **kwargs)
05f43789f9bb3464e5287a98bdace0696a037fa183462a83c0071d3209d9d1ca
import json from django.contrib.postgres import forms, lookups from django.contrib.postgres.fields.array import ArrayField from django.core import exceptions from django.db.models import Field, TextField, Transform from django.db.models.fields.mixins import CheckFieldDefaultMixin from django.utils.translation import gettext_lazy as _ __all__ = ['HStoreField'] class HStoreField(CheckFieldDefaultMixin, Field): empty_strings_allowed = False description = _('Map of strings to strings/nulls') default_error_messages = { 'not_a_string': _('The value of “%(key)s” is not a string or null.'), } _default_hint = ('dict', '{}') def db_type(self, connection): return 'hstore' def get_transform(self, name): transform = super().get_transform(name) if transform: return transform return KeyTransformFactory(name) def validate(self, value, model_instance): super().validate(value, model_instance) for key, val in value.items(): if not isinstance(val, str) and val is not None: raise exceptions.ValidationError( self.error_messages['not_a_string'], code='not_a_string', params={'key': key}, ) def to_python(self, value): if isinstance(value, str): value = json.loads(value) return value def value_to_string(self, obj): return json.dumps(self.value_from_object(obj)) def formfield(self, **kwargs): return super().formfield(**{ 'form_class': forms.HStoreField, **kwargs, }) def get_prep_value(self, value): value = super().get_prep_value(value) if isinstance(value, dict): prep_value = {} for key, val in value.items(): key = str(key) if val is not None: val = str(val) prep_value[key] = val value = prep_value if isinstance(value, list): value = [str(item) for item in value] return value HStoreField.register_lookup(lookups.DataContains) HStoreField.register_lookup(lookups.ContainedBy) HStoreField.register_lookup(lookups.HasKey) HStoreField.register_lookup(lookups.HasKeys) HStoreField.register_lookup(lookups.HasAnyKeys) class KeyTransform(Transform): output_field = TextField() def __init__(self, key_name, *args, **kwargs): super().__init__(*args, **kwargs) self.key_name = key_name def as_sql(self, compiler, connection): lhs, params = compiler.compile(self.lhs) return '(%s -> %%s)' % lhs, tuple(params) + (self.key_name,) class KeyTransformFactory: def __init__(self, key_name): self.key_name = key_name def __call__(self, *args, **kwargs): return KeyTransform(self.key_name, *args, **kwargs) @HStoreField.register_lookup class KeysTransform(Transform): lookup_name = 'keys' function = 'akeys' output_field = ArrayField(TextField()) @HStoreField.register_lookup class ValuesTransform(Transform): lookup_name = 'values' function = 'avals' output_field = ArrayField(TextField())
499202f85ebaf6c4103d55be9b84a88149d02ea5182bb2770a84a47c47aa0065
import json from psycopg2.extras import Json from django.contrib.postgres import forms, lookups from django.core import exceptions from django.db.models import ( Field, TextField, Transform, lookups as builtin_lookups, ) from django.db.models.fields.mixins import CheckFieldDefaultMixin from django.utils.translation import gettext_lazy as _ __all__ = ['JSONField'] class JsonAdapter(Json): """ Customized psycopg2.extras.Json to allow for a custom encoder. """ def __init__(self, adapted, dumps=None, encoder=None): self.encoder = encoder super().__init__(adapted, dumps=dumps) def dumps(self, obj): options = {'cls': self.encoder} if self.encoder else {} return json.dumps(obj, **options) class JSONField(CheckFieldDefaultMixin, Field): empty_strings_allowed = False description = _('A JSON object') default_error_messages = { 'invalid': _("Value must be valid JSON."), } _default_hint = ('dict', '{}') def __init__(self, verbose_name=None, name=None, encoder=None, **kwargs): if encoder and not callable(encoder): raise ValueError("The encoder parameter must be a callable object.") self.encoder = encoder super().__init__(verbose_name, name, **kwargs) def db_type(self, connection): return 'jsonb' def deconstruct(self): name, path, args, kwargs = super().deconstruct() if self.encoder is not None: kwargs['encoder'] = self.encoder return name, path, args, kwargs def get_transform(self, name): transform = super().get_transform(name) if transform: return transform return KeyTransformFactory(name) def get_prep_value(self, value): if value is not None: return JsonAdapter(value, encoder=self.encoder) return value def validate(self, value, model_instance): super().validate(value, model_instance) options = {'cls': self.encoder} if self.encoder else {} try: json.dumps(value, **options) except TypeError: raise exceptions.ValidationError( self.error_messages['invalid'], code='invalid', params={'value': value}, ) def value_to_string(self, obj): return self.value_from_object(obj) def formfield(self, **kwargs): return super().formfield(**{ 'form_class': forms.JSONField, **kwargs, }) JSONField.register_lookup(lookups.DataContains) JSONField.register_lookup(lookups.ContainedBy) JSONField.register_lookup(lookups.HasKey) JSONField.register_lookup(lookups.HasKeys) JSONField.register_lookup(lookups.HasAnyKeys) JSONField.register_lookup(lookups.JSONExact) class KeyTransform(Transform): operator = '->' nested_operator = '#>' def __init__(self, key_name, *args, **kwargs): super().__init__(*args, **kwargs) self.key_name = key_name def as_sql(self, compiler, connection): key_transforms = [self.key_name] previous = self.lhs while isinstance(previous, KeyTransform): key_transforms.insert(0, previous.key_name) previous = previous.lhs lhs, params = compiler.compile(previous) if len(key_transforms) > 1: return '(%s %s %%s)' % (lhs, self.nested_operator), params + [key_transforms] try: lookup = int(self.key_name) except ValueError: lookup = self.key_name return '(%s %s %%s)' % (lhs, self.operator), tuple(params) + (lookup,) class KeyTextTransform(KeyTransform): operator = '->>' nested_operator = '#>>' output_field = TextField() class KeyTransformTextLookupMixin: """ Mixin for combining with a lookup expecting a text lhs from a JSONField key lookup. Make use of the ->> operator instead of casting key values to text and performing the lookup on the resulting representation. """ def __init__(self, key_transform, *args, **kwargs): assert isinstance(key_transform, KeyTransform) key_text_transform = KeyTextTransform( key_transform.key_name, *key_transform.source_expressions, **key_transform.extra ) super().__init__(key_text_transform, *args, **kwargs) class KeyTransformIExact(KeyTransformTextLookupMixin, builtin_lookups.IExact): pass class KeyTransformIContains(KeyTransformTextLookupMixin, builtin_lookups.IContains): pass class KeyTransformStartsWith(KeyTransformTextLookupMixin, builtin_lookups.StartsWith): pass class KeyTransformIStartsWith(KeyTransformTextLookupMixin, builtin_lookups.IStartsWith): pass class KeyTransformEndsWith(KeyTransformTextLookupMixin, builtin_lookups.EndsWith): pass class KeyTransformIEndsWith(KeyTransformTextLookupMixin, builtin_lookups.IEndsWith): pass class KeyTransformRegex(KeyTransformTextLookupMixin, builtin_lookups.Regex): pass class KeyTransformIRegex(KeyTransformTextLookupMixin, builtin_lookups.IRegex): pass KeyTransform.register_lookup(KeyTransformIExact) KeyTransform.register_lookup(KeyTransformIContains) KeyTransform.register_lookup(KeyTransformStartsWith) KeyTransform.register_lookup(KeyTransformIStartsWith) KeyTransform.register_lookup(KeyTransformEndsWith) KeyTransform.register_lookup(KeyTransformIEndsWith) KeyTransform.register_lookup(KeyTransformRegex) KeyTransform.register_lookup(KeyTransformIRegex) class KeyTransformFactory: def __init__(self, key_name): self.key_name = key_name def __call__(self, *args, **kwargs): return KeyTransform(self.key_name, *args, **kwargs)
4f174832bd45790d55877f01fceda44d366f91e919e709cec74039ead0ecf56d
from django.contrib.gis.db.backends.base.adapter import WKTAdapter from django.contrib.gis.db.backends.base.operations import ( BaseSpatialOperations, ) from django.contrib.gis.db.backends.utils import SpatialOperator from django.contrib.gis.db.models import aggregates from django.contrib.gis.geos.geometry import GEOSGeometryBase from django.contrib.gis.geos.prototypes.io import wkb_r from django.contrib.gis.measure import Distance from django.db.backends.mysql.operations import DatabaseOperations from django.utils.functional import cached_property class MySQLOperations(BaseSpatialOperations, DatabaseOperations): mysql = True name = 'mysql' geom_func_prefix = 'ST_' Adapter = WKTAdapter @cached_property def select(self): return self.geom_func_prefix + 'AsBinary(%s)' @cached_property def from_text(self): return self.geom_func_prefix + 'GeomFromText' @cached_property def gis_operators(self): operators = { 'bbcontains': SpatialOperator(func='MBRContains'), # For consistency w/PostGIS API 'bboverlaps': SpatialOperator(func='MBROverlaps'), # ... 'contained': SpatialOperator(func='MBRWithin'), # ... 'contains': SpatialOperator(func='ST_Contains'), 'crosses': SpatialOperator(func='ST_Crosses'), 'disjoint': SpatialOperator(func='ST_Disjoint'), 'equals': SpatialOperator(func='ST_Equals'), 'exact': SpatialOperator(func='ST_Equals'), 'intersects': SpatialOperator(func='ST_Intersects'), 'overlaps': SpatialOperator(func='ST_Overlaps'), 'same_as': SpatialOperator(func='ST_Equals'), 'touches': SpatialOperator(func='ST_Touches'), 'within': SpatialOperator(func='ST_Within'), } if self.connection.mysql_is_mariadb: operators['relate'] = SpatialOperator(func='ST_Relate') return operators disallowed_aggregates = ( aggregates.Collect, aggregates.Extent, aggregates.Extent3D, aggregates.MakeLine, aggregates.Union, ) @cached_property def unsupported_functions(self): unsupported = { 'AsGML', 'AsKML', 'AsSVG', 'Azimuth', 'BoundingCircle', 'ForcePolygonCW', 'GeometryDistance', 'LineLocatePoint', 'MakeValid', 'MemSize', 'Perimeter', 'PointOnSurface', 'Reverse', 'Scale', 'SnapToGrid', 'Transform', 'Translate', } if self.connection.mysql_is_mariadb: unsupported.remove('PointOnSurface') unsupported.update({'GeoHash', 'IsValid'}) if self.connection.mysql_version < (10, 2, 4): unsupported.add('AsGeoJSON') elif self.connection.mysql_version < (5, 7, 5): unsupported.update({'AsGeoJSON', 'GeoHash', 'IsValid'}) return unsupported def geo_db_type(self, f): return f.geom_type def get_distance(self, f, value, lookup_type): value = value[0] if isinstance(value, Distance): if f.geodetic(self.connection): raise ValueError( 'Only numeric values of degree units are allowed on ' 'geodetic distance queries.' ) dist_param = getattr(value, Distance.unit_attname(f.units_name(self.connection))) else: dist_param = value return [dist_param] def get_geometry_converter(self, expression): read = wkb_r().read srid = expression.output_field.srid if srid == -1: srid = None geom_class = expression.output_field.geom_class def converter(value, expression, connection): if value is not None: geom = GEOSGeometryBase(read(memoryview(value)), geom_class) if srid: geom.srid = srid return geom return converter
4e975cde2c2c342afd2606486198c56b61541bde63c28acf9e066725b57cc8d0
import datetime import importlib import io import os import sys from unittest import mock from django.apps import apps from django.core.management import CommandError, call_command from django.db import ( ConnectionHandler, DatabaseError, connection, connections, models, ) from django.db.backends.base.schema import BaseDatabaseSchemaEditor from django.db.backends.utils import truncate_name from django.db.migrations.exceptions import InconsistentMigrationHistory from django.db.migrations.recorder import MigrationRecorder from django.test import TestCase, override_settings from .models import UnicodeModel, UnserializableModel from .routers import TestRouter from .test_base import MigrationTestBase class MigrateTests(MigrationTestBase): """ Tests running the migrate command. """ databases = {'default', 'other'} @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"}) def test_migrate(self): """ Tests basic usage of the migrate command. """ # No tables are created self.assertTableNotExists("migrations_author") self.assertTableNotExists("migrations_tribble") self.assertTableNotExists("migrations_book") # Run the migrations to 0001 only stdout = io.StringIO() call_command('migrate', 'migrations', '0001', verbosity=1, stdout=stdout, no_color=True) stdout = stdout.getvalue() self.assertIn('Target specific migration: 0001_initial, from migrations', stdout) self.assertIn('Applying migrations.0001_initial... OK', stdout) # The correct tables exist self.assertTableExists("migrations_author") self.assertTableExists("migrations_tribble") self.assertTableNotExists("migrations_book") # Run migrations all the way call_command("migrate", verbosity=0) # The correct tables exist self.assertTableExists("migrations_author") self.assertTableNotExists("migrations_tribble") self.assertTableExists("migrations_book") # Unmigrate everything stdout = io.StringIO() call_command('migrate', 'migrations', 'zero', verbosity=1, stdout=stdout, no_color=True) stdout = stdout.getvalue() self.assertIn('Unapply all migrations: migrations', stdout) self.assertIn('Unapplying migrations.0002_second... OK', stdout) # Tables are gone self.assertTableNotExists("migrations_author") self.assertTableNotExists("migrations_tribble") self.assertTableNotExists("migrations_book") @override_settings(INSTALLED_APPS=[ 'django.contrib.auth', 'django.contrib.contenttypes', 'migrations.migrations_test_apps.migrated_app', ]) def test_migrate_with_system_checks(self): out = io.StringIO() call_command('migrate', skip_checks=False, no_color=True, stdout=out) self.assertIn('Apply all migrations: migrated_app', out.getvalue()) @override_settings(INSTALLED_APPS=['migrations', 'migrations.migrations_test_apps.unmigrated_app_syncdb']) def test_app_without_migrations(self): msg = "App 'unmigrated_app_syncdb' does not have migrations." with self.assertRaisesMessage(CommandError, msg): call_command('migrate', app_label='unmigrated_app_syncdb') @override_settings(MIGRATION_MODULES={'migrations': 'migrations.test_migrations_clashing_prefix'}) def test_ambiguous_prefix(self): msg = ( "More than one migration matches 'a' in app 'migrations'. Please " "be more specific." ) with self.assertRaisesMessage(CommandError, msg): call_command('migrate', app_label='migrations', migration_name='a') @override_settings(MIGRATION_MODULES={'migrations': 'migrations.test_migrations'}) def test_unknown_prefix(self): msg = "Cannot find a migration matching 'nonexistent' from app 'migrations'." with self.assertRaisesMessage(CommandError, msg): call_command('migrate', app_label='migrations', migration_name='nonexistent') @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_initial_false"}) def test_migrate_initial_false(self): """ `Migration.initial = False` skips fake-initial detection. """ # Make sure no tables are created self.assertTableNotExists("migrations_author") self.assertTableNotExists("migrations_tribble") # Run the migrations to 0001 only call_command("migrate", "migrations", "0001", verbosity=0) # Fake rollback call_command("migrate", "migrations", "zero", fake=True, verbosity=0) # Make sure fake-initial detection does not run with self.assertRaises(DatabaseError): call_command("migrate", "migrations", "0001", fake_initial=True, verbosity=0) call_command("migrate", "migrations", "0001", fake=True, verbosity=0) # Real rollback call_command("migrate", "migrations", "zero", verbosity=0) # Make sure it's all gone self.assertTableNotExists("migrations_author") self.assertTableNotExists("migrations_tribble") self.assertTableNotExists("migrations_book") @override_settings( MIGRATION_MODULES={"migrations": "migrations.test_migrations"}, DATABASE_ROUTERS=['migrations.routers.TestRouter'], ) def test_migrate_fake_initial(self): """ --fake-initial only works if all tables created in the initial migration of an app exists. Database routers must be obeyed when doing that check. """ # Make sure no tables are created for db in connections: self.assertTableNotExists("migrations_author", using=db) self.assertTableNotExists("migrations_tribble", using=db) # Run the migrations to 0001 only call_command("migrate", "migrations", "0001", verbosity=0) call_command("migrate", "migrations", "0001", verbosity=0, database="other") # Make sure the right tables exist self.assertTableExists("migrations_author") self.assertTableNotExists("migrations_tribble") # Also check the "other" database self.assertTableNotExists("migrations_author", using="other") self.assertTableExists("migrations_tribble", using="other") # Fake a roll-back call_command("migrate", "migrations", "zero", fake=True, verbosity=0) call_command("migrate", "migrations", "zero", fake=True, verbosity=0, database="other") # Make sure the tables still exist self.assertTableExists("migrations_author") self.assertTableExists("migrations_tribble", using="other") # Try to run initial migration with self.assertRaises(DatabaseError): call_command("migrate", "migrations", "0001", verbosity=0) # Run initial migration with an explicit --fake-initial out = io.StringIO() with mock.patch('django.core.management.color.supports_color', lambda *args: False): call_command("migrate", "migrations", "0001", fake_initial=True, stdout=out, verbosity=1) call_command("migrate", "migrations", "0001", fake_initial=True, verbosity=0, database="other") self.assertIn( "migrations.0001_initial... faked", out.getvalue().lower() ) # Run migrations all the way call_command("migrate", verbosity=0) call_command("migrate", verbosity=0, database="other") # Make sure the right tables exist self.assertTableExists("migrations_author") self.assertTableNotExists("migrations_tribble") self.assertTableExists("migrations_book") self.assertTableNotExists("migrations_author", using="other") self.assertTableNotExists("migrations_tribble", using="other") self.assertTableNotExists("migrations_book", using="other") # Fake a roll-back call_command("migrate", "migrations", "zero", fake=True, verbosity=0) call_command("migrate", "migrations", "zero", fake=True, verbosity=0, database="other") # Make sure the tables still exist self.assertTableExists("migrations_author") self.assertTableNotExists("migrations_tribble") self.assertTableExists("migrations_book") # Try to run initial migration with self.assertRaises(DatabaseError): call_command("migrate", "migrations", verbosity=0) # Run initial migration with an explicit --fake-initial with self.assertRaises(DatabaseError): # Fails because "migrations_tribble" does not exist but needs to in # order to make --fake-initial work. call_command("migrate", "migrations", fake_initial=True, verbosity=0) # Fake an apply call_command("migrate", "migrations", fake=True, verbosity=0) call_command("migrate", "migrations", fake=True, verbosity=0, database="other") # Unmigrate everything call_command("migrate", "migrations", "zero", verbosity=0) call_command("migrate", "migrations", "zero", verbosity=0, database="other") # Make sure it's all gone for db in connections: self.assertTableNotExists("migrations_author", using=db) self.assertTableNotExists("migrations_tribble", using=db) self.assertTableNotExists("migrations_book", using=db) @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_fake_split_initial"}) def test_migrate_fake_split_initial(self): """ Split initial migrations can be faked with --fake-initial. """ call_command("migrate", "migrations", "0002", verbosity=0) call_command("migrate", "migrations", "zero", fake=True, verbosity=0) out = io.StringIO() with mock.patch('django.core.management.color.supports_color', lambda *args: False): call_command("migrate", "migrations", "0002", fake_initial=True, stdout=out, verbosity=1) value = out.getvalue().lower() self.assertIn("migrations.0001_initial... faked", value) self.assertIn("migrations.0002_second... faked", value) # Fake an apply call_command("migrate", "migrations", fake=True, verbosity=0) # Unmigrate everything call_command("migrate", "migrations", "zero", verbosity=0) @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_conflict"}) def test_migrate_conflict_exit(self): """ migrate exits if it detects a conflict. """ with self.assertRaisesMessage(CommandError, "Conflicting migrations detected"): call_command("migrate", "migrations") @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"}) def test_showmigrations_list(self): """ showmigrations --list displays migrations and whether or not they're applied. """ out = io.StringIO() with mock.patch('django.core.management.color.supports_color', lambda *args: True): call_command("showmigrations", format='list', stdout=out, verbosity=0, no_color=False) self.assertEqual( '\x1b[1mmigrations\n\x1b[0m' ' [ ] 0001_initial\n' ' [ ] 0002_second\n', out.getvalue().lower() ) call_command("migrate", "migrations", "0001", verbosity=0) out = io.StringIO() # Giving the explicit app_label tests for selective `show_list` in the command call_command("showmigrations", "migrations", format='list', stdout=out, verbosity=0, no_color=True) self.assertEqual( 'migrations\n' ' [x] 0001_initial\n' ' [ ] 0002_second\n', out.getvalue().lower() ) out = io.StringIO() # Applied datetimes are displayed at verbosity 2+. call_command('showmigrations', 'migrations', stdout=out, verbosity=2, no_color=True) migration1 = MigrationRecorder(connection).migration_qs.get(app='migrations', name='0001_initial') self.assertEqual( 'migrations\n' ' [x] 0001_initial (applied at %s)\n' ' [ ] 0002_second\n' % migration1.applied.strftime('%Y-%m-%d %H:%M:%S'), out.getvalue().lower() ) # Cleanup by unmigrating everything call_command("migrate", "migrations", "zero", verbosity=0) @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_run_before"}) def test_showmigrations_plan(self): """ Tests --plan output of showmigrations command """ out = io.StringIO() call_command("showmigrations", format='plan', stdout=out) self.assertEqual( "[ ] migrations.0001_initial\n" "[ ] migrations.0003_third\n" "[ ] migrations.0002_second\n", out.getvalue().lower() ) out = io.StringIO() call_command("showmigrations", format='plan', stdout=out, verbosity=2) self.assertEqual( "[ ] migrations.0001_initial\n" "[ ] migrations.0003_third ... (migrations.0001_initial)\n" "[ ] migrations.0002_second ... (migrations.0001_initial, migrations.0003_third)\n", out.getvalue().lower() ) call_command("migrate", "migrations", "0003", verbosity=0) out = io.StringIO() call_command("showmigrations", format='plan', stdout=out) self.assertEqual( "[x] migrations.0001_initial\n" "[x] migrations.0003_third\n" "[ ] migrations.0002_second\n", out.getvalue().lower() ) out = io.StringIO() call_command("showmigrations", format='plan', stdout=out, verbosity=2) self.assertEqual( "[x] migrations.0001_initial\n" "[x] migrations.0003_third ... (migrations.0001_initial)\n" "[ ] migrations.0002_second ... (migrations.0001_initial, migrations.0003_third)\n", out.getvalue().lower() ) # Cleanup by unmigrating everything call_command("migrate", "migrations", "zero", verbosity=0) @override_settings(MIGRATION_MODULES={'migrations': 'migrations.test_migrations_plan'}) def test_migrate_plan(self): """Tests migrate --plan output.""" out = io.StringIO() # Show the plan up to the third migration. call_command('migrate', 'migrations', '0003', plan=True, stdout=out, no_color=True) self.assertEqual( 'Planned operations:\n' 'migrations.0001_initial\n' ' Create model Salamander\n' ' Raw Python operation -> Grow salamander tail.\n' 'migrations.0002_second\n' ' Create model Book\n' " Raw SQL operation -> ['SELECT * FROM migrations_book']\n" 'migrations.0003_third\n' ' Create model Author\n' " Raw SQL operation -> ['SELECT * FROM migrations_author']\n", out.getvalue() ) try: # Migrate to the third migration. call_command('migrate', 'migrations', '0003', verbosity=0) out = io.StringIO() # Show the plan for when there is nothing to apply. call_command('migrate', 'migrations', '0003', plan=True, stdout=out, no_color=True) self.assertEqual( 'Planned operations:\n' ' No planned migration operations.\n', out.getvalue() ) out = io.StringIO() # Show the plan for reverse migration back to 0001. call_command('migrate', 'migrations', '0001', plan=True, stdout=out, no_color=True) self.assertEqual( 'Planned operations:\n' 'migrations.0003_third\n' ' Undo Create model Author\n' " Raw SQL operation -> ['SELECT * FROM migrations_book']\n" 'migrations.0002_second\n' ' Undo Create model Book\n' " Raw SQL operation -> ['SELECT * FROM migrations_salamand…\n", out.getvalue() ) out = io.StringIO() # Show the migration plan to fourth, with truncated details. call_command('migrate', 'migrations', '0004', plan=True, stdout=out, no_color=True) self.assertEqual( 'Planned operations:\n' 'migrations.0004_fourth\n' ' Raw SQL operation -> SELECT * FROM migrations_author WHE…\n', out.getvalue() ) # Show the plan when an operation is irreversible. # Migrate to the fourth migration. call_command('migrate', 'migrations', '0004', verbosity=0) out = io.StringIO() call_command('migrate', 'migrations', '0003', plan=True, stdout=out, no_color=True) self.assertEqual( 'Planned operations:\n' 'migrations.0004_fourth\n' ' Raw SQL operation -> IRREVERSIBLE\n', out.getvalue() ) out = io.StringIO() call_command('migrate', 'migrations', '0005', plan=True, stdout=out, no_color=True) # Operation is marked as irreversible only in the revert plan. self.assertEqual( 'Planned operations:\n' 'migrations.0005_fifth\n' ' Raw Python operation\n' ' Raw Python operation\n' ' Raw Python operation -> Feed salamander.\n', out.getvalue() ) call_command('migrate', 'migrations', '0005', verbosity=0) out = io.StringIO() call_command('migrate', 'migrations', '0004', plan=True, stdout=out, no_color=True) self.assertEqual( 'Planned operations:\n' 'migrations.0005_fifth\n' ' Raw Python operation -> IRREVERSIBLE\n' ' Raw Python operation -> IRREVERSIBLE\n' ' Raw Python operation\n', out.getvalue() ) finally: # Cleanup by unmigrating everything: fake the irreversible, then # migrate all to zero. call_command('migrate', 'migrations', '0003', fake=True, verbosity=0) call_command('migrate', 'migrations', 'zero', verbosity=0) @override_settings(MIGRATION_MODULES={'migrations': 'migrations.test_migrations_empty'}) def test_showmigrations_no_migrations(self): out = io.StringIO() call_command('showmigrations', stdout=out, no_color=True) self.assertEqual('migrations\n (no migrations)\n', out.getvalue().lower()) @override_settings(INSTALLED_APPS=['migrations.migrations_test_apps.unmigrated_app']) def test_showmigrations_unmigrated_app(self): out = io.StringIO() call_command('showmigrations', 'unmigrated_app', stdout=out, no_color=True) self.assertEqual('unmigrated_app\n (no migrations)\n', out.getvalue().lower()) @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_empty"}) def test_showmigrations_plan_no_migrations(self): """ Tests --plan output of showmigrations command without migrations """ out = io.StringIO() call_command('showmigrations', format='plan', stdout=out, no_color=True) self.assertEqual('(no migrations)\n', out.getvalue().lower()) out = io.StringIO() call_command('showmigrations', format='plan', stdout=out, verbosity=2, no_color=True) self.assertEqual('(no migrations)\n', out.getvalue().lower()) @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_squashed_complex"}) def test_showmigrations_plan_squashed(self): """ Tests --plan output of showmigrations command with squashed migrations. """ out = io.StringIO() call_command("showmigrations", format='plan', stdout=out) self.assertEqual( "[ ] migrations.1_auto\n" "[ ] migrations.2_auto\n" "[ ] migrations.3_squashed_5\n" "[ ] migrations.6_auto\n" "[ ] migrations.7_auto\n", out.getvalue().lower() ) out = io.StringIO() call_command("showmigrations", format='plan', stdout=out, verbosity=2) self.assertEqual( "[ ] migrations.1_auto\n" "[ ] migrations.2_auto ... (migrations.1_auto)\n" "[ ] migrations.3_squashed_5 ... (migrations.2_auto)\n" "[ ] migrations.6_auto ... (migrations.3_squashed_5)\n" "[ ] migrations.7_auto ... (migrations.6_auto)\n", out.getvalue().lower() ) call_command("migrate", "migrations", "3_squashed_5", verbosity=0) out = io.StringIO() call_command("showmigrations", format='plan', stdout=out) self.assertEqual( "[x] migrations.1_auto\n" "[x] migrations.2_auto\n" "[x] migrations.3_squashed_5\n" "[ ] migrations.6_auto\n" "[ ] migrations.7_auto\n", out.getvalue().lower() ) out = io.StringIO() call_command("showmigrations", format='plan', stdout=out, verbosity=2) self.assertEqual( "[x] migrations.1_auto\n" "[x] migrations.2_auto ... (migrations.1_auto)\n" "[x] migrations.3_squashed_5 ... (migrations.2_auto)\n" "[ ] migrations.6_auto ... (migrations.3_squashed_5)\n" "[ ] migrations.7_auto ... (migrations.6_auto)\n", out.getvalue().lower() ) @override_settings(INSTALLED_APPS=[ 'migrations.migrations_test_apps.mutate_state_b', 'migrations.migrations_test_apps.alter_fk.author_app', 'migrations.migrations_test_apps.alter_fk.book_app', ]) def test_showmigrations_plan_single_app_label(self): """ `showmigrations --plan app_label` output with a single app_label. """ # Single app with no dependencies on other apps. out = io.StringIO() call_command('showmigrations', 'mutate_state_b', format='plan', stdout=out) self.assertEqual( '[ ] mutate_state_b.0001_initial\n' '[ ] mutate_state_b.0002_add_field\n', out.getvalue() ) # Single app with dependencies. out = io.StringIO() call_command('showmigrations', 'author_app', format='plan', stdout=out) self.assertEqual( '[ ] author_app.0001_initial\n' '[ ] book_app.0001_initial\n' '[ ] author_app.0002_alter_id\n', out.getvalue() ) # Some migrations already applied. call_command('migrate', 'author_app', '0001', verbosity=0) out = io.StringIO() call_command('showmigrations', 'author_app', format='plan', stdout=out) self.assertEqual( '[X] author_app.0001_initial\n' '[ ] book_app.0001_initial\n' '[ ] author_app.0002_alter_id\n', out.getvalue() ) # Cleanup by unmigrating author_app. call_command('migrate', 'author_app', 'zero', verbosity=0) @override_settings(INSTALLED_APPS=[ 'migrations.migrations_test_apps.mutate_state_b', 'migrations.migrations_test_apps.alter_fk.author_app', 'migrations.migrations_test_apps.alter_fk.book_app', ]) def test_showmigrations_plan_multiple_app_labels(self): """ `showmigrations --plan app_label` output with multiple app_labels. """ # Multiple apps: author_app depends on book_app; mutate_state_b doesn't # depend on other apps. out = io.StringIO() call_command('showmigrations', 'mutate_state_b', 'author_app', format='plan', stdout=out) self.assertEqual( '[ ] author_app.0001_initial\n' '[ ] book_app.0001_initial\n' '[ ] author_app.0002_alter_id\n' '[ ] mutate_state_b.0001_initial\n' '[ ] mutate_state_b.0002_add_field\n', out.getvalue() ) # Multiple apps: args order shouldn't matter (the same result is # expected as above). out = io.StringIO() call_command('showmigrations', 'author_app', 'mutate_state_b', format='plan', stdout=out) self.assertEqual( '[ ] author_app.0001_initial\n' '[ ] book_app.0001_initial\n' '[ ] author_app.0002_alter_id\n' '[ ] mutate_state_b.0001_initial\n' '[ ] mutate_state_b.0002_add_field\n', out.getvalue() ) @override_settings(INSTALLED_APPS=['migrations.migrations_test_apps.unmigrated_app']) def test_showmigrations_plan_app_label_no_migrations(self): out = io.StringIO() call_command('showmigrations', 'unmigrated_app', format='plan', stdout=out, no_color=True) self.assertEqual('(no migrations)\n', out.getvalue()) @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"}) def test_sqlmigrate_forwards(self): """ sqlmigrate outputs forward looking SQL. """ out = io.StringIO() call_command("sqlmigrate", "migrations", "0001", stdout=out) output = out.getvalue().lower() index_tx_start = output.find(connection.ops.start_transaction_sql().lower()) index_op_desc_author = output.find('-- create model author') index_create_table = output.find('create table') index_op_desc_tribble = output.find('-- create model tribble') index_op_desc_unique_together = output.find('-- alter unique_together') index_tx_end = output.find(connection.ops.end_transaction_sql().lower()) if connection.features.can_rollback_ddl: self.assertGreater(index_tx_start, -1, "Transaction start not found") self.assertGreater( index_tx_end, index_op_desc_unique_together, "Transaction end not found or found before operation description (unique_together)" ) self.assertGreater( index_op_desc_author, index_tx_start, "Operation description (author) not found or found before transaction start" ) self.assertGreater( index_create_table, index_op_desc_author, "CREATE TABLE not found or found before operation description (author)" ) self.assertGreater( index_op_desc_tribble, index_create_table, "Operation description (tribble) not found or found before CREATE TABLE (author)" ) self.assertGreater( index_op_desc_unique_together, index_op_desc_tribble, "Operation description (unique_together) not found or found before operation description (tribble)" ) @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"}) def test_sqlmigrate_backwards(self): """ sqlmigrate outputs reverse looking SQL. """ # Cannot generate the reverse SQL unless we've applied the migration. call_command("migrate", "migrations", verbosity=0) out = io.StringIO() call_command("sqlmigrate", "migrations", "0001", stdout=out, backwards=True) output = out.getvalue().lower() index_tx_start = output.find(connection.ops.start_transaction_sql().lower()) index_op_desc_unique_together = output.find('-- alter unique_together') index_op_desc_tribble = output.find('-- create model tribble') index_op_desc_author = output.find('-- create model author') index_drop_table = output.rfind('drop table') index_tx_end = output.find(connection.ops.end_transaction_sql().lower()) if connection.features.can_rollback_ddl: self.assertGreater(index_tx_start, -1, "Transaction start not found") self.assertGreater( index_tx_end, index_op_desc_unique_together, "Transaction end not found or found before DROP TABLE" ) self.assertGreater( index_op_desc_unique_together, index_tx_start, "Operation description (unique_together) not found or found before transaction start" ) self.assertGreater( index_op_desc_tribble, index_op_desc_unique_together, "Operation description (tribble) not found or found before operation description (unique_together)" ) self.assertGreater( index_op_desc_author, index_op_desc_tribble, "Operation description (author) not found or found before operation description (tribble)" ) self.assertGreater( index_drop_table, index_op_desc_author, "DROP TABLE not found or found before operation description (author)" ) # Cleanup by unmigrating everything call_command("migrate", "migrations", "zero", verbosity=0) @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_non_atomic"}) def test_sqlmigrate_for_non_atomic_migration(self): """ Transaction wrappers aren't shown for non-atomic migrations. """ out = io.StringIO() call_command("sqlmigrate", "migrations", "0001", stdout=out) output = out.getvalue().lower() queries = [q.strip() for q in output.splitlines()] if connection.ops.start_transaction_sql(): self.assertNotIn(connection.ops.start_transaction_sql().lower(), queries) self.assertNotIn(connection.ops.end_transaction_sql().lower(), queries) @override_settings(MIGRATION_MODULES={'migrations': 'migrations.test_migrations'}) def test_sqlmigrate_for_non_transactional_databases(self): """ Transaction wrappers aren't shown for databases that don't support transactional DDL. """ out = io.StringIO() with mock.patch.object(connection.features, 'can_rollback_ddl', False): call_command('sqlmigrate', 'migrations', '0001', stdout=out) output = out.getvalue().lower() queries = [q.strip() for q in output.splitlines()] start_transaction_sql = connection.ops.start_transaction_sql() if start_transaction_sql: self.assertNotIn(start_transaction_sql.lower(), queries) self.assertNotIn(connection.ops.end_transaction_sql().lower(), queries) @override_settings(MIGRATION_MODULES={'migrations': 'migrations.test_migrations_no_operations'}) def test_migrations_no_operations(self): err = io.StringIO() call_command('sqlmigrate', 'migrations', '0001_initial', stderr=err) self.assertEqual(err.getvalue(), 'No operations found.\n') @override_settings( INSTALLED_APPS=[ "migrations.migrations_test_apps.migrated_app", "migrations.migrations_test_apps.migrated_unapplied_app", "migrations.migrations_test_apps.unmigrated_app", ], ) def test_regression_22823_unmigrated_fk_to_migrated_model(self): """ Assuming you have 3 apps, `A`, `B`, and `C`, such that: * `A` has migrations * `B` has a migration we want to apply * `C` has no migrations, but has an FK to `A` When we try to migrate "B", an exception occurs because the "B" was not included in the ProjectState that is used to detect soft-applied migrations (#22823). """ call_command("migrate", "migrated_unapplied_app", stdout=io.StringIO()) # unmigrated_app.SillyModel has a foreign key to 'migrations.Tribble', # but that model is only defined in a migration, so the global app # registry never sees it and the reference is left dangling. Remove it # to avoid problems in subsequent tests. del apps._pending_operations[('migrations', 'tribble')] @override_settings(INSTALLED_APPS=['migrations.migrations_test_apps.unmigrated_app_syncdb']) def test_migrate_syncdb_deferred_sql_executed_with_schemaeditor(self): """ For an app without migrations, editor.execute() is used for executing the syncdb deferred SQL. """ stdout = io.StringIO() with mock.patch.object(BaseDatabaseSchemaEditor, 'execute') as execute: call_command('migrate', run_syncdb=True, verbosity=1, stdout=stdout, no_color=True) create_table_count = len([call for call in execute.mock_calls if 'CREATE TABLE' in str(call)]) self.assertEqual(create_table_count, 2) # There's at least one deferred SQL for creating the foreign key # index. self.assertGreater(len(execute.mock_calls), 2) stdout = stdout.getvalue() self.assertIn('Synchronize unmigrated apps: unmigrated_app_syncdb', stdout) self.assertIn('Creating tables...', stdout) table_name = truncate_name('unmigrated_app_syncdb_classroom', connection.ops.max_name_length()) self.assertIn('Creating table %s' % table_name, stdout) @override_settings(MIGRATION_MODULES={'migrations': 'migrations.test_migrations'}) def test_migrate_syncdb_app_with_migrations(self): msg = "Can't use run_syncdb with app 'migrations' as it has migrations." with self.assertRaisesMessage(CommandError, msg): call_command('migrate', 'migrations', run_syncdb=True, verbosity=0) @override_settings(INSTALLED_APPS=[ 'migrations.migrations_test_apps.unmigrated_app_syncdb', 'migrations.migrations_test_apps.unmigrated_app_simple', ]) def test_migrate_syncdb_app_label(self): """ Running migrate --run-syncdb with an app_label only creates tables for the specified app. """ stdout = io.StringIO() with mock.patch.object(BaseDatabaseSchemaEditor, 'execute') as execute: call_command('migrate', 'unmigrated_app_syncdb', run_syncdb=True, stdout=stdout) create_table_count = len([call for call in execute.mock_calls if 'CREATE TABLE' in str(call)]) self.assertEqual(create_table_count, 2) self.assertGreater(len(execute.mock_calls), 2) self.assertIn('Synchronize unmigrated app: unmigrated_app_syncdb', stdout.getvalue()) @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_squashed"}) def test_migrate_record_replaced(self): """ Running a single squashed migration should record all of the original replaced migrations as run. """ recorder = MigrationRecorder(connection) out = io.StringIO() call_command("migrate", "migrations", verbosity=0) call_command("showmigrations", "migrations", stdout=out, no_color=True) self.assertEqual( 'migrations\n' ' [x] 0001_squashed_0002 (2 squashed migrations)\n', out.getvalue().lower() ) applied_migrations = recorder.applied_migrations() self.assertIn(("migrations", "0001_initial"), applied_migrations) self.assertIn(("migrations", "0002_second"), applied_migrations) self.assertIn(("migrations", "0001_squashed_0002"), applied_migrations) # Rollback changes call_command("migrate", "migrations", "zero", verbosity=0) @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_squashed"}) def test_migrate_record_squashed(self): """ Running migrate for a squashed migration should record as run if all of the replaced migrations have been run (#25231). """ recorder = MigrationRecorder(connection) recorder.record_applied("migrations", "0001_initial") recorder.record_applied("migrations", "0002_second") out = io.StringIO() call_command("migrate", "migrations", verbosity=0) call_command("showmigrations", "migrations", stdout=out, no_color=True) self.assertEqual( 'migrations\n' ' [x] 0001_squashed_0002 (2 squashed migrations)\n', out.getvalue().lower() ) self.assertIn( ("migrations", "0001_squashed_0002"), recorder.applied_migrations() ) # No changes were actually applied so there is nothing to rollback @override_settings(MIGRATION_MODULES={'migrations': 'migrations.test_migrations'}) def test_migrate_inconsistent_history(self): """ Running migrate with some migrations applied before their dependencies should not be allowed. """ recorder = MigrationRecorder(connection) recorder.record_applied("migrations", "0002_second") msg = "Migration migrations.0002_second is applied before its dependency migrations.0001_initial" with self.assertRaisesMessage(InconsistentMigrationHistory, msg): call_command("migrate") applied_migrations = recorder.applied_migrations() self.assertNotIn(("migrations", "0001_initial"), applied_migrations) class MakeMigrationsTests(MigrationTestBase): """ Tests running the makemigrations command. """ def setUp(self): super().setUp() self._old_models = apps.app_configs['migrations'].models.copy() def tearDown(self): apps.app_configs['migrations'].models = self._old_models apps.all_models['migrations'] = self._old_models apps.clear_cache() super().tearDown() def test_files_content(self): self.assertTableNotExists("migrations_unicodemodel") apps.register_model('migrations', UnicodeModel) with self.temporary_migration_module() as migration_dir: call_command("makemigrations", "migrations", verbosity=0) # Check for empty __init__.py file in migrations folder init_file = os.path.join(migration_dir, "__init__.py") self.assertTrue(os.path.exists(init_file)) with open(init_file) as fp: content = fp.read() self.assertEqual(content, '') # Check for existing 0001_initial.py file in migration folder initial_file = os.path.join(migration_dir, "0001_initial.py") self.assertTrue(os.path.exists(initial_file)) with open(initial_file, encoding='utf-8') as fp: content = fp.read() self.assertIn('migrations.CreateModel', content) self.assertIn('initial = True', content) self.assertIn('úñí©óðé µóðéø', content) # Meta.verbose_name self.assertIn('úñí©óðé µóðéøß', content) # Meta.verbose_name_plural self.assertIn('ÚÑÍ¢ÓÐÉ', content) # title.verbose_name self.assertIn('“Ðjáñgó”', content) # title.default def test_makemigrations_order(self): """ makemigrations should recognize number-only migrations (0001.py). """ module = 'migrations.test_migrations_order' with self.temporary_migration_module(module=module) as migration_dir: if hasattr(importlib, 'invalidate_caches'): # importlib caches os.listdir() on some platforms like macOS # (#23850). importlib.invalidate_caches() call_command('makemigrations', 'migrations', '--empty', '-n', 'a', '-v', '0') self.assertTrue(os.path.exists(os.path.join(migration_dir, '0002_a.py'))) def test_makemigrations_empty_connections(self): empty_connections = ConnectionHandler({'default': {}}) with mock.patch('django.core.management.commands.makemigrations.connections', new=empty_connections): # with no apps out = io.StringIO() call_command('makemigrations', stdout=out) self.assertIn('No changes detected', out.getvalue()) # with an app with self.temporary_migration_module() as migration_dir: call_command('makemigrations', 'migrations', verbosity=0) init_file = os.path.join(migration_dir, '__init__.py') self.assertTrue(os.path.exists(init_file)) @override_settings(INSTALLED_APPS=['migrations', 'migrations2']) def test_makemigrations_consistency_checks_respect_routers(self): """ The history consistency checks in makemigrations respect settings.DATABASE_ROUTERS. """ def patched_has_table(migration_recorder): if migration_recorder.connection is connections['other']: raise Exception('Other connection') else: return mock.DEFAULT self.assertTableNotExists('migrations_unicodemodel') apps.register_model('migrations', UnicodeModel) with mock.patch.object( MigrationRecorder, 'has_table', autospec=True, side_effect=patched_has_table) as has_table: with self.temporary_migration_module() as migration_dir: call_command("makemigrations", "migrations", verbosity=0) initial_file = os.path.join(migration_dir, "0001_initial.py") self.assertTrue(os.path.exists(initial_file)) self.assertEqual(has_table.call_count, 1) # 'default' is checked # Router says not to migrate 'other' so consistency shouldn't # be checked. with self.settings(DATABASE_ROUTERS=['migrations.routers.TestRouter']): call_command('makemigrations', 'migrations', verbosity=0) self.assertEqual(has_table.call_count, 2) # 'default' again # With a router that doesn't prohibit migrating 'other', # consistency is checked. with self.settings(DATABASE_ROUTERS=['migrations.routers.EmptyRouter']): with self.assertRaisesMessage(Exception, 'Other connection'): call_command('makemigrations', 'migrations', verbosity=0) self.assertEqual(has_table.call_count, 4) # 'default' and 'other' # With a router that doesn't allow migrating on any database, # no consistency checks are made. with self.settings(DATABASE_ROUTERS=['migrations.routers.TestRouter']): with mock.patch.object(TestRouter, 'allow_migrate', return_value=False) as allow_migrate: call_command('makemigrations', 'migrations', verbosity=0) allow_migrate.assert_any_call('other', 'migrations', model_name='UnicodeModel') # allow_migrate() is called with the correct arguments. self.assertGreater(len(allow_migrate.mock_calls), 0) for mock_call in allow_migrate.mock_calls: _, call_args, call_kwargs = mock_call connection_alias, app_name = call_args self.assertIn(connection_alias, ['default', 'other']) # Raises an error if invalid app_name/model_name occurs. apps.get_app_config(app_name).get_model(call_kwargs['model_name']) self.assertEqual(has_table.call_count, 4) def test_failing_migration(self): # If a migration fails to serialize, it shouldn't generate an empty file. #21280 apps.register_model('migrations', UnserializableModel) with self.temporary_migration_module() as migration_dir: with self.assertRaisesMessage(ValueError, 'Cannot serialize'): call_command("makemigrations", "migrations", verbosity=0) initial_file = os.path.join(migration_dir, "0001_initial.py") self.assertFalse(os.path.exists(initial_file)) def test_makemigrations_conflict_exit(self): """ makemigrations exits if it detects a conflict. """ with self.temporary_migration_module(module="migrations.test_migrations_conflict"): with self.assertRaises(CommandError) as context: call_command("makemigrations") exception_message = str(context.exception) self.assertIn( 'Conflicting migrations detected; multiple leaf nodes ' 'in the migration graph:', exception_message ) self.assertIn('0002_second', exception_message) self.assertIn('0002_conflicting_second', exception_message) self.assertIn('in migrations', exception_message) self.assertIn("To fix them run 'python manage.py makemigrations --merge'", exception_message) def test_makemigrations_merge_no_conflict(self): """ makemigrations exits if in merge mode with no conflicts. """ out = io.StringIO() with self.temporary_migration_module(module="migrations.test_migrations"): call_command("makemigrations", merge=True, stdout=out) self.assertIn("No conflicts detected to merge.", out.getvalue()) def test_makemigrations_empty_no_app_specified(self): """ makemigrations exits if no app is specified with 'empty' mode. """ msg = 'You must supply at least one app label when using --empty.' with self.assertRaisesMessage(CommandError, msg): call_command("makemigrations", empty=True) def test_makemigrations_empty_migration(self): """ makemigrations properly constructs an empty migration. """ with self.temporary_migration_module() as migration_dir: call_command("makemigrations", "migrations", empty=True, verbosity=0) # Check for existing 0001_initial.py file in migration folder initial_file = os.path.join(migration_dir, "0001_initial.py") self.assertTrue(os.path.exists(initial_file)) with open(initial_file, encoding='utf-8') as fp: content = fp.read() # Remove all whitespace to check for empty dependencies and operations content = content.replace(' ', '') self.assertIn('dependencies=[\n]', content) self.assertIn('operations=[\n]', content) @override_settings(MIGRATION_MODULES={"migrations": None}) def test_makemigrations_disabled_migrations_for_app(self): """ makemigrations raises a nice error when migrations are disabled for an app. """ msg = ( "Django can't create migrations for app 'migrations' because migrations " "have been disabled via the MIGRATION_MODULES setting." ) with self.assertRaisesMessage(ValueError, msg): call_command("makemigrations", "migrations", empty=True, verbosity=0) def test_makemigrations_no_changes_no_apps(self): """ makemigrations exits when there are no changes and no apps are specified. """ out = io.StringIO() call_command("makemigrations", stdout=out) self.assertIn("No changes detected", out.getvalue()) def test_makemigrations_no_changes(self): """ makemigrations exits when there are no changes to an app. """ out = io.StringIO() with self.temporary_migration_module(module="migrations.test_migrations_no_changes"): call_command("makemigrations", "migrations", stdout=out) self.assertIn("No changes detected in app 'migrations'", out.getvalue()) def test_makemigrations_no_apps_initial(self): """ makemigrations should detect initial is needed on empty migration modules if no app provided. """ out = io.StringIO() with self.temporary_migration_module(module="migrations.test_migrations_empty"): call_command("makemigrations", stdout=out) self.assertIn("0001_initial.py", out.getvalue()) def test_makemigrations_no_init(self): """Migration directories without an __init__.py file are allowed.""" out = io.StringIO() with self.temporary_migration_module(module='migrations.test_migrations_no_init'): call_command('makemigrations', stdout=out) self.assertIn('0001_initial.py', out.getvalue()) def test_makemigrations_migrations_announce(self): """ makemigrations announces the migration at the default verbosity level. """ out = io.StringIO() with self.temporary_migration_module(): call_command("makemigrations", "migrations", stdout=out) self.assertIn("Migrations for 'migrations'", out.getvalue()) def test_makemigrations_no_common_ancestor(self): """ makemigrations fails to merge migrations with no common ancestor. """ with self.assertRaises(ValueError) as context: with self.temporary_migration_module(module="migrations.test_migrations_no_ancestor"): call_command("makemigrations", "migrations", merge=True) exception_message = str(context.exception) self.assertIn("Could not find common ancestor of", exception_message) self.assertIn("0002_second", exception_message) self.assertIn("0002_conflicting_second", exception_message) def test_makemigrations_interactive_reject(self): """ makemigrations enters and exits interactive mode properly. """ # Monkeypatch interactive questioner to auto reject with mock.patch('builtins.input', mock.Mock(return_value='N')): with self.temporary_migration_module(module="migrations.test_migrations_conflict") as migration_dir: call_command("makemigrations", "migrations", name="merge", merge=True, interactive=True, verbosity=0) merge_file = os.path.join(migration_dir, '0003_merge.py') self.assertFalse(os.path.exists(merge_file)) def test_makemigrations_interactive_accept(self): """ makemigrations enters interactive mode and merges properly. """ # Monkeypatch interactive questioner to auto accept with mock.patch('builtins.input', mock.Mock(return_value='y')): out = io.StringIO() with self.temporary_migration_module(module="migrations.test_migrations_conflict") as migration_dir: call_command("makemigrations", "migrations", name="merge", merge=True, interactive=True, stdout=out) merge_file = os.path.join(migration_dir, '0003_merge.py') self.assertTrue(os.path.exists(merge_file)) self.assertIn("Created new merge migration", out.getvalue()) @mock.patch('django.db.migrations.utils.datetime') def test_makemigrations_default_merge_name(self, mock_datetime): mock_datetime.datetime.now.return_value = datetime.datetime(2016, 1, 2, 3, 4) with mock.patch('builtins.input', mock.Mock(return_value='y')): out = io.StringIO() with self.temporary_migration_module(module="migrations.test_migrations_conflict") as migration_dir: call_command("makemigrations", "migrations", merge=True, interactive=True, stdout=out) merge_file = os.path.join(migration_dir, '0003_merge_20160102_0304.py') self.assertTrue(os.path.exists(merge_file)) self.assertIn("Created new merge migration", out.getvalue()) def test_makemigrations_non_interactive_not_null_addition(self): """ Non-interactive makemigrations fails when a default is missing on a new not-null field. """ class SillyModel(models.Model): silly_field = models.BooleanField(default=False) silly_int = models.IntegerField() class Meta: app_label = "migrations" out = io.StringIO() with self.assertRaises(SystemExit): with self.temporary_migration_module(module="migrations.test_migrations_no_default"): call_command("makemigrations", "migrations", interactive=False, stdout=out) def test_makemigrations_non_interactive_not_null_alteration(self): """ Non-interactive makemigrations fails when a default is missing on a field changed to not-null. """ class Author(models.Model): name = models.CharField(max_length=255) slug = models.SlugField() age = models.IntegerField(default=0) class Meta: app_label = "migrations" out = io.StringIO() with self.temporary_migration_module(module="migrations.test_migrations"): call_command("makemigrations", "migrations", interactive=False, stdout=out) self.assertIn("Alter field slug on author", out.getvalue()) def test_makemigrations_non_interactive_no_model_rename(self): """ makemigrations adds and removes a possible model rename in non-interactive mode. """ class RenamedModel(models.Model): silly_field = models.BooleanField(default=False) class Meta: app_label = "migrations" out = io.StringIO() with self.temporary_migration_module(module="migrations.test_migrations_no_default"): call_command("makemigrations", "migrations", interactive=False, stdout=out) self.assertIn("Delete model SillyModel", out.getvalue()) self.assertIn("Create model RenamedModel", out.getvalue()) def test_makemigrations_non_interactive_no_field_rename(self): """ makemigrations adds and removes a possible field rename in non-interactive mode. """ class SillyModel(models.Model): silly_rename = models.BooleanField(default=False) class Meta: app_label = "migrations" out = io.StringIO() with self.temporary_migration_module(module="migrations.test_migrations_no_default"): call_command("makemigrations", "migrations", interactive=False, stdout=out) self.assertIn("Remove field silly_field from sillymodel", out.getvalue()) self.assertIn("Add field silly_rename to sillymodel", out.getvalue()) def test_makemigrations_handle_merge(self): """ makemigrations properly merges the conflicting migrations with --noinput. """ out = io.StringIO() with self.temporary_migration_module(module="migrations.test_migrations_conflict") as migration_dir: call_command("makemigrations", "migrations", name="merge", merge=True, interactive=False, stdout=out) merge_file = os.path.join(migration_dir, '0003_merge.py') self.assertTrue(os.path.exists(merge_file)) output = out.getvalue() self.assertIn("Merging migrations", output) self.assertIn("Branch 0002_second", output) self.assertIn("Branch 0002_conflicting_second", output) self.assertIn("Created new merge migration", output) def test_makemigration_merge_dry_run(self): """ makemigrations respects --dry-run option when fixing migration conflicts (#24427). """ out = io.StringIO() with self.temporary_migration_module(module="migrations.test_migrations_conflict") as migration_dir: call_command( "makemigrations", "migrations", name="merge", dry_run=True, merge=True, interactive=False, stdout=out, ) merge_file = os.path.join(migration_dir, '0003_merge.py') self.assertFalse(os.path.exists(merge_file)) output = out.getvalue() self.assertIn("Merging migrations", output) self.assertIn("Branch 0002_second", output) self.assertIn("Branch 0002_conflicting_second", output) self.assertNotIn("Created new merge migration", output) def test_makemigration_merge_dry_run_verbosity_3(self): """ `makemigrations --merge --dry-run` writes the merge migration file to stdout with `verbosity == 3` (#24427). """ out = io.StringIO() with self.temporary_migration_module(module="migrations.test_migrations_conflict") as migration_dir: call_command( "makemigrations", "migrations", name="merge", dry_run=True, merge=True, interactive=False, stdout=out, verbosity=3, ) merge_file = os.path.join(migration_dir, '0003_merge.py') self.assertFalse(os.path.exists(merge_file)) output = out.getvalue() self.assertIn("Merging migrations", output) self.assertIn("Branch 0002_second", output) self.assertIn("Branch 0002_conflicting_second", output) self.assertNotIn("Created new merge migration", output) # Additional output caused by verbosity 3 # The complete merge migration file that would be written self.assertIn("class Migration(migrations.Migration):", output) self.assertIn("dependencies = [", output) self.assertIn("('migrations', '0002_second')", output) self.assertIn("('migrations', '0002_conflicting_second')", output) self.assertIn("operations = [", output) self.assertIn("]", output) def test_makemigrations_dry_run(self): """ `makemigrations --dry-run` should not ask for defaults. """ class SillyModel(models.Model): silly_field = models.BooleanField(default=False) silly_date = models.DateField() # Added field without a default class Meta: app_label = "migrations" out = io.StringIO() with self.temporary_migration_module(module="migrations.test_migrations_no_default"): call_command("makemigrations", "migrations", dry_run=True, stdout=out) # Output the expected changes directly, without asking for defaults self.assertIn("Add field silly_date to sillymodel", out.getvalue()) def test_makemigrations_dry_run_verbosity_3(self): """ Allow `makemigrations --dry-run` to output the migrations file to stdout (with verbosity == 3). """ class SillyModel(models.Model): silly_field = models.BooleanField(default=False) silly_char = models.CharField(default="") class Meta: app_label = "migrations" out = io.StringIO() with self.temporary_migration_module(module="migrations.test_migrations_no_default"): call_command("makemigrations", "migrations", dry_run=True, stdout=out, verbosity=3) # Normal --dry-run output self.assertIn("- Add field silly_char to sillymodel", out.getvalue()) # Additional output caused by verbosity 3 # The complete migrations file that would be written self.assertIn("class Migration(migrations.Migration):", out.getvalue()) self.assertIn("dependencies = [", out.getvalue()) self.assertIn("('migrations', '0001_initial'),", out.getvalue()) self.assertIn("migrations.AddField(", out.getvalue()) self.assertIn("model_name='sillymodel',", out.getvalue()) self.assertIn("name='silly_char',", out.getvalue()) def test_makemigrations_migrations_modules_path_not_exist(self): """ makemigrations creates migrations when specifying a custom location for migration files using MIGRATION_MODULES if the custom path doesn't already exist. """ class SillyModel(models.Model): silly_field = models.BooleanField(default=False) class Meta: app_label = "migrations" out = io.StringIO() migration_module = "migrations.test_migrations_path_doesnt_exist.foo.bar" with self.temporary_migration_module(module=migration_module) as migration_dir: call_command("makemigrations", "migrations", stdout=out) # Migrations file is actually created in the expected path. initial_file = os.path.join(migration_dir, "0001_initial.py") self.assertTrue(os.path.exists(initial_file)) # Command output indicates the migration is created. self.assertIn(" - Create model SillyModel", out.getvalue()) @override_settings(MIGRATION_MODULES={'migrations': 'some.nonexistent.path'}) def test_makemigrations_migrations_modules_nonexistent_toplevel_package(self): msg = ( 'Could not locate an appropriate location to create migrations ' 'package some.nonexistent.path. Make sure the toplevel package ' 'exists and can be imported.' ) with self.assertRaisesMessage(ValueError, msg): call_command('makemigrations', 'migrations', empty=True, verbosity=0) def test_makemigrations_interactive_by_default(self): """ The user is prompted to merge by default if there are conflicts and merge is True. Answer negative to differentiate it from behavior when --noinput is specified. """ # Monkeypatch interactive questioner to auto reject out = io.StringIO() with mock.patch('builtins.input', mock.Mock(return_value='N')): with self.temporary_migration_module(module="migrations.test_migrations_conflict") as migration_dir: call_command("makemigrations", "migrations", name="merge", merge=True, stdout=out) merge_file = os.path.join(migration_dir, '0003_merge.py') # This will fail if interactive is False by default self.assertFalse(os.path.exists(merge_file)) self.assertNotIn("Created new merge migration", out.getvalue()) @override_settings( INSTALLED_APPS=[ "migrations", "migrations.migrations_test_apps.unspecified_app_with_conflict"]) def test_makemigrations_unspecified_app_with_conflict_no_merge(self): """ makemigrations does not raise a CommandError when an unspecified app has conflicting migrations. """ with self.temporary_migration_module(module="migrations.test_migrations_no_changes"): call_command("makemigrations", "migrations", merge=False, verbosity=0) @override_settings( INSTALLED_APPS=[ "migrations.migrations_test_apps.migrated_app", "migrations.migrations_test_apps.unspecified_app_with_conflict"]) def test_makemigrations_unspecified_app_with_conflict_merge(self): """ makemigrations does not create a merge for an unspecified app even if it has conflicting migrations. """ # Monkeypatch interactive questioner to auto accept with mock.patch('builtins.input', mock.Mock(return_value='y')): out = io.StringIO() with self.temporary_migration_module(app_label="migrated_app") as migration_dir: call_command("makemigrations", "migrated_app", name="merge", merge=True, interactive=True, stdout=out) merge_file = os.path.join(migration_dir, '0003_merge.py') self.assertFalse(os.path.exists(merge_file)) self.assertIn("No conflicts detected to merge.", out.getvalue()) @override_settings( INSTALLED_APPS=[ "migrations.migrations_test_apps.migrated_app", "migrations.migrations_test_apps.conflicting_app_with_dependencies"]) def test_makemigrations_merge_dont_output_dependency_operations(self): """ makemigrations --merge does not output any operations from apps that don't belong to a given app. """ # Monkeypatch interactive questioner to auto accept with mock.patch('builtins.input', mock.Mock(return_value='N')): out = io.StringIO() with mock.patch('django.core.management.color.supports_color', lambda *args: False): call_command( "makemigrations", "conflicting_app_with_dependencies", merge=True, interactive=True, stdout=out ) val = out.getvalue().lower() self.assertIn('merging conflicting_app_with_dependencies\n', val) self.assertIn( ' branch 0002_conflicting_second\n' ' - create model something\n', val ) self.assertIn( ' branch 0002_second\n' ' - delete model tribble\n' ' - remove field silly_field from author\n' ' - add field rating to author\n' ' - create model book\n', val ) def test_makemigrations_with_custom_name(self): """ makemigrations --name generate a custom migration name. """ with self.temporary_migration_module() as migration_dir: def cmd(migration_count, migration_name, *args): call_command("makemigrations", "migrations", "--verbosity", "0", "--name", migration_name, *args) migration_file = os.path.join(migration_dir, "%s_%s.py" % (migration_count, migration_name)) # Check for existing migration file in migration folder self.assertTrue(os.path.exists(migration_file)) with open(migration_file, encoding='utf-8') as fp: content = fp.read() content = content.replace(" ", "") return content # generate an initial migration migration_name_0001 = "my_initial_migration" content = cmd("0001", migration_name_0001) self.assertIn("dependencies=[\n]", content) # importlib caches os.listdir() on some platforms like macOS # (#23850). if hasattr(importlib, 'invalidate_caches'): importlib.invalidate_caches() # generate an empty migration migration_name_0002 = "my_custom_migration" content = cmd("0002", migration_name_0002, "--empty") self.assertIn("dependencies=[\n('migrations','0001_%s'),\n]" % migration_name_0001, content) self.assertIn("operations=[\n]", content) def test_makemigrations_with_invalid_custom_name(self): msg = 'The migration name must be a valid Python identifier.' with self.assertRaisesMessage(CommandError, msg): call_command('makemigrations', 'migrations', '--name', 'invalid name', '--empty') def test_makemigrations_check(self): """ makemigrations --check should exit with a non-zero status when there are changes to an app requiring migrations. """ with self.temporary_migration_module(): with self.assertRaises(SystemExit): call_command("makemigrations", "--check", "migrations", verbosity=0) with self.temporary_migration_module(module="migrations.test_migrations_no_changes"): call_command("makemigrations", "--check", "migrations", verbosity=0) def test_makemigrations_migration_path_output(self): """ makemigrations should print the relative paths to the migrations unless they are outside of the current tree, in which case the absolute path should be shown. """ out = io.StringIO() apps.register_model('migrations', UnicodeModel) with self.temporary_migration_module() as migration_dir: call_command("makemigrations", "migrations", stdout=out) self.assertIn(os.path.join(migration_dir, '0001_initial.py'), out.getvalue()) def test_makemigrations_migration_path_output_valueerror(self): """ makemigrations prints the absolute path if os.path.relpath() raises a ValueError when it's impossible to obtain a relative path, e.g. on Windows if Django is installed on a different drive than where the migration files are created. """ out = io.StringIO() with self.temporary_migration_module() as migration_dir: with mock.patch('os.path.relpath', side_effect=ValueError): call_command('makemigrations', 'migrations', stdout=out) self.assertIn(os.path.join(migration_dir, '0001_initial.py'), out.getvalue()) def test_makemigrations_inconsistent_history(self): """ makemigrations should raise InconsistentMigrationHistory exception if there are some migrations applied before their dependencies. """ recorder = MigrationRecorder(connection) recorder.record_applied('migrations', '0002_second') msg = "Migration migrations.0002_second is applied before its dependency migrations.0001_initial" with self.temporary_migration_module(module="migrations.test_migrations"): with self.assertRaisesMessage(InconsistentMigrationHistory, msg): call_command("makemigrations") @mock.patch('builtins.input', return_value='1') @mock.patch('django.db.migrations.questioner.sys.stdin', mock.MagicMock(encoding=sys.getdefaultencoding())) def test_makemigrations_auto_now_add_interactive(self, *args): """ makemigrations prompts the user when adding auto_now_add to an existing model. """ class Entry(models.Model): title = models.CharField(max_length=255) creation_date = models.DateTimeField(auto_now_add=True) class Meta: app_label = 'migrations' # Monkeypatch interactive questioner to auto accept with mock.patch('django.db.migrations.questioner.sys.stdout', new_callable=io.StringIO) as prompt_stdout: out = io.StringIO() with self.temporary_migration_module(module='migrations.test_auto_now_add'): call_command('makemigrations', 'migrations', interactive=True, stdout=out) output = out.getvalue() prompt_output = prompt_stdout.getvalue() self.assertIn("You can accept the default 'timezone.now' by pressing 'Enter'", prompt_output) self.assertIn("Add field creation_date to entry", output) class SquashMigrationsTests(MigrationTestBase): """ Tests running the squashmigrations command. """ def test_squashmigrations_squashes(self): """ squashmigrations squashes migrations. """ with self.temporary_migration_module(module="migrations.test_migrations") as migration_dir: call_command("squashmigrations", "migrations", "0002", interactive=False, verbosity=0) squashed_migration_file = os.path.join(migration_dir, "0001_squashed_0002_second.py") self.assertTrue(os.path.exists(squashed_migration_file)) def test_squashmigrations_initial_attribute(self): with self.temporary_migration_module(module="migrations.test_migrations") as migration_dir: call_command("squashmigrations", "migrations", "0002", interactive=False, verbosity=0) squashed_migration_file = os.path.join(migration_dir, "0001_squashed_0002_second.py") with open(squashed_migration_file, encoding='utf-8') as fp: content = fp.read() self.assertIn("initial = True", content) def test_squashmigrations_optimizes(self): """ squashmigrations optimizes operations. """ out = io.StringIO() with self.temporary_migration_module(module="migrations.test_migrations"): call_command("squashmigrations", "migrations", "0002", interactive=False, verbosity=1, stdout=out) self.assertIn("Optimized from 8 operations to 2 operations.", out.getvalue()) def test_ticket_23799_squashmigrations_no_optimize(self): """ squashmigrations --no-optimize doesn't optimize operations. """ out = io.StringIO() with self.temporary_migration_module(module="migrations.test_migrations"): call_command("squashmigrations", "migrations", "0002", interactive=False, verbosity=1, no_optimize=True, stdout=out) self.assertIn("Skipping optimization", out.getvalue()) def test_squashmigrations_valid_start(self): """ squashmigrations accepts a starting migration. """ out = io.StringIO() with self.temporary_migration_module(module="migrations.test_migrations_no_changes") as migration_dir: call_command("squashmigrations", "migrations", "0002", "0003", interactive=False, verbosity=1, stdout=out) squashed_migration_file = os.path.join(migration_dir, "0002_second_squashed_0003_third.py") with open(squashed_migration_file, encoding='utf-8') as fp: content = fp.read() self.assertIn(" ('migrations', '0001_initial')", content) self.assertNotIn("initial = True", content) out = out.getvalue() self.assertNotIn(" - 0001_initial", out) self.assertIn(" - 0002_second", out) self.assertIn(" - 0003_third", out) def test_squashmigrations_invalid_start(self): """ squashmigrations doesn't accept a starting migration after the ending migration. """ with self.temporary_migration_module(module="migrations.test_migrations_no_changes"): msg = ( "The migration 'migrations.0003_third' cannot be found. Maybe " "it comes after the migration 'migrations.0002_second'" ) with self.assertRaisesMessage(CommandError, msg): call_command("squashmigrations", "migrations", "0003", "0002", interactive=False, verbosity=0) def test_squashed_name_with_start_migration_name(self): """--squashed-name specifies the new migration's name.""" squashed_name = 'squashed_name' with self.temporary_migration_module(module='migrations.test_migrations') as migration_dir: call_command( 'squashmigrations', 'migrations', '0001', '0002', squashed_name=squashed_name, interactive=False, verbosity=0, ) squashed_migration_file = os.path.join(migration_dir, '0001_%s.py' % squashed_name) self.assertTrue(os.path.exists(squashed_migration_file)) def test_squashed_name_without_start_migration_name(self): """--squashed-name also works if a start migration is omitted.""" squashed_name = 'squashed_name' with self.temporary_migration_module(module="migrations.test_migrations") as migration_dir: call_command( 'squashmigrations', 'migrations', '0001', squashed_name=squashed_name, interactive=False, verbosity=0, ) squashed_migration_file = os.path.join(migration_dir, '0001_%s.py' % squashed_name) self.assertTrue(os.path.exists(squashed_migration_file)) class AppLabelErrorTests(TestCase): """ This class inherits TestCase because MigrationTestBase uses `available_apps = ['migrations']` which means that it's the only installed app. 'django.contrib.auth' must be in INSTALLED_APPS for some of these tests. """ nonexistent_app_error = "No installed app with label 'nonexistent_app'." did_you_mean_auth_error = ( "No installed app with label 'django.contrib.auth'. Did you mean " "'auth'?" ) def test_makemigrations_nonexistent_app_label(self): err = io.StringIO() with self.assertRaises(SystemExit): call_command('makemigrations', 'nonexistent_app', stderr=err) self.assertIn(self.nonexistent_app_error, err.getvalue()) def test_makemigrations_app_name_specified_as_label(self): err = io.StringIO() with self.assertRaises(SystemExit): call_command('makemigrations', 'django.contrib.auth', stderr=err) self.assertIn(self.did_you_mean_auth_error, err.getvalue()) def test_migrate_nonexistent_app_label(self): with self.assertRaisesMessage(CommandError, self.nonexistent_app_error): call_command('migrate', 'nonexistent_app') def test_migrate_app_name_specified_as_label(self): with self.assertRaisesMessage(CommandError, self.did_you_mean_auth_error): call_command('migrate', 'django.contrib.auth') def test_showmigrations_nonexistent_app_label(self): err = io.StringIO() with self.assertRaises(SystemExit): call_command('showmigrations', 'nonexistent_app', stderr=err) self.assertIn(self.nonexistent_app_error, err.getvalue()) def test_showmigrations_app_name_specified_as_label(self): err = io.StringIO() with self.assertRaises(SystemExit): call_command('showmigrations', 'django.contrib.auth', stderr=err) self.assertIn(self.did_you_mean_auth_error, err.getvalue()) def test_sqlmigrate_nonexistent_app_label(self): with self.assertRaisesMessage(CommandError, self.nonexistent_app_error): call_command('sqlmigrate', 'nonexistent_app', '0002') def test_sqlmigrate_app_name_specified_as_label(self): with self.assertRaisesMessage(CommandError, self.did_you_mean_auth_error): call_command('sqlmigrate', 'django.contrib.auth', '0002') def test_squashmigrations_nonexistent_app_label(self): with self.assertRaisesMessage(CommandError, self.nonexistent_app_error): call_command('squashmigrations', 'nonexistent_app', '0002') def test_squashmigrations_app_name_specified_as_label(self): with self.assertRaisesMessage(CommandError, self.did_you_mean_auth_error): call_command('squashmigrations', 'django.contrib.auth', '0002')
5088e18520ce9264c1611c84782bdbcaa7be2074b85ecef436750f4e61b348bf
import compileall import os from django.db import connection, connections from django.db.migrations.exceptions import ( AmbiguityError, InconsistentMigrationHistory, NodeNotFoundError, ) from django.db.migrations.loader import MigrationLoader from django.db.migrations.recorder import MigrationRecorder from django.test import TestCase, modify_settings, override_settings from .test_base import MigrationTestBase class RecorderTests(TestCase): """ Tests recording migrations as applied or not. """ databases = {'default', 'other'} def test_apply(self): """ Tests marking migrations as applied/unapplied. """ recorder = MigrationRecorder(connection) self.assertEqual( {(x, y) for (x, y) in recorder.applied_migrations() if x == "myapp"}, set(), ) recorder.record_applied("myapp", "0432_ponies") self.assertEqual( {(x, y) for (x, y) in recorder.applied_migrations() if x == "myapp"}, {("myapp", "0432_ponies")}, ) # That should not affect records of another database recorder_other = MigrationRecorder(connections['other']) self.assertEqual( {(x, y) for (x, y) in recorder_other.applied_migrations() if x == "myapp"}, set(), ) recorder.record_unapplied("myapp", "0432_ponies") self.assertEqual( {(x, y) for (x, y) in recorder.applied_migrations() if x == "myapp"}, set(), ) class LoaderTests(TestCase): """ Tests the disk and database loader, and running through migrations in memory. """ @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"}) @modify_settings(INSTALLED_APPS={'append': 'basic'}) def test_load(self): """ Makes sure the loader can load the migrations for the test apps, and then render them out to a new Apps. """ # Load and test the plan migration_loader = MigrationLoader(connection) self.assertEqual( migration_loader.graph.forwards_plan(("migrations", "0002_second")), [ ("migrations", "0001_initial"), ("migrations", "0002_second"), ], ) # Now render it out! project_state = migration_loader.project_state(("migrations", "0002_second")) self.assertEqual(len(project_state.models), 2) author_state = project_state.models["migrations", "author"] self.assertEqual( [x for x, y in author_state.fields], ["id", "name", "slug", "age", "rating"] ) book_state = project_state.models["migrations", "book"] self.assertEqual( [x for x, y in book_state.fields], ["id", "author"] ) # Ensure we've included unmigrated apps in there too self.assertIn("basic", project_state.real_apps) @override_settings(MIGRATION_MODULES={ 'migrations': 'migrations.test_migrations', 'migrations2': 'migrations2.test_migrations_2', }) @modify_settings(INSTALLED_APPS={'append': 'migrations2'}) def test_plan_handles_repeated_migrations(self): """ _generate_plan() doesn't readd migrations already in the plan (#29180). """ migration_loader = MigrationLoader(connection) nodes = [('migrations', '0002_second'), ('migrations2', '0001_initial')] self.assertEqual( migration_loader.graph._generate_plan(nodes, at_end=True), [('migrations', '0001_initial'), ('migrations', '0002_second'), ('migrations2', '0001_initial')] ) @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_unmigdep"}) def test_load_unmigrated_dependency(self): """ Makes sure the loader can load migrations with a dependency on an unmigrated app. """ # Load and test the plan migration_loader = MigrationLoader(connection) self.assertEqual( migration_loader.graph.forwards_plan(("migrations", "0001_initial")), [ ('contenttypes', '0001_initial'), ('auth', '0001_initial'), ("migrations", "0001_initial"), ], ) # Now render it out! project_state = migration_loader.project_state(("migrations", "0001_initial")) self.assertEqual(len([m for a, m in project_state.models if a == "migrations"]), 1) book_state = project_state.models["migrations", "book"] self.assertEqual( [x for x, y in book_state.fields], ["id", "user"] ) @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_run_before"}) def test_run_before(self): """ Makes sure the loader uses Migration.run_before. """ # Load and test the plan migration_loader = MigrationLoader(connection) self.assertEqual( migration_loader.graph.forwards_plan(("migrations", "0002_second")), [ ("migrations", "0001_initial"), ("migrations", "0003_third"), ("migrations", "0002_second"), ], ) @override_settings(MIGRATION_MODULES={ "migrations": "migrations.test_migrations_first", "migrations2": "migrations2.test_migrations_2_first", }) @modify_settings(INSTALLED_APPS={'append': 'migrations2'}) def test_first(self): """ Makes sure the '__first__' migrations build correctly. """ migration_loader = MigrationLoader(connection) self.assertEqual( migration_loader.graph.forwards_plan(("migrations", "second")), [ ("migrations", "thefirst"), ("migrations2", "0001_initial"), ("migrations2", "0002_second"), ("migrations", "second"), ], ) @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"}) def test_name_match(self): "Tests prefix name matching" migration_loader = MigrationLoader(connection) self.assertEqual( migration_loader.get_migration_by_prefix("migrations", "0001").name, "0001_initial", ) with self.assertRaises(AmbiguityError): migration_loader.get_migration_by_prefix("migrations", "0") with self.assertRaises(KeyError): migration_loader.get_migration_by_prefix("migrations", "blarg") def test_load_import_error(self): with override_settings(MIGRATION_MODULES={"migrations": "import_error_package"}): with self.assertRaises(ImportError): MigrationLoader(connection) def test_load_module_file(self): with override_settings(MIGRATION_MODULES={"migrations": "migrations.faulty_migrations.file"}): loader = MigrationLoader(connection) self.assertIn( "migrations", loader.unmigrated_apps, "App with migrations module file not in unmigrated apps." ) def test_load_empty_dir(self): with override_settings(MIGRATION_MODULES={"migrations": "migrations.faulty_migrations.namespace"}): loader = MigrationLoader(connection) self.assertIn( "migrations", loader.unmigrated_apps, "App missing __init__.py in migrations module not in unmigrated apps." ) @override_settings( INSTALLED_APPS=['migrations.migrations_test_apps.migrated_app'], ) def test_marked_as_migrated(self): """ Undefined MIGRATION_MODULES implies default migration module. """ migration_loader = MigrationLoader(connection) self.assertEqual(migration_loader.migrated_apps, {'migrated_app'}) self.assertEqual(migration_loader.unmigrated_apps, set()) @override_settings( INSTALLED_APPS=['migrations.migrations_test_apps.migrated_app'], MIGRATION_MODULES={"migrated_app": None}, ) def test_marked_as_unmigrated(self): """ MIGRATION_MODULES allows disabling of migrations for a particular app. """ migration_loader = MigrationLoader(connection) self.assertEqual(migration_loader.migrated_apps, set()) self.assertEqual(migration_loader.unmigrated_apps, {'migrated_app'}) @override_settings( INSTALLED_APPS=['migrations.migrations_test_apps.migrated_app'], MIGRATION_MODULES={'migrated_app': 'missing-module'}, ) def test_explicit_missing_module(self): """ If a MIGRATION_MODULES override points to a missing module, the error raised during the importation attempt should be propagated unless `ignore_no_migrations=True`. """ with self.assertRaisesMessage(ImportError, 'missing-module'): migration_loader = MigrationLoader(connection) migration_loader = MigrationLoader(connection, ignore_no_migrations=True) self.assertEqual(migration_loader.migrated_apps, set()) self.assertEqual(migration_loader.unmigrated_apps, {'migrated_app'}) @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_squashed"}) def test_loading_squashed(self): "Tests loading a squashed migration" migration_loader = MigrationLoader(connection) recorder = MigrationRecorder(connection) self.addCleanup(recorder.flush) # Loading with nothing applied should just give us the one node self.assertEqual( len([x for x in migration_loader.graph.nodes if x[0] == "migrations"]), 1, ) # However, fake-apply one migration and it should now use the old two recorder.record_applied("migrations", "0001_initial") migration_loader.build_graph() self.assertEqual( len([x for x in migration_loader.graph.nodes if x[0] == "migrations"]), 2, ) @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_squashed_complex"}) def test_loading_squashed_complex(self): "Tests loading a complex set of squashed migrations" loader = MigrationLoader(connection) recorder = MigrationRecorder(connection) self.addCleanup(recorder.flush) def num_nodes(): plan = set(loader.graph.forwards_plan(('migrations', '7_auto'))) return len(plan - loader.applied_migrations.keys()) # Empty database: use squashed migration loader.build_graph() self.assertEqual(num_nodes(), 5) # Starting at 1 or 2 should use the squashed migration too recorder.record_applied("migrations", "1_auto") loader.build_graph() self.assertEqual(num_nodes(), 4) recorder.record_applied("migrations", "2_auto") loader.build_graph() self.assertEqual(num_nodes(), 3) # However, starting at 3 to 5 cannot use the squashed migration recorder.record_applied("migrations", "3_auto") loader.build_graph() self.assertEqual(num_nodes(), 4) recorder.record_applied("migrations", "4_auto") loader.build_graph() self.assertEqual(num_nodes(), 3) # Starting at 5 to 7 we are passed the squashed migrations recorder.record_applied("migrations", "5_auto") loader.build_graph() self.assertEqual(num_nodes(), 2) recorder.record_applied("migrations", "6_auto") loader.build_graph() self.assertEqual(num_nodes(), 1) recorder.record_applied("migrations", "7_auto") loader.build_graph() self.assertEqual(num_nodes(), 0) @override_settings(MIGRATION_MODULES={ "app1": "migrations.test_migrations_squashed_complex_multi_apps.app1", "app2": "migrations.test_migrations_squashed_complex_multi_apps.app2", }) @modify_settings(INSTALLED_APPS={'append': [ "migrations.test_migrations_squashed_complex_multi_apps.app1", "migrations.test_migrations_squashed_complex_multi_apps.app2", ]}) def test_loading_squashed_complex_multi_apps(self): loader = MigrationLoader(connection) loader.build_graph() plan = set(loader.graph.forwards_plan(('app1', '4_auto'))) expected_plan = { ('app1', '1_auto'), ('app2', '1_squashed_2'), ('app1', '2_squashed_3'), ('app1', '4_auto'), } self.assertEqual(plan, expected_plan) @override_settings(MIGRATION_MODULES={ "app1": "migrations.test_migrations_squashed_complex_multi_apps.app1", "app2": "migrations.test_migrations_squashed_complex_multi_apps.app2", }) @modify_settings(INSTALLED_APPS={'append': [ "migrations.test_migrations_squashed_complex_multi_apps.app1", "migrations.test_migrations_squashed_complex_multi_apps.app2", ]}) def test_loading_squashed_complex_multi_apps_partially_applied(self): loader = MigrationLoader(connection) recorder = MigrationRecorder(connection) recorder.record_applied('app1', '1_auto') recorder.record_applied('app1', '2_auto') loader.build_graph() plan = set(loader.graph.forwards_plan(('app1', '4_auto'))) plan = plan - loader.applied_migrations.keys() expected_plan = { ('app2', '1_squashed_2'), ('app1', '3_auto'), ('app1', '4_auto'), } self.assertEqual(plan, expected_plan) @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_squashed_erroneous"}) def test_loading_squashed_erroneous(self): "Tests loading a complex but erroneous set of squashed migrations" loader = MigrationLoader(connection) recorder = MigrationRecorder(connection) self.addCleanup(recorder.flush) def num_nodes(): plan = set(loader.graph.forwards_plan(('migrations', '7_auto'))) return len(plan - loader.applied_migrations.keys()) # Empty database: use squashed migration loader.build_graph() self.assertEqual(num_nodes(), 5) # Starting at 1 or 2 should use the squashed migration too recorder.record_applied("migrations", "1_auto") loader.build_graph() self.assertEqual(num_nodes(), 4) recorder.record_applied("migrations", "2_auto") loader.build_graph() self.assertEqual(num_nodes(), 3) # However, starting at 3 or 4, nonexistent migrations would be needed. msg = ("Migration migrations.6_auto depends on nonexistent node ('migrations', '5_auto'). " "Django tried to replace migration migrations.5_auto with any of " "[migrations.3_squashed_5] but wasn't able to because some of the replaced " "migrations are already applied.") recorder.record_applied("migrations", "3_auto") with self.assertRaisesMessage(NodeNotFoundError, msg): loader.build_graph() recorder.record_applied("migrations", "4_auto") with self.assertRaisesMessage(NodeNotFoundError, msg): loader.build_graph() # Starting at 5 to 7 we are passed the squashed migrations recorder.record_applied("migrations", "5_auto") loader.build_graph() self.assertEqual(num_nodes(), 2) recorder.record_applied("migrations", "6_auto") loader.build_graph() self.assertEqual(num_nodes(), 1) recorder.record_applied("migrations", "7_auto") loader.build_graph() self.assertEqual(num_nodes(), 0) @override_settings( MIGRATION_MODULES={'migrations': 'migrations.test_migrations'}, INSTALLED_APPS=['migrations'], ) def test_check_consistent_history(self): loader = MigrationLoader(connection=None) loader.check_consistent_history(connection) recorder = MigrationRecorder(connection) recorder.record_applied('migrations', '0002_second') msg = ( "Migration migrations.0002_second is applied before its dependency " "migrations.0001_initial on database 'default'." ) with self.assertRaisesMessage(InconsistentMigrationHistory, msg): loader.check_consistent_history(connection) @override_settings( MIGRATION_MODULES={'migrations': 'migrations.test_migrations_squashed_extra'}, INSTALLED_APPS=['migrations'], ) def test_check_consistent_history_squashed(self): """ MigrationLoader.check_consistent_history() should ignore unapplied squashed migrations that have all of their `replaces` applied. """ loader = MigrationLoader(connection=None) recorder = MigrationRecorder(connection) recorder.record_applied('migrations', '0001_initial') recorder.record_applied('migrations', '0002_second') loader.check_consistent_history(connection) recorder.record_applied('migrations', '0003_third') loader.check_consistent_history(connection) @override_settings(MIGRATION_MODULES={ "app1": "migrations.test_migrations_squashed_ref_squashed.app1", "app2": "migrations.test_migrations_squashed_ref_squashed.app2", }) @modify_settings(INSTALLED_APPS={'append': [ "migrations.test_migrations_squashed_ref_squashed.app1", "migrations.test_migrations_squashed_ref_squashed.app2", ]}) def test_loading_squashed_ref_squashed(self): "Tests loading a squashed migration with a new migration referencing it" r""" The sample migrations are structured like this: app_1 1 --> 2 ---------------------*--> 3 *--> 4 \ / / *-------------------*----/--> 2_sq_3 --* \ / / =============== \ ============= / == / ====================== app_2 *--> 1_sq_2 --* / \ / *--> 1 --> 2 --* Where 2_sq_3 is a replacing migration for 2 and 3 in app_1, as 1_sq_2 is a replacing migration for 1 and 2 in app_2. """ loader = MigrationLoader(connection) recorder = MigrationRecorder(connection) self.addCleanup(recorder.flush) # Load with nothing applied: both migrations squashed. loader.build_graph() plan = set(loader.graph.forwards_plan(('app1', '4_auto'))) plan = plan - loader.applied_migrations.keys() expected_plan = { ('app1', '1_auto'), ('app2', '1_squashed_2'), ('app1', '2_squashed_3'), ('app1', '4_auto'), } self.assertEqual(plan, expected_plan) # Fake-apply a few from app1: unsquashes migration in app1. recorder.record_applied('app1', '1_auto') recorder.record_applied('app1', '2_auto') loader.build_graph() plan = set(loader.graph.forwards_plan(('app1', '4_auto'))) plan = plan - loader.applied_migrations.keys() expected_plan = { ('app2', '1_squashed_2'), ('app1', '3_auto'), ('app1', '4_auto'), } self.assertEqual(plan, expected_plan) # Fake-apply one from app2: unsquashes migration in app2 too. recorder.record_applied('app2', '1_auto') loader.build_graph() plan = set(loader.graph.forwards_plan(('app1', '4_auto'))) plan = plan - loader.applied_migrations.keys() expected_plan = { ('app2', '2_auto'), ('app1', '3_auto'), ('app1', '4_auto'), } self.assertEqual(plan, expected_plan) @override_settings(MIGRATION_MODULES={'migrations': 'migrations.test_migrations_private'}) def test_ignore_files(self): """Files prefixed with underscore, tilde, or dot aren't loaded.""" loader = MigrationLoader(connection) loader.load_disk() migrations = [name for app, name in loader.disk_migrations if app == 'migrations'] self.assertEqual(migrations, ['0001_initial']) @override_settings( MIGRATION_MODULES={'migrations': 'migrations.test_migrations_namespace_package'}, ) def test_loading_namespace_package(self): """Migration directories without an __init__.py file are loaded.""" migration_loader = MigrationLoader(connection) self.assertEqual( migration_loader.graph.forwards_plan(('migrations', '0001_initial')), [('migrations', '0001_initial')], ) class PycLoaderTests(MigrationTestBase): def test_valid(self): """ To support frozen environments, MigrationLoader loads .pyc migrations. """ with self.temporary_migration_module(module='migrations.test_migrations') as migration_dir: # Compile .py files to .pyc files and delete .py files. compileall.compile_dir(migration_dir, force=True, quiet=1, legacy=True) for name in os.listdir(migration_dir): if name.endswith('.py'): os.remove(os.path.join(migration_dir, name)) loader = MigrationLoader(connection) self.assertIn(('migrations', '0001_initial'), loader.disk_migrations) def test_invalid(self): """ MigrationLoader reraises ImportErrors caused by "bad magic number" pyc files with a more helpful message. """ with self.temporary_migration_module(module='migrations.test_migrations_bad_pyc') as migration_dir: # The -tpl suffix is to avoid the pyc exclusion in MANIFEST.in. os.rename( os.path.join(migration_dir, '0001_initial.pyc-tpl'), os.path.join(migration_dir, '0001_initial.pyc'), ) msg = ( r"Couldn't import '\w+.migrations.0001_initial' as it appears " "to be a stale .pyc file." ) with self.assertRaisesRegex(ImportError, msg): MigrationLoader(connection)
e6a9d7865de6a32729a867c016cb427761fa6f668829f553104164fc788c9136
import functools import re from unittest import mock from django.apps import apps from django.conf import settings from django.contrib.auth.models import AbstractBaseUser from django.core.validators import RegexValidator, validate_slug from django.db import connection, models from django.db.migrations.autodetector import MigrationAutodetector from django.db.migrations.graph import MigrationGraph from django.db.migrations.loader import MigrationLoader from django.db.migrations.questioner import MigrationQuestioner from django.db.migrations.state import ModelState, ProjectState from django.test import TestCase, override_settings from django.test.utils import isolate_lru_cache from .models import FoodManager, FoodQuerySet class DeconstructibleObject: """ A custom deconstructible object. """ def __init__(self, *args, **kwargs): self.args = args self.kwargs = kwargs def deconstruct(self): return ( self.__module__ + '.' + self.__class__.__name__, self.args, self.kwargs ) class AutodetectorTests(TestCase): """ Tests the migration autodetector. """ author_empty = ModelState("testapp", "Author", [("id", models.AutoField(primary_key=True))]) author_name = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200)), ]) author_name_null = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200, null=True)), ]) author_name_longer = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=400)), ]) author_name_renamed = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("names", models.CharField(max_length=200)), ]) author_name_default = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200, default='Ada Lovelace')), ]) author_name_check_constraint = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200)), ], {'constraints': [models.CheckConstraint(check=models.Q(name__contains='Bob'), name='name_contains_bob')]}, ) author_dates_of_birth_auto_now = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("date_of_birth", models.DateField(auto_now=True)), ("date_time_of_birth", models.DateTimeField(auto_now=True)), ("time_of_birth", models.TimeField(auto_now=True)), ]) author_dates_of_birth_auto_now_add = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("date_of_birth", models.DateField(auto_now_add=True)), ("date_time_of_birth", models.DateTimeField(auto_now_add=True)), ("time_of_birth", models.TimeField(auto_now_add=True)), ]) author_name_deconstructible_1 = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200, default=DeconstructibleObject())), ]) author_name_deconstructible_2 = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200, default=DeconstructibleObject())), ]) author_name_deconstructible_3 = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200, default=models.IntegerField())), ]) author_name_deconstructible_4 = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200, default=models.IntegerField())), ]) author_name_deconstructible_list_1 = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200, default=[DeconstructibleObject(), 123])), ]) author_name_deconstructible_list_2 = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200, default=[DeconstructibleObject(), 123])), ]) author_name_deconstructible_list_3 = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200, default=[DeconstructibleObject(), 999])), ]) author_name_deconstructible_tuple_1 = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200, default=(DeconstructibleObject(), 123))), ]) author_name_deconstructible_tuple_2 = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200, default=(DeconstructibleObject(), 123))), ]) author_name_deconstructible_tuple_3 = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200, default=(DeconstructibleObject(), 999))), ]) author_name_deconstructible_dict_1 = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200, default={ 'item': DeconstructibleObject(), 'otheritem': 123 })), ]) author_name_deconstructible_dict_2 = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200, default={ 'item': DeconstructibleObject(), 'otheritem': 123 })), ]) author_name_deconstructible_dict_3 = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200, default={ 'item': DeconstructibleObject(), 'otheritem': 999 })), ]) author_name_nested_deconstructible_1 = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200, default=DeconstructibleObject( DeconstructibleObject(1), (DeconstructibleObject('t1'), DeconstructibleObject('t2'),), a=DeconstructibleObject('A'), b=DeconstructibleObject(B=DeconstructibleObject('c')), ))), ]) author_name_nested_deconstructible_2 = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200, default=DeconstructibleObject( DeconstructibleObject(1), (DeconstructibleObject('t1'), DeconstructibleObject('t2'),), a=DeconstructibleObject('A'), b=DeconstructibleObject(B=DeconstructibleObject('c')), ))), ]) author_name_nested_deconstructible_changed_arg = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200, default=DeconstructibleObject( DeconstructibleObject(1), (DeconstructibleObject('t1'), DeconstructibleObject('t2-changed'),), a=DeconstructibleObject('A'), b=DeconstructibleObject(B=DeconstructibleObject('c')), ))), ]) author_name_nested_deconstructible_extra_arg = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200, default=DeconstructibleObject( DeconstructibleObject(1), (DeconstructibleObject('t1'), DeconstructibleObject('t2'),), None, a=DeconstructibleObject('A'), b=DeconstructibleObject(B=DeconstructibleObject('c')), ))), ]) author_name_nested_deconstructible_changed_kwarg = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200, default=DeconstructibleObject( DeconstructibleObject(1), (DeconstructibleObject('t1'), DeconstructibleObject('t2'),), a=DeconstructibleObject('A'), b=DeconstructibleObject(B=DeconstructibleObject('c-changed')), ))), ]) author_name_nested_deconstructible_extra_kwarg = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200, default=DeconstructibleObject( DeconstructibleObject(1), (DeconstructibleObject('t1'), DeconstructibleObject('t2'),), a=DeconstructibleObject('A'), b=DeconstructibleObject(B=DeconstructibleObject('c')), c=None, ))), ]) author_custom_pk = ModelState("testapp", "Author", [("pk_field", models.IntegerField(primary_key=True))]) author_with_biography_non_blank = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField()), ("biography", models.TextField()), ]) author_with_biography_blank = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(blank=True)), ("biography", models.TextField(blank=True)), ]) author_with_book = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200)), ("book", models.ForeignKey("otherapp.Book", models.CASCADE)), ]) author_with_book_order_wrt = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200)), ("book", models.ForeignKey("otherapp.Book", models.CASCADE)), ], options={"order_with_respect_to": "book"}) author_renamed_with_book = ModelState("testapp", "Writer", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200)), ("book", models.ForeignKey("otherapp.Book", models.CASCADE)), ]) author_with_publisher_string = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200)), ("publisher_name", models.CharField(max_length=200)), ]) author_with_publisher = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200)), ("publisher", models.ForeignKey("testapp.Publisher", models.CASCADE)), ]) author_with_user = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200)), ("user", models.ForeignKey("auth.User", models.CASCADE)), ]) author_with_custom_user = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200)), ("user", models.ForeignKey("thirdapp.CustomUser", models.CASCADE)), ]) author_proxy = ModelState("testapp", "AuthorProxy", [], {"proxy": True}, ("testapp.author",)) author_proxy_options = ModelState("testapp", "AuthorProxy", [], { "proxy": True, "verbose_name": "Super Author", }, ("testapp.author",)) author_proxy_notproxy = ModelState("testapp", "AuthorProxy", [], {}, ("testapp.author",)) author_proxy_third = ModelState("thirdapp", "AuthorProxy", [], {"proxy": True}, ("testapp.author",)) author_proxy_third_notproxy = ModelState("thirdapp", "AuthorProxy", [], {}, ("testapp.author",)) author_proxy_proxy = ModelState("testapp", "AAuthorProxyProxy", [], {"proxy": True}, ("testapp.authorproxy",)) author_unmanaged = ModelState("testapp", "AuthorUnmanaged", [], {"managed": False}, ("testapp.author",)) author_unmanaged_managed = ModelState("testapp", "AuthorUnmanaged", [], {}, ("testapp.author",)) author_unmanaged_default_pk = ModelState("testapp", "Author", [("id", models.AutoField(primary_key=True))]) author_unmanaged_custom_pk = ModelState("testapp", "Author", [ ("pk_field", models.IntegerField(primary_key=True)), ]) author_with_m2m = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("publishers", models.ManyToManyField("testapp.Publisher")), ]) author_with_m2m_blank = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("publishers", models.ManyToManyField("testapp.Publisher", blank=True)), ]) author_with_m2m_through = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("publishers", models.ManyToManyField("testapp.Publisher", through="testapp.Contract")), ]) author_with_renamed_m2m_through = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("publishers", models.ManyToManyField("testapp.Publisher", through="testapp.Deal")), ]) author_with_former_m2m = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("publishers", models.CharField(max_length=100)), ]) author_with_options = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ], { "permissions": [('can_hire', 'Can hire')], "verbose_name": "Authi", }) author_with_db_table_options = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ], {"db_table": "author_one"}) author_with_new_db_table_options = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ], {"db_table": "author_two"}) author_renamed_with_db_table_options = ModelState("testapp", "NewAuthor", [ ("id", models.AutoField(primary_key=True)), ], {"db_table": "author_one"}) author_renamed_with_new_db_table_options = ModelState("testapp", "NewAuthor", [ ("id", models.AutoField(primary_key=True)), ], {"db_table": "author_three"}) contract = ModelState("testapp", "Contract", [ ("id", models.AutoField(primary_key=True)), ("author", models.ForeignKey("testapp.Author", models.CASCADE)), ("publisher", models.ForeignKey("testapp.Publisher", models.CASCADE)), ]) contract_renamed = ModelState("testapp", "Deal", [ ("id", models.AutoField(primary_key=True)), ("author", models.ForeignKey("testapp.Author", models.CASCADE)), ("publisher", models.ForeignKey("testapp.Publisher", models.CASCADE)), ]) publisher = ModelState("testapp", "Publisher", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=100)), ]) publisher_with_author = ModelState("testapp", "Publisher", [ ("id", models.AutoField(primary_key=True)), ("author", models.ForeignKey("testapp.Author", models.CASCADE)), ("name", models.CharField(max_length=100)), ]) publisher_with_aardvark_author = ModelState("testapp", "Publisher", [ ("id", models.AutoField(primary_key=True)), ("author", models.ForeignKey("testapp.Aardvark", models.CASCADE)), ("name", models.CharField(max_length=100)), ]) publisher_with_book = ModelState("testapp", "Publisher", [ ("id", models.AutoField(primary_key=True)), ("author", models.ForeignKey("otherapp.Book", models.CASCADE)), ("name", models.CharField(max_length=100)), ]) other_pony = ModelState("otherapp", "Pony", [ ("id", models.AutoField(primary_key=True)), ]) other_pony_food = ModelState("otherapp", "Pony", [ ("id", models.AutoField(primary_key=True)), ], managers=[ ('food_qs', FoodQuerySet.as_manager()), ('food_mgr', FoodManager('a', 'b')), ('food_mgr_kwargs', FoodManager('x', 'y', 3, 4)), ]) other_stable = ModelState("otherapp", "Stable", [("id", models.AutoField(primary_key=True))]) third_thing = ModelState("thirdapp", "Thing", [("id", models.AutoField(primary_key=True))]) book = ModelState("otherapp", "Book", [ ("id", models.AutoField(primary_key=True)), ("author", models.ForeignKey("testapp.Author", models.CASCADE)), ("title", models.CharField(max_length=200)), ]) book_proxy_fk = ModelState("otherapp", "Book", [ ("id", models.AutoField(primary_key=True)), ("author", models.ForeignKey("thirdapp.AuthorProxy", models.CASCADE)), ("title", models.CharField(max_length=200)), ]) book_proxy_proxy_fk = ModelState("otherapp", "Book", [ ("id", models.AutoField(primary_key=True)), ("author", models.ForeignKey("testapp.AAuthorProxyProxy", models.CASCADE)), ]) book_migrations_fk = ModelState("otherapp", "Book", [ ("id", models.AutoField(primary_key=True)), ("author", models.ForeignKey("migrations.UnmigratedModel", models.CASCADE)), ("title", models.CharField(max_length=200)), ]) book_with_no_author_fk = ModelState("otherapp", "Book", [ ("id", models.AutoField(primary_key=True)), ("author", models.IntegerField()), ("title", models.CharField(max_length=200)), ]) book_with_no_author = ModelState("otherapp", "Book", [ ("id", models.AutoField(primary_key=True)), ("title", models.CharField(max_length=200)), ]) book_with_author_renamed = ModelState("otherapp", "Book", [ ("id", models.AutoField(primary_key=True)), ("author", models.ForeignKey("testapp.Writer", models.CASCADE)), ("title", models.CharField(max_length=200)), ]) book_with_field_and_author_renamed = ModelState("otherapp", "Book", [ ("id", models.AutoField(primary_key=True)), ("writer", models.ForeignKey("testapp.Writer", models.CASCADE)), ("title", models.CharField(max_length=200)), ]) book_with_multiple_authors = ModelState("otherapp", "Book", [ ("id", models.AutoField(primary_key=True)), ("authors", models.ManyToManyField("testapp.Author")), ("title", models.CharField(max_length=200)), ]) book_with_multiple_authors_through_attribution = ModelState("otherapp", "Book", [ ("id", models.AutoField(primary_key=True)), ("authors", models.ManyToManyField("testapp.Author", through="otherapp.Attribution")), ("title", models.CharField(max_length=200)), ]) book_indexes = ModelState("otherapp", "Book", [ ("id", models.AutoField(primary_key=True)), ("author", models.ForeignKey("testapp.Author", models.CASCADE)), ("title", models.CharField(max_length=200)), ], { "indexes": [models.Index(fields=["author", "title"], name="book_title_author_idx")], }) book_unordered_indexes = ModelState("otherapp", "Book", [ ("id", models.AutoField(primary_key=True)), ("author", models.ForeignKey("testapp.Author", models.CASCADE)), ("title", models.CharField(max_length=200)), ], { "indexes": [models.Index(fields=["title", "author"], name="book_author_title_idx")], }) book_foo_together = ModelState("otherapp", "Book", [ ("id", models.AutoField(primary_key=True)), ("author", models.ForeignKey("testapp.Author", models.CASCADE)), ("title", models.CharField(max_length=200)), ], { "index_together": {("author", "title")}, "unique_together": {("author", "title")}, }) book_foo_together_2 = ModelState("otherapp", "Book", [ ("id", models.AutoField(primary_key=True)), ("author", models.ForeignKey("testapp.Author", models.CASCADE)), ("title", models.CharField(max_length=200)), ], { "index_together": {("title", "author")}, "unique_together": {("title", "author")}, }) book_foo_together_3 = ModelState("otherapp", "Book", [ ("id", models.AutoField(primary_key=True)), ("newfield", models.IntegerField()), ("author", models.ForeignKey("testapp.Author", models.CASCADE)), ("title", models.CharField(max_length=200)), ], { "index_together": {("title", "newfield")}, "unique_together": {("title", "newfield")}, }) book_foo_together_4 = ModelState("otherapp", "Book", [ ("id", models.AutoField(primary_key=True)), ("newfield2", models.IntegerField()), ("author", models.ForeignKey("testapp.Author", models.CASCADE)), ("title", models.CharField(max_length=200)), ], { "index_together": {("title", "newfield2")}, "unique_together": {("title", "newfield2")}, }) attribution = ModelState("otherapp", "Attribution", [ ("id", models.AutoField(primary_key=True)), ("author", models.ForeignKey("testapp.Author", models.CASCADE)), ("book", models.ForeignKey("otherapp.Book", models.CASCADE)), ]) edition = ModelState("thirdapp", "Edition", [ ("id", models.AutoField(primary_key=True)), ("book", models.ForeignKey("otherapp.Book", models.CASCADE)), ]) custom_user = ModelState("thirdapp", "CustomUser", [ ("id", models.AutoField(primary_key=True)), ("username", models.CharField(max_length=255)), ], bases=(AbstractBaseUser,)) custom_user_no_inherit = ModelState("thirdapp", "CustomUser", [ ("id", models.AutoField(primary_key=True)), ("username", models.CharField(max_length=255)), ]) aardvark = ModelState("thirdapp", "Aardvark", [("id", models.AutoField(primary_key=True))]) aardvark_testapp = ModelState("testapp", "Aardvark", [("id", models.AutoField(primary_key=True))]) aardvark_based_on_author = ModelState("testapp", "Aardvark", [], bases=("testapp.Author",)) aardvark_pk_fk_author = ModelState("testapp", "Aardvark", [ ("id", models.OneToOneField("testapp.Author", models.CASCADE, primary_key=True)), ]) knight = ModelState("eggs", "Knight", [("id", models.AutoField(primary_key=True))]) rabbit = ModelState("eggs", "Rabbit", [ ("id", models.AutoField(primary_key=True)), ("knight", models.ForeignKey("eggs.Knight", models.CASCADE)), ("parent", models.ForeignKey("eggs.Rabbit", models.CASCADE)), ], { "unique_together": {("parent", "knight")}, "indexes": [models.Index(fields=["parent", "knight"], name='rabbit_circular_fk_index')], }) def repr_changes(self, changes, include_dependencies=False): output = "" for app_label, migrations in sorted(changes.items()): output += " %s:\n" % app_label for migration in migrations: output += " %s\n" % migration.name for operation in migration.operations: output += " %s\n" % operation if include_dependencies: output += " Dependencies:\n" if migration.dependencies: for dep in migration.dependencies: output += " %s\n" % (dep,) else: output += " None\n" return output def assertNumberMigrations(self, changes, app_label, number): if len(changes.get(app_label, [])) != number: self.fail("Incorrect number of migrations (%s) for %s (expected %s)\n%s" % ( len(changes.get(app_label, [])), app_label, number, self.repr_changes(changes), )) def assertMigrationDependencies(self, changes, app_label, position, dependencies): if not changes.get(app_label): self.fail("No migrations found for %s\n%s" % (app_label, self.repr_changes(changes))) if len(changes[app_label]) < position + 1: self.fail("No migration at index %s for %s\n%s" % (position, app_label, self.repr_changes(changes))) migration = changes[app_label][position] if set(migration.dependencies) != set(dependencies): self.fail("Migration dependencies mismatch for %s.%s (expected %s):\n%s" % ( app_label, migration.name, dependencies, self.repr_changes(changes, include_dependencies=True), )) def assertOperationTypes(self, changes, app_label, position, types): if not changes.get(app_label): self.fail("No migrations found for %s\n%s" % (app_label, self.repr_changes(changes))) if len(changes[app_label]) < position + 1: self.fail("No migration at index %s for %s\n%s" % (position, app_label, self.repr_changes(changes))) migration = changes[app_label][position] real_types = [operation.__class__.__name__ for operation in migration.operations] if types != real_types: self.fail("Operation type mismatch for %s.%s (expected %s):\n%s" % ( app_label, migration.name, types, self.repr_changes(changes), )) def assertOperationAttributes(self, changes, app_label, position, operation_position, **attrs): if not changes.get(app_label): self.fail("No migrations found for %s\n%s" % (app_label, self.repr_changes(changes))) if len(changes[app_label]) < position + 1: self.fail("No migration at index %s for %s\n%s" % (position, app_label, self.repr_changes(changes))) migration = changes[app_label][position] if len(changes[app_label]) < position + 1: self.fail("No operation at index %s for %s.%s\n%s" % ( operation_position, app_label, migration.name, self.repr_changes(changes), )) operation = migration.operations[operation_position] for attr, value in attrs.items(): if getattr(operation, attr, None) != value: self.fail("Attribute mismatch for %s.%s op #%s, %s (expected %r, got %r):\n%s" % ( app_label, migration.name, operation_position, attr, value, getattr(operation, attr, None), self.repr_changes(changes), )) def assertOperationFieldAttributes(self, changes, app_label, position, operation_position, **attrs): if not changes.get(app_label): self.fail("No migrations found for %s\n%s" % (app_label, self.repr_changes(changes))) if len(changes[app_label]) < position + 1: self.fail("No migration at index %s for %s\n%s" % (position, app_label, self.repr_changes(changes))) migration = changes[app_label][position] if len(changes[app_label]) < position + 1: self.fail("No operation at index %s for %s.%s\n%s" % ( operation_position, app_label, migration.name, self.repr_changes(changes), )) operation = migration.operations[operation_position] if not hasattr(operation, 'field'): self.fail("No field attribute for %s.%s op #%s." % ( app_label, migration.name, operation_position, )) field = operation.field for attr, value in attrs.items(): if getattr(field, attr, None) != value: self.fail("Field attribute mismatch for %s.%s op #%s, field.%s (expected %r, got %r):\n%s" % ( app_label, migration.name, operation_position, attr, value, getattr(field, attr, None), self.repr_changes(changes), )) def make_project_state(self, model_states): "Shortcut to make ProjectStates from lists of predefined models" project_state = ProjectState() for model_state in model_states: project_state.add_model(model_state.clone()) return project_state def get_changes(self, before_states, after_states, questioner=None): return MigrationAutodetector( self.make_project_state(before_states), self.make_project_state(after_states), questioner, )._detect_changes() def test_arrange_for_graph(self): """Tests auto-naming of migrations for graph matching.""" # Make a fake graph graph = MigrationGraph() graph.add_node(("testapp", "0001_initial"), None) graph.add_node(("testapp", "0002_foobar"), None) graph.add_node(("otherapp", "0001_initial"), None) graph.add_dependency("testapp.0002_foobar", ("testapp", "0002_foobar"), ("testapp", "0001_initial")) graph.add_dependency("testapp.0002_foobar", ("testapp", "0002_foobar"), ("otherapp", "0001_initial")) # Use project state to make a new migration change set before = self.make_project_state([]) after = self.make_project_state([self.author_empty, self.other_pony, self.other_stable]) autodetector = MigrationAutodetector(before, after) changes = autodetector._detect_changes() # Run through arrange_for_graph changes = autodetector.arrange_for_graph(changes, graph) # Make sure there's a new name, deps match, etc. self.assertEqual(changes["testapp"][0].name, "0003_author") self.assertEqual(changes["testapp"][0].dependencies, [("testapp", "0002_foobar")]) self.assertEqual(changes["otherapp"][0].name, "0002_pony_stable") self.assertEqual(changes["otherapp"][0].dependencies, [("otherapp", "0001_initial")]) def test_trim_apps(self): """ Trim does not remove dependencies but does remove unwanted apps. """ # Use project state to make a new migration change set before = self.make_project_state([]) after = self.make_project_state([self.author_empty, self.other_pony, self.other_stable, self.third_thing]) autodetector = MigrationAutodetector(before, after, MigrationQuestioner({"ask_initial": True})) changes = autodetector._detect_changes() # Run through arrange_for_graph graph = MigrationGraph() changes = autodetector.arrange_for_graph(changes, graph) changes["testapp"][0].dependencies.append(("otherapp", "0001_initial")) changes = autodetector._trim_to_apps(changes, {"testapp"}) # Make sure there's the right set of migrations self.assertEqual(changes["testapp"][0].name, "0001_initial") self.assertEqual(changes["otherapp"][0].name, "0001_initial") self.assertNotIn("thirdapp", changes) def test_custom_migration_name(self): """Tests custom naming of migrations for graph matching.""" # Make a fake graph graph = MigrationGraph() graph.add_node(("testapp", "0001_initial"), None) graph.add_node(("testapp", "0002_foobar"), None) graph.add_node(("otherapp", "0001_initial"), None) graph.add_dependency("testapp.0002_foobar", ("testapp", "0002_foobar"), ("testapp", "0001_initial")) # Use project state to make a new migration change set before = self.make_project_state([]) after = self.make_project_state([self.author_empty, self.other_pony, self.other_stable]) autodetector = MigrationAutodetector(before, after) changes = autodetector._detect_changes() # Run through arrange_for_graph migration_name = 'custom_name' changes = autodetector.arrange_for_graph(changes, graph, migration_name) # Make sure there's a new name, deps match, etc. self.assertEqual(changes["testapp"][0].name, "0003_%s" % migration_name) self.assertEqual(changes["testapp"][0].dependencies, [("testapp", "0002_foobar")]) self.assertEqual(changes["otherapp"][0].name, "0002_%s" % migration_name) self.assertEqual(changes["otherapp"][0].dependencies, [("otherapp", "0001_initial")]) def test_new_model(self): """Tests autodetection of new models.""" changes = self.get_changes([], [self.other_pony_food]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'otherapp', 1) self.assertOperationTypes(changes, 'otherapp', 0, ["CreateModel"]) self.assertOperationAttributes(changes, "otherapp", 0, 0, name="Pony") self.assertEqual([name for name, mgr in changes['otherapp'][0].operations[0].managers], ['food_qs', 'food_mgr', 'food_mgr_kwargs']) def test_old_model(self): """Tests deletion of old models.""" changes = self.get_changes([self.author_empty], []) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["DeleteModel"]) self.assertOperationAttributes(changes, "testapp", 0, 0, name="Author") def test_add_field(self): """Tests autodetection of new fields.""" changes = self.get_changes([self.author_empty], [self.author_name]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["AddField"]) self.assertOperationAttributes(changes, "testapp", 0, 0, name="name") @mock.patch('django.db.migrations.questioner.MigrationQuestioner.ask_not_null_addition', side_effect=AssertionError("Should not have prompted for not null addition")) def test_add_date_fields_with_auto_now_not_asking_for_default(self, mocked_ask_method): changes = self.get_changes([self.author_empty], [self.author_dates_of_birth_auto_now]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["AddField", "AddField", "AddField"]) self.assertOperationFieldAttributes(changes, "testapp", 0, 0, auto_now=True) self.assertOperationFieldAttributes(changes, "testapp", 0, 1, auto_now=True) self.assertOperationFieldAttributes(changes, "testapp", 0, 2, auto_now=True) @mock.patch('django.db.migrations.questioner.MigrationQuestioner.ask_not_null_addition', side_effect=AssertionError("Should not have prompted for not null addition")) def test_add_date_fields_with_auto_now_add_not_asking_for_null_addition(self, mocked_ask_method): changes = self.get_changes([self.author_empty], [self.author_dates_of_birth_auto_now_add]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["AddField", "AddField", "AddField"]) self.assertOperationFieldAttributes(changes, "testapp", 0, 0, auto_now_add=True) self.assertOperationFieldAttributes(changes, "testapp", 0, 1, auto_now_add=True) self.assertOperationFieldAttributes(changes, "testapp", 0, 2, auto_now_add=True) @mock.patch('django.db.migrations.questioner.MigrationQuestioner.ask_auto_now_add_addition') def test_add_date_fields_with_auto_now_add_asking_for_default(self, mocked_ask_method): changes = self.get_changes([self.author_empty], [self.author_dates_of_birth_auto_now_add]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["AddField", "AddField", "AddField"]) self.assertOperationFieldAttributes(changes, "testapp", 0, 0, auto_now_add=True) self.assertOperationFieldAttributes(changes, "testapp", 0, 1, auto_now_add=True) self.assertOperationFieldAttributes(changes, "testapp", 0, 2, auto_now_add=True) self.assertEqual(mocked_ask_method.call_count, 3) def test_remove_field(self): """Tests autodetection of removed fields.""" changes = self.get_changes([self.author_name], [self.author_empty]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["RemoveField"]) self.assertOperationAttributes(changes, "testapp", 0, 0, name="name") def test_alter_field(self): """Tests autodetection of new fields.""" changes = self.get_changes([self.author_name], [self.author_name_longer]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["AlterField"]) self.assertOperationAttributes(changes, "testapp", 0, 0, name="name", preserve_default=True) def test_supports_functools_partial(self): def _content_file_name(instance, filename, key, **kwargs): return '{}/{}'.format(instance, filename) def content_file_name(key, **kwargs): return functools.partial(_content_file_name, key, **kwargs) # An unchanged partial reference. before = [ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("file", models.FileField(max_length=200, upload_to=content_file_name('file'))), ])] after = [ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("file", models.FileField(max_length=200, upload_to=content_file_name('file'))), ])] changes = self.get_changes(before, after) self.assertNumberMigrations(changes, 'testapp', 0) # A changed partial reference. args_changed = [ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("file", models.FileField(max_length=200, upload_to=content_file_name('other-file'))), ])] changes = self.get_changes(before, args_changed) self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ['AlterField']) # Can't use assertOperationFieldAttributes because we need the # deconstructed version, i.e., the exploded func/args/keywords rather # than the partial: we don't care if it's not the same instance of the # partial, only if it's the same source function, args, and keywords. value = changes['testapp'][0].operations[0].field.upload_to self.assertEqual( (_content_file_name, ('other-file',), {}), (value.func, value.args, value.keywords) ) kwargs_changed = [ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("file", models.FileField(max_length=200, upload_to=content_file_name('file', spam='eggs'))), ])] changes = self.get_changes(before, kwargs_changed) self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ['AlterField']) value = changes['testapp'][0].operations[0].field.upload_to self.assertEqual( (_content_file_name, ('file',), {'spam': 'eggs'}), (value.func, value.args, value.keywords) ) @mock.patch('django.db.migrations.questioner.MigrationQuestioner.ask_not_null_alteration', side_effect=AssertionError("Should not have prompted for not null addition")) def test_alter_field_to_not_null_with_default(self, mocked_ask_method): """ #23609 - Tests autodetection of nullable to non-nullable alterations. """ changes = self.get_changes([self.author_name_null], [self.author_name_default]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["AlterField"]) self.assertOperationAttributes(changes, "testapp", 0, 0, name="name", preserve_default=True) self.assertOperationFieldAttributes(changes, "testapp", 0, 0, default='Ada Lovelace') @mock.patch( 'django.db.migrations.questioner.MigrationQuestioner.ask_not_null_alteration', return_value=models.NOT_PROVIDED, ) def test_alter_field_to_not_null_without_default(self, mocked_ask_method): """ #23609 - Tests autodetection of nullable to non-nullable alterations. """ changes = self.get_changes([self.author_name_null], [self.author_name]) self.assertEqual(mocked_ask_method.call_count, 1) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["AlterField"]) self.assertOperationAttributes(changes, "testapp", 0, 0, name="name", preserve_default=True) self.assertOperationFieldAttributes(changes, "testapp", 0, 0, default=models.NOT_PROVIDED) @mock.patch( 'django.db.migrations.questioner.MigrationQuestioner.ask_not_null_alteration', return_value='Some Name', ) def test_alter_field_to_not_null_oneoff_default(self, mocked_ask_method): """ #23609 - Tests autodetection of nullable to non-nullable alterations. """ changes = self.get_changes([self.author_name_null], [self.author_name]) self.assertEqual(mocked_ask_method.call_count, 1) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["AlterField"]) self.assertOperationAttributes(changes, "testapp", 0, 0, name="name", preserve_default=False) self.assertOperationFieldAttributes(changes, "testapp", 0, 0, default="Some Name") def test_rename_field(self): """Tests autodetection of renamed fields.""" changes = self.get_changes( [self.author_name], [self.author_name_renamed], MigrationQuestioner({"ask_rename": True}) ) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["RenameField"]) self.assertOperationAttributes(changes, 'testapp', 0, 0, old_name="name", new_name="names") def test_rename_field_foreign_key_to_field(self): before = [ ModelState('app', 'Foo', [ ('id', models.AutoField(primary_key=True)), ('field', models.IntegerField(unique=True)), ]), ModelState('app', 'Bar', [ ('id', models.AutoField(primary_key=True)), ('foo', models.ForeignKey('app.Foo', models.CASCADE, to_field='field')), ]), ] after = [ ModelState('app', 'Foo', [ ('id', models.AutoField(primary_key=True)), ('renamed_field', models.IntegerField(unique=True)), ]), ModelState('app', 'Bar', [ ('id', models.AutoField(primary_key=True)), ('foo', models.ForeignKey('app.Foo', models.CASCADE, to_field='renamed_field')), ]), ] changes = self.get_changes(before, after, MigrationQuestioner({'ask_rename': True})) # Right number/type of migrations? self.assertNumberMigrations(changes, 'app', 1) self.assertOperationTypes(changes, 'app', 0, ['RenameField']) self.assertOperationAttributes(changes, 'app', 0, 0, old_name='field', new_name='renamed_field') def test_rename_foreign_object_fields(self): fields = ('first', 'second') renamed_fields = ('first_renamed', 'second_renamed') before = [ ModelState('app', 'Foo', [ ('id', models.AutoField(primary_key=True)), ('first', models.IntegerField()), ('second', models.IntegerField()), ], options={'unique_together': {fields}}), ModelState('app', 'Bar', [ ('id', models.AutoField(primary_key=True)), ('first', models.IntegerField()), ('second', models.IntegerField()), ('foo', models.ForeignObject( 'app.Foo', models.CASCADE, from_fields=fields, to_fields=fields, )), ]), ] # Case 1: to_fields renames. after = [ ModelState('app', 'Foo', [ ('id', models.AutoField(primary_key=True)), ('first_renamed', models.IntegerField()), ('second_renamed', models.IntegerField()), ], options={'unique_together': {renamed_fields}}), ModelState('app', 'Bar', [ ('id', models.AutoField(primary_key=True)), ('first', models.IntegerField()), ('second', models.IntegerField()), ('foo', models.ForeignObject( 'app.Foo', models.CASCADE, from_fields=fields, to_fields=renamed_fields, )), ]), ] changes = self.get_changes(before, after, MigrationQuestioner({'ask_rename': True})) self.assertNumberMigrations(changes, 'app', 1) self.assertOperationTypes(changes, 'app', 0, ['RenameField', 'RenameField', 'AlterUniqueTogether']) self.assertOperationAttributes( changes, 'app', 0, 0, model_name='foo', old_name='first', new_name='first_renamed', ) self.assertOperationAttributes( changes, 'app', 0, 1, model_name='foo', old_name='second', new_name='second_renamed', ) # Case 2: from_fields renames. after = [ ModelState('app', 'Foo', [ ('id', models.AutoField(primary_key=True)), ('first', models.IntegerField()), ('second', models.IntegerField()), ], options={'unique_together': {fields}}), ModelState('app', 'Bar', [ ('id', models.AutoField(primary_key=True)), ('first_renamed', models.IntegerField()), ('second_renamed', models.IntegerField()), ('foo', models.ForeignObject( 'app.Foo', models.CASCADE, from_fields=renamed_fields, to_fields=fields, )), ]), ] changes = self.get_changes(before, after, MigrationQuestioner({'ask_rename': True})) self.assertNumberMigrations(changes, 'app', 1) self.assertOperationTypes(changes, 'app', 0, ['RenameField', 'RenameField']) self.assertOperationAttributes( changes, 'app', 0, 0, model_name='bar', old_name='first', new_name='first_renamed', ) self.assertOperationAttributes( changes, 'app', 0, 1, model_name='bar', old_name='second', new_name='second_renamed', ) def test_rename_referenced_primary_key(self): before = [ ModelState('app', 'Foo', [ ('id', models.CharField(primary_key=True, serialize=False)), ]), ModelState('app', 'Bar', [ ('id', models.AutoField(primary_key=True)), ('foo', models.ForeignKey('app.Foo', models.CASCADE)), ]), ] after = [ ModelState('app', 'Foo', [ ('renamed_id', models.CharField(primary_key=True, serialize=False)) ]), ModelState('app', 'Bar', [ ('id', models.AutoField(primary_key=True)), ('foo', models.ForeignKey('app.Foo', models.CASCADE)), ]), ] changes = self.get_changes(before, after, MigrationQuestioner({'ask_rename': True})) self.assertNumberMigrations(changes, 'app', 1) self.assertOperationTypes(changes, 'app', 0, ['RenameField']) self.assertOperationAttributes(changes, 'app', 0, 0, old_name='id', new_name='renamed_id') def test_rename_field_preserved_db_column(self): """ RenameField is used if a field is renamed and db_column equal to the old field's column is added. """ before = [ ModelState('app', 'Foo', [ ('id', models.AutoField(primary_key=True)), ('field', models.IntegerField()), ]), ] after = [ ModelState('app', 'Foo', [ ('id', models.AutoField(primary_key=True)), ('renamed_field', models.IntegerField(db_column='field')), ]), ] changes = self.get_changes(before, after, MigrationQuestioner({'ask_rename': True})) self.assertNumberMigrations(changes, 'app', 1) self.assertOperationTypes(changes, 'app', 0, ['RenameField', 'AlterField']) self.assertOperationAttributes( changes, 'app', 0, 0, model_name='foo', old_name='field', new_name='renamed_field', ) self.assertOperationAttributes(changes, 'app', 0, 1, model_name='foo', name='renamed_field') self.assertEqual(changes['app'][0].operations[-1].field.deconstruct(), ( 'renamed_field', 'django.db.models.IntegerField', [], {'db_column': 'field'}, )) def test_rename_related_field_preserved_db_column(self): before = [ ModelState('app', 'Foo', [ ('id', models.AutoField(primary_key=True)), ]), ModelState('app', 'Bar', [ ('id', models.AutoField(primary_key=True)), ('foo', models.ForeignKey('app.Foo', models.CASCADE)), ]), ] after = [ ModelState('app', 'Foo', [ ('id', models.AutoField(primary_key=True)), ]), ModelState('app', 'Bar', [ ('id', models.AutoField(primary_key=True)), ('renamed_foo', models.ForeignKey('app.Foo', models.CASCADE, db_column='foo_id')), ]), ] changes = self.get_changes(before, after, MigrationQuestioner({'ask_rename': True})) self.assertNumberMigrations(changes, 'app', 1) self.assertOperationTypes(changes, 'app', 0, ['RenameField', 'AlterField']) self.assertOperationAttributes( changes, 'app', 0, 0, model_name='bar', old_name='foo', new_name='renamed_foo', ) self.assertOperationAttributes(changes, 'app', 0, 1, model_name='bar', name='renamed_foo') self.assertEqual(changes['app'][0].operations[-1].field.deconstruct(), ( 'renamed_foo', 'django.db.models.ForeignKey', [], {'to': 'app.Foo', 'on_delete': models.CASCADE, 'db_column': 'foo_id'}, )) def test_rename_model(self): """Tests autodetection of renamed models.""" changes = self.get_changes( [self.author_with_book, self.book], [self.author_renamed_with_book, self.book_with_author_renamed], MigrationQuestioner({"ask_rename_model": True}), ) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["RenameModel"]) self.assertOperationAttributes(changes, 'testapp', 0, 0, old_name="Author", new_name="Writer") # Now that RenameModel handles related fields too, there should be # no AlterField for the related field. self.assertNumberMigrations(changes, 'otherapp', 0) def test_rename_m2m_through_model(self): """ Tests autodetection of renamed models that are used in M2M relations as through models. """ changes = self.get_changes( [self.author_with_m2m_through, self.publisher, self.contract], [self.author_with_renamed_m2m_through, self.publisher, self.contract_renamed], MigrationQuestioner({'ask_rename_model': True}) ) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ['RenameModel']) self.assertOperationAttributes(changes, 'testapp', 0, 0, old_name='Contract', new_name='Deal') def test_rename_model_with_renamed_rel_field(self): """ Tests autodetection of renamed models while simultaneously renaming one of the fields that relate to the renamed model. """ changes = self.get_changes( [self.author_with_book, self.book], [self.author_renamed_with_book, self.book_with_field_and_author_renamed], MigrationQuestioner({"ask_rename": True, "ask_rename_model": True}), ) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["RenameModel"]) self.assertOperationAttributes(changes, 'testapp', 0, 0, old_name="Author", new_name="Writer") # Right number/type of migrations for related field rename? # Alter is already taken care of. self.assertNumberMigrations(changes, 'otherapp', 1) self.assertOperationTypes(changes, 'otherapp', 0, ["RenameField"]) self.assertOperationAttributes(changes, 'otherapp', 0, 0, old_name="author", new_name="writer") def test_rename_model_with_fks_in_different_position(self): """ #24537 - The order of fields in a model does not influence the RenameModel detection. """ before = [ ModelState("testapp", "EntityA", [ ("id", models.AutoField(primary_key=True)), ]), ModelState("testapp", "EntityB", [ ("id", models.AutoField(primary_key=True)), ("some_label", models.CharField(max_length=255)), ("entity_a", models.ForeignKey("testapp.EntityA", models.CASCADE)), ]), ] after = [ ModelState("testapp", "EntityA", [ ("id", models.AutoField(primary_key=True)), ]), ModelState("testapp", "RenamedEntityB", [ ("id", models.AutoField(primary_key=True)), ("entity_a", models.ForeignKey("testapp.EntityA", models.CASCADE)), ("some_label", models.CharField(max_length=255)), ]), ] changes = self.get_changes(before, after, MigrationQuestioner({"ask_rename_model": True})) self.assertNumberMigrations(changes, "testapp", 1) self.assertOperationTypes(changes, "testapp", 0, ["RenameModel"]) self.assertOperationAttributes(changes, "testapp", 0, 0, old_name="EntityB", new_name="RenamedEntityB") def test_rename_model_reverse_relation_dependencies(self): """ The migration to rename a model pointed to by a foreign key in another app must run after the other app's migration that adds the foreign key with model's original name. Therefore, the renaming migration has a dependency on that other migration. """ before = [ ModelState('testapp', 'EntityA', [ ('id', models.AutoField(primary_key=True)), ]), ModelState('otherapp', 'EntityB', [ ('id', models.AutoField(primary_key=True)), ('entity_a', models.ForeignKey('testapp.EntityA', models.CASCADE)), ]), ] after = [ ModelState('testapp', 'RenamedEntityA', [ ('id', models.AutoField(primary_key=True)), ]), ModelState('otherapp', 'EntityB', [ ('id', models.AutoField(primary_key=True)), ('entity_a', models.ForeignKey('testapp.RenamedEntityA', models.CASCADE)), ]), ] changes = self.get_changes(before, after, MigrationQuestioner({'ask_rename_model': True})) self.assertNumberMigrations(changes, 'testapp', 1) self.assertMigrationDependencies(changes, 'testapp', 0, [('otherapp', '__first__')]) self.assertOperationTypes(changes, 'testapp', 0, ['RenameModel']) self.assertOperationAttributes(changes, 'testapp', 0, 0, old_name='EntityA', new_name='RenamedEntityA') def test_fk_dependency(self): """Having a ForeignKey automatically adds a dependency.""" # Note that testapp (author) has no dependencies, # otherapp (book) depends on testapp (author), # thirdapp (edition) depends on otherapp (book) changes = self.get_changes([], [self.author_name, self.book, self.edition]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["CreateModel"]) self.assertOperationAttributes(changes, 'testapp', 0, 0, name="Author") self.assertMigrationDependencies(changes, 'testapp', 0, []) # Right number/type of migrations? self.assertNumberMigrations(changes, 'otherapp', 1) self.assertOperationTypes(changes, 'otherapp', 0, ["CreateModel"]) self.assertOperationAttributes(changes, 'otherapp', 0, 0, name="Book") self.assertMigrationDependencies(changes, 'otherapp', 0, [("testapp", "auto_1")]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'thirdapp', 1) self.assertOperationTypes(changes, 'thirdapp', 0, ["CreateModel"]) self.assertOperationAttributes(changes, 'thirdapp', 0, 0, name="Edition") self.assertMigrationDependencies(changes, 'thirdapp', 0, [("otherapp", "auto_1")]) def test_proxy_fk_dependency(self): """FK dependencies still work on proxy models.""" # Note that testapp (author) has no dependencies, # otherapp (book) depends on testapp (authorproxy) changes = self.get_changes([], [self.author_empty, self.author_proxy_third, self.book_proxy_fk]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["CreateModel"]) self.assertOperationAttributes(changes, 'testapp', 0, 0, name="Author") self.assertMigrationDependencies(changes, 'testapp', 0, []) # Right number/type of migrations? self.assertNumberMigrations(changes, 'otherapp', 1) self.assertOperationTypes(changes, 'otherapp', 0, ["CreateModel"]) self.assertOperationAttributes(changes, 'otherapp', 0, 0, name="Book") self.assertMigrationDependencies(changes, 'otherapp', 0, [("thirdapp", "auto_1")]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'thirdapp', 1) self.assertOperationTypes(changes, 'thirdapp', 0, ["CreateModel"]) self.assertOperationAttributes(changes, 'thirdapp', 0, 0, name="AuthorProxy") self.assertMigrationDependencies(changes, 'thirdapp', 0, [("testapp", "auto_1")]) def test_same_app_no_fk_dependency(self): """ A migration with a FK between two models of the same app does not have a dependency to itself. """ changes = self.get_changes([], [self.author_with_publisher, self.publisher]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["CreateModel", "CreateModel"]) self.assertOperationAttributes(changes, "testapp", 0, 0, name="Publisher") self.assertOperationAttributes(changes, "testapp", 0, 1, name="Author") self.assertMigrationDependencies(changes, 'testapp', 0, []) def test_circular_fk_dependency(self): """ Having a circular ForeignKey dependency automatically resolves the situation into 2 migrations on one side and 1 on the other. """ changes = self.get_changes([], [self.author_with_book, self.book, self.publisher_with_book]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["CreateModel", "CreateModel"]) self.assertOperationAttributes(changes, "testapp", 0, 0, name="Publisher") self.assertOperationAttributes(changes, "testapp", 0, 1, name="Author") self.assertMigrationDependencies(changes, 'testapp', 0, [("otherapp", "auto_1")]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'otherapp', 2) self.assertOperationTypes(changes, 'otherapp', 0, ["CreateModel"]) self.assertOperationTypes(changes, 'otherapp', 1, ["AddField"]) self.assertMigrationDependencies(changes, 'otherapp', 0, []) self.assertMigrationDependencies(changes, 'otherapp', 1, [("otherapp", "auto_1"), ("testapp", "auto_1")]) # both split migrations should be `initial` self.assertTrue(changes['otherapp'][0].initial) self.assertTrue(changes['otherapp'][1].initial) def test_same_app_circular_fk_dependency(self): """ A migration with a FK between two models of the same app does not have a dependency to itself. """ changes = self.get_changes([], [self.author_with_publisher, self.publisher_with_author]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["CreateModel", "CreateModel", "AddField"]) self.assertOperationAttributes(changes, "testapp", 0, 0, name="Author") self.assertOperationAttributes(changes, "testapp", 0, 1, name="Publisher") self.assertOperationAttributes(changes, "testapp", 0, 2, name="publisher") self.assertMigrationDependencies(changes, 'testapp', 0, []) def test_same_app_circular_fk_dependency_with_unique_together_and_indexes(self): """ #22275 - A migration with circular FK dependency does not try to create unique together constraint and indexes before creating all required fields first. """ changes = self.get_changes([], [self.knight, self.rabbit]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'eggs', 1) self.assertOperationTypes( changes, 'eggs', 0, ["CreateModel", "CreateModel", "AddIndex", "AlterUniqueTogether"] ) self.assertNotIn("unique_together", changes['eggs'][0].operations[0].options) self.assertNotIn("unique_together", changes['eggs'][0].operations[1].options) self.assertMigrationDependencies(changes, 'eggs', 0, []) def test_alter_db_table_add(self): """Tests detection for adding db_table in model's options.""" changes = self.get_changes([self.author_empty], [self.author_with_db_table_options]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["AlterModelTable"]) self.assertOperationAttributes(changes, "testapp", 0, 0, name="author", table="author_one") def test_alter_db_table_change(self): """Tests detection for changing db_table in model's options'.""" changes = self.get_changes([self.author_with_db_table_options], [self.author_with_new_db_table_options]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["AlterModelTable"]) self.assertOperationAttributes(changes, "testapp", 0, 0, name="author", table="author_two") def test_alter_db_table_remove(self): """Tests detection for removing db_table in model's options.""" changes = self.get_changes([self.author_with_db_table_options], [self.author_empty]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["AlterModelTable"]) self.assertOperationAttributes(changes, "testapp", 0, 0, name="author", table=None) def test_alter_db_table_no_changes(self): """ Alter_db_table doesn't generate a migration if no changes have been made. """ changes = self.get_changes([self.author_with_db_table_options], [self.author_with_db_table_options]) # Right number of migrations? self.assertEqual(len(changes), 0) def test_keep_db_table_with_model_change(self): """ Tests when model changes but db_table stays as-is, autodetector must not create more than one operation. """ changes = self.get_changes( [self.author_with_db_table_options], [self.author_renamed_with_db_table_options], MigrationQuestioner({"ask_rename_model": True}), ) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["RenameModel"]) self.assertOperationAttributes(changes, "testapp", 0, 0, old_name="Author", new_name="NewAuthor") def test_alter_db_table_with_model_change(self): """ Tests when model and db_table changes, autodetector must create two operations. """ changes = self.get_changes( [self.author_with_db_table_options], [self.author_renamed_with_new_db_table_options], MigrationQuestioner({"ask_rename_model": True}), ) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["RenameModel", "AlterModelTable"]) self.assertOperationAttributes(changes, "testapp", 0, 0, old_name="Author", new_name="NewAuthor") self.assertOperationAttributes(changes, "testapp", 0, 1, name="newauthor", table="author_three") def test_identical_regex_doesnt_alter(self): from_state = ModelState( "testapp", "model", [("id", models.AutoField(primary_key=True, validators=[ RegexValidator( re.compile('^[-a-zA-Z0-9_]+\\Z'), 'Enter a valid “slug” consisting of letters, numbers, underscores or hyphens.', 'invalid' ) ]))] ) to_state = ModelState( "testapp", "model", [("id", models.AutoField(primary_key=True, validators=[validate_slug]))] ) changes = self.get_changes([from_state], [to_state]) # Right number/type of migrations? self.assertNumberMigrations(changes, "testapp", 0) def test_different_regex_does_alter(self): from_state = ModelState( "testapp", "model", [("id", models.AutoField(primary_key=True, validators=[ RegexValidator( re.compile('^[a-z]+\\Z', 32), 'Enter a valid “slug” consisting of letters, numbers, underscores or hyphens.', 'invalid' ) ]))] ) to_state = ModelState( "testapp", "model", [("id", models.AutoField(primary_key=True, validators=[validate_slug]))] ) changes = self.get_changes([from_state], [to_state]) self.assertNumberMigrations(changes, "testapp", 1) self.assertOperationTypes(changes, "testapp", 0, ["AlterField"]) def test_empty_foo_together(self): """ #23452 - Empty unique/index_together shouldn't generate a migration. """ # Explicitly testing for not specified, since this is the case after # a CreateModel operation w/o any definition on the original model model_state_not_specified = ModelState("a", "model", [("id", models.AutoField(primary_key=True))]) # Explicitly testing for None, since this was the issue in #23452 after # an AlterFooTogether operation with e.g. () as value model_state_none = ModelState("a", "model", [ ("id", models.AutoField(primary_key=True)) ], { "index_together": None, "unique_together": None, }) # Explicitly testing for the empty set, since we now always have sets. # During removal (('col1', 'col2'),) --> () this becomes set([]) model_state_empty = ModelState("a", "model", [ ("id", models.AutoField(primary_key=True)) ], { "index_together": set(), "unique_together": set(), }) def test(from_state, to_state, msg): changes = self.get_changes([from_state], [to_state]) if changes: ops = ', '.join(o.__class__.__name__ for o in changes['a'][0].operations) self.fail('Created operation(s) %s from %s' % (ops, msg)) tests = ( (model_state_not_specified, model_state_not_specified, '"not specified" to "not specified"'), (model_state_not_specified, model_state_none, '"not specified" to "None"'), (model_state_not_specified, model_state_empty, '"not specified" to "empty"'), (model_state_none, model_state_not_specified, '"None" to "not specified"'), (model_state_none, model_state_none, '"None" to "None"'), (model_state_none, model_state_empty, '"None" to "empty"'), (model_state_empty, model_state_not_specified, '"empty" to "not specified"'), (model_state_empty, model_state_none, '"empty" to "None"'), (model_state_empty, model_state_empty, '"empty" to "empty"'), ) for t in tests: test(*t) def test_create_model_with_indexes(self): """Test creation of new model with indexes already defined.""" author = ModelState('otherapp', 'Author', [ ('id', models.AutoField(primary_key=True)), ('name', models.CharField(max_length=200)), ], {'indexes': [models.Index(fields=['name'], name='create_model_with_indexes_idx')]}) changes = self.get_changes([], [author]) added_index = models.Index(fields=['name'], name='create_model_with_indexes_idx') # Right number of migrations? self.assertEqual(len(changes['otherapp']), 1) # Right number of actions? migration = changes['otherapp'][0] self.assertEqual(len(migration.operations), 2) # Right actions order? self.assertOperationTypes(changes, 'otherapp', 0, ['CreateModel', 'AddIndex']) self.assertOperationAttributes(changes, 'otherapp', 0, 0, name='Author') self.assertOperationAttributes(changes, 'otherapp', 0, 1, model_name='author', index=added_index) def test_add_indexes(self): """Test change detection of new indexes.""" changes = self.get_changes([self.author_empty, self.book], [self.author_empty, self.book_indexes]) self.assertNumberMigrations(changes, 'otherapp', 1) self.assertOperationTypes(changes, 'otherapp', 0, ['AddIndex']) added_index = models.Index(fields=['author', 'title'], name='book_title_author_idx') self.assertOperationAttributes(changes, 'otherapp', 0, 0, model_name='book', index=added_index) def test_remove_indexes(self): """Test change detection of removed indexes.""" changes = self.get_changes([self.author_empty, self.book_indexes], [self.author_empty, self.book]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'otherapp', 1) self.assertOperationTypes(changes, 'otherapp', 0, ['RemoveIndex']) self.assertOperationAttributes(changes, 'otherapp', 0, 0, model_name='book', name='book_title_author_idx') def test_order_fields_indexes(self): """Test change detection of reordering of fields in indexes.""" changes = self.get_changes( [self.author_empty, self.book_indexes], [self.author_empty, self.book_unordered_indexes] ) self.assertNumberMigrations(changes, 'otherapp', 1) self.assertOperationTypes(changes, 'otherapp', 0, ['RemoveIndex', 'AddIndex']) self.assertOperationAttributes(changes, 'otherapp', 0, 0, model_name='book', name='book_title_author_idx') added_index = models.Index(fields=['title', 'author'], name='book_author_title_idx') self.assertOperationAttributes(changes, 'otherapp', 0, 1, model_name='book', index=added_index) def test_create_model_with_check_constraint(self): """Test creation of new model with constraints already defined.""" author = ModelState('otherapp', 'Author', [ ('id', models.AutoField(primary_key=True)), ('name', models.CharField(max_length=200)), ], {'constraints': [models.CheckConstraint(check=models.Q(name__contains='Bob'), name='name_contains_bob')]}) changes = self.get_changes([], [author]) added_constraint = models.CheckConstraint(check=models.Q(name__contains='Bob'), name='name_contains_bob') # Right number of migrations? self.assertEqual(len(changes['otherapp']), 1) # Right number of actions? migration = changes['otherapp'][0] self.assertEqual(len(migration.operations), 2) # Right actions order? self.assertOperationTypes(changes, 'otherapp', 0, ['CreateModel', 'AddConstraint']) self.assertOperationAttributes(changes, 'otherapp', 0, 0, name='Author') self.assertOperationAttributes(changes, 'otherapp', 0, 1, model_name='author', constraint=added_constraint) def test_add_constraints(self): """Test change detection of new constraints.""" changes = self.get_changes([self.author_name], [self.author_name_check_constraint]) self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ['AddConstraint']) added_constraint = models.CheckConstraint(check=models.Q(name__contains='Bob'), name='name_contains_bob') self.assertOperationAttributes(changes, 'testapp', 0, 0, model_name='author', constraint=added_constraint) def test_remove_constraints(self): """Test change detection of removed constraints.""" changes = self.get_changes([self.author_name_check_constraint], [self.author_name]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ['RemoveConstraint']) self.assertOperationAttributes(changes, 'testapp', 0, 0, model_name='author', name='name_contains_bob') def test_add_foo_together(self): """Tests index/unique_together detection.""" changes = self.get_changes([self.author_empty, self.book], [self.author_empty, self.book_foo_together]) # Right number/type of migrations? self.assertNumberMigrations(changes, "otherapp", 1) self.assertOperationTypes(changes, "otherapp", 0, ["AlterUniqueTogether", "AlterIndexTogether"]) self.assertOperationAttributes(changes, "otherapp", 0, 0, name="book", unique_together={("author", "title")}) self.assertOperationAttributes(changes, "otherapp", 0, 1, name="book", index_together={("author", "title")}) def test_remove_foo_together(self): """Tests index/unique_together detection.""" changes = self.get_changes([self.author_empty, self.book_foo_together], [self.author_empty, self.book]) # Right number/type of migrations? self.assertNumberMigrations(changes, "otherapp", 1) self.assertOperationTypes(changes, "otherapp", 0, ["AlterUniqueTogether", "AlterIndexTogether"]) self.assertOperationAttributes(changes, "otherapp", 0, 0, name="book", unique_together=set()) self.assertOperationAttributes(changes, "otherapp", 0, 1, name="book", index_together=set()) def test_foo_together_remove_fk(self): """Tests unique_together and field removal detection & ordering""" changes = self.get_changes( [self.author_empty, self.book_foo_together], [self.author_empty, self.book_with_no_author] ) # Right number/type of migrations? self.assertNumberMigrations(changes, "otherapp", 1) self.assertOperationTypes(changes, "otherapp", 0, [ "AlterUniqueTogether", "AlterIndexTogether", "RemoveField" ]) self.assertOperationAttributes(changes, "otherapp", 0, 0, name="book", unique_together=set()) self.assertOperationAttributes(changes, "otherapp", 0, 1, name="book", index_together=set()) self.assertOperationAttributes(changes, "otherapp", 0, 2, model_name="book", name="author") def test_foo_together_no_changes(self): """ index/unique_together doesn't generate a migration if no changes have been made. """ changes = self.get_changes( [self.author_empty, self.book_foo_together], [self.author_empty, self.book_foo_together] ) # Right number of migrations? self.assertEqual(len(changes), 0) def test_foo_together_ordering(self): """ index/unique_together also triggers on ordering changes. """ changes = self.get_changes( [self.author_empty, self.book_foo_together], [self.author_empty, self.book_foo_together_2] ) # Right number/type of migrations? self.assertNumberMigrations(changes, "otherapp", 1) self.assertOperationTypes(changes, "otherapp", 0, ["AlterUniqueTogether", "AlterIndexTogether"]) self.assertOperationAttributes(changes, "otherapp", 0, 0, name="book", unique_together={("title", "author")}) self.assertOperationAttributes(changes, "otherapp", 0, 1, name="book", index_together={("title", "author")}) def test_add_field_and_foo_together(self): """ Added fields will be created before using them in index/unique_together. """ changes = self.get_changes([self.author_empty, self.book], [self.author_empty, self.book_foo_together_3]) # Right number/type of migrations? self.assertNumberMigrations(changes, "otherapp", 1) self.assertOperationTypes(changes, "otherapp", 0, ["AddField", "AlterUniqueTogether", "AlterIndexTogether"]) self.assertOperationAttributes(changes, "otherapp", 0, 1, name="book", unique_together={("title", "newfield")}) self.assertOperationAttributes(changes, "otherapp", 0, 2, name="book", index_together={("title", "newfield")}) def test_create_model_and_unique_together(self): author = ModelState("otherapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200)), ]) book_with_author = ModelState("otherapp", "Book", [ ("id", models.AutoField(primary_key=True)), ("author", models.ForeignKey("otherapp.Author", models.CASCADE)), ("title", models.CharField(max_length=200)), ], { "index_together": {("title", "author")}, "unique_together": {("title", "author")}, }) changes = self.get_changes([self.book_with_no_author], [author, book_with_author]) # Right number of migrations? self.assertEqual(len(changes['otherapp']), 1) # Right number of actions? migration = changes['otherapp'][0] self.assertEqual(len(migration.operations), 4) # Right actions order? self.assertOperationTypes( changes, 'otherapp', 0, ['CreateModel', 'AddField', 'AlterUniqueTogether', 'AlterIndexTogether'] ) def test_remove_field_and_foo_together(self): """ Removed fields will be removed after updating index/unique_together. """ changes = self.get_changes( [self.author_empty, self.book_foo_together_3], [self.author_empty, self.book_foo_together] ) # Right number/type of migrations? self.assertNumberMigrations(changes, "otherapp", 1) self.assertOperationTypes(changes, "otherapp", 0, ["AlterUniqueTogether", "AlterIndexTogether", "RemoveField"]) self.assertOperationAttributes(changes, "otherapp", 0, 0, name="book", unique_together={("author", "title")}) self.assertOperationAttributes(changes, "otherapp", 0, 1, name="book", index_together={("author", "title")}) self.assertOperationAttributes(changes, "otherapp", 0, 2, model_name="book", name="newfield") def test_rename_field_and_foo_together(self): """ Removed fields will be removed after updating index/unique_together. """ changes = self.get_changes( [self.author_empty, self.book_foo_together_3], [self.author_empty, self.book_foo_together_4], MigrationQuestioner({"ask_rename": True}), ) # Right number/type of migrations? self.assertNumberMigrations(changes, "otherapp", 1) self.assertOperationTypes(changes, "otherapp", 0, ["RenameField", "AlterUniqueTogether", "AlterIndexTogether"]) self.assertOperationAttributes(changes, "otherapp", 0, 1, name="book", unique_together={ ("title", "newfield2") }) self.assertOperationAttributes(changes, "otherapp", 0, 2, name="book", index_together={("title", "newfield2")}) def test_proxy(self): """The autodetector correctly deals with proxy models.""" # First, we test adding a proxy model changes = self.get_changes([self.author_empty], [self.author_empty, self.author_proxy]) # Right number/type of migrations? self.assertNumberMigrations(changes, "testapp", 1) self.assertOperationTypes(changes, "testapp", 0, ["CreateModel"]) self.assertOperationAttributes( changes, "testapp", 0, 0, name="AuthorProxy", options={"proxy": True, "indexes": [], "constraints": []} ) # Now, we test turning a proxy model into a non-proxy model # It should delete the proxy then make the real one changes = self.get_changes( [self.author_empty, self.author_proxy], [self.author_empty, self.author_proxy_notproxy] ) # Right number/type of migrations? self.assertNumberMigrations(changes, "testapp", 1) self.assertOperationTypes(changes, "testapp", 0, ["DeleteModel", "CreateModel"]) self.assertOperationAttributes(changes, "testapp", 0, 0, name="AuthorProxy") self.assertOperationAttributes(changes, "testapp", 0, 1, name="AuthorProxy", options={}) def test_proxy_custom_pk(self): """ #23415 - The autodetector must correctly deal with custom FK on proxy models. """ # First, we test the default pk field name changes = self.get_changes([], [self.author_empty, self.author_proxy_third, self.book_proxy_fk]) # The field name the FK on the book model points to self.assertEqual(changes['otherapp'][0].operations[0].fields[2][1].remote_field.field_name, 'id') # Now, we test the custom pk field name changes = self.get_changes([], [self.author_custom_pk, self.author_proxy_third, self.book_proxy_fk]) # The field name the FK on the book model points to self.assertEqual(changes['otherapp'][0].operations[0].fields[2][1].remote_field.field_name, 'pk_field') def test_proxy_to_mti_with_fk_to_proxy(self): # First, test the pk table and field name. changes = self.get_changes( [], [self.author_empty, self.author_proxy_third, self.book_proxy_fk], ) self.assertEqual( changes['otherapp'][0].operations[0].fields[2][1].remote_field.model._meta.db_table, 'testapp_author', ) self.assertEqual(changes['otherapp'][0].operations[0].fields[2][1].remote_field.field_name, 'id') # Change AuthorProxy to use MTI. changes = self.get_changes( [self.author_empty, self.author_proxy_third, self.book_proxy_fk], [self.author_empty, self.author_proxy_third_notproxy, self.book_proxy_fk], ) # Right number/type of migrations for the AuthorProxy model? self.assertNumberMigrations(changes, 'thirdapp', 1) self.assertOperationTypes(changes, 'thirdapp', 0, ['DeleteModel', 'CreateModel']) # Right number/type of migrations for the Book model with a FK to # AuthorProxy? self.assertNumberMigrations(changes, 'otherapp', 1) self.assertOperationTypes(changes, 'otherapp', 0, ['AlterField']) # otherapp should depend on thirdapp. self.assertMigrationDependencies(changes, 'otherapp', 0, [('thirdapp', 'auto_1')]) # Now, test the pk table and field name. self.assertEqual( changes['otherapp'][0].operations[0].field.remote_field.model._meta.db_table, 'thirdapp_authorproxy', ) self.assertEqual(changes['otherapp'][0].operations[0].field.remote_field.field_name, 'author_ptr') def test_proxy_to_mti_with_fk_to_proxy_proxy(self): # First, test the pk table and field name. changes = self.get_changes( [], [self.author_empty, self.author_proxy, self.author_proxy_proxy, self.book_proxy_proxy_fk], ) self.assertEqual( changes['otherapp'][0].operations[0].fields[1][1].remote_field.model._meta.db_table, 'testapp_author', ) self.assertEqual(changes['otherapp'][0].operations[0].fields[1][1].remote_field.field_name, 'id') # Change AuthorProxy to use MTI. FK still points to AAuthorProxyProxy, # a proxy of AuthorProxy. changes = self.get_changes( [self.author_empty, self.author_proxy, self.author_proxy_proxy, self.book_proxy_proxy_fk], [self.author_empty, self.author_proxy_notproxy, self.author_proxy_proxy, self.book_proxy_proxy_fk], ) # Right number/type of migrations for the AuthorProxy model? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ['DeleteModel', 'CreateModel']) # Right number/type of migrations for the Book model with a FK to # AAuthorProxyProxy? self.assertNumberMigrations(changes, 'otherapp', 1) self.assertOperationTypes(changes, 'otherapp', 0, ['AlterField']) # otherapp should depend on testapp. self.assertMigrationDependencies(changes, 'otherapp', 0, [('testapp', 'auto_1')]) # Now, test the pk table and field name. self.assertEqual( changes['otherapp'][0].operations[0].field.remote_field.model._meta.db_table, 'testapp_authorproxy', ) self.assertEqual(changes['otherapp'][0].operations[0].field.remote_field.field_name, 'author_ptr') def test_unmanaged_create(self): """The autodetector correctly deals with managed models.""" # First, we test adding an unmanaged model changes = self.get_changes([self.author_empty], [self.author_empty, self.author_unmanaged]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["CreateModel"]) self.assertOperationAttributes(changes, 'testapp', 0, 0, name="AuthorUnmanaged", options={"managed": False}) def test_unmanaged_delete(self): changes = self.get_changes([self.author_empty, self.author_unmanaged], [self.author_empty]) self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ['DeleteModel']) def test_unmanaged_to_managed(self): # Now, we test turning an unmanaged model into a managed model changes = self.get_changes( [self.author_empty, self.author_unmanaged], [self.author_empty, self.author_unmanaged_managed] ) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["AlterModelOptions"]) self.assertOperationAttributes(changes, 'testapp', 0, 0, name="authorunmanaged", options={}) def test_managed_to_unmanaged(self): # Now, we turn managed to unmanaged. changes = self.get_changes( [self.author_empty, self.author_unmanaged_managed], [self.author_empty, self.author_unmanaged] ) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, "testapp", 0, ["AlterModelOptions"]) self.assertOperationAttributes(changes, "testapp", 0, 0, name="authorunmanaged", options={"managed": False}) def test_unmanaged_custom_pk(self): """ #23415 - The autodetector must correctly deal with custom FK on unmanaged models. """ # First, we test the default pk field name changes = self.get_changes([], [self.author_unmanaged_default_pk, self.book]) # The field name the FK on the book model points to self.assertEqual(changes['otherapp'][0].operations[0].fields[2][1].remote_field.field_name, 'id') # Now, we test the custom pk field name changes = self.get_changes([], [self.author_unmanaged_custom_pk, self.book]) # The field name the FK on the book model points to self.assertEqual(changes['otherapp'][0].operations[0].fields[2][1].remote_field.field_name, 'pk_field') @override_settings(AUTH_USER_MODEL="thirdapp.CustomUser") def test_swappable(self): with isolate_lru_cache(apps.get_swappable_settings_name): changes = self.get_changes([self.custom_user], [self.custom_user, self.author_with_custom_user]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["CreateModel"]) self.assertOperationAttributes(changes, 'testapp', 0, 0, name="Author") self.assertMigrationDependencies(changes, 'testapp', 0, [("__setting__", "AUTH_USER_MODEL")]) def test_swappable_changed(self): with isolate_lru_cache(apps.get_swappable_settings_name): before = self.make_project_state([self.custom_user, self.author_with_user]) with override_settings(AUTH_USER_MODEL="thirdapp.CustomUser"): after = self.make_project_state([self.custom_user, self.author_with_custom_user]) autodetector = MigrationAutodetector(before, after) changes = autodetector._detect_changes() # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["AlterField"]) self.assertOperationAttributes(changes, 'testapp', 0, 0, model_name="author", name='user') fk_field = changes['testapp'][0].operations[0].field to_model = '%s.%s' % ( fk_field.remote_field.model._meta.app_label, fk_field.remote_field.model._meta.object_name, ) self.assertEqual(to_model, 'thirdapp.CustomUser') def test_add_field_with_default(self): """#22030 - Adding a field with a default should work.""" changes = self.get_changes([self.author_empty], [self.author_name_default]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["AddField"]) self.assertOperationAttributes(changes, 'testapp', 0, 0, name="name") def test_custom_deconstructible(self): """ Two instances which deconstruct to the same value aren't considered a change. """ changes = self.get_changes([self.author_name_deconstructible_1], [self.author_name_deconstructible_2]) # Right number of migrations? self.assertEqual(len(changes), 0) def test_deconstruct_field_kwarg(self): """Field instances are handled correctly by nested deconstruction.""" changes = self.get_changes([self.author_name_deconstructible_3], [self.author_name_deconstructible_4]) self.assertEqual(changes, {}) def test_deconstructible_list(self): """Nested deconstruction descends into lists.""" # When lists contain items that deconstruct to identical values, those lists # should be considered equal for the purpose of detecting state changes # (even if the original items are unequal). changes = self.get_changes( [self.author_name_deconstructible_list_1], [self.author_name_deconstructible_list_2] ) self.assertEqual(changes, {}) # Legitimate differences within the deconstructed lists should be reported # as a change changes = self.get_changes( [self.author_name_deconstructible_list_1], [self.author_name_deconstructible_list_3] ) self.assertEqual(len(changes), 1) def test_deconstructible_tuple(self): """Nested deconstruction descends into tuples.""" # When tuples contain items that deconstruct to identical values, those tuples # should be considered equal for the purpose of detecting state changes # (even if the original items are unequal). changes = self.get_changes( [self.author_name_deconstructible_tuple_1], [self.author_name_deconstructible_tuple_2] ) self.assertEqual(changes, {}) # Legitimate differences within the deconstructed tuples should be reported # as a change changes = self.get_changes( [self.author_name_deconstructible_tuple_1], [self.author_name_deconstructible_tuple_3] ) self.assertEqual(len(changes), 1) def test_deconstructible_dict(self): """Nested deconstruction descends into dict values.""" # When dicts contain items whose values deconstruct to identical values, # those dicts should be considered equal for the purpose of detecting # state changes (even if the original values are unequal). changes = self.get_changes( [self.author_name_deconstructible_dict_1], [self.author_name_deconstructible_dict_2] ) self.assertEqual(changes, {}) # Legitimate differences within the deconstructed dicts should be reported # as a change changes = self.get_changes( [self.author_name_deconstructible_dict_1], [self.author_name_deconstructible_dict_3] ) self.assertEqual(len(changes), 1) def test_nested_deconstructible_objects(self): """ Nested deconstruction is applied recursively to the args/kwargs of deconstructed objects. """ # If the items within a deconstructed object's args/kwargs have the same # deconstructed values - whether or not the items themselves are different # instances - then the object as a whole is regarded as unchanged. changes = self.get_changes( [self.author_name_nested_deconstructible_1], [self.author_name_nested_deconstructible_2] ) self.assertEqual(changes, {}) # Differences that exist solely within the args list of a deconstructed object # should be reported as changes changes = self.get_changes( [self.author_name_nested_deconstructible_1], [self.author_name_nested_deconstructible_changed_arg] ) self.assertEqual(len(changes), 1) # Additional args should also be reported as a change changes = self.get_changes( [self.author_name_nested_deconstructible_1], [self.author_name_nested_deconstructible_extra_arg] ) self.assertEqual(len(changes), 1) # Differences that exist solely within the kwargs dict of a deconstructed object # should be reported as changes changes = self.get_changes( [self.author_name_nested_deconstructible_1], [self.author_name_nested_deconstructible_changed_kwarg] ) self.assertEqual(len(changes), 1) # Additional kwargs should also be reported as a change changes = self.get_changes( [self.author_name_nested_deconstructible_1], [self.author_name_nested_deconstructible_extra_kwarg] ) self.assertEqual(len(changes), 1) def test_deconstruct_type(self): """ #22951 -- Uninstantiated classes with deconstruct are correctly returned by deep_deconstruct during serialization. """ author = ModelState( "testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField( max_length=200, # IntegerField intentionally not instantiated. default=models.IntegerField, )) ], ) changes = self.get_changes([], [author]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["CreateModel"]) def test_replace_string_with_foreignkey(self): """ #22300 - Adding an FK in the same "spot" as a deleted CharField should work. """ changes = self.get_changes([self.author_with_publisher_string], [self.author_with_publisher, self.publisher]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["CreateModel", "RemoveField", "AddField"]) self.assertOperationAttributes(changes, 'testapp', 0, 0, name="Publisher") self.assertOperationAttributes(changes, 'testapp', 0, 1, name="publisher_name") self.assertOperationAttributes(changes, 'testapp', 0, 2, name="publisher") def test_foreign_key_removed_before_target_model(self): """ Removing an FK and the model it targets in the same change must remove the FK field before the model to maintain consistency. """ changes = self.get_changes( [self.author_with_publisher, self.publisher], [self.author_name] ) # removes both the model and FK # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["RemoveField", "DeleteModel"]) self.assertOperationAttributes(changes, 'testapp', 0, 0, name="publisher") self.assertOperationAttributes(changes, 'testapp', 0, 1, name="Publisher") @mock.patch('django.db.migrations.questioner.MigrationQuestioner.ask_not_null_addition', side_effect=AssertionError("Should not have prompted for not null addition")) def test_add_many_to_many(self, mocked_ask_method): """#22435 - Adding a ManyToManyField should not prompt for a default.""" changes = self.get_changes([self.author_empty, self.publisher], [self.author_with_m2m, self.publisher]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["AddField"]) self.assertOperationAttributes(changes, 'testapp', 0, 0, name="publishers") def test_alter_many_to_many(self): changes = self.get_changes( [self.author_with_m2m, self.publisher], [self.author_with_m2m_blank, self.publisher] ) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["AlterField"]) self.assertOperationAttributes(changes, 'testapp', 0, 0, name="publishers") def test_create_with_through_model(self): """ Adding a m2m with a through model and the models that use it should be ordered correctly. """ changes = self.get_changes([], [self.author_with_m2m_through, self.publisher, self.contract]) # Right number/type of migrations? self.assertNumberMigrations(changes, "testapp", 1) self.assertOperationTypes(changes, "testapp", 0, [ 'CreateModel', 'CreateModel', 'CreateModel', 'AddField', ]) self.assertOperationAttributes(changes, 'testapp', 0, 0, name='Author') self.assertOperationAttributes(changes, 'testapp', 0, 1, name='Publisher') self.assertOperationAttributes(changes, 'testapp', 0, 2, name='Contract') self.assertOperationAttributes(changes, 'testapp', 0, 3, model_name='author', name='publishers') def test_many_to_many_removed_before_through_model(self): """ Removing a ManyToManyField and the "through" model in the same change must remove the field before the model to maintain consistency. """ changes = self.get_changes( [self.book_with_multiple_authors_through_attribution, self.author_name, self.attribution], [self.book_with_no_author, self.author_name], ) # Remove both the through model and ManyToMany # Right number/type of migrations? self.assertNumberMigrations(changes, "otherapp", 1) self.assertOperationTypes(changes, 'otherapp', 0, ['RemoveField', 'DeleteModel']) self.assertOperationAttributes(changes, 'otherapp', 0, 0, name='authors', model_name='book') self.assertOperationAttributes(changes, 'otherapp', 0, 1, name='Attribution') def test_many_to_many_removed_before_through_model_2(self): """ Removing a model that contains a ManyToManyField and the "through" model in the same change must remove the field before the model to maintain consistency. """ changes = self.get_changes( [self.book_with_multiple_authors_through_attribution, self.author_name, self.attribution], [self.author_name], ) # Remove both the through model and ManyToMany # Right number/type of migrations? self.assertNumberMigrations(changes, "otherapp", 1) self.assertOperationTypes(changes, 'otherapp', 0, ['RemoveField', 'DeleteModel', 'DeleteModel']) self.assertOperationAttributes(changes, 'otherapp', 0, 0, name='authors', model_name='book') self.assertOperationAttributes(changes, 'otherapp', 0, 1, name='Attribution') self.assertOperationAttributes(changes, 'otherapp', 0, 2, name='Book') def test_m2m_w_through_multistep_remove(self): """ A model with a m2m field that specifies a "through" model cannot be removed in the same migration as that through model as the schema will pass through an inconsistent state. The autodetector should produce two migrations to avoid this issue. """ changes = self.get_changes([self.author_with_m2m_through, self.publisher, self.contract], [self.publisher]) # Right number/type of migrations? self.assertNumberMigrations(changes, "testapp", 1) self.assertOperationTypes(changes, "testapp", 0, [ "RemoveField", "RemoveField", "DeleteModel", "DeleteModel" ]) self.assertOperationAttributes(changes, "testapp", 0, 0, name="author", model_name='contract') self.assertOperationAttributes(changes, "testapp", 0, 1, name="publisher", model_name='contract') self.assertOperationAttributes(changes, "testapp", 0, 2, name="Author") self.assertOperationAttributes(changes, "testapp", 0, 3, name="Contract") def test_concrete_field_changed_to_many_to_many(self): """ #23938 - Changing a concrete field into a ManyToManyField first removes the concrete field and then adds the m2m field. """ changes = self.get_changes([self.author_with_former_m2m], [self.author_with_m2m, self.publisher]) # Right number/type of migrations? self.assertNumberMigrations(changes, "testapp", 1) self.assertOperationTypes(changes, "testapp", 0, ["CreateModel", "RemoveField", "AddField"]) self.assertOperationAttributes(changes, 'testapp', 0, 0, name='Publisher') self.assertOperationAttributes(changes, 'testapp', 0, 1, name="publishers", model_name='author') self.assertOperationAttributes(changes, 'testapp', 0, 2, name="publishers", model_name='author') def test_many_to_many_changed_to_concrete_field(self): """ #23938 - Changing a ManyToManyField into a concrete field first removes the m2m field and then adds the concrete field. """ changes = self.get_changes([self.author_with_m2m, self.publisher], [self.author_with_former_m2m]) # Right number/type of migrations? self.assertNumberMigrations(changes, "testapp", 1) self.assertOperationTypes(changes, "testapp", 0, ["RemoveField", "AddField", "DeleteModel"]) self.assertOperationAttributes(changes, 'testapp', 0, 0, name="publishers", model_name='author') self.assertOperationAttributes(changes, 'testapp', 0, 1, name="publishers", model_name='author') self.assertOperationAttributes(changes, 'testapp', 0, 2, name='Publisher') self.assertOperationFieldAttributes(changes, 'testapp', 0, 1, max_length=100) def test_non_circular_foreignkey_dependency_removal(self): """ If two models with a ForeignKey from one to the other are removed at the same time, the autodetector should remove them in the correct order. """ changes = self.get_changes([self.author_with_publisher, self.publisher_with_author], []) # Right number/type of migrations? self.assertNumberMigrations(changes, "testapp", 1) self.assertOperationTypes(changes, "testapp", 0, ["RemoveField", "DeleteModel", "DeleteModel"]) self.assertOperationAttributes(changes, "testapp", 0, 0, name="author", model_name='publisher') self.assertOperationAttributes(changes, "testapp", 0, 1, name="Author") self.assertOperationAttributes(changes, "testapp", 0, 2, name="Publisher") def test_alter_model_options(self): """Changing a model's options should make a change.""" changes = self.get_changes([self.author_empty], [self.author_with_options]) # Right number/type of migrations? self.assertNumberMigrations(changes, "testapp", 1) self.assertOperationTypes(changes, "testapp", 0, ["AlterModelOptions"]) self.assertOperationAttributes(changes, "testapp", 0, 0, options={ "permissions": [('can_hire', 'Can hire')], "verbose_name": "Authi", }) # Changing them back to empty should also make a change changes = self.get_changes([self.author_with_options], [self.author_empty]) # Right number/type of migrations? self.assertNumberMigrations(changes, "testapp", 1) self.assertOperationTypes(changes, "testapp", 0, ["AlterModelOptions"]) self.assertOperationAttributes(changes, "testapp", 0, 0, name="author", options={}) def test_alter_model_options_proxy(self): """Changing a proxy model's options should also make a change.""" changes = self.get_changes( [self.author_proxy, self.author_empty], [self.author_proxy_options, self.author_empty] ) # Right number/type of migrations? self.assertNumberMigrations(changes, "testapp", 1) self.assertOperationTypes(changes, "testapp", 0, ["AlterModelOptions"]) self.assertOperationAttributes(changes, "testapp", 0, 0, name="authorproxy", options={ "verbose_name": "Super Author" }) def test_set_alter_order_with_respect_to(self): """Setting order_with_respect_to adds a field.""" changes = self.get_changes([self.book, self.author_with_book], [self.book, self.author_with_book_order_wrt]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["AlterOrderWithRespectTo"]) self.assertOperationAttributes(changes, 'testapp', 0, 0, name="author", order_with_respect_to="book") def test_add_alter_order_with_respect_to(self): """ Setting order_with_respect_to when adding the FK too does things in the right order. """ changes = self.get_changes([self.author_name], [self.book, self.author_with_book_order_wrt]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["AddField", "AlterOrderWithRespectTo"]) self.assertOperationAttributes(changes, 'testapp', 0, 0, model_name="author", name="book") self.assertOperationAttributes(changes, 'testapp', 0, 1, name="author", order_with_respect_to="book") def test_remove_alter_order_with_respect_to(self): """ Removing order_with_respect_to when removing the FK too does things in the right order. """ changes = self.get_changes([self.book, self.author_with_book_order_wrt], [self.author_name]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["AlterOrderWithRespectTo", "RemoveField"]) self.assertOperationAttributes(changes, 'testapp', 0, 0, name="author", order_with_respect_to=None) self.assertOperationAttributes(changes, 'testapp', 0, 1, model_name="author", name="book") def test_add_model_order_with_respect_to(self): """ Setting order_with_respect_to when adding the whole model does things in the right order. """ changes = self.get_changes([], [self.book, self.author_with_book_order_wrt]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["CreateModel"]) self.assertOperationAttributes( changes, 'testapp', 0, 0, name="Author", options={'order_with_respect_to': 'book'} ) self.assertNotIn("_order", [name for name, field in changes['testapp'][0].operations[0].fields]) def test_alter_model_managers(self): """ Changing the model managers adds a new operation. """ changes = self.get_changes([self.other_pony], [self.other_pony_food]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'otherapp', 1) self.assertOperationTypes(changes, 'otherapp', 0, ["AlterModelManagers"]) self.assertOperationAttributes(changes, 'otherapp', 0, 0, name="pony") self.assertEqual([name for name, mgr in changes['otherapp'][0].operations[0].managers], ['food_qs', 'food_mgr', 'food_mgr_kwargs']) self.assertEqual(changes['otherapp'][0].operations[0].managers[1][1].args, ('a', 'b', 1, 2)) self.assertEqual(changes['otherapp'][0].operations[0].managers[2][1].args, ('x', 'y', 3, 4)) def test_swappable_first_inheritance(self): """Swappable models get their CreateModel first.""" changes = self.get_changes([], [self.custom_user, self.aardvark]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'thirdapp', 1) self.assertOperationTypes(changes, 'thirdapp', 0, ["CreateModel", "CreateModel"]) self.assertOperationAttributes(changes, 'thirdapp', 0, 0, name="CustomUser") self.assertOperationAttributes(changes, 'thirdapp', 0, 1, name="Aardvark") def test_default_related_name_option(self): model_state = ModelState('app', 'model', [ ('id', models.AutoField(primary_key=True)), ], options={'default_related_name': 'related_name'}) changes = self.get_changes([], [model_state]) self.assertNumberMigrations(changes, 'app', 1) self.assertOperationTypes(changes, 'app', 0, ['CreateModel']) self.assertOperationAttributes( changes, 'app', 0, 0, name='model', options={'default_related_name': 'related_name'}, ) altered_model_state = ModelState('app', 'Model', [ ('id', models.AutoField(primary_key=True)), ]) changes = self.get_changes([model_state], [altered_model_state]) self.assertNumberMigrations(changes, 'app', 1) self.assertOperationTypes(changes, 'app', 0, ['AlterModelOptions']) self.assertOperationAttributes(changes, 'app', 0, 0, name='model', options={}) @override_settings(AUTH_USER_MODEL="thirdapp.CustomUser") def test_swappable_first_setting(self): """Swappable models get their CreateModel first.""" with isolate_lru_cache(apps.get_swappable_settings_name): changes = self.get_changes([], [self.custom_user_no_inherit, self.aardvark]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'thirdapp', 1) self.assertOperationTypes(changes, 'thirdapp', 0, ["CreateModel", "CreateModel"]) self.assertOperationAttributes(changes, 'thirdapp', 0, 0, name="CustomUser") self.assertOperationAttributes(changes, 'thirdapp', 0, 1, name="Aardvark") def test_bases_first(self): """Bases of other models come first.""" changes = self.get_changes([], [self.aardvark_based_on_author, self.author_name]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["CreateModel", "CreateModel"]) self.assertOperationAttributes(changes, 'testapp', 0, 0, name="Author") self.assertOperationAttributes(changes, 'testapp', 0, 1, name="Aardvark") def test_multiple_bases(self): """#23956 - Inheriting models doesn't move *_ptr fields into AddField operations.""" A = ModelState("app", "A", [("a_id", models.AutoField(primary_key=True))]) B = ModelState("app", "B", [("b_id", models.AutoField(primary_key=True))]) C = ModelState("app", "C", [], bases=("app.A", "app.B")) D = ModelState("app", "D", [], bases=("app.A", "app.B")) E = ModelState("app", "E", [], bases=("app.A", "app.B")) changes = self.get_changes([], [A, B, C, D, E]) # Right number/type of migrations? self.assertNumberMigrations(changes, "app", 1) self.assertOperationTypes(changes, "app", 0, [ "CreateModel", "CreateModel", "CreateModel", "CreateModel", "CreateModel" ]) self.assertOperationAttributes(changes, "app", 0, 0, name="A") self.assertOperationAttributes(changes, "app", 0, 1, name="B") self.assertOperationAttributes(changes, "app", 0, 2, name="C") self.assertOperationAttributes(changes, "app", 0, 3, name="D") self.assertOperationAttributes(changes, "app", 0, 4, name="E") def test_proxy_bases_first(self): """Bases of proxies come first.""" changes = self.get_changes([], [self.author_empty, self.author_proxy, self.author_proxy_proxy]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["CreateModel", "CreateModel", "CreateModel"]) self.assertOperationAttributes(changes, 'testapp', 0, 0, name="Author") self.assertOperationAttributes(changes, 'testapp', 0, 1, name="AuthorProxy") self.assertOperationAttributes(changes, 'testapp', 0, 2, name="AAuthorProxyProxy") def test_pk_fk_included(self): """ A relation used as the primary key is kept as part of CreateModel. """ changes = self.get_changes([], [self.aardvark_pk_fk_author, self.author_name]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["CreateModel", "CreateModel"]) self.assertOperationAttributes(changes, 'testapp', 0, 0, name="Author") self.assertOperationAttributes(changes, 'testapp', 0, 1, name="Aardvark") def test_first_dependency(self): """ A dependency to an app with no migrations uses __first__. """ # Load graph loader = MigrationLoader(connection) before = self.make_project_state([]) after = self.make_project_state([self.book_migrations_fk]) after.real_apps = ["migrations"] autodetector = MigrationAutodetector(before, after) changes = autodetector._detect_changes(graph=loader.graph) # Right number/type of migrations? self.assertNumberMigrations(changes, 'otherapp', 1) self.assertOperationTypes(changes, 'otherapp', 0, ["CreateModel"]) self.assertOperationAttributes(changes, 'otherapp', 0, 0, name="Book") self.assertMigrationDependencies(changes, 'otherapp', 0, [("migrations", "__first__")]) @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"}) def test_last_dependency(self): """ A dependency to an app with existing migrations uses the last migration of that app. """ # Load graph loader = MigrationLoader(connection) before = self.make_project_state([]) after = self.make_project_state([self.book_migrations_fk]) after.real_apps = ["migrations"] autodetector = MigrationAutodetector(before, after) changes = autodetector._detect_changes(graph=loader.graph) # Right number/type of migrations? self.assertNumberMigrations(changes, 'otherapp', 1) self.assertOperationTypes(changes, 'otherapp', 0, ["CreateModel"]) self.assertOperationAttributes(changes, 'otherapp', 0, 0, name="Book") self.assertMigrationDependencies(changes, 'otherapp', 0, [("migrations", "0002_second")]) def test_alter_fk_before_model_deletion(self): """ ForeignKeys are altered _before_ the model they used to refer to are deleted. """ changes = self.get_changes( [self.author_name, self.publisher_with_author], [self.aardvark_testapp, self.publisher_with_aardvark_author] ) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["CreateModel", "AlterField", "DeleteModel"]) self.assertOperationAttributes(changes, 'testapp', 0, 0, name="Aardvark") self.assertOperationAttributes(changes, 'testapp', 0, 1, name="author") self.assertOperationAttributes(changes, 'testapp', 0, 2, name="Author") def test_fk_dependency_other_app(self): """ #23100 - ForeignKeys correctly depend on other apps' models. """ changes = self.get_changes([self.author_name, self.book], [self.author_with_book, self.book]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["AddField"]) self.assertOperationAttributes(changes, 'testapp', 0, 0, name="book") self.assertMigrationDependencies(changes, 'testapp', 0, [("otherapp", "__first__")]) def test_alter_field_to_fk_dependency_other_app(self): changes = self.get_changes( [self.author_empty, self.book_with_no_author_fk], [self.author_empty, self.book], ) self.assertNumberMigrations(changes, 'otherapp', 1) self.assertOperationTypes(changes, 'otherapp', 0, ['AlterField']) self.assertMigrationDependencies(changes, 'otherapp', 0, [('testapp', '__first__')]) def test_circular_dependency_mixed_addcreate(self): """ #23315 - The dependency resolver knows to put all CreateModel before AddField and not become unsolvable. """ address = ModelState("a", "Address", [ ("id", models.AutoField(primary_key=True)), ("country", models.ForeignKey("b.DeliveryCountry", models.CASCADE)), ]) person = ModelState("a", "Person", [ ("id", models.AutoField(primary_key=True)), ]) apackage = ModelState("b", "APackage", [ ("id", models.AutoField(primary_key=True)), ("person", models.ForeignKey("a.Person", models.CASCADE)), ]) country = ModelState("b", "DeliveryCountry", [ ("id", models.AutoField(primary_key=True)), ]) changes = self.get_changes([], [address, person, apackage, country]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'a', 2) self.assertNumberMigrations(changes, 'b', 1) self.assertOperationTypes(changes, 'a', 0, ["CreateModel", "CreateModel"]) self.assertOperationTypes(changes, 'a', 1, ["AddField"]) self.assertOperationTypes(changes, 'b', 0, ["CreateModel", "CreateModel"]) @override_settings(AUTH_USER_MODEL="a.Tenant") def test_circular_dependency_swappable(self): """ #23322 - The dependency resolver knows to explicitly resolve swappable models. """ with isolate_lru_cache(apps.get_swappable_settings_name): tenant = ModelState("a", "Tenant", [ ("id", models.AutoField(primary_key=True)), ("primary_address", models.ForeignKey("b.Address", models.CASCADE))], bases=(AbstractBaseUser,) ) address = ModelState("b", "Address", [ ("id", models.AutoField(primary_key=True)), ("tenant", models.ForeignKey(settings.AUTH_USER_MODEL, models.CASCADE)), ]) changes = self.get_changes([], [address, tenant]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'a', 2) self.assertOperationTypes(changes, 'a', 0, ["CreateModel"]) self.assertOperationTypes(changes, 'a', 1, ["AddField"]) self.assertMigrationDependencies(changes, 'a', 0, []) self.assertMigrationDependencies(changes, 'a', 1, [('a', 'auto_1'), ('b', 'auto_1')]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'b', 1) self.assertOperationTypes(changes, 'b', 0, ["CreateModel"]) self.assertMigrationDependencies(changes, 'b', 0, [('__setting__', 'AUTH_USER_MODEL')]) @override_settings(AUTH_USER_MODEL="b.Tenant") def test_circular_dependency_swappable2(self): """ #23322 - The dependency resolver knows to explicitly resolve swappable models but with the swappable not being the first migrated model. """ with isolate_lru_cache(apps.get_swappable_settings_name): address = ModelState("a", "Address", [ ("id", models.AutoField(primary_key=True)), ("tenant", models.ForeignKey(settings.AUTH_USER_MODEL, models.CASCADE)), ]) tenant = ModelState("b", "Tenant", [ ("id", models.AutoField(primary_key=True)), ("primary_address", models.ForeignKey("a.Address", models.CASCADE))], bases=(AbstractBaseUser,) ) changes = self.get_changes([], [address, tenant]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'a', 2) self.assertOperationTypes(changes, 'a', 0, ["CreateModel"]) self.assertOperationTypes(changes, 'a', 1, ["AddField"]) self.assertMigrationDependencies(changes, 'a', 0, []) self.assertMigrationDependencies(changes, 'a', 1, [('__setting__', 'AUTH_USER_MODEL'), ('a', 'auto_1')]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'b', 1) self.assertOperationTypes(changes, 'b', 0, ["CreateModel"]) self.assertMigrationDependencies(changes, 'b', 0, [('a', 'auto_1')]) @override_settings(AUTH_USER_MODEL="a.Person") def test_circular_dependency_swappable_self(self): """ #23322 - The dependency resolver knows to explicitly resolve swappable models. """ with isolate_lru_cache(apps.get_swappable_settings_name): person = ModelState("a", "Person", [ ("id", models.AutoField(primary_key=True)), ("parent1", models.ForeignKey(settings.AUTH_USER_MODEL, models.CASCADE, related_name='children')) ]) changes = self.get_changes([], [person]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'a', 1) self.assertOperationTypes(changes, 'a', 0, ["CreateModel"]) self.assertMigrationDependencies(changes, 'a', 0, []) @override_settings(AUTH_USER_MODEL='a.User') def test_swappable_circular_multi_mti(self): with isolate_lru_cache(apps.get_swappable_settings_name): parent = ModelState('a', 'Parent', [ ('user', models.ForeignKey(settings.AUTH_USER_MODEL, models.CASCADE)) ]) child = ModelState('a', 'Child', [], bases=('a.Parent',)) user = ModelState('a', 'User', [], bases=(AbstractBaseUser, 'a.Child')) changes = self.get_changes([], [parent, child, user]) self.assertNumberMigrations(changes, 'a', 1) self.assertOperationTypes(changes, 'a', 0, ['CreateModel', 'CreateModel', 'CreateModel', 'AddField']) @mock.patch('django.db.migrations.questioner.MigrationQuestioner.ask_not_null_addition', side_effect=AssertionError("Should not have prompted for not null addition")) def test_add_blank_textfield_and_charfield(self, mocked_ask_method): """ #23405 - Adding a NOT NULL and blank `CharField` or `TextField` without default should not prompt for a default. """ changes = self.get_changes([self.author_empty], [self.author_with_biography_blank]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["AddField", "AddField"]) self.assertOperationAttributes(changes, 'testapp', 0, 0) @mock.patch('django.db.migrations.questioner.MigrationQuestioner.ask_not_null_addition') def test_add_non_blank_textfield_and_charfield(self, mocked_ask_method): """ #23405 - Adding a NOT NULL and non-blank `CharField` or `TextField` without default should prompt for a default. """ changes = self.get_changes([self.author_empty], [self.author_with_biography_non_blank]) self.assertEqual(mocked_ask_method.call_count, 2) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["AddField", "AddField"]) self.assertOperationAttributes(changes, 'testapp', 0, 0) def test_mti_inheritance_model_removal(self): Animal = ModelState('app', 'Animal', [ ("id", models.AutoField(primary_key=True)), ]) Dog = ModelState('app', 'Dog', [], bases=('app.Animal',)) changes = self.get_changes([Animal, Dog], [Animal]) self.assertNumberMigrations(changes, 'app', 1) self.assertOperationTypes(changes, 'app', 0, ['DeleteModel']) self.assertOperationAttributes(changes, 'app', 0, 0, name='Dog')
6d8bc755b5c05f70da6422c94ee2538da8dcf570ed30745b4be42cd97535d2c8
import unittest from io import StringIO from unittest import mock from unittest.suite import _DebugResult from django.test import SimpleTestCase class ErrorTestCase(SimpleTestCase): def raising_test(self): self._pre_setup.assert_called_once_with() raise Exception('debug() bubbles up exceptions before cleanup.') def simple_test(self): self._pre_setup.assert_called_once_with() @unittest.skip('Skip condition.') def skipped_test(self): pass @mock.patch.object(ErrorTestCase, '_post_teardown') @mock.patch.object(ErrorTestCase, '_pre_setup') class DebugInvocationTests(SimpleTestCase): def get_runner(self): return unittest.TextTestRunner(stream=StringIO()) def isolate_debug_test(self, test_suite, result): # Suite teardown needs to be manually called to isolate failures. test_suite._tearDownPreviousClass(None, result) test_suite._handleModuleTearDown(result) def test_run_cleanup(self, _pre_setup, _post_teardown): """Simple test run: catches errors and runs cleanup.""" test_suite = unittest.TestSuite() test_suite.addTest(ErrorTestCase('raising_test')) result = self.get_runner()._makeResult() self.assertEqual(result.errors, []) test_suite.run(result) self.assertEqual(len(result.errors), 1) _, traceback = result.errors[0] self.assertIn('Exception: debug() bubbles up exceptions before cleanup.', traceback) _pre_setup.assert_called_once_with() _post_teardown.assert_called_once_with() def test_run_pre_setup_error(self, _pre_setup, _post_teardown): _pre_setup.side_effect = Exception('Exception in _pre_setup.') test_suite = unittest.TestSuite() test_suite.addTest(ErrorTestCase('simple_test')) result = self.get_runner()._makeResult() self.assertEqual(result.errors, []) test_suite.run(result) self.assertEqual(len(result.errors), 1) _, traceback = result.errors[0] self.assertIn('Exception: Exception in _pre_setup.', traceback) # pre-setup is called but not post-teardown. _pre_setup.assert_called_once_with() self.assertFalse(_post_teardown.called) def test_run_post_teardown_error(self, _pre_setup, _post_teardown): _post_teardown.side_effect = Exception('Exception in _post_teardown.') test_suite = unittest.TestSuite() test_suite.addTest(ErrorTestCase('simple_test')) result = self.get_runner()._makeResult() self.assertEqual(result.errors, []) test_suite.run(result) self.assertEqual(len(result.errors), 1) _, traceback = result.errors[0] self.assertIn('Exception: Exception in _post_teardown.', traceback) # pre-setup and post-teardwn are called. _pre_setup.assert_called_once_with() _post_teardown.assert_called_once_with() def test_run_skipped_test_no_cleanup(self, _pre_setup, _post_teardown): test_suite = unittest.TestSuite() test_suite.addTest(ErrorTestCase('skipped_test')) try: test_suite.run(self.get_runner()._makeResult()) except unittest.SkipTest: self.fail('SkipTest should not be raised at this stage.') self.assertFalse(_post_teardown.called) self.assertFalse(_pre_setup.called) def test_debug_cleanup(self, _pre_setup, _post_teardown): """Simple debug run without errors.""" test_suite = unittest.TestSuite() test_suite.addTest(ErrorTestCase('simple_test')) test_suite.debug() _pre_setup.assert_called_once_with() _post_teardown.assert_called_once_with() def test_debug_bubbles_error(self, _pre_setup, _post_teardown): """debug() bubbles up exceptions before cleanup.""" test_suite = unittest.TestSuite() test_suite.addTest(ErrorTestCase('raising_test')) msg = 'debug() bubbles up exceptions before cleanup.' with self.assertRaisesMessage(Exception, msg): # This is the same as test_suite.debug(). result = _DebugResult() test_suite.run(result, debug=True) # pre-setup is called but not post-teardown. _pre_setup.assert_called_once_with() self.assertFalse(_post_teardown.called) self.isolate_debug_test(test_suite, result) def test_debug_bubbles_pre_setup_error(self, _pre_setup, _post_teardown): """debug() bubbles up exceptions during _pre_setup.""" msg = 'Exception in _pre_setup.' _pre_setup.side_effect = Exception(msg) test_suite = unittest.TestSuite() test_suite.addTest(ErrorTestCase('simple_test')) with self.assertRaisesMessage(Exception, msg): # This is the same as test_suite.debug(). result = _DebugResult() test_suite.run(result, debug=True) # pre-setup is called but not post-teardown. _pre_setup.assert_called_once_with() self.assertFalse(_post_teardown.called) self.isolate_debug_test(test_suite, result) def test_debug_bubbles_post_teardown_error(self, _pre_setup, _post_teardown): """debug() bubbles up exceptions during _post_teardown.""" msg = 'Exception in _post_teardown.' _post_teardown.side_effect = Exception(msg) test_suite = unittest.TestSuite() test_suite.addTest(ErrorTestCase('simple_test')) with self.assertRaisesMessage(Exception, msg): # This is the same as test_suite.debug(). result = _DebugResult() test_suite.run(result, debug=True) # pre-setup and post-teardwn are called. _pre_setup.assert_called_once_with() _post_teardown.assert_called_once_with() self.isolate_debug_test(test_suite, result) def test_debug_skipped_test_no_cleanup(self, _pre_setup, _post_teardown): test_suite = unittest.TestSuite() test_suite.addTest(ErrorTestCase('skipped_test')) with self.assertRaisesMessage(unittest.SkipTest, 'Skip condition.'): # This is the same as test_suite.debug(). result = _DebugResult() test_suite.run(result, debug=True) self.assertFalse(_post_teardown.called) self.assertFalse(_pre_setup.called) self.isolate_debug_test(test_suite, result)
8def2cca3c940eac5ad4faefd163d4f8cb892be90196e2b8ed3dbc56be974a19
import decimal import enum import json import unittest import uuid from django import forms from django.core import checks, exceptions, serializers, validators from django.core.exceptions import FieldError from django.core.management import call_command from django.db import IntegrityError, connection, models from django.db.models.expressions import RawSQL from django.db.models.functions import Cast from django.test import TransactionTestCase, modify_settings, override_settings from django.test.utils import isolate_apps from django.utils import timezone from . import ( PostgreSQLSimpleTestCase, PostgreSQLTestCase, PostgreSQLWidgetTestCase, ) from .models import ( ArrayEnumModel, ArrayFieldSubclass, CharArrayModel, DateTimeArrayModel, IntegerArrayModel, NestedIntegerArrayModel, NullableIntegerArrayModel, OtherTypesArrayModel, PostgreSQLModel, Tag, ) try: from django.contrib.postgres.aggregates import ArrayAgg from django.contrib.postgres.fields import ArrayField from django.contrib.postgres.fields.array import IndexTransform, SliceTransform from django.contrib.postgres.forms import ( SimpleArrayField, SplitArrayField, SplitArrayWidget, ) from psycopg2.extras import NumericRange except ImportError: pass class TestSaveLoad(PostgreSQLTestCase): def test_integer(self): instance = IntegerArrayModel(field=[1, 2, 3]) instance.save() loaded = IntegerArrayModel.objects.get() self.assertEqual(instance.field, loaded.field) def test_char(self): instance = CharArrayModel(field=['hello', 'goodbye']) instance.save() loaded = CharArrayModel.objects.get() self.assertEqual(instance.field, loaded.field) def test_dates(self): instance = DateTimeArrayModel( datetimes=[timezone.now()], dates=[timezone.now().date()], times=[timezone.now().time()], ) instance.save() loaded = DateTimeArrayModel.objects.get() self.assertEqual(instance.datetimes, loaded.datetimes) self.assertEqual(instance.dates, loaded.dates) self.assertEqual(instance.times, loaded.times) def test_tuples(self): instance = IntegerArrayModel(field=(1,)) instance.save() loaded = IntegerArrayModel.objects.get() self.assertSequenceEqual(instance.field, loaded.field) def test_integers_passed_as_strings(self): # This checks that get_prep_value is deferred properly instance = IntegerArrayModel(field=['1']) instance.save() loaded = IntegerArrayModel.objects.get() self.assertEqual(loaded.field, [1]) def test_default_null(self): instance = NullableIntegerArrayModel() instance.save() loaded = NullableIntegerArrayModel.objects.get(pk=instance.pk) self.assertIsNone(loaded.field) self.assertEqual(instance.field, loaded.field) def test_null_handling(self): instance = NullableIntegerArrayModel(field=None) instance.save() loaded = NullableIntegerArrayModel.objects.get() self.assertEqual(instance.field, loaded.field) instance = IntegerArrayModel(field=None) with self.assertRaises(IntegrityError): instance.save() def test_nested(self): instance = NestedIntegerArrayModel(field=[[1, 2], [3, 4]]) instance.save() loaded = NestedIntegerArrayModel.objects.get() self.assertEqual(instance.field, loaded.field) def test_other_array_types(self): instance = OtherTypesArrayModel( ips=['192.168.0.1', '::1'], uuids=[uuid.uuid4()], decimals=[decimal.Decimal(1.25), 1.75], tags=[Tag(1), Tag(2), Tag(3)], json=[{'a': 1}, {'b': 2}], int_ranges=[NumericRange(10, 20), NumericRange(30, 40)], bigint_ranges=[ NumericRange(7000000000, 10000000000), NumericRange(50000000000, 70000000000), ] ) instance.save() loaded = OtherTypesArrayModel.objects.get() self.assertEqual(instance.ips, loaded.ips) self.assertEqual(instance.uuids, loaded.uuids) self.assertEqual(instance.decimals, loaded.decimals) self.assertEqual(instance.tags, loaded.tags) self.assertEqual(instance.json, loaded.json) self.assertEqual(instance.int_ranges, loaded.int_ranges) self.assertEqual(instance.bigint_ranges, loaded.bigint_ranges) def test_null_from_db_value_handling(self): instance = OtherTypesArrayModel.objects.create( ips=['192.168.0.1', '::1'], uuids=[uuid.uuid4()], decimals=[decimal.Decimal(1.25), 1.75], tags=None, ) instance.refresh_from_db() self.assertIsNone(instance.tags) self.assertEqual(instance.json, []) self.assertIsNone(instance.int_ranges) self.assertIsNone(instance.bigint_ranges) def test_model_set_on_base_field(self): instance = IntegerArrayModel() field = instance._meta.get_field('field') self.assertEqual(field.model, IntegerArrayModel) self.assertEqual(field.base_field.model, IntegerArrayModel) class TestQuerying(PostgreSQLTestCase): @classmethod def setUpTestData(cls): cls.objs = NullableIntegerArrayModel.objects.bulk_create([ NullableIntegerArrayModel(field=[1]), NullableIntegerArrayModel(field=[2]), NullableIntegerArrayModel(field=[2, 3]), NullableIntegerArrayModel(field=[20, 30, 40]), NullableIntegerArrayModel(field=None), ]) def test_empty_list(self): NullableIntegerArrayModel.objects.create(field=[]) obj = NullableIntegerArrayModel.objects.annotate( empty_array=models.Value([], output_field=ArrayField(models.IntegerField())), ).filter(field=models.F('empty_array')).get() self.assertEqual(obj.field, []) self.assertEqual(obj.empty_array, []) def test_exact(self): self.assertSequenceEqual( NullableIntegerArrayModel.objects.filter(field__exact=[1]), self.objs[:1] ) def test_exact_charfield(self): instance = CharArrayModel.objects.create(field=['text']) self.assertSequenceEqual( CharArrayModel.objects.filter(field=['text']), [instance] ) def test_exact_nested(self): instance = NestedIntegerArrayModel.objects.create(field=[[1, 2], [3, 4]]) self.assertSequenceEqual( NestedIntegerArrayModel.objects.filter(field=[[1, 2], [3, 4]]), [instance] ) def test_isnull(self): self.assertSequenceEqual( NullableIntegerArrayModel.objects.filter(field__isnull=True), self.objs[-1:] ) def test_gt(self): self.assertSequenceEqual( NullableIntegerArrayModel.objects.filter(field__gt=[0]), self.objs[:4] ) def test_lt(self): self.assertSequenceEqual( NullableIntegerArrayModel.objects.filter(field__lt=[2]), self.objs[:1] ) def test_in(self): self.assertSequenceEqual( NullableIntegerArrayModel.objects.filter(field__in=[[1], [2]]), self.objs[:2] ) def test_in_subquery(self): IntegerArrayModel.objects.create(field=[2, 3]) self.assertSequenceEqual( NullableIntegerArrayModel.objects.filter( field__in=IntegerArrayModel.objects.all().values_list('field', flat=True) ), self.objs[2:3] ) @unittest.expectedFailure def test_in_including_F_object(self): # This test asserts that Array objects passed to filters can be # constructed to contain F objects. This currently doesn't work as the # psycopg2 mogrify method that generates the ARRAY() syntax is # expecting literals, not column references (#27095). self.assertSequenceEqual( NullableIntegerArrayModel.objects.filter(field__in=[[models.F('id')]]), self.objs[:2] ) def test_in_as_F_object(self): self.assertSequenceEqual( NullableIntegerArrayModel.objects.filter(field__in=[models.F('field')]), self.objs[:4] ) def test_contained_by(self): self.assertSequenceEqual( NullableIntegerArrayModel.objects.filter(field__contained_by=[1, 2]), self.objs[:2] ) @unittest.expectedFailure def test_contained_by_including_F_object(self): # This test asserts that Array objects passed to filters can be # constructed to contain F objects. This currently doesn't work as the # psycopg2 mogrify method that generates the ARRAY() syntax is # expecting literals, not column references (#27095). self.assertSequenceEqual( NullableIntegerArrayModel.objects.filter(field__contained_by=[models.F('id'), 2]), self.objs[:2] ) def test_contains(self): self.assertSequenceEqual( NullableIntegerArrayModel.objects.filter(field__contains=[2]), self.objs[1:3] ) def test_icontains(self): # Using the __icontains lookup with ArrayField is inefficient. instance = CharArrayModel.objects.create(field=['FoO']) self.assertSequenceEqual( CharArrayModel.objects.filter(field__icontains='foo'), [instance] ) def test_contains_charfield(self): # Regression for #22907 self.assertSequenceEqual( CharArrayModel.objects.filter(field__contains=['text']), [] ) def test_contained_by_charfield(self): self.assertSequenceEqual( CharArrayModel.objects.filter(field__contained_by=['text']), [] ) def test_overlap_charfield(self): self.assertSequenceEqual( CharArrayModel.objects.filter(field__overlap=['text']), [] ) def test_lookups_autofield_array(self): qs = NullableIntegerArrayModel.objects.filter( field__0__isnull=False, ).values('field__0').annotate( arrayagg=ArrayAgg('id'), ).order_by('field__0') tests = ( ('contained_by', [self.objs[1].pk, self.objs[2].pk, 0], [2]), ('contains', [self.objs[2].pk], [2]), ('exact', [self.objs[3].pk], [20]), ('overlap', [self.objs[1].pk, self.objs[3].pk], [2, 20]), ) for lookup, value, expected in tests: with self.subTest(lookup=lookup): self.assertSequenceEqual( qs.filter( **{'arrayagg__' + lookup: value}, ).values_list('field__0', flat=True), expected, ) def test_index(self): self.assertSequenceEqual( NullableIntegerArrayModel.objects.filter(field__0=2), self.objs[1:3] ) def test_index_chained(self): self.assertSequenceEqual( NullableIntegerArrayModel.objects.filter(field__0__lt=3), self.objs[0:3] ) def test_index_nested(self): instance = NestedIntegerArrayModel.objects.create(field=[[1, 2], [3, 4]]) self.assertSequenceEqual( NestedIntegerArrayModel.objects.filter(field__0__0=1), [instance] ) @unittest.expectedFailure def test_index_used_on_nested_data(self): instance = NestedIntegerArrayModel.objects.create(field=[[1, 2], [3, 4]]) self.assertSequenceEqual( NestedIntegerArrayModel.objects.filter(field__0=[1, 2]), [instance] ) def test_index_transform_expression(self): expr = RawSQL("string_to_array(%s, ';')", ['1;2']) self.assertSequenceEqual( NullableIntegerArrayModel.objects.filter( field__0=Cast( IndexTransform(1, models.IntegerField, expr), output_field=models.IntegerField(), ), ), self.objs[:1], ) def test_overlap(self): self.assertSequenceEqual( NullableIntegerArrayModel.objects.filter(field__overlap=[1, 2]), self.objs[0:3] ) def test_len(self): self.assertSequenceEqual( NullableIntegerArrayModel.objects.filter(field__len__lte=2), self.objs[0:3] ) def test_len_empty_array(self): obj = NullableIntegerArrayModel.objects.create(field=[]) self.assertSequenceEqual( NullableIntegerArrayModel.objects.filter(field__len=0), [obj] ) def test_slice(self): self.assertSequenceEqual( NullableIntegerArrayModel.objects.filter(field__0_1=[2]), self.objs[1:3] ) self.assertSequenceEqual( NullableIntegerArrayModel.objects.filter(field__0_2=[2, 3]), self.objs[2:3] ) def test_order_by_slice(self): more_objs = ( NullableIntegerArrayModel.objects.create(field=[1, 637]), NullableIntegerArrayModel.objects.create(field=[2, 1]), NullableIntegerArrayModel.objects.create(field=[3, -98123]), NullableIntegerArrayModel.objects.create(field=[4, 2]), ) self.assertSequenceEqual( NullableIntegerArrayModel.objects.order_by('field__1'), [ more_objs[2], more_objs[1], more_objs[3], self.objs[2], self.objs[3], more_objs[0], self.objs[4], self.objs[1], self.objs[0], ] ) @unittest.expectedFailure def test_slice_nested(self): instance = NestedIntegerArrayModel.objects.create(field=[[1, 2], [3, 4]]) self.assertSequenceEqual( NestedIntegerArrayModel.objects.filter(field__0__0_1=[1]), [instance] ) def test_slice_transform_expression(self): expr = RawSQL("string_to_array(%s, ';')", ['9;2;3']) self.assertSequenceEqual( NullableIntegerArrayModel.objects.filter(field__0_2=SliceTransform(2, 3, expr)), self.objs[2:3], ) def test_usage_in_subquery(self): self.assertSequenceEqual( NullableIntegerArrayModel.objects.filter( id__in=NullableIntegerArrayModel.objects.filter(field__len=3) ), [self.objs[3]] ) def test_enum_lookup(self): class TestEnum(enum.Enum): VALUE_1 = 'value_1' instance = ArrayEnumModel.objects.create(array_of_enums=[TestEnum.VALUE_1]) self.assertSequenceEqual( ArrayEnumModel.objects.filter(array_of_enums__contains=[TestEnum.VALUE_1]), [instance] ) def test_unsupported_lookup(self): msg = "Unsupported lookup '0_bar' for ArrayField or join on the field not permitted." with self.assertRaisesMessage(FieldError, msg): list(NullableIntegerArrayModel.objects.filter(field__0_bar=[2])) msg = "Unsupported lookup '0bar' for ArrayField or join on the field not permitted." with self.assertRaisesMessage(FieldError, msg): list(NullableIntegerArrayModel.objects.filter(field__0bar=[2])) def test_grouping_by_annotations_with_array_field_param(self): value = models.Value([1], output_field=ArrayField(models.IntegerField())) self.assertEqual( NullableIntegerArrayModel.objects.annotate( array_length=models.Func(value, 1, function='ARRAY_LENGTH'), ).values('array_length').annotate( count=models.Count('pk'), ).get()['array_length'], 1, ) class TestDateTimeExactQuerying(PostgreSQLTestCase): @classmethod def setUpTestData(cls): now = timezone.now() cls.datetimes = [now] cls.dates = [now.date()] cls.times = [now.time()] cls.objs = [ DateTimeArrayModel.objects.create(datetimes=cls.datetimes, dates=cls.dates, times=cls.times), ] def test_exact_datetimes(self): self.assertSequenceEqual( DateTimeArrayModel.objects.filter(datetimes=self.datetimes), self.objs ) def test_exact_dates(self): self.assertSequenceEqual( DateTimeArrayModel.objects.filter(dates=self.dates), self.objs ) def test_exact_times(self): self.assertSequenceEqual( DateTimeArrayModel.objects.filter(times=self.times), self.objs ) class TestOtherTypesExactQuerying(PostgreSQLTestCase): @classmethod def setUpTestData(cls): cls.ips = ['192.168.0.1', '::1'] cls.uuids = [uuid.uuid4()] cls.decimals = [decimal.Decimal(1.25), 1.75] cls.tags = [Tag(1), Tag(2), Tag(3)] cls.objs = [ OtherTypesArrayModel.objects.create( ips=cls.ips, uuids=cls.uuids, decimals=cls.decimals, tags=cls.tags, ) ] def test_exact_ip_addresses(self): self.assertSequenceEqual( OtherTypesArrayModel.objects.filter(ips=self.ips), self.objs ) def test_exact_uuids(self): self.assertSequenceEqual( OtherTypesArrayModel.objects.filter(uuids=self.uuids), self.objs ) def test_exact_decimals(self): self.assertSequenceEqual( OtherTypesArrayModel.objects.filter(decimals=self.decimals), self.objs ) def test_exact_tags(self): self.assertSequenceEqual( OtherTypesArrayModel.objects.filter(tags=self.tags), self.objs ) @isolate_apps('postgres_tests') class TestChecks(PostgreSQLSimpleTestCase): def test_field_checks(self): class MyModel(PostgreSQLModel): field = ArrayField(models.CharField()) model = MyModel() errors = model.check() self.assertEqual(len(errors), 1) # The inner CharField is missing a max_length. self.assertEqual(errors[0].id, 'postgres.E001') self.assertIn('max_length', errors[0].msg) def test_invalid_base_fields(self): class MyModel(PostgreSQLModel): field = ArrayField(models.ManyToManyField('postgres_tests.IntegerArrayModel')) model = MyModel() errors = model.check() self.assertEqual(len(errors), 1) self.assertEqual(errors[0].id, 'postgres.E002') def test_invalid_default(self): class MyModel(PostgreSQLModel): field = ArrayField(models.IntegerField(), default=[]) model = MyModel() self.assertEqual(model.check(), [ checks.Warning( msg=( "ArrayField default should be a callable instead of an " "instance so that it's not shared between all field " "instances." ), hint='Use a callable instead, e.g., use `list` instead of `[]`.', obj=MyModel._meta.get_field('field'), id='fields.E010', ) ]) def test_valid_default(self): class MyModel(PostgreSQLModel): field = ArrayField(models.IntegerField(), default=list) model = MyModel() self.assertEqual(model.check(), []) def test_valid_default_none(self): class MyModel(PostgreSQLModel): field = ArrayField(models.IntegerField(), default=None) model = MyModel() self.assertEqual(model.check(), []) def test_nested_field_checks(self): """ Nested ArrayFields are permitted. """ class MyModel(PostgreSQLModel): field = ArrayField(ArrayField(models.CharField())) model = MyModel() errors = model.check() self.assertEqual(len(errors), 1) # The inner CharField is missing a max_length. self.assertEqual(errors[0].id, 'postgres.E001') self.assertIn('max_length', errors[0].msg) @unittest.skipUnless(connection.vendor == 'postgresql', "PostgreSQL specific tests") class TestMigrations(TransactionTestCase): available_apps = ['postgres_tests'] def test_deconstruct(self): field = ArrayField(models.IntegerField()) name, path, args, kwargs = field.deconstruct() new = ArrayField(*args, **kwargs) self.assertEqual(type(new.base_field), type(field.base_field)) self.assertIsNot(new.base_field, field.base_field) def test_deconstruct_with_size(self): field = ArrayField(models.IntegerField(), size=3) name, path, args, kwargs = field.deconstruct() new = ArrayField(*args, **kwargs) self.assertEqual(new.size, field.size) def test_deconstruct_args(self): field = ArrayField(models.CharField(max_length=20)) name, path, args, kwargs = field.deconstruct() new = ArrayField(*args, **kwargs) self.assertEqual(new.base_field.max_length, field.base_field.max_length) def test_subclass_deconstruct(self): field = ArrayField(models.IntegerField()) name, path, args, kwargs = field.deconstruct() self.assertEqual(path, 'django.contrib.postgres.fields.ArrayField') field = ArrayFieldSubclass() name, path, args, kwargs = field.deconstruct() self.assertEqual(path, 'postgres_tests.models.ArrayFieldSubclass') @override_settings(MIGRATION_MODULES={ "postgres_tests": "postgres_tests.array_default_migrations", }) def test_adding_field_with_default(self): # See #22962 table_name = 'postgres_tests_integerarraydefaultmodel' with connection.cursor() as cursor: self.assertNotIn(table_name, connection.introspection.table_names(cursor)) call_command('migrate', 'postgres_tests', verbosity=0) with connection.cursor() as cursor: self.assertIn(table_name, connection.introspection.table_names(cursor)) call_command('migrate', 'postgres_tests', 'zero', verbosity=0) with connection.cursor() as cursor: self.assertNotIn(table_name, connection.introspection.table_names(cursor)) @override_settings(MIGRATION_MODULES={ "postgres_tests": "postgres_tests.array_index_migrations", }) def test_adding_arrayfield_with_index(self): """ ArrayField shouldn't have varchar_patterns_ops or text_patterns_ops indexes. """ table_name = 'postgres_tests_chartextarrayindexmodel' call_command('migrate', 'postgres_tests', verbosity=0) with connection.cursor() as cursor: like_constraint_columns_list = [ v['columns'] for k, v in list(connection.introspection.get_constraints(cursor, table_name).items()) if k.endswith('_like') ] # Only the CharField should have a LIKE index. self.assertEqual(like_constraint_columns_list, [['char2']]) # All fields should have regular indexes. with connection.cursor() as cursor: indexes = [ c['columns'][0] for c in connection.introspection.get_constraints(cursor, table_name).values() if c['index'] and len(c['columns']) == 1 ] self.assertIn('char', indexes) self.assertIn('char2', indexes) self.assertIn('text', indexes) call_command('migrate', 'postgres_tests', 'zero', verbosity=0) with connection.cursor() as cursor: self.assertNotIn(table_name, connection.introspection.table_names(cursor)) class TestSerialization(PostgreSQLSimpleTestCase): test_data = ( '[{"fields": {"field": "[\\"1\\", \\"2\\", null]"}, "model": "postgres_tests.integerarraymodel", "pk": null}]' ) def test_dumping(self): instance = IntegerArrayModel(field=[1, 2, None]) data = serializers.serialize('json', [instance]) self.assertEqual(json.loads(data), json.loads(self.test_data)) def test_loading(self): instance = list(serializers.deserialize('json', self.test_data))[0].object self.assertEqual(instance.field, [1, 2, None]) class TestValidation(PostgreSQLSimpleTestCase): def test_unbounded(self): field = ArrayField(models.IntegerField()) with self.assertRaises(exceptions.ValidationError) as cm: field.clean([1, None], None) self.assertEqual(cm.exception.code, 'item_invalid') self.assertEqual( cm.exception.message % cm.exception.params, 'Item 2 in the array did not validate: This field cannot be null.' ) def test_blank_true(self): field = ArrayField(models.IntegerField(blank=True, null=True)) # This should not raise a validation error field.clean([1, None], None) def test_with_size(self): field = ArrayField(models.IntegerField(), size=3) field.clean([1, 2, 3], None) with self.assertRaises(exceptions.ValidationError) as cm: field.clean([1, 2, 3, 4], None) self.assertEqual(cm.exception.messages[0], 'List contains 4 items, it should contain no more than 3.') def test_nested_array_mismatch(self): field = ArrayField(ArrayField(models.IntegerField())) field.clean([[1, 2], [3, 4]], None) with self.assertRaises(exceptions.ValidationError) as cm: field.clean([[1, 2], [3, 4, 5]], None) self.assertEqual(cm.exception.code, 'nested_array_mismatch') self.assertEqual(cm.exception.messages[0], 'Nested arrays must have the same length.') def test_with_base_field_error_params(self): field = ArrayField(models.CharField(max_length=2)) with self.assertRaises(exceptions.ValidationError) as cm: field.clean(['abc'], None) self.assertEqual(len(cm.exception.error_list), 1) exception = cm.exception.error_list[0] self.assertEqual( exception.message, 'Item 1 in the array did not validate: Ensure this value has at most 2 characters (it has 3).' ) self.assertEqual(exception.code, 'item_invalid') self.assertEqual(exception.params, {'nth': 1, 'value': 'abc', 'limit_value': 2, 'show_value': 3}) def test_with_validators(self): field = ArrayField(models.IntegerField(validators=[validators.MinValueValidator(1)])) field.clean([1, 2], None) with self.assertRaises(exceptions.ValidationError) as cm: field.clean([0], None) self.assertEqual(len(cm.exception.error_list), 1) exception = cm.exception.error_list[0] self.assertEqual( exception.message, 'Item 1 in the array did not validate: Ensure this value is greater than or equal to 1.' ) self.assertEqual(exception.code, 'item_invalid') self.assertEqual(exception.params, {'nth': 1, 'value': 0, 'limit_value': 1, 'show_value': 0}) class TestSimpleFormField(PostgreSQLSimpleTestCase): def test_valid(self): field = SimpleArrayField(forms.CharField()) value = field.clean('a,b,c') self.assertEqual(value, ['a', 'b', 'c']) def test_to_python_fail(self): field = SimpleArrayField(forms.IntegerField()) with self.assertRaises(exceptions.ValidationError) as cm: field.clean('a,b,9') self.assertEqual(cm.exception.messages[0], 'Item 1 in the array did not validate: Enter a whole number.') def test_validate_fail(self): field = SimpleArrayField(forms.CharField(required=True)) with self.assertRaises(exceptions.ValidationError) as cm: field.clean('a,b,') self.assertEqual(cm.exception.messages[0], 'Item 3 in the array did not validate: This field is required.') def test_validate_fail_base_field_error_params(self): field = SimpleArrayField(forms.CharField(max_length=2)) with self.assertRaises(exceptions.ValidationError) as cm: field.clean('abc,c,defg') errors = cm.exception.error_list self.assertEqual(len(errors), 2) first_error = errors[0] self.assertEqual( first_error.message, 'Item 1 in the array did not validate: Ensure this value has at most 2 characters (it has 3).' ) self.assertEqual(first_error.code, 'item_invalid') self.assertEqual(first_error.params, {'nth': 1, 'value': 'abc', 'limit_value': 2, 'show_value': 3}) second_error = errors[1] self.assertEqual( second_error.message, 'Item 3 in the array did not validate: Ensure this value has at most 2 characters (it has 4).' ) self.assertEqual(second_error.code, 'item_invalid') self.assertEqual(second_error.params, {'nth': 3, 'value': 'defg', 'limit_value': 2, 'show_value': 4}) def test_validators_fail(self): field = SimpleArrayField(forms.RegexField('[a-e]{2}')) with self.assertRaises(exceptions.ValidationError) as cm: field.clean('a,bc,de') self.assertEqual(cm.exception.messages[0], 'Item 1 in the array did not validate: Enter a valid value.') def test_delimiter(self): field = SimpleArrayField(forms.CharField(), delimiter='|') value = field.clean('a|b|c') self.assertEqual(value, ['a', 'b', 'c']) def test_delimiter_with_nesting(self): field = SimpleArrayField(SimpleArrayField(forms.CharField()), delimiter='|') value = field.clean('a,b|c,d') self.assertEqual(value, [['a', 'b'], ['c', 'd']]) def test_prepare_value(self): field = SimpleArrayField(forms.CharField()) value = field.prepare_value(['a', 'b', 'c']) self.assertEqual(value, 'a,b,c') def test_max_length(self): field = SimpleArrayField(forms.CharField(), max_length=2) with self.assertRaises(exceptions.ValidationError) as cm: field.clean('a,b,c') self.assertEqual(cm.exception.messages[0], 'List contains 3 items, it should contain no more than 2.') def test_min_length(self): field = SimpleArrayField(forms.CharField(), min_length=4) with self.assertRaises(exceptions.ValidationError) as cm: field.clean('a,b,c') self.assertEqual(cm.exception.messages[0], 'List contains 3 items, it should contain no fewer than 4.') def test_required(self): field = SimpleArrayField(forms.CharField(), required=True) with self.assertRaises(exceptions.ValidationError) as cm: field.clean('') self.assertEqual(cm.exception.messages[0], 'This field is required.') def test_model_field_formfield(self): model_field = ArrayField(models.CharField(max_length=27)) form_field = model_field.formfield() self.assertIsInstance(form_field, SimpleArrayField) self.assertIsInstance(form_field.base_field, forms.CharField) self.assertEqual(form_field.base_field.max_length, 27) def test_model_field_formfield_size(self): model_field = ArrayField(models.CharField(max_length=27), size=4) form_field = model_field.formfield() self.assertIsInstance(form_field, SimpleArrayField) self.assertEqual(form_field.max_length, 4) def test_model_field_choices(self): model_field = ArrayField(models.IntegerField(choices=((1, 'A'), (2, 'B')))) form_field = model_field.formfield() self.assertEqual(form_field.clean('1,2'), [1, 2]) def test_already_converted_value(self): field = SimpleArrayField(forms.CharField()) vals = ['a', 'b', 'c'] self.assertEqual(field.clean(vals), vals) def test_has_changed(self): field = SimpleArrayField(forms.IntegerField()) self.assertIs(field.has_changed([1, 2], [1, 2]), False) self.assertIs(field.has_changed([1, 2], '1,2'), False) self.assertIs(field.has_changed([1, 2], '1,2,3'), True) self.assertIs(field.has_changed([1, 2], 'a,b'), True) def test_has_changed_empty(self): field = SimpleArrayField(forms.CharField()) self.assertIs(field.has_changed(None, None), False) self.assertIs(field.has_changed(None, ''), False) self.assertIs(field.has_changed(None, []), False) self.assertIs(field.has_changed([], None), False) self.assertIs(field.has_changed([], ''), False) class TestSplitFormField(PostgreSQLSimpleTestCase): def test_valid(self): class SplitForm(forms.Form): array = SplitArrayField(forms.CharField(), size=3) data = {'array_0': 'a', 'array_1': 'b', 'array_2': 'c'} form = SplitForm(data) self.assertTrue(form.is_valid()) self.assertEqual(form.cleaned_data, {'array': ['a', 'b', 'c']}) def test_required(self): class SplitForm(forms.Form): array = SplitArrayField(forms.CharField(), required=True, size=3) data = {'array_0': '', 'array_1': '', 'array_2': ''} form = SplitForm(data) self.assertFalse(form.is_valid()) self.assertEqual(form.errors, {'array': ['This field is required.']}) def test_remove_trailing_nulls(self): class SplitForm(forms.Form): array = SplitArrayField(forms.CharField(required=False), size=5, remove_trailing_nulls=True) data = {'array_0': 'a', 'array_1': '', 'array_2': 'b', 'array_3': '', 'array_4': ''} form = SplitForm(data) self.assertTrue(form.is_valid(), form.errors) self.assertEqual(form.cleaned_data, {'array': ['a', '', 'b']}) def test_remove_trailing_nulls_not_required(self): class SplitForm(forms.Form): array = SplitArrayField( forms.CharField(required=False), size=2, remove_trailing_nulls=True, required=False, ) data = {'array_0': '', 'array_1': ''} form = SplitForm(data) self.assertTrue(form.is_valid()) self.assertEqual(form.cleaned_data, {'array': []}) def test_required_field(self): class SplitForm(forms.Form): array = SplitArrayField(forms.CharField(), size=3) data = {'array_0': 'a', 'array_1': 'b', 'array_2': ''} form = SplitForm(data) self.assertFalse(form.is_valid()) self.assertEqual(form.errors, {'array': ['Item 3 in the array did not validate: This field is required.']}) def test_invalid_integer(self): msg = 'Item 2 in the array did not validate: Ensure this value is less than or equal to 100.' with self.assertRaisesMessage(exceptions.ValidationError, msg): SplitArrayField(forms.IntegerField(max_value=100), size=2).clean([0, 101]) # To locate the widget's template. @modify_settings(INSTALLED_APPS={'append': 'django.contrib.postgres'}) def test_rendering(self): class SplitForm(forms.Form): array = SplitArrayField(forms.CharField(), size=3) self.assertHTMLEqual(str(SplitForm()), ''' <tr> <th><label for="id_array_0">Array:</label></th> <td> <input id="id_array_0" name="array_0" type="text" required> <input id="id_array_1" name="array_1" type="text" required> <input id="id_array_2" name="array_2" type="text" required> </td> </tr> ''') def test_invalid_char_length(self): field = SplitArrayField(forms.CharField(max_length=2), size=3) with self.assertRaises(exceptions.ValidationError) as cm: field.clean(['abc', 'c', 'defg']) self.assertEqual(cm.exception.messages, [ 'Item 1 in the array did not validate: Ensure this value has at most 2 characters (it has 3).', 'Item 3 in the array did not validate: Ensure this value has at most 2 characters (it has 4).', ]) def test_splitarraywidget_value_omitted_from_data(self): class Form(forms.ModelForm): field = SplitArrayField(forms.IntegerField(), required=False, size=2) class Meta: model = IntegerArrayModel fields = ('field',) form = Form({'field_0': '1', 'field_1': '2'}) self.assertEqual(form.errors, {}) obj = form.save(commit=False) self.assertEqual(obj.field, [1, 2]) def test_splitarrayfield_has_changed(self): class Form(forms.ModelForm): field = SplitArrayField(forms.IntegerField(), required=False, size=2) class Meta: model = IntegerArrayModel fields = ('field',) obj = IntegerArrayModel(field=[1, 2]) form = Form({'field_0': '1', 'field_1': '2'}, instance=obj) self.assertFalse(form.has_changed()) class TestSplitFormWidget(PostgreSQLWidgetTestCase): def test_get_context(self): self.assertEqual( SplitArrayWidget(forms.TextInput(), size=2).get_context('name', ['val1', 'val2']), { 'widget': { 'name': 'name', 'is_hidden': False, 'required': False, 'value': "['val1', 'val2']", 'attrs': {}, 'template_name': 'postgres/widgets/split_array.html', 'subwidgets': [ { 'name': 'name_0', 'is_hidden': False, 'required': False, 'value': 'val1', 'attrs': {}, 'template_name': 'django/forms/widgets/text.html', 'type': 'text', }, { 'name': 'name_1', 'is_hidden': False, 'required': False, 'value': 'val2', 'attrs': {}, 'template_name': 'django/forms/widgets/text.html', 'type': 'text', }, ] } } ) def test_render(self): self.check_html( SplitArrayWidget(forms.TextInput(), size=2), 'array', None, """ <input name="array_0" type="text"> <input name="array_1" type="text"> """ ) def test_render_attrs(self): self.check_html( SplitArrayWidget(forms.TextInput(), size=2), 'array', ['val1', 'val2'], attrs={'id': 'foo'}, html=( """ <input id="foo_0" name="array_0" type="text" value="val1"> <input id="foo_1" name="array_1" type="text" value="val2"> """ ) ) def test_value_omitted_from_data(self): widget = SplitArrayWidget(forms.TextInput(), size=2) self.assertIs(widget.value_omitted_from_data({}, {}, 'field'), True) self.assertIs(widget.value_omitted_from_data({'field_0': 'value'}, {}, 'field'), False) self.assertIs(widget.value_omitted_from_data({'field_1': 'value'}, {}, 'field'), False) self.assertIs(widget.value_omitted_from_data({'field_0': 'value', 'field_1': 'value'}, {}, 'field'), False)
9349c2fb8bdc72f964b0f6ad587d1d0e1f9aa290fedd4e09f53aefd7196751df
import datetime import operator import uuid from decimal import Decimal from django.core import checks, exceptions, serializers from django.core.serializers.json import DjangoJSONEncoder from django.db import connection from django.db.models import Count, F, OuterRef, Q, Subquery from django.db.models.expressions import RawSQL from django.db.models.functions import Cast from django.forms import CharField, Form, widgets from django.test.utils import CaptureQueriesContext, isolate_apps from django.utils.html import escape from . import PostgreSQLSimpleTestCase, PostgreSQLTestCase from .models import JSONModel, PostgreSQLModel try: from django.contrib.postgres import forms from django.contrib.postgres.fields import JSONField from django.contrib.postgres.fields.jsonb import KeyTextTransform, KeyTransform except ImportError: pass class TestModelMetaOrdering(PostgreSQLSimpleTestCase): def test_ordering_by_json_field_value(self): class TestJSONModel(JSONModel): class Meta: ordering = ['field__value'] self.assertEqual(TestJSONModel.check(), []) class TestSaveLoad(PostgreSQLTestCase): def test_null(self): instance = JSONModel() instance.save() loaded = JSONModel.objects.get() self.assertIsNone(loaded.field) def test_empty_object(self): instance = JSONModel(field={}) instance.save() loaded = JSONModel.objects.get() self.assertEqual(loaded.field, {}) def test_empty_list(self): instance = JSONModel(field=[]) instance.save() loaded = JSONModel.objects.get() self.assertEqual(loaded.field, []) def test_boolean(self): instance = JSONModel(field=True) instance.save() loaded = JSONModel.objects.get() self.assertIs(loaded.field, True) def test_string(self): instance = JSONModel(field='why?') instance.save() loaded = JSONModel.objects.get() self.assertEqual(loaded.field, 'why?') def test_number(self): instance = JSONModel(field=1) instance.save() loaded = JSONModel.objects.get() self.assertEqual(loaded.field, 1) def test_realistic_object(self): obj = { 'a': 'b', 'c': 1, 'd': ['e', {'f': 'g'}], 'h': True, 'i': False, 'j': None, } instance = JSONModel(field=obj) instance.save() loaded = JSONModel.objects.get() self.assertEqual(loaded.field, obj) def test_custom_encoding(self): """ JSONModel.field_custom has a custom DjangoJSONEncoder. """ some_uuid = uuid.uuid4() obj_before = { 'date': datetime.date(2016, 8, 12), 'datetime': datetime.datetime(2016, 8, 12, 13, 44, 47, 575981), 'decimal': Decimal('10.54'), 'uuid': some_uuid, } obj_after = { 'date': '2016-08-12', 'datetime': '2016-08-12T13:44:47.575', 'decimal': '10.54', 'uuid': str(some_uuid), } JSONModel.objects.create(field_custom=obj_before) loaded = JSONModel.objects.get() self.assertEqual(loaded.field_custom, obj_after) class TestQuerying(PostgreSQLTestCase): @classmethod def setUpTestData(cls): cls.objs = JSONModel.objects.bulk_create([ JSONModel(field=None), JSONModel(field=True), JSONModel(field=False), JSONModel(field='yes'), JSONModel(field=7), JSONModel(field=[]), JSONModel(field={}), JSONModel(field={ 'a': 'b', 'c': 1, }), JSONModel(field={ 'a': 'b', 'c': 1, 'd': ['e', {'f': 'g'}], 'h': True, 'i': False, 'j': None, 'k': {'l': 'm'}, }), JSONModel(field=[1, [2]]), JSONModel(field={ 'k': True, 'l': False, }), JSONModel(field={ 'foo': 'bar', 'baz': {'a': 'b', 'c': 'd'}, 'bar': ['foo', 'bar'], 'bax': {'foo': 'bar'}, }), ]) def test_exact(self): self.assertSequenceEqual( JSONModel.objects.filter(field__exact={}), [self.objs[6]] ) def test_exact_complex(self): self.assertSequenceEqual( JSONModel.objects.filter(field__exact={'a': 'b', 'c': 1}), [self.objs[7]] ) def test_isnull(self): self.assertSequenceEqual( JSONModel.objects.filter(field__isnull=True), [self.objs[0]] ) def test_ordering_by_transform(self): objs = [ JSONModel.objects.create(field={'ord': 93, 'name': 'bar'}), JSONModel.objects.create(field={'ord': 22.1, 'name': 'foo'}), JSONModel.objects.create(field={'ord': -1, 'name': 'baz'}), JSONModel.objects.create(field={'ord': 21.931902, 'name': 'spam'}), JSONModel.objects.create(field={'ord': -100291029, 'name': 'eggs'}), ] query = JSONModel.objects.filter(field__name__isnull=False).order_by('field__ord') self.assertSequenceEqual(query, [objs[4], objs[2], objs[3], objs[1], objs[0]]) def test_ordering_grouping_by_key_transform(self): base_qs = JSONModel.objects.filter(field__d__0__isnull=False) for qs in ( base_qs.order_by('field__d__0'), base_qs.annotate(key=KeyTransform('0', KeyTransform('d', 'field'))).order_by('key'), ): self.assertSequenceEqual(qs, [self.objs[8]]) qs = JSONModel.objects.filter(field__isnull=False) self.assertQuerysetEqual( qs.values('field__d__0').annotate(count=Count('field__d__0')).order_by('count'), [1, 10], operator.itemgetter('count'), ) self.assertQuerysetEqual( qs.filter(field__isnull=False).annotate( key=KeyTextTransform('f', KeyTransform('1', KeyTransform('d', 'field'))), ).values('key').annotate(count=Count('key')).order_by('count'), [(None, 0), ('g', 1)], operator.itemgetter('key', 'count'), ) def test_key_transform_raw_expression(self): expr = RawSQL('%s::jsonb', ['{"x": "bar"}']) self.assertSequenceEqual( JSONModel.objects.filter(field__foo=KeyTransform('x', expr)), [self.objs[-1]], ) def test_key_transform_expression(self): self.assertSequenceEqual( JSONModel.objects.filter(field__d__0__isnull=False).annotate( key=KeyTransform('d', 'field'), chain=KeyTransform('0', 'key'), expr=KeyTransform('0', Cast('key', JSONField())), ).filter(chain=F('expr')), [self.objs[8]], ) def test_nested_key_transform_raw_expression(self): expr = RawSQL('%s::jsonb', ['{"x": {"y": "bar"}}']) self.assertSequenceEqual( JSONModel.objects.filter(field__foo=KeyTransform('y', KeyTransform('x', expr))), [self.objs[-1]], ) def test_nested_key_transform_expression(self): self.assertSequenceEqual( JSONModel.objects.filter(field__d__0__isnull=False).annotate( key=KeyTransform('d', 'field'), chain=KeyTransform('f', KeyTransform('1', 'key')), expr=KeyTransform('f', KeyTransform('1', Cast('key', JSONField()))), ).filter(chain=F('expr')), [self.objs[8]], ) def test_deep_values(self): query = JSONModel.objects.values_list('field__k__l') self.assertSequenceEqual( query, [ (None,), (None,), (None,), (None,), (None,), (None,), (None,), (None,), ('m',), (None,), (None,), (None,), ] ) def test_deep_distinct(self): query = JSONModel.objects.distinct('field__k__l').values_list('field__k__l') self.assertSequenceEqual(query, [('m',), (None,)]) def test_isnull_key(self): # key__isnull works the same as has_key='key'. self.assertSequenceEqual( JSONModel.objects.filter(field__a__isnull=True), self.objs[:7] + self.objs[9:] ) self.assertSequenceEqual( JSONModel.objects.filter(field__a__isnull=False), [self.objs[7], self.objs[8]] ) def test_none_key(self): self.assertSequenceEqual(JSONModel.objects.filter(field__j=None), [self.objs[8]]) def test_none_key_exclude(self): obj = JSONModel.objects.create(field={'j': 1}) self.assertSequenceEqual(JSONModel.objects.exclude(field__j=None), [obj]) def test_isnull_key_or_none(self): obj = JSONModel.objects.create(field={'a': None}) self.assertSequenceEqual( JSONModel.objects.filter(Q(field__a__isnull=True) | Q(field__a=None)), self.objs[:7] + self.objs[9:] + [obj] ) def test_contains(self): self.assertSequenceEqual( JSONModel.objects.filter(field__contains={'a': 'b'}), [self.objs[7], self.objs[8]] ) def test_contained_by(self): self.assertSequenceEqual( JSONModel.objects.filter(field__contained_by={'a': 'b', 'c': 1, 'h': True}), [self.objs[6], self.objs[7]] ) def test_has_key(self): self.assertSequenceEqual( JSONModel.objects.filter(field__has_key='a'), [self.objs[7], self.objs[8]] ) def test_has_keys(self): self.assertSequenceEqual( JSONModel.objects.filter(field__has_keys=['a', 'c', 'h']), [self.objs[8]] ) def test_has_any_keys(self): self.assertSequenceEqual( JSONModel.objects.filter(field__has_any_keys=['c', 'l']), [self.objs[7], self.objs[8], self.objs[10]] ) def test_shallow_list_lookup(self): self.assertSequenceEqual( JSONModel.objects.filter(field__0=1), [self.objs[9]] ) def test_shallow_obj_lookup(self): self.assertSequenceEqual( JSONModel.objects.filter(field__a='b'), [self.objs[7], self.objs[8]] ) def test_obj_subquery_lookup(self): qs = JSONModel.objects.annotate( value=Subquery(JSONModel.objects.filter(pk=OuterRef('pk')).values('field')), ).filter(value__a='b') self.assertSequenceEqual(qs, [self.objs[7], self.objs[8]]) def test_deep_lookup_objs(self): self.assertSequenceEqual( JSONModel.objects.filter(field__k__l='m'), [self.objs[8]] ) def test_shallow_lookup_obj_target(self): self.assertSequenceEqual( JSONModel.objects.filter(field__k={'l': 'm'}), [self.objs[8]] ) def test_deep_lookup_array(self): self.assertSequenceEqual( JSONModel.objects.filter(field__1__0=2), [self.objs[9]] ) def test_deep_lookup_mixed(self): self.assertSequenceEqual( JSONModel.objects.filter(field__d__1__f='g'), [self.objs[8]] ) def test_deep_lookup_transform(self): self.assertSequenceEqual( JSONModel.objects.filter(field__c__gt=1), [] ) self.assertSequenceEqual( JSONModel.objects.filter(field__c__lt=5), [self.objs[7], self.objs[8]] ) def test_usage_in_subquery(self): self.assertSequenceEqual( JSONModel.objects.filter(id__in=JSONModel.objects.filter(field__c=1)), self.objs[7:9] ) def test_iexact(self): self.assertTrue(JSONModel.objects.filter(field__foo__iexact='BaR').exists()) self.assertFalse(JSONModel.objects.filter(field__foo__iexact='"BaR"').exists()) def test_icontains(self): self.assertFalse(JSONModel.objects.filter(field__foo__icontains='"bar"').exists()) def test_startswith(self): self.assertTrue(JSONModel.objects.filter(field__foo__startswith='b').exists()) def test_istartswith(self): self.assertTrue(JSONModel.objects.filter(field__foo__istartswith='B').exists()) def test_endswith(self): self.assertTrue(JSONModel.objects.filter(field__foo__endswith='r').exists()) def test_iendswith(self): self.assertTrue(JSONModel.objects.filter(field__foo__iendswith='R').exists()) def test_regex(self): self.assertTrue(JSONModel.objects.filter(field__foo__regex=r'^bar$').exists()) def test_iregex(self): self.assertTrue(JSONModel.objects.filter(field__foo__iregex=r'^bAr$').exists()) def test_key_sql_injection(self): with CaptureQueriesContext(connection) as queries: self.assertFalse( JSONModel.objects.filter(**{ """field__test' = '"a"') OR 1 = 1 OR ('d""": 'x', }).exists() ) self.assertIn( """."field" -> 'test'' = ''"a"'') OR 1 = 1 OR (''d') = '"x"' """, queries[0]['sql'], ) def test_lookups_with_key_transform(self): tests = ( ('field__d__contains', 'e'), ('field__baz__contained_by', {'a': 'b', 'c': 'd', 'e': 'f'}), ('field__baz__has_key', 'c'), ('field__baz__has_keys', ['a', 'c']), ('field__baz__has_any_keys', ['a', 'x']), ('field__contains', KeyTransform('bax', 'field')), ( 'field__contained_by', KeyTransform('x', RawSQL('%s::jsonb', ['{"x": {"a": "b", "c": 1, "d": "e"}}'])), ), ('field__has_key', KeyTextTransform('foo', 'field')), ) for lookup, value in tests: with self.subTest(lookup=lookup): self.assertTrue(JSONModel.objects.filter( **{lookup: value}, ).exists()) @isolate_apps('postgres_tests') class TestChecks(PostgreSQLSimpleTestCase): def test_invalid_default(self): class MyModel(PostgreSQLModel): field = JSONField(default={}) model = MyModel() self.assertEqual(model.check(), [ checks.Warning( msg=( "JSONField default should be a callable instead of an " "instance so that it's not shared between all field " "instances." ), hint='Use a callable instead, e.g., use `dict` instead of `{}`.', obj=MyModel._meta.get_field('field'), id='fields.E010', ) ]) def test_valid_default(self): class MyModel(PostgreSQLModel): field = JSONField(default=dict) model = MyModel() self.assertEqual(model.check(), []) def test_valid_default_none(self): class MyModel(PostgreSQLModel): field = JSONField(default=None) model = MyModel() self.assertEqual(model.check(), []) class TestSerialization(PostgreSQLSimpleTestCase): test_data = ( '[{"fields": {"field": %s, "field_custom": null}, ' '"model": "postgres_tests.jsonmodel", "pk": null}]' ) test_values = ( # (Python value, serialized value), ({'a': 'b', 'c': None}, '{"a": "b", "c": null}'), ('abc', '"abc"'), ('{"a": "a"}', '"{\\"a\\": \\"a\\"}"'), ) def test_dumping(self): for value, serialized in self.test_values: with self.subTest(value=value): instance = JSONModel(field=value) data = serializers.serialize('json', [instance]) self.assertJSONEqual(data, self.test_data % serialized) def test_loading(self): for value, serialized in self.test_values: with self.subTest(value=value): instance = list(serializers.deserialize('json', self.test_data % serialized))[0].object self.assertEqual(instance.field, value) class TestValidation(PostgreSQLSimpleTestCase): def test_not_serializable(self): field = JSONField() with self.assertRaises(exceptions.ValidationError) as cm: field.clean(datetime.timedelta(days=1), None) self.assertEqual(cm.exception.code, 'invalid') self.assertEqual(cm.exception.message % cm.exception.params, "Value must be valid JSON.") def test_custom_encoder(self): with self.assertRaisesMessage(ValueError, "The encoder parameter must be a callable object."): field = JSONField(encoder=DjangoJSONEncoder()) field = JSONField(encoder=DjangoJSONEncoder) self.assertEqual(field.clean(datetime.timedelta(days=1), None), datetime.timedelta(days=1)) class TestFormField(PostgreSQLSimpleTestCase): def test_valid(self): field = forms.JSONField() value = field.clean('{"a": "b"}') self.assertEqual(value, {'a': 'b'}) def test_valid_empty(self): field = forms.JSONField(required=False) value = field.clean('') self.assertIsNone(value) def test_invalid(self): field = forms.JSONField() with self.assertRaises(exceptions.ValidationError) as cm: field.clean('{some badly formed: json}') self.assertEqual(cm.exception.messages[0], '“{some badly formed: json}” value must be valid JSON.') def test_formfield(self): model_field = JSONField() form_field = model_field.formfield() self.assertIsInstance(form_field, forms.JSONField) def test_formfield_disabled(self): class JsonForm(Form): name = CharField() jfield = forms.JSONField(disabled=True) form = JsonForm({'name': 'xyz', 'jfield': '["bar"]'}, initial={'jfield': ['foo']}) self.assertIn('[&quot;foo&quot;]</textarea>', form.as_p()) def test_prepare_value(self): field = forms.JSONField() self.assertEqual(field.prepare_value({'a': 'b'}), '{"a": "b"}') self.assertEqual(field.prepare_value(None), 'null') self.assertEqual(field.prepare_value('foo'), '"foo"') def test_redisplay_wrong_input(self): """ When displaying a bound form (typically due to invalid input), the form should not overquote JSONField inputs. """ class JsonForm(Form): name = CharField(max_length=2) jfield = forms.JSONField() # JSONField input is fine, name is too long form = JsonForm({'name': 'xyz', 'jfield': '["foo"]'}) self.assertIn('[&quot;foo&quot;]</textarea>', form.as_p()) # This time, the JSONField input is wrong form = JsonForm({'name': 'xy', 'jfield': '{"foo"}'}) # Appears once in the textarea and once in the error message self.assertEqual(form.as_p().count(escape('{"foo"}')), 2) def test_widget(self): """The default widget of a JSONField is a Textarea.""" field = forms.JSONField() self.assertIsInstance(field.widget, widgets.Textarea) def test_custom_widget_kwarg(self): """The widget can be overridden with a kwarg.""" field = forms.JSONField(widget=widgets.Input) self.assertIsInstance(field.widget, widgets.Input) def test_custom_widget_attribute(self): """The widget can be overridden with an attribute.""" class CustomJSONField(forms.JSONField): widget = widgets.Input field = CustomJSONField() self.assertIsInstance(field.widget, widgets.Input) def test_already_converted_value(self): field = forms.JSONField(required=False) tests = [ '["a", "b", "c"]', '{"a": 1, "b": 2}', '1', '1.5', '"foo"', 'true', 'false', 'null', ] for json_string in tests: val = field.clean(json_string) self.assertEqual(field.clean(val), val) def test_has_changed(self): field = forms.JSONField() self.assertIs(field.has_changed({'a': True}, '{"a": 1}'), True) self.assertIs(field.has_changed({'a': 1, 'b': 2}, '{"b": 2, "a": 1}'), False)
002b836f2afc3eb87e197436e35703fa984bea19b1310fadc9eafb07b1edb866
import json from django.core import checks, exceptions, serializers from django.db import connection from django.db.models.expressions import OuterRef, RawSQL, Subquery from django.forms import Form from django.test.utils import CaptureQueriesContext, isolate_apps from . import PostgreSQLSimpleTestCase, PostgreSQLTestCase from .models import HStoreModel, PostgreSQLModel try: from django.contrib.postgres import forms from django.contrib.postgres.fields import HStoreField from django.contrib.postgres.fields.hstore import KeyTransform from django.contrib.postgres.validators import KeysValidator except ImportError: pass class SimpleTests(PostgreSQLTestCase): def test_save_load_success(self): value = {'a': 'b'} instance = HStoreModel(field=value) instance.save() reloaded = HStoreModel.objects.get() self.assertEqual(reloaded.field, value) def test_null(self): instance = HStoreModel(field=None) instance.save() reloaded = HStoreModel.objects.get() self.assertIsNone(reloaded.field) def test_value_null(self): value = {'a': None} instance = HStoreModel(field=value) instance.save() reloaded = HStoreModel.objects.get() self.assertEqual(reloaded.field, value) def test_key_val_cast_to_string(self): value = {'a': 1, 'b': 'B', 2: 'c', 'ï': 'ê'} expected_value = {'a': '1', 'b': 'B', '2': 'c', 'ï': 'ê'} instance = HStoreModel.objects.create(field=value) instance = HStoreModel.objects.get() self.assertEqual(instance.field, expected_value) instance = HStoreModel.objects.get(field__a=1) self.assertEqual(instance.field, expected_value) instance = HStoreModel.objects.get(field__has_keys=[2, 'a', 'ï']) self.assertEqual(instance.field, expected_value) def test_array_field(self): value = [ {'a': 1, 'b': 'B', 2: 'c', 'ï': 'ê'}, {'a': 1, 'b': 'B', 2: 'c', 'ï': 'ê'}, ] expected_value = [ {'a': '1', 'b': 'B', '2': 'c', 'ï': 'ê'}, {'a': '1', 'b': 'B', '2': 'c', 'ï': 'ê'}, ] instance = HStoreModel.objects.create(array_field=value) instance.refresh_from_db() self.assertEqual(instance.array_field, expected_value) class TestQuerying(PostgreSQLTestCase): @classmethod def setUpTestData(cls): cls.objs = HStoreModel.objects.bulk_create([ HStoreModel(field={'a': 'b'}), HStoreModel(field={'a': 'b', 'c': 'd'}), HStoreModel(field={'c': 'd'}), HStoreModel(field={}), HStoreModel(field=None), ]) def test_exact(self): self.assertSequenceEqual( HStoreModel.objects.filter(field__exact={'a': 'b'}), self.objs[:1] ) def test_contained_by(self): self.assertSequenceEqual( HStoreModel.objects.filter(field__contained_by={'a': 'b', 'c': 'd'}), self.objs[:4] ) def test_contains(self): self.assertSequenceEqual( HStoreModel.objects.filter(field__contains={'a': 'b'}), self.objs[:2] ) def test_in_generator(self): def search(): yield {'a': 'b'} self.assertSequenceEqual( HStoreModel.objects.filter(field__in=search()), self.objs[:1] ) def test_has_key(self): self.assertSequenceEqual( HStoreModel.objects.filter(field__has_key='c'), self.objs[1:3] ) def test_has_keys(self): self.assertSequenceEqual( HStoreModel.objects.filter(field__has_keys=['a', 'c']), self.objs[1:2] ) def test_has_any_keys(self): self.assertSequenceEqual( HStoreModel.objects.filter(field__has_any_keys=['a', 'c']), self.objs[:3] ) def test_key_transform(self): self.assertSequenceEqual( HStoreModel.objects.filter(field__a='b'), self.objs[:2] ) def test_key_transform_raw_expression(self): expr = RawSQL('%s::hstore', ['x => b, y => c']) self.assertSequenceEqual( HStoreModel.objects.filter(field__a=KeyTransform('x', expr)), self.objs[:2] ) def test_keys(self): self.assertSequenceEqual( HStoreModel.objects.filter(field__keys=['a']), self.objs[:1] ) def test_values(self): self.assertSequenceEqual( HStoreModel.objects.filter(field__values=['b']), self.objs[:1] ) def test_field_chaining(self): self.assertSequenceEqual( HStoreModel.objects.filter(field__a__contains='b'), self.objs[:2] ) def test_order_by_field(self): more_objs = ( HStoreModel.objects.create(field={'g': '637'}), HStoreModel.objects.create(field={'g': '002'}), HStoreModel.objects.create(field={'g': '042'}), HStoreModel.objects.create(field={'g': '981'}), ) self.assertSequenceEqual( HStoreModel.objects.filter(field__has_key='g').order_by('field__g'), [more_objs[1], more_objs[2], more_objs[0], more_objs[3]] ) def test_keys_contains(self): self.assertSequenceEqual( HStoreModel.objects.filter(field__keys__contains=['a']), self.objs[:2] ) def test_values_overlap(self): self.assertSequenceEqual( HStoreModel.objects.filter(field__values__overlap=['b', 'd']), self.objs[:3] ) def test_key_isnull(self): obj = HStoreModel.objects.create(field={'a': None}) self.assertSequenceEqual( HStoreModel.objects.filter(field__a__isnull=True), self.objs[2:5] + [obj] ) self.assertSequenceEqual( HStoreModel.objects.filter(field__a__isnull=False), self.objs[:2] ) def test_usage_in_subquery(self): self.assertSequenceEqual( HStoreModel.objects.filter(id__in=HStoreModel.objects.filter(field__a='b')), self.objs[:2] ) def test_key_sql_injection(self): with CaptureQueriesContext(connection) as queries: self.assertFalse( HStoreModel.objects.filter(**{ "field__test' = 'a') OR 1 = 1 OR ('d": 'x', }).exists() ) self.assertIn( """."field" -> 'test'' = ''a'') OR 1 = 1 OR (''d') = 'x' """, queries[0]['sql'], ) def test_obj_subquery_lookup(self): qs = HStoreModel.objects.annotate( value=Subquery(HStoreModel.objects.filter(pk=OuterRef('pk')).values('field')), ).filter(value__a='b') self.assertSequenceEqual(qs, self.objs[:2]) @isolate_apps('postgres_tests') class TestChecks(PostgreSQLSimpleTestCase): def test_invalid_default(self): class MyModel(PostgreSQLModel): field = HStoreField(default={}) model = MyModel() self.assertEqual(model.check(), [ checks.Warning( msg=( "HStoreField default should be a callable instead of an " "instance so that it's not shared between all field " "instances." ), hint='Use a callable instead, e.g., use `dict` instead of `{}`.', obj=MyModel._meta.get_field('field'), id='fields.E010', ) ]) def test_valid_default(self): class MyModel(PostgreSQLModel): field = HStoreField(default=dict) self.assertEqual(MyModel().check(), []) class TestSerialization(PostgreSQLSimpleTestCase): test_data = json.dumps([{ 'model': 'postgres_tests.hstoremodel', 'pk': None, 'fields': { 'field': json.dumps({'a': 'b'}), 'array_field': json.dumps([ json.dumps({'a': 'b'}), json.dumps({'b': 'a'}), ]), }, }]) def test_dumping(self): instance = HStoreModel(field={'a': 'b'}, array_field=[{'a': 'b'}, {'b': 'a'}]) data = serializers.serialize('json', [instance]) self.assertEqual(json.loads(data), json.loads(self.test_data)) def test_loading(self): instance = list(serializers.deserialize('json', self.test_data))[0].object self.assertEqual(instance.field, {'a': 'b'}) self.assertEqual(instance.array_field, [{'a': 'b'}, {'b': 'a'}]) def test_roundtrip_with_null(self): instance = HStoreModel(field={'a': 'b', 'c': None}) data = serializers.serialize('json', [instance]) new_instance = list(serializers.deserialize('json', data))[0].object self.assertEqual(instance.field, new_instance.field) class TestValidation(PostgreSQLSimpleTestCase): def test_not_a_string(self): field = HStoreField() with self.assertRaises(exceptions.ValidationError) as cm: field.clean({'a': 1}, None) self.assertEqual(cm.exception.code, 'not_a_string') self.assertEqual(cm.exception.message % cm.exception.params, 'The value of “a” is not a string or null.') def test_none_allowed_as_value(self): field = HStoreField() self.assertEqual(field.clean({'a': None}, None), {'a': None}) class TestFormField(PostgreSQLSimpleTestCase): def test_valid(self): field = forms.HStoreField() value = field.clean('{"a": "b"}') self.assertEqual(value, {'a': 'b'}) def test_invalid_json(self): field = forms.HStoreField() with self.assertRaises(exceptions.ValidationError) as cm: field.clean('{"a": "b"') self.assertEqual(cm.exception.messages[0], 'Could not load JSON data.') self.assertEqual(cm.exception.code, 'invalid_json') def test_non_dict_json(self): field = forms.HStoreField() msg = 'Input must be a JSON dictionary.' with self.assertRaisesMessage(exceptions.ValidationError, msg) as cm: field.clean('["a", "b", 1]') self.assertEqual(cm.exception.code, 'invalid_format') def test_not_string_values(self): field = forms.HStoreField() value = field.clean('{"a": 1}') self.assertEqual(value, {'a': '1'}) def test_none_value(self): field = forms.HStoreField() value = field.clean('{"a": null}') self.assertEqual(value, {'a': None}) def test_empty(self): field = forms.HStoreField(required=False) value = field.clean('') self.assertEqual(value, {}) def test_model_field_formfield(self): model_field = HStoreField() form_field = model_field.formfield() self.assertIsInstance(form_field, forms.HStoreField) def test_field_has_changed(self): class HStoreFormTest(Form): f1 = forms.HStoreField() form_w_hstore = HStoreFormTest() self.assertFalse(form_w_hstore.has_changed()) form_w_hstore = HStoreFormTest({'f1': '{"a": 1}'}) self.assertTrue(form_w_hstore.has_changed()) form_w_hstore = HStoreFormTest({'f1': '{"a": 1}'}, initial={'f1': '{"a": 1}'}) self.assertFalse(form_w_hstore.has_changed()) form_w_hstore = HStoreFormTest({'f1': '{"a": 2}'}, initial={'f1': '{"a": 1}'}) self.assertTrue(form_w_hstore.has_changed()) form_w_hstore = HStoreFormTest({'f1': '{"a": 1}'}, initial={'f1': {"a": 1}}) self.assertFalse(form_w_hstore.has_changed()) form_w_hstore = HStoreFormTest({'f1': '{"a": 2}'}, initial={'f1': {"a": 1}}) self.assertTrue(form_w_hstore.has_changed()) class TestValidator(PostgreSQLSimpleTestCase): def test_simple_valid(self): validator = KeysValidator(keys=['a', 'b']) validator({'a': 'foo', 'b': 'bar', 'c': 'baz'}) def test_missing_keys(self): validator = KeysValidator(keys=['a', 'b']) with self.assertRaises(exceptions.ValidationError) as cm: validator({'a': 'foo', 'c': 'baz'}) self.assertEqual(cm.exception.messages[0], 'Some keys were missing: b') self.assertEqual(cm.exception.code, 'missing_keys') def test_strict_valid(self): validator = KeysValidator(keys=['a', 'b'], strict=True) validator({'a': 'foo', 'b': 'bar'}) def test_extra_keys(self): validator = KeysValidator(keys=['a', 'b'], strict=True) with self.assertRaises(exceptions.ValidationError) as cm: validator({'a': 'foo', 'b': 'bar', 'c': 'baz'}) self.assertEqual(cm.exception.messages[0], 'Some unknown keys were provided: c') self.assertEqual(cm.exception.code, 'extra_keys') def test_custom_messages(self): messages = { 'missing_keys': 'Foobar', } validator = KeysValidator(keys=['a', 'b'], strict=True, messages=messages) with self.assertRaises(exceptions.ValidationError) as cm: validator({'a': 'foo', 'c': 'baz'}) self.assertEqual(cm.exception.messages[0], 'Foobar') self.assertEqual(cm.exception.code, 'missing_keys') with self.assertRaises(exceptions.ValidationError) as cm: validator({'a': 'foo', 'b': 'bar', 'c': 'baz'}) self.assertEqual(cm.exception.messages[0], 'Some unknown keys were provided: c') self.assertEqual(cm.exception.code, 'extra_keys') def test_deconstruct(self): messages = { 'missing_keys': 'Foobar', } validator = KeysValidator(keys=['a', 'b'], strict=True, messages=messages) path, args, kwargs = validator.deconstruct() self.assertEqual(path, 'django.contrib.postgres.validators.KeysValidator') self.assertEqual(args, ()) self.assertEqual(kwargs, {'keys': ['a', 'b'], 'strict': True, 'messages': messages})
ad9c5fd9bdc8ad697071cf46227ea15655b214faf00f2279f655ee321c22831c
import gzip import random import re import struct from io import BytesIO from urllib.parse import quote from django.conf import settings from django.core import mail from django.core.exceptions import PermissionDenied from django.http import ( FileResponse, HttpRequest, HttpResponse, HttpResponseNotFound, HttpResponsePermanentRedirect, HttpResponseRedirect, StreamingHttpResponse, ) from django.middleware.clickjacking import XFrameOptionsMiddleware from django.middleware.common import ( BrokenLinkEmailsMiddleware, CommonMiddleware, ) from django.middleware.gzip import GZipMiddleware from django.middleware.http import ConditionalGetMiddleware from django.test import RequestFactory, SimpleTestCase, override_settings int2byte = struct.Struct(">B").pack @override_settings(ROOT_URLCONF='middleware.urls') class CommonMiddlewareTest(SimpleTestCase): rf = RequestFactory() @override_settings(APPEND_SLASH=True) def test_append_slash_have_slash(self): """ URLs with slashes should go unmolested. """ request = self.rf.get('/slash/') self.assertIsNone(CommonMiddleware().process_request(request)) response = HttpResponseNotFound() self.assertEqual(CommonMiddleware().process_response(request, response), response) @override_settings(APPEND_SLASH=True) def test_append_slash_slashless_resource(self): """ Matches to explicit slashless URLs should go unmolested. """ request = self.rf.get('/noslash') self.assertIsNone(CommonMiddleware().process_request(request)) response = HttpResponse("Here's the text of the Web page.") self.assertEqual(CommonMiddleware().process_response(request, response), response) @override_settings(APPEND_SLASH=True) def test_append_slash_slashless_unknown(self): """ APPEND_SLASH should not redirect to unknown resources. """ request = self.rf.get('/unknown') response = HttpResponseNotFound() self.assertEqual(CommonMiddleware().process_response(request, response), response) @override_settings(APPEND_SLASH=True) def test_append_slash_redirect(self): """ APPEND_SLASH should redirect slashless URLs to a valid pattern. """ request = self.rf.get('/slash') r = CommonMiddleware().process_request(request) self.assertEqual(r.status_code, 301) @override_settings(APPEND_SLASH=True) def test_append_slash_redirect_querystring(self): """ APPEND_SLASH should preserve querystrings when redirecting. """ request = self.rf.get('/slash?test=1') response = HttpResponseNotFound() r = CommonMiddleware().process_response(request, response) self.assertEqual(r.url, '/slash/?test=1') @override_settings(APPEND_SLASH=True) def test_append_slash_redirect_querystring_have_slash(self): """ APPEND_SLASH should append slash to path when redirecting a request with a querystring ending with slash. """ request = self.rf.get('/slash?test=slash/') response = HttpResponseNotFound() r = CommonMiddleware().process_response(request, response) self.assertIsInstance(r, HttpResponsePermanentRedirect) self.assertEqual(r.url, '/slash/?test=slash/') @override_settings(APPEND_SLASH=True, DEBUG=True) def test_append_slash_no_redirect_on_POST_in_DEBUG(self): """ While in debug mode, an exception is raised with a warning when a failed attempt is made to POST, PUT, or PATCH to an URL which would normally be redirected to a slashed version. """ msg = "maintaining %s data. Change your form to point to testserver/slash/" request = self.rf.get('/slash') request.method = 'POST' response = HttpResponseNotFound() with self.assertRaisesMessage(RuntimeError, msg % request.method): CommonMiddleware().process_response(request, response) request = self.rf.get('/slash') request.method = 'PUT' with self.assertRaisesMessage(RuntimeError, msg % request.method): CommonMiddleware().process_response(request, response) request = self.rf.get('/slash') request.method = 'PATCH' with self.assertRaisesMessage(RuntimeError, msg % request.method): CommonMiddleware().process_response(request, response) @override_settings(APPEND_SLASH=False) def test_append_slash_disabled(self): """ Disabling append slash functionality should leave slashless URLs alone. """ request = self.rf.get('/slash') response = HttpResponseNotFound() self.assertEqual(CommonMiddleware().process_response(request, response), response) @override_settings(APPEND_SLASH=True) def test_append_slash_quoted(self): """ URLs which require quoting should be redirected to their slash version. """ request = self.rf.get(quote('/needsquoting#')) response = HttpResponseNotFound() r = CommonMiddleware().process_response(request, response) self.assertEqual(r.status_code, 301) self.assertEqual(r.url, '/needsquoting%23/') @override_settings(APPEND_SLASH=True) def test_append_slash_leading_slashes(self): """ Paths starting with two slashes are escaped to prevent open redirects. If there's a URL pattern that allows paths to start with two slashes, a request with path //evil.com must not redirect to //evil.com/ (appended slash) which is a schemaless absolute URL. The browser would navigate to evil.com/. """ # Use 4 slashes because of RequestFactory behavior. request = self.rf.get('////evil.com/security') response = HttpResponseNotFound() r = CommonMiddleware().process_request(request) self.assertEqual(r.status_code, 301) self.assertEqual(r.url, '/%2Fevil.com/security/') r = CommonMiddleware().process_response(request, response) self.assertEqual(r.status_code, 301) self.assertEqual(r.url, '/%2Fevil.com/security/') @override_settings(APPEND_SLASH=False, PREPEND_WWW=True) def test_prepend_www(self): request = self.rf.get('/path/') r = CommonMiddleware().process_request(request) self.assertEqual(r.status_code, 301) self.assertEqual(r.url, 'http://www.testserver/path/') @override_settings(APPEND_SLASH=True, PREPEND_WWW=True) def test_prepend_www_append_slash_have_slash(self): request = self.rf.get('/slash/') r = CommonMiddleware().process_request(request) self.assertEqual(r.status_code, 301) self.assertEqual(r.url, 'http://www.testserver/slash/') @override_settings(APPEND_SLASH=True, PREPEND_WWW=True) def test_prepend_www_append_slash_slashless(self): request = self.rf.get('/slash') r = CommonMiddleware().process_request(request) self.assertEqual(r.status_code, 301) self.assertEqual(r.url, 'http://www.testserver/slash/') # The following tests examine expected behavior given a custom URLconf that # overrides the default one through the request object. @override_settings(APPEND_SLASH=True) def test_append_slash_have_slash_custom_urlconf(self): """ URLs with slashes should go unmolested. """ request = self.rf.get('/customurlconf/slash/') request.urlconf = 'middleware.extra_urls' self.assertIsNone(CommonMiddleware().process_request(request)) response = HttpResponseNotFound() self.assertEqual(CommonMiddleware().process_response(request, response), response) @override_settings(APPEND_SLASH=True) def test_append_slash_slashless_resource_custom_urlconf(self): """ Matches to explicit slashless URLs should go unmolested. """ request = self.rf.get('/customurlconf/noslash') request.urlconf = 'middleware.extra_urls' self.assertIsNone(CommonMiddleware().process_request(request)) response = HttpResponse("Here's the text of the Web page.") self.assertEqual(CommonMiddleware().process_response(request, response), response) @override_settings(APPEND_SLASH=True) def test_append_slash_slashless_unknown_custom_urlconf(self): """ APPEND_SLASH should not redirect to unknown resources. """ request = self.rf.get('/customurlconf/unknown') request.urlconf = 'middleware.extra_urls' self.assertIsNone(CommonMiddleware().process_request(request)) response = HttpResponseNotFound() self.assertEqual(CommonMiddleware().process_response(request, response), response) @override_settings(APPEND_SLASH=True) def test_append_slash_redirect_custom_urlconf(self): """ APPEND_SLASH should redirect slashless URLs to a valid pattern. """ request = self.rf.get('/customurlconf/slash') request.urlconf = 'middleware.extra_urls' response = HttpResponseNotFound() r = CommonMiddleware().process_response(request, response) self.assertIsNotNone(r, "CommonMiddleware failed to return APPEND_SLASH redirect using request.urlconf") self.assertEqual(r.status_code, 301) self.assertEqual(r.url, '/customurlconf/slash/') @override_settings(APPEND_SLASH=True, DEBUG=True) def test_append_slash_no_redirect_on_POST_in_DEBUG_custom_urlconf(self): """ While in debug mode, an exception is raised with a warning when a failed attempt is made to POST to an URL which would normally be redirected to a slashed version. """ request = self.rf.get('/customurlconf/slash') request.urlconf = 'middleware.extra_urls' request.method = 'POST' response = HttpResponseNotFound() with self.assertRaisesMessage(RuntimeError, 'end in a slash'): CommonMiddleware().process_response(request, response) @override_settings(APPEND_SLASH=False) def test_append_slash_disabled_custom_urlconf(self): """ Disabling append slash functionality should leave slashless URLs alone. """ request = self.rf.get('/customurlconf/slash') request.urlconf = 'middleware.extra_urls' self.assertIsNone(CommonMiddleware().process_request(request)) response = HttpResponseNotFound() self.assertEqual(CommonMiddleware().process_response(request, response), response) @override_settings(APPEND_SLASH=True) def test_append_slash_quoted_custom_urlconf(self): """ URLs which require quoting should be redirected to their slash version. """ request = self.rf.get(quote('/customurlconf/needsquoting#')) request.urlconf = 'middleware.extra_urls' response = HttpResponseNotFound() r = CommonMiddleware().process_response(request, response) self.assertIsNotNone(r, "CommonMiddleware failed to return APPEND_SLASH redirect using request.urlconf") self.assertEqual(r.status_code, 301) self.assertEqual(r.url, '/customurlconf/needsquoting%23/') @override_settings(APPEND_SLASH=False, PREPEND_WWW=True) def test_prepend_www_custom_urlconf(self): request = self.rf.get('/customurlconf/path/') request.urlconf = 'middleware.extra_urls' r = CommonMiddleware().process_request(request) self.assertEqual(r.status_code, 301) self.assertEqual(r.url, 'http://www.testserver/customurlconf/path/') @override_settings(APPEND_SLASH=True, PREPEND_WWW=True) def test_prepend_www_append_slash_have_slash_custom_urlconf(self): request = self.rf.get('/customurlconf/slash/') request.urlconf = 'middleware.extra_urls' r = CommonMiddleware().process_request(request) self.assertEqual(r.status_code, 301) self.assertEqual(r.url, 'http://www.testserver/customurlconf/slash/') @override_settings(APPEND_SLASH=True, PREPEND_WWW=True) def test_prepend_www_append_slash_slashless_custom_urlconf(self): request = self.rf.get('/customurlconf/slash') request.urlconf = 'middleware.extra_urls' r = CommonMiddleware().process_request(request) self.assertEqual(r.status_code, 301) self.assertEqual(r.url, 'http://www.testserver/customurlconf/slash/') # Tests for the Content-Length header def test_content_length_header_added(self): response = HttpResponse('content') self.assertNotIn('Content-Length', response) response = CommonMiddleware().process_response(HttpRequest(), response) self.assertEqual(int(response['Content-Length']), len(response.content)) def test_content_length_header_not_added_for_streaming_response(self): response = StreamingHttpResponse('content') self.assertNotIn('Content-Length', response) response = CommonMiddleware().process_response(HttpRequest(), response) self.assertNotIn('Content-Length', response) def test_content_length_header_not_changed(self): response = HttpResponse() bad_content_length = len(response.content) + 10 response['Content-Length'] = bad_content_length response = CommonMiddleware().process_response(HttpRequest(), response) self.assertEqual(int(response['Content-Length']), bad_content_length) # Other tests @override_settings(DISALLOWED_USER_AGENTS=[re.compile(r'foo')]) def test_disallowed_user_agents(self): request = self.rf.get('/slash') request.META['HTTP_USER_AGENT'] = 'foo' with self.assertRaisesMessage(PermissionDenied, 'Forbidden user agent'): CommonMiddleware().process_request(request) def test_non_ascii_query_string_does_not_crash(self): """Regression test for #15152""" request = self.rf.get('/slash') request.META['QUERY_STRING'] = 'drink=café' r = CommonMiddleware().process_request(request) self.assertEqual(r.status_code, 301) def test_response_redirect_class(self): request = self.rf.get('/slash') response = HttpResponseNotFound() r = CommonMiddleware().process_response(request, response) self.assertEqual(r.status_code, 301) self.assertEqual(r.url, '/slash/') self.assertIsInstance(r, HttpResponsePermanentRedirect) def test_response_redirect_class_subclass(self): class MyCommonMiddleware(CommonMiddleware): response_redirect_class = HttpResponseRedirect request = self.rf.get('/slash') response = HttpResponseNotFound() r = MyCommonMiddleware().process_response(request, response) self.assertEqual(r.status_code, 302) self.assertEqual(r.url, '/slash/') self.assertIsInstance(r, HttpResponseRedirect) @override_settings( IGNORABLE_404_URLS=[re.compile(r'foo')], MANAGERS=[('PHD', '[email protected]')], ) class BrokenLinkEmailsMiddlewareTest(SimpleTestCase): rf = RequestFactory() def setUp(self): self.req = self.rf.get('/regular_url/that/does/not/exist') self.resp = self.client.get(self.req.path) def test_404_error_reporting(self): self.req.META['HTTP_REFERER'] = '/another/url/' BrokenLinkEmailsMiddleware().process_response(self.req, self.resp) self.assertEqual(len(mail.outbox), 1) self.assertIn('Broken', mail.outbox[0].subject) def test_404_error_reporting_no_referer(self): BrokenLinkEmailsMiddleware().process_response(self.req, self.resp) self.assertEqual(len(mail.outbox), 0) def test_404_error_reporting_ignored_url(self): self.req.path = self.req.path_info = 'foo_url/that/does/not/exist' BrokenLinkEmailsMiddleware().process_response(self.req, self.resp) self.assertEqual(len(mail.outbox), 0) def test_custom_request_checker(self): class SubclassedMiddleware(BrokenLinkEmailsMiddleware): ignored_user_agent_patterns = (re.compile(r'Spider.*'), re.compile(r'Robot.*')) def is_ignorable_request(self, request, uri, domain, referer): '''Check user-agent in addition to normal checks.''' if super().is_ignorable_request(request, uri, domain, referer): return True user_agent = request.META['HTTP_USER_AGENT'] return any(pattern.search(user_agent) for pattern in self.ignored_user_agent_patterns) self.req.META['HTTP_REFERER'] = '/another/url/' self.req.META['HTTP_USER_AGENT'] = 'Spider machine 3.4' SubclassedMiddleware().process_response(self.req, self.resp) self.assertEqual(len(mail.outbox), 0) self.req.META['HTTP_USER_AGENT'] = 'My user agent' SubclassedMiddleware().process_response(self.req, self.resp) self.assertEqual(len(mail.outbox), 1) def test_referer_equal_to_requested_url(self): """ Some bots set the referer to the current URL to avoid being blocked by an referer check (#25302). """ self.req.META['HTTP_REFERER'] = self.req.path BrokenLinkEmailsMiddleware().process_response(self.req, self.resp) self.assertEqual(len(mail.outbox), 0) # URL with scheme and domain should also be ignored self.req.META['HTTP_REFERER'] = 'http://testserver%s' % self.req.path BrokenLinkEmailsMiddleware().process_response(self.req, self.resp) self.assertEqual(len(mail.outbox), 0) # URL with a different scheme should be ignored as well because bots # tend to use http:// in referers even when browsing HTTPS websites. self.req.META['HTTP_X_PROTO'] = 'https' self.req.META['SERVER_PORT'] = 443 with self.settings(SECURE_PROXY_SSL_HEADER=('HTTP_X_PROTO', 'https')): BrokenLinkEmailsMiddleware().process_response(self.req, self.resp) self.assertEqual(len(mail.outbox), 0) def test_referer_equal_to_requested_url_on_another_domain(self): self.req.META['HTTP_REFERER'] = 'http://anotherserver%s' % self.req.path BrokenLinkEmailsMiddleware().process_response(self.req, self.resp) self.assertEqual(len(mail.outbox), 1) @override_settings(APPEND_SLASH=True) def test_referer_equal_to_requested_url_without_trailing_slash_when_append_slash_is_set(self): self.req.path = self.req.path_info = '/regular_url/that/does/not/exist/' self.req.META['HTTP_REFERER'] = self.req.path_info[:-1] BrokenLinkEmailsMiddleware().process_response(self.req, self.resp) self.assertEqual(len(mail.outbox), 0) @override_settings(APPEND_SLASH=False) def test_referer_equal_to_requested_url_without_trailing_slash_when_append_slash_is_unset(self): self.req.path = self.req.path_info = '/regular_url/that/does/not/exist/' self.req.META['HTTP_REFERER'] = self.req.path_info[:-1] BrokenLinkEmailsMiddleware().process_response(self.req, self.resp) self.assertEqual(len(mail.outbox), 1) @override_settings(ROOT_URLCONF='middleware.cond_get_urls') class ConditionalGetMiddlewareTest(SimpleTestCase): request_factory = RequestFactory() def setUp(self): self.req = self.request_factory.get('/') self.resp = self.client.get(self.req.path_info) # Tests for the ETag header def test_middleware_calculates_etag(self): self.assertNotIn('ETag', self.resp) self.resp = ConditionalGetMiddleware().process_response(self.req, self.resp) self.assertEqual(self.resp.status_code, 200) self.assertNotEqual('', self.resp['ETag']) def test_middleware_wont_overwrite_etag(self): self.resp['ETag'] = 'eggs' self.resp = ConditionalGetMiddleware().process_response(self.req, self.resp) self.assertEqual(self.resp.status_code, 200) self.assertEqual('eggs', self.resp['ETag']) def test_no_etag_streaming_response(self): res = StreamingHttpResponse(['content']) self.assertFalse(ConditionalGetMiddleware().process_response(self.req, res).has_header('ETag')) def test_no_etag_response_empty_content(self): res = HttpResponse() self.assertFalse( ConditionalGetMiddleware().process_response(self.req, res).has_header('ETag') ) def test_no_etag_no_store_cache(self): self.resp['Cache-Control'] = 'No-Cache, No-Store, Max-age=0' self.assertFalse(ConditionalGetMiddleware().process_response(self.req, self.resp).has_header('ETag')) def test_etag_extended_cache_control(self): self.resp['Cache-Control'] = 'my-directive="my-no-store"' self.assertTrue(ConditionalGetMiddleware().process_response(self.req, self.resp).has_header('ETag')) def test_if_none_match_and_no_etag(self): self.req.META['HTTP_IF_NONE_MATCH'] = 'spam' self.resp = ConditionalGetMiddleware().process_response(self.req, self.resp) self.assertEqual(self.resp.status_code, 200) def test_no_if_none_match_and_etag(self): self.resp['ETag'] = 'eggs' self.resp = ConditionalGetMiddleware().process_response(self.req, self.resp) self.assertEqual(self.resp.status_code, 200) def test_if_none_match_and_same_etag(self): self.req.META['HTTP_IF_NONE_MATCH'] = self.resp['ETag'] = '"spam"' self.resp = ConditionalGetMiddleware().process_response(self.req, self.resp) self.assertEqual(self.resp.status_code, 304) def test_if_none_match_and_different_etag(self): self.req.META['HTTP_IF_NONE_MATCH'] = 'spam' self.resp['ETag'] = 'eggs' self.resp = ConditionalGetMiddleware().process_response(self.req, self.resp) self.assertEqual(self.resp.status_code, 200) def test_if_none_match_and_redirect(self): self.req.META['HTTP_IF_NONE_MATCH'] = self.resp['ETag'] = 'spam' self.resp['Location'] = '/' self.resp.status_code = 301 self.resp = ConditionalGetMiddleware().process_response(self.req, self.resp) self.assertEqual(self.resp.status_code, 301) def test_if_none_match_and_client_error(self): self.req.META['HTTP_IF_NONE_MATCH'] = self.resp['ETag'] = 'spam' self.resp.status_code = 400 self.resp = ConditionalGetMiddleware().process_response(self.req, self.resp) self.assertEqual(self.resp.status_code, 400) # Tests for the Last-Modified header def test_if_modified_since_and_no_last_modified(self): self.req.META['HTTP_IF_MODIFIED_SINCE'] = 'Sat, 12 Feb 2011 17:38:44 GMT' self.resp = ConditionalGetMiddleware().process_response(self.req, self.resp) self.assertEqual(self.resp.status_code, 200) def test_no_if_modified_since_and_last_modified(self): self.resp['Last-Modified'] = 'Sat, 12 Feb 2011 17:38:44 GMT' self.resp = ConditionalGetMiddleware().process_response(self.req, self.resp) self.assertEqual(self.resp.status_code, 200) def test_if_modified_since_and_same_last_modified(self): self.req.META['HTTP_IF_MODIFIED_SINCE'] = 'Sat, 12 Feb 2011 17:38:44 GMT' self.resp['Last-Modified'] = 'Sat, 12 Feb 2011 17:38:44 GMT' self.resp = ConditionalGetMiddleware().process_response(self.req, self.resp) self.assertEqual(self.resp.status_code, 304) def test_if_modified_since_and_last_modified_in_the_past(self): self.req.META['HTTP_IF_MODIFIED_SINCE'] = 'Sat, 12 Feb 2011 17:38:44 GMT' self.resp['Last-Modified'] = 'Sat, 12 Feb 2011 17:35:44 GMT' self.resp = ConditionalGetMiddleware().process_response(self.req, self.resp) self.assertEqual(self.resp.status_code, 304) def test_if_modified_since_and_last_modified_in_the_future(self): self.req.META['HTTP_IF_MODIFIED_SINCE'] = 'Sat, 12 Feb 2011 17:38:44 GMT' self.resp['Last-Modified'] = 'Sat, 12 Feb 2011 17:41:44 GMT' self.resp = ConditionalGetMiddleware().process_response(self.req, self.resp) self.assertEqual(self.resp.status_code, 200) def test_if_modified_since_and_redirect(self): self.req.META['HTTP_IF_MODIFIED_SINCE'] = 'Sat, 12 Feb 2011 17:38:44 GMT' self.resp['Last-Modified'] = 'Sat, 12 Feb 2011 17:35:44 GMT' self.resp['Location'] = '/' self.resp.status_code = 301 self.resp = ConditionalGetMiddleware().process_response(self.req, self.resp) self.assertEqual(self.resp.status_code, 301) def test_if_modified_since_and_client_error(self): self.req.META['HTTP_IF_MODIFIED_SINCE'] = 'Sat, 12 Feb 2011 17:38:44 GMT' self.resp['Last-Modified'] = 'Sat, 12 Feb 2011 17:35:44 GMT' self.resp.status_code = 400 self.resp = ConditionalGetMiddleware().process_response(self.req, self.resp) self.assertEqual(self.resp.status_code, 400) def test_not_modified_headers(self): """ The 304 Not Modified response should include only the headers required by section 4.1 of RFC 7232, Last-Modified, and the cookies. """ self.req.META['HTTP_IF_NONE_MATCH'] = self.resp['ETag'] = '"spam"' self.resp['Date'] = 'Sat, 12 Feb 2011 17:35:44 GMT' self.resp['Last-Modified'] = 'Sat, 12 Feb 2011 17:35:44 GMT' self.resp['Expires'] = 'Sun, 13 Feb 2011 17:35:44 GMT' self.resp['Vary'] = 'Cookie' self.resp['Cache-Control'] = 'public' self.resp['Content-Location'] = '/alt' self.resp['Content-Language'] = 'en' # shouldn't be preserved self.resp.set_cookie('key', 'value') new_response = ConditionalGetMiddleware().process_response(self.req, self.resp) self.assertEqual(new_response.status_code, 304) for header in ('Cache-Control', 'Content-Location', 'Date', 'ETag', 'Expires', 'Last-Modified', 'Vary'): self.assertEqual(new_response[header], self.resp[header]) self.assertEqual(new_response.cookies, self.resp.cookies) self.assertNotIn('Content-Language', new_response) def test_no_unsafe(self): """ ConditionalGetMiddleware shouldn't return a conditional response on an unsafe request. A response has already been generated by the time ConditionalGetMiddleware is called, so it's too late to return a 412 Precondition Failed. """ get_response = ConditionalGetMiddleware().process_response(self.req, self.resp) etag = get_response['ETag'] put_request = self.request_factory.put('/', HTTP_IF_MATCH=etag) put_response = HttpResponse(status=200) conditional_get_response = ConditionalGetMiddleware().process_response(put_request, put_response) self.assertEqual(conditional_get_response.status_code, 200) # should never be a 412 def test_no_head(self): """ ConditionalGetMiddleware shouldn't compute and return an ETag on a HEAD request since it can't do so accurately without access to the response body of the corresponding GET. """ request = self.request_factory.head('/') response = HttpResponse(status=200) conditional_get_response = ConditionalGetMiddleware().process_response(request, response) self.assertNotIn('ETag', conditional_get_response) class XFrameOptionsMiddlewareTest(SimpleTestCase): """ Tests for the X-Frame-Options clickjacking prevention middleware. """ def test_same_origin(self): """ The X_FRAME_OPTIONS setting can be set to SAMEORIGIN to have the middleware use that value for the HTTP header. """ with override_settings(X_FRAME_OPTIONS='SAMEORIGIN'): r = XFrameOptionsMiddleware().process_response(HttpRequest(), HttpResponse()) self.assertEqual(r['X-Frame-Options'], 'SAMEORIGIN') with override_settings(X_FRAME_OPTIONS='sameorigin'): r = XFrameOptionsMiddleware().process_response(HttpRequest(), HttpResponse()) self.assertEqual(r['X-Frame-Options'], 'SAMEORIGIN') def test_deny(self): """ The X_FRAME_OPTIONS setting can be set to DENY to have the middleware use that value for the HTTP header. """ with override_settings(X_FRAME_OPTIONS='DENY'): r = XFrameOptionsMiddleware().process_response(HttpRequest(), HttpResponse()) self.assertEqual(r['X-Frame-Options'], 'DENY') with override_settings(X_FRAME_OPTIONS='deny'): r = XFrameOptionsMiddleware().process_response(HttpRequest(), HttpResponse()) self.assertEqual(r['X-Frame-Options'], 'DENY') def test_defaults_sameorigin(self): """ If the X_FRAME_OPTIONS setting is not set then it defaults to DENY. """ with override_settings(X_FRAME_OPTIONS=None): del settings.X_FRAME_OPTIONS # restored by override_settings r = XFrameOptionsMiddleware().process_response(HttpRequest(), HttpResponse()) self.assertEqual(r['X-Frame-Options'], 'DENY') def test_dont_set_if_set(self): """ If the X-Frame-Options header is already set then the middleware does not attempt to override it. """ with override_settings(X_FRAME_OPTIONS='DENY'): response = HttpResponse() response['X-Frame-Options'] = 'SAMEORIGIN' r = XFrameOptionsMiddleware().process_response(HttpRequest(), response) self.assertEqual(r['X-Frame-Options'], 'SAMEORIGIN') with override_settings(X_FRAME_OPTIONS='SAMEORIGIN'): response = HttpResponse() response['X-Frame-Options'] = 'DENY' r = XFrameOptionsMiddleware().process_response(HttpRequest(), response) self.assertEqual(r['X-Frame-Options'], 'DENY') def test_response_exempt(self): """ If the response has an xframe_options_exempt attribute set to False then it still sets the header, but if it's set to True then it doesn't. """ with override_settings(X_FRAME_OPTIONS='SAMEORIGIN'): response = HttpResponse() response.xframe_options_exempt = False r = XFrameOptionsMiddleware().process_response(HttpRequest(), response) self.assertEqual(r['X-Frame-Options'], 'SAMEORIGIN') response = HttpResponse() response.xframe_options_exempt = True r = XFrameOptionsMiddleware().process_response(HttpRequest(), response) self.assertIsNone(r.get('X-Frame-Options')) def test_is_extendable(self): """ The XFrameOptionsMiddleware method that determines the X-Frame-Options header value can be overridden based on something in the request or response. """ class OtherXFrameOptionsMiddleware(XFrameOptionsMiddleware): # This is just an example for testing purposes... def get_xframe_options_value(self, request, response): if getattr(request, 'sameorigin', False): return 'SAMEORIGIN' if getattr(response, 'sameorigin', False): return 'SAMEORIGIN' return 'DENY' with override_settings(X_FRAME_OPTIONS='DENY'): response = HttpResponse() response.sameorigin = True r = OtherXFrameOptionsMiddleware().process_response(HttpRequest(), response) self.assertEqual(r['X-Frame-Options'], 'SAMEORIGIN') request = HttpRequest() request.sameorigin = True r = OtherXFrameOptionsMiddleware().process_response(request, HttpResponse()) self.assertEqual(r['X-Frame-Options'], 'SAMEORIGIN') with override_settings(X_FRAME_OPTIONS='SAMEORIGIN'): r = OtherXFrameOptionsMiddleware().process_response(HttpRequest(), HttpResponse()) self.assertEqual(r['X-Frame-Options'], 'DENY') class GZipMiddlewareTest(SimpleTestCase): """ Tests the GZipMiddleware. """ short_string = b"This string is too short to be worth compressing." compressible_string = b'a' * 500 incompressible_string = b''.join(int2byte(random.randint(0, 255)) for _ in range(500)) sequence = [b'a' * 500, b'b' * 200, b'a' * 300] sequence_unicode = ['a' * 500, 'é' * 200, 'a' * 300] request_factory = RequestFactory() def setUp(self): self.req = self.request_factory.get('/') self.req.META['HTTP_ACCEPT_ENCODING'] = 'gzip, deflate' self.req.META['HTTP_USER_AGENT'] = 'Mozilla/5.0 (Windows NT 5.1; rv:9.0.1) Gecko/20100101 Firefox/9.0.1' self.resp = HttpResponse() self.resp.status_code = 200 self.resp.content = self.compressible_string self.resp['Content-Type'] = 'text/html; charset=UTF-8' self.stream_resp = StreamingHttpResponse(self.sequence) self.stream_resp['Content-Type'] = 'text/html; charset=UTF-8' self.stream_resp_unicode = StreamingHttpResponse(self.sequence_unicode) self.stream_resp_unicode['Content-Type'] = 'text/html; charset=UTF-8' @staticmethod def decompress(gzipped_string): with gzip.GzipFile(mode='rb', fileobj=BytesIO(gzipped_string)) as f: return f.read() @staticmethod def get_mtime(gzipped_string): with gzip.GzipFile(mode='rb', fileobj=BytesIO(gzipped_string)) as f: f.read() # must read the data before accessing the header return f.mtime def test_compress_response(self): """ Compression is performed on responses with compressible content. """ r = GZipMiddleware().process_response(self.req, self.resp) self.assertEqual(self.decompress(r.content), self.compressible_string) self.assertEqual(r.get('Content-Encoding'), 'gzip') self.assertEqual(r.get('Content-Length'), str(len(r.content))) def test_compress_streaming_response(self): """ Compression is performed on responses with streaming content. """ r = GZipMiddleware().process_response(self.req, self.stream_resp) self.assertEqual(self.decompress(b''.join(r)), b''.join(self.sequence)) self.assertEqual(r.get('Content-Encoding'), 'gzip') self.assertFalse(r.has_header('Content-Length')) def test_compress_streaming_response_unicode(self): """ Compression is performed on responses with streaming Unicode content. """ r = GZipMiddleware().process_response(self.req, self.stream_resp_unicode) self.assertEqual( self.decompress(b''.join(r)), b''.join(x.encode() for x in self.sequence_unicode) ) self.assertEqual(r.get('Content-Encoding'), 'gzip') self.assertFalse(r.has_header('Content-Length')) def test_compress_file_response(self): """ Compression is performed on FileResponse. """ with open(__file__, 'rb') as file1: file_resp = FileResponse(file1) file_resp['Content-Type'] = 'text/html; charset=UTF-8' r = GZipMiddleware().process_response(self.req, file_resp) with open(__file__, 'rb') as file2: self.assertEqual(self.decompress(b''.join(r)), file2.read()) self.assertEqual(r.get('Content-Encoding'), 'gzip') self.assertIsNot(r.file_to_stream, file1) def test_compress_non_200_response(self): """ Compression is performed on responses with a status other than 200 (#10762). """ self.resp.status_code = 404 r = GZipMiddleware().process_response(self.req, self.resp) self.assertEqual(self.decompress(r.content), self.compressible_string) self.assertEqual(r.get('Content-Encoding'), 'gzip') def test_no_compress_short_response(self): """ Compression isn't performed on responses with short content. """ self.resp.content = self.short_string r = GZipMiddleware().process_response(self.req, self.resp) self.assertEqual(r.content, self.short_string) self.assertIsNone(r.get('Content-Encoding')) def test_no_compress_compressed_response(self): """ Compression isn't performed on responses that are already compressed. """ self.resp['Content-Encoding'] = 'deflate' r = GZipMiddleware().process_response(self.req, self.resp) self.assertEqual(r.content, self.compressible_string) self.assertEqual(r.get('Content-Encoding'), 'deflate') def test_no_compress_incompressible_response(self): """ Compression isn't performed on responses with incompressible content. """ self.resp.content = self.incompressible_string r = GZipMiddleware().process_response(self.req, self.resp) self.assertEqual(r.content, self.incompressible_string) self.assertIsNone(r.get('Content-Encoding')) def test_compress_deterministic(self): """ Compression results are the same for the same content and don't include a modification time (since that would make the results of compression non-deterministic and prevent ConditionalGetMiddleware from recognizing conditional matches on gzipped content). """ r1 = GZipMiddleware().process_response(self.req, self.resp) r2 = GZipMiddleware().process_response(self.req, self.resp) self.assertEqual(r1.content, r2.content) self.assertEqual(self.get_mtime(r1.content), 0) self.assertEqual(self.get_mtime(r2.content), 0) class ETagGZipMiddlewareTest(SimpleTestCase): """ ETags are handled properly by GZipMiddleware. """ rf = RequestFactory() compressible_string = b'a' * 500 def test_strong_etag_modified(self): """ GZipMiddleware makes a strong ETag weak. """ request = self.rf.get('/', HTTP_ACCEPT_ENCODING='gzip, deflate') response = HttpResponse(self.compressible_string) response['ETag'] = '"eggs"' gzip_response = GZipMiddleware().process_response(request, response) self.assertEqual(gzip_response['ETag'], 'W/"eggs"') def test_weak_etag_not_modified(self): """ GZipMiddleware doesn't modify a weak ETag. """ request = self.rf.get('/', HTTP_ACCEPT_ENCODING='gzip, deflate') response = HttpResponse(self.compressible_string) response['ETag'] = 'W/"eggs"' gzip_response = GZipMiddleware().process_response(request, response) self.assertEqual(gzip_response['ETag'], 'W/"eggs"') def test_etag_match(self): """ GZipMiddleware allows 304 Not Modified responses. """ request = self.rf.get('/', HTTP_ACCEPT_ENCODING='gzip, deflate') response = GZipMiddleware().process_response( request, ConditionalGetMiddleware().process_response(request, HttpResponse(self.compressible_string)) ) gzip_etag = response['ETag'] next_request = self.rf.get('/', HTTP_ACCEPT_ENCODING='gzip, deflate', HTTP_IF_NONE_MATCH=gzip_etag) next_response = ConditionalGetMiddleware().process_response( next_request, HttpResponse(self.compressible_string) ) self.assertEqual(next_response.status_code, 304)
4374094792ac3db0e6f78a5e8bcf00db1e0b0e51f33c62588d1b8f71fc011330
""" Regression tests for the Test Client, especially the customized assertions. """ import itertools import os from django.contrib.auth.models import User from django.contrib.auth.signals import user_logged_in, user_logged_out from django.http import HttpResponse from django.template import ( Context, RequestContext, TemplateSyntaxError, engines, ) from django.template.response import SimpleTemplateResponse from django.test import ( Client, SimpleTestCase, TestCase, modify_settings, override_settings, ) from django.test.client import RedirectCycleError, RequestFactory, encode_file from django.test.utils import ContextList from django.urls import NoReverseMatch, reverse from django.utils.translation import gettext_lazy from .models import CustomUser from .views import CustomTestException class TestDataMixin: @classmethod def setUpTestData(cls): cls.u1 = User.objects.create_user(username='testclient', password='password') cls.staff = User.objects.create_user(username='staff', password='password', is_staff=True) @override_settings(ROOT_URLCONF='test_client_regress.urls') class AssertContainsTests(SimpleTestCase): def test_contains(self): "Responses can be inspected for content, including counting repeated substrings" response = self.client.get('/no_template_view/') self.assertNotContains(response, 'never') self.assertContains(response, 'never', 0) self.assertContains(response, 'once') self.assertContains(response, 'once', 1) self.assertContains(response, 'twice') self.assertContains(response, 'twice', 2) try: self.assertContains(response, 'text', status_code=999) except AssertionError as e: self.assertIn("Couldn't retrieve content: Response code was 200 (expected 999)", str(e)) try: self.assertContains(response, 'text', status_code=999, msg_prefix='abc') except AssertionError as e: self.assertIn("abc: Couldn't retrieve content: Response code was 200 (expected 999)", str(e)) try: self.assertNotContains(response, 'text', status_code=999) except AssertionError as e: self.assertIn("Couldn't retrieve content: Response code was 200 (expected 999)", str(e)) try: self.assertNotContains(response, 'text', status_code=999, msg_prefix='abc') except AssertionError as e: self.assertIn("abc: Couldn't retrieve content: Response code was 200 (expected 999)", str(e)) try: self.assertNotContains(response, 'once') except AssertionError as e: self.assertIn("Response should not contain 'once'", str(e)) try: self.assertNotContains(response, 'once', msg_prefix='abc') except AssertionError as e: self.assertIn("abc: Response should not contain 'once'", str(e)) try: self.assertContains(response, 'never', 1) except AssertionError as e: self.assertIn("Found 0 instances of 'never' in response (expected 1)", str(e)) try: self.assertContains(response, 'never', 1, msg_prefix='abc') except AssertionError as e: self.assertIn("abc: Found 0 instances of 'never' in response (expected 1)", str(e)) try: self.assertContains(response, 'once', 0) except AssertionError as e: self.assertIn("Found 1 instances of 'once' in response (expected 0)", str(e)) try: self.assertContains(response, 'once', 0, msg_prefix='abc') except AssertionError as e: self.assertIn("abc: Found 1 instances of 'once' in response (expected 0)", str(e)) try: self.assertContains(response, 'once', 2) except AssertionError as e: self.assertIn("Found 1 instances of 'once' in response (expected 2)", str(e)) try: self.assertContains(response, 'once', 2, msg_prefix='abc') except AssertionError as e: self.assertIn("abc: Found 1 instances of 'once' in response (expected 2)", str(e)) try: self.assertContains(response, 'twice', 1) except AssertionError as e: self.assertIn("Found 2 instances of 'twice' in response (expected 1)", str(e)) try: self.assertContains(response, 'twice', 1, msg_prefix='abc') except AssertionError as e: self.assertIn("abc: Found 2 instances of 'twice' in response (expected 1)", str(e)) try: self.assertContains(response, 'thrice') except AssertionError as e: self.assertIn("Couldn't find 'thrice' in response", str(e)) try: self.assertContains(response, 'thrice', msg_prefix='abc') except AssertionError as e: self.assertIn("abc: Couldn't find 'thrice' in response", str(e)) try: self.assertContains(response, 'thrice', 3) except AssertionError as e: self.assertIn("Found 0 instances of 'thrice' in response (expected 3)", str(e)) try: self.assertContains(response, 'thrice', 3, msg_prefix='abc') except AssertionError as e: self.assertIn("abc: Found 0 instances of 'thrice' in response (expected 3)", str(e)) def test_unicode_contains(self): "Unicode characters can be found in template context" # Regression test for #10183 r = self.client.get('/check_unicode/') self.assertContains(r, 'さかき') self.assertContains(r, b'\xe5\xb3\xa0'.decode()) def test_unicode_not_contains(self): "Unicode characters can be searched for, and not found in template context" # Regression test for #10183 r = self.client.get('/check_unicode/') self.assertNotContains(r, 'はたけ') self.assertNotContains(r, b'\xe3\x81\xaf\xe3\x81\x9f\xe3\x81\x91'.decode()) def test_binary_contains(self): r = self.client.get('/check_binary/') self.assertContains(r, b'%PDF-1.4\r\n%\x93\x8c\x8b\x9e') with self.assertRaises(AssertionError): self.assertContains(r, b'%PDF-1.4\r\n%\x93\x8c\x8b\x9e', count=2) def test_binary_not_contains(self): r = self.client.get('/check_binary/') self.assertNotContains(r, b'%ODF-1.4\r\n%\x93\x8c\x8b\x9e') with self.assertRaises(AssertionError): self.assertNotContains(r, b'%PDF-1.4\r\n%\x93\x8c\x8b\x9e') def test_nontext_contains(self): r = self.client.get('/no_template_view/') self.assertContains(r, gettext_lazy('once')) def test_nontext_not_contains(self): r = self.client.get('/no_template_view/') self.assertNotContains(r, gettext_lazy('never')) def test_assert_contains_renders_template_response(self): """ An unrendered SimpleTemplateResponse may be used in assertContains(). """ template = engines['django'].from_string('Hello') response = SimpleTemplateResponse(template) self.assertContains(response, 'Hello') def test_assert_contains_using_non_template_response(self): """ auto-rendering does not affect responses that aren't instances (or subclasses) of SimpleTemplateResponse. Refs #15826. """ response = HttpResponse('Hello') self.assertContains(response, 'Hello') def test_assert_not_contains_renders_template_response(self): """ An unrendered SimpleTemplateResponse may be used in assertNotContains(). """ template = engines['django'].from_string('Hello') response = SimpleTemplateResponse(template) self.assertNotContains(response, 'Bye') def test_assert_not_contains_using_non_template_response(self): """ auto-rendering does not affect responses that aren't instances (or subclasses) of SimpleTemplateResponse. """ response = HttpResponse('Hello') self.assertNotContains(response, 'Bye') @override_settings(ROOT_URLCONF='test_client_regress.urls') class AssertTemplateUsedTests(TestDataMixin, TestCase): def test_no_context(self): "Template usage assertions work then templates aren't in use" response = self.client.get('/no_template_view/') # The no template case doesn't mess with the template assertions self.assertTemplateNotUsed(response, 'GET Template') try: self.assertTemplateUsed(response, 'GET Template') except AssertionError as e: self.assertIn("No templates used to render the response", str(e)) try: self.assertTemplateUsed(response, 'GET Template', msg_prefix='abc') except AssertionError as e: self.assertIn("abc: No templates used to render the response", str(e)) with self.assertRaises(AssertionError) as context: self.assertTemplateUsed(response, 'GET Template', count=2) self.assertIn( "No templates used to render the response", str(context.exception)) def test_single_context(self): "Template assertions work when there is a single context" response = self.client.get('/post_view/', {}) try: self.assertTemplateNotUsed(response, 'Empty GET Template') except AssertionError as e: self.assertIn("Template 'Empty GET Template' was used unexpectedly in rendering the response", str(e)) try: self.assertTemplateNotUsed(response, 'Empty GET Template', msg_prefix='abc') except AssertionError as e: self.assertIn("abc: Template 'Empty GET Template' was used unexpectedly in rendering the response", str(e)) try: self.assertTemplateUsed(response, 'Empty POST Template') except AssertionError as e: self.assertIn( "Template 'Empty POST Template' was not a template used to " "render the response. Actual template(s) used: Empty GET Template", str(e) ) try: self.assertTemplateUsed(response, 'Empty POST Template', msg_prefix='abc') except AssertionError as e: self.assertIn( "abc: Template 'Empty POST Template' was not a template used " "to render the response. Actual template(s) used: Empty GET Template", str(e) ) with self.assertRaises(AssertionError) as context: self.assertTemplateUsed(response, 'Empty GET Template', count=2) self.assertIn( "Template 'Empty GET Template' was expected to be rendered 2 " "time(s) but was actually rendered 1 time(s).", str(context.exception)) with self.assertRaises(AssertionError) as context: self.assertTemplateUsed( response, 'Empty GET Template', msg_prefix='abc', count=2) self.assertIn( "abc: Template 'Empty GET Template' was expected to be rendered 2 " "time(s) but was actually rendered 1 time(s).", str(context.exception)) def test_multiple_context(self): "Template assertions work when there are multiple contexts" post_data = { 'text': 'Hello World', 'email': '[email protected]', 'value': 37, 'single': 'b', 'multi': ('b', 'c', 'e') } response = self.client.post('/form_view_with_template/', post_data) self.assertContains(response, 'POST data OK') try: self.assertTemplateNotUsed(response, "form_view.html") except AssertionError as e: self.assertIn("Template 'form_view.html' was used unexpectedly in rendering the response", str(e)) try: self.assertTemplateNotUsed(response, 'base.html') except AssertionError as e: self.assertIn("Template 'base.html' was used unexpectedly in rendering the response", str(e)) try: self.assertTemplateUsed(response, "Valid POST Template") except AssertionError as e: self.assertIn( "Template 'Valid POST Template' was not a template used to " "render the response. Actual template(s) used: form_view.html, base.html", str(e) ) with self.assertRaises(AssertionError) as context: self.assertTemplateUsed(response, 'base.html', count=2) self.assertIn( "Template 'base.html' was expected to be rendered 2 " "time(s) but was actually rendered 1 time(s).", str(context.exception)) def test_template_rendered_multiple_times(self): """Template assertions work when a template is rendered multiple times.""" response = self.client.get('/render_template_multiple_times/') self.assertTemplateUsed(response, 'base.html', count=2) @override_settings(ROOT_URLCONF='test_client_regress.urls') class AssertRedirectsTests(SimpleTestCase): def test_redirect_page(self): "An assertion is raised if the original page couldn't be retrieved as expected" # This page will redirect with code 301, not 302 response = self.client.get('/permanent_redirect_view/') try: self.assertRedirects(response, '/get_view/') except AssertionError as e: self.assertIn("Response didn't redirect as expected: Response code was 301 (expected 302)", str(e)) try: self.assertRedirects(response, '/get_view/', msg_prefix='abc') except AssertionError as e: self.assertIn("abc: Response didn't redirect as expected: Response code was 301 (expected 302)", str(e)) def test_lost_query(self): "An assertion is raised if the redirect location doesn't preserve GET parameters" response = self.client.get('/redirect_view/', {'var': 'value'}) try: self.assertRedirects(response, '/get_view/') except AssertionError as e: self.assertIn("Response redirected to '/get_view/?var=value', expected '/get_view/'", str(e)) try: self.assertRedirects(response, '/get_view/', msg_prefix='abc') except AssertionError as e: self.assertIn("abc: Response redirected to '/get_view/?var=value', expected '/get_view/'", str(e)) def test_incorrect_target(self): "An assertion is raised if the response redirects to another target" response = self.client.get('/permanent_redirect_view/') try: # Should redirect to get_view self.assertRedirects(response, '/some_view/') except AssertionError as e: self.assertIn("Response didn't redirect as expected: Response code was 301 (expected 302)", str(e)) def test_target_page(self): "An assertion is raised if the response redirect target cannot be retrieved as expected" response = self.client.get('/double_redirect_view/') try: # The redirect target responds with a 301 code, not 200 self.assertRedirects(response, 'http://testserver/permanent_redirect_view/') except AssertionError as e: self.assertIn( "Couldn't retrieve redirection page '/permanent_redirect_view/': " "response code was 301 (expected 200)", str(e) ) try: # The redirect target responds with a 301 code, not 200 self.assertRedirects(response, 'http://testserver/permanent_redirect_view/', msg_prefix='abc') except AssertionError as e: self.assertIn( "abc: Couldn't retrieve redirection page '/permanent_redirect_view/': " "response code was 301 (expected 200)", str(e) ) def test_redirect_chain(self): "You can follow a redirect chain of multiple redirects" response = self.client.get('/redirects/further/more/', {}, follow=True) self.assertRedirects(response, '/no_template_view/', status_code=302, target_status_code=200) self.assertEqual(len(response.redirect_chain), 1) self.assertEqual(response.redirect_chain[0], ('/no_template_view/', 302)) def test_multiple_redirect_chain(self): "You can follow a redirect chain of multiple redirects" response = self.client.get('/redirects/', {}, follow=True) self.assertRedirects(response, '/no_template_view/', status_code=302, target_status_code=200) self.assertEqual(len(response.redirect_chain), 3) self.assertEqual(response.redirect_chain[0], ('/redirects/further/', 302)) self.assertEqual(response.redirect_chain[1], ('/redirects/further/more/', 302)) self.assertEqual(response.redirect_chain[2], ('/no_template_view/', 302)) def test_redirect_chain_to_non_existent(self): "You can follow a chain to a nonexistent view." response = self.client.get('/redirect_to_non_existent_view2/', {}, follow=True) self.assertRedirects(response, '/non_existent_view/', status_code=302, target_status_code=404) def test_redirect_chain_to_self(self): "Redirections to self are caught and escaped" with self.assertRaises(RedirectCycleError) as context: self.client.get('/redirect_to_self/', {}, follow=True) response = context.exception.last_response # The chain of redirects stops once the cycle is detected. self.assertRedirects(response, '/redirect_to_self/', status_code=302, target_status_code=302) self.assertEqual(len(response.redirect_chain), 2) def test_redirect_to_self_with_changing_query(self): "Redirections don't loop forever even if query is changing" with self.assertRaises(RedirectCycleError): self.client.get('/redirect_to_self_with_changing_query_view/', {'counter': '0'}, follow=True) def test_circular_redirect(self): "Circular redirect chains are caught and escaped" with self.assertRaises(RedirectCycleError) as context: self.client.get('/circular_redirect_1/', {}, follow=True) response = context.exception.last_response # The chain of redirects will get back to the starting point, but stop there. self.assertRedirects(response, '/circular_redirect_2/', status_code=302, target_status_code=302) self.assertEqual(len(response.redirect_chain), 4) def test_redirect_chain_post(self): "A redirect chain will be followed from an initial POST post" response = self.client.post('/redirects/', {'nothing': 'to_send'}, follow=True) self.assertRedirects(response, '/no_template_view/', 302, 200) self.assertEqual(len(response.redirect_chain), 3) def test_redirect_chain_head(self): "A redirect chain will be followed from an initial HEAD request" response = self.client.head('/redirects/', {'nothing': 'to_send'}, follow=True) self.assertRedirects(response, '/no_template_view/', 302, 200) self.assertEqual(len(response.redirect_chain), 3) def test_redirect_chain_options(self): "A redirect chain will be followed from an initial OPTIONS request" response = self.client.options('/redirects/', follow=True) self.assertRedirects(response, '/no_template_view/', 302, 200) self.assertEqual(len(response.redirect_chain), 3) def test_redirect_chain_put(self): "A redirect chain will be followed from an initial PUT request" response = self.client.put('/redirects/', follow=True) self.assertRedirects(response, '/no_template_view/', 302, 200) self.assertEqual(len(response.redirect_chain), 3) def test_redirect_chain_delete(self): "A redirect chain will be followed from an initial DELETE request" response = self.client.delete('/redirects/', follow=True) self.assertRedirects(response, '/no_template_view/', 302, 200) self.assertEqual(len(response.redirect_chain), 3) @modify_settings(ALLOWED_HOSTS={'append': 'otherserver'}) def test_redirect_to_different_host(self): "The test client will preserve scheme, host and port changes" response = self.client.get('/redirect_other_host/', follow=True) self.assertRedirects( response, 'https://otherserver:8443/no_template_view/', status_code=302, target_status_code=200 ) # We can't use is_secure() or get_host() # because response.request is a dictionary, not an HttpRequest self.assertEqual(response.request.get('wsgi.url_scheme'), 'https') self.assertEqual(response.request.get('SERVER_NAME'), 'otherserver') self.assertEqual(response.request.get('SERVER_PORT'), '8443') # assertRedirects() can follow redirect to 'otherserver' too. response = self.client.get('/redirect_other_host/', follow=False) self.assertRedirects( response, 'https://otherserver:8443/no_template_view/', status_code=302, target_status_code=200 ) def test_redirect_chain_on_non_redirect_page(self): "An assertion is raised if the original page couldn't be retrieved as expected" # This page will redirect with code 301, not 302 response = self.client.get('/get_view/', follow=True) try: self.assertRedirects(response, '/get_view/') except AssertionError as e: self.assertIn("Response didn't redirect as expected: Response code was 200 (expected 302)", str(e)) try: self.assertRedirects(response, '/get_view/', msg_prefix='abc') except AssertionError as e: self.assertIn("abc: Response didn't redirect as expected: Response code was 200 (expected 302)", str(e)) def test_redirect_on_non_redirect_page(self): "An assertion is raised if the original page couldn't be retrieved as expected" # This page will redirect with code 301, not 302 response = self.client.get('/get_view/') try: self.assertRedirects(response, '/get_view/') except AssertionError as e: self.assertIn("Response didn't redirect as expected: Response code was 200 (expected 302)", str(e)) try: self.assertRedirects(response, '/get_view/', msg_prefix='abc') except AssertionError as e: self.assertIn("abc: Response didn't redirect as expected: Response code was 200 (expected 302)", str(e)) def test_redirect_scheme(self): "An assertion is raised if the response doesn't have the scheme specified in expected_url" # For all possible True/False combinations of follow and secure for follow, secure in itertools.product([True, False], repeat=2): # always redirects to https response = self.client.get('/https_redirect_view/', follow=follow, secure=secure) # the goal scheme is https self.assertRedirects(response, 'https://testserver/secure_view/', status_code=302) with self.assertRaises(AssertionError): self.assertRedirects(response, 'http://testserver/secure_view/', status_code=302) def test_redirect_fetch_redirect_response(self): """Preserve extra headers of requests made with django.test.Client.""" methods = ( 'get', 'post', 'head', 'options', 'put', 'patch', 'delete', 'trace', ) for method in methods: with self.subTest(method=method): req_method = getattr(self.client, method) response = req_method( '/redirect_based_on_extra_headers_1/', follow=False, HTTP_REDIRECT='val', ) self.assertRedirects( response, '/redirect_based_on_extra_headers_2/', fetch_redirect_response=True, status_code=302, target_status_code=302, ) @override_settings(ROOT_URLCONF='test_client_regress.urls') class AssertFormErrorTests(SimpleTestCase): def test_unknown_form(self): "An assertion is raised if the form name is unknown" post_data = { 'text': 'Hello World', 'email': 'not an email address', 'value': 37, 'single': 'b', 'multi': ('b', 'c', 'e') } response = self.client.post('/form_view/', post_data) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, "Invalid POST Template") try: self.assertFormError(response, 'wrong_form', 'some_field', 'Some error.') except AssertionError as e: self.assertIn("The form 'wrong_form' was not used to render the response", str(e)) try: self.assertFormError(response, 'wrong_form', 'some_field', 'Some error.', msg_prefix='abc') except AssertionError as e: self.assertIn("abc: The form 'wrong_form' was not used to render the response", str(e)) def test_unknown_field(self): "An assertion is raised if the field name is unknown" post_data = { 'text': 'Hello World', 'email': 'not an email address', 'value': 37, 'single': 'b', 'multi': ('b', 'c', 'e') } response = self.client.post('/form_view/', post_data) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, "Invalid POST Template") try: self.assertFormError(response, 'form', 'some_field', 'Some error.') except AssertionError as e: self.assertIn("The form 'form' in context 0 does not contain the field 'some_field'", str(e)) try: self.assertFormError(response, 'form', 'some_field', 'Some error.', msg_prefix='abc') except AssertionError as e: self.assertIn("abc: The form 'form' in context 0 does not contain the field 'some_field'", str(e)) def test_noerror_field(self): "An assertion is raised if the field doesn't have any errors" post_data = { 'text': 'Hello World', 'email': 'not an email address', 'value': 37, 'single': 'b', 'multi': ('b', 'c', 'e') } response = self.client.post('/form_view/', post_data) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, "Invalid POST Template") try: self.assertFormError(response, 'form', 'value', 'Some error.') except AssertionError as e: self.assertIn("The field 'value' on form 'form' in context 0 contains no errors", str(e)) try: self.assertFormError(response, 'form', 'value', 'Some error.', msg_prefix='abc') except AssertionError as e: self.assertIn("abc: The field 'value' on form 'form' in context 0 contains no errors", str(e)) def test_unknown_error(self): "An assertion is raised if the field doesn't contain the provided error" post_data = { 'text': 'Hello World', 'email': 'not an email address', 'value': 37, 'single': 'b', 'multi': ('b', 'c', 'e') } response = self.client.post('/form_view/', post_data) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, "Invalid POST Template") try: self.assertFormError(response, 'form', 'email', 'Some error.') except AssertionError as e: self.assertIn( "The field 'email' on form 'form' in context 0 does not " "contain the error 'Some error.' (actual errors: " "['Enter a valid email address.'])", str(e) ) try: self.assertFormError(response, 'form', 'email', 'Some error.', msg_prefix='abc') except AssertionError as e: self.assertIn( "abc: The field 'email' on form 'form' in context 0 does " "not contain the error 'Some error.' (actual errors: " "['Enter a valid email address.'])", str(e) ) def test_unknown_nonfield_error(self): """ An assertion is raised if the form's non field errors doesn't contain the provided error. """ post_data = { 'text': 'Hello World', 'email': 'not an email address', 'value': 37, 'single': 'b', 'multi': ('b', 'c', 'e') } response = self.client.post('/form_view/', post_data) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, "Invalid POST Template") try: self.assertFormError(response, 'form', None, 'Some error.') except AssertionError as e: self.assertIn( "The form 'form' in context 0 does not contain the non-field " "error 'Some error.' (actual errors: none)", str(e) ) try: self.assertFormError(response, 'form', None, 'Some error.', msg_prefix='abc') except AssertionError as e: self.assertIn( "abc: The form 'form' in context 0 does not contain the " "non-field error 'Some error.' (actual errors: none)", str(e) ) @override_settings(ROOT_URLCONF='test_client_regress.urls') class AssertFormsetErrorTests(SimpleTestCase): msg_prefixes = [("", {}), ("abc: ", {"msg_prefix": "abc"})] def setUp(self): """Makes response object for testing field and non-field errors""" # For testing field and non-field errors self.response_form_errors = self.getResponse({ 'form-TOTAL_FORMS': '2', 'form-INITIAL_FORMS': '2', 'form-0-text': 'Raise non-field error', 'form-0-email': 'not an email address', 'form-0-value': 37, 'form-0-single': 'b', 'form-0-multi': ('b', 'c', 'e'), 'form-1-text': 'Hello World', 'form-1-email': '[email protected]', 'form-1-value': 37, 'form-1-single': 'b', 'form-1-multi': ('b', 'c', 'e'), }) # For testing non-form errors self.response_nonform_errors = self.getResponse({ 'form-TOTAL_FORMS': '2', 'form-INITIAL_FORMS': '2', 'form-0-text': 'Hello World', 'form-0-email': '[email protected]', 'form-0-value': 37, 'form-0-single': 'b', 'form-0-multi': ('b', 'c', 'e'), 'form-1-text': 'Hello World', 'form-1-email': '[email protected]', 'form-1-value': 37, 'form-1-single': 'b', 'form-1-multi': ('b', 'c', 'e'), }) def getResponse(self, post_data): response = self.client.post('/formset_view/', post_data) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, "Invalid POST Template") return response def test_unknown_formset(self): "An assertion is raised if the formset name is unknown" for prefix, kwargs in self.msg_prefixes: msg = prefix + "The formset 'wrong_formset' was not used to render the response" with self.assertRaisesMessage(AssertionError, msg): self.assertFormsetError( self.response_form_errors, 'wrong_formset', 0, 'Some_field', 'Some error.', **kwargs ) def test_unknown_field(self): "An assertion is raised if the field name is unknown" for prefix, kwargs in self.msg_prefixes: msg = prefix + "The formset 'my_formset', form 0 in context 0 does not contain the field 'Some_field'" with self.assertRaisesMessage(AssertionError, msg): self.assertFormsetError( self.response_form_errors, 'my_formset', 0, 'Some_field', 'Some error.', **kwargs ) def test_no_error_field(self): "An assertion is raised if the field doesn't have any errors" for prefix, kwargs in self.msg_prefixes: msg = prefix + "The field 'value' on formset 'my_formset', form 1 in context 0 contains no errors" with self.assertRaisesMessage(AssertionError, msg): self.assertFormsetError(self.response_form_errors, 'my_formset', 1, 'value', 'Some error.', **kwargs) def test_unknown_error(self): "An assertion is raised if the field doesn't contain the specified error" for prefix, kwargs in self.msg_prefixes: msg = prefix + ( "The field 'email' on formset 'my_formset', form 0 " "in context 0 does not contain the error 'Some error.' " "(actual errors: ['Enter a valid email address.'])" ) with self.assertRaisesMessage(AssertionError, msg): self.assertFormsetError(self.response_form_errors, 'my_formset', 0, 'email', 'Some error.', **kwargs) def test_field_error(self): "No assertion is raised if the field contains the provided error" error_msg = ['Enter a valid email address.'] for prefix, kwargs in self.msg_prefixes: self.assertFormsetError(self.response_form_errors, 'my_formset', 0, 'email', error_msg, **kwargs) def test_no_nonfield_error(self): "An assertion is raised if the formsets non-field errors doesn't contain any errors." for prefix, kwargs in self.msg_prefixes: msg = prefix + "The formset 'my_formset', form 1 in context 0 does not contain any non-field errors." with self.assertRaisesMessage(AssertionError, msg): self.assertFormsetError(self.response_form_errors, 'my_formset', 1, None, 'Some error.', **kwargs) def test_unknown_nonfield_error(self): "An assertion is raised if the formsets non-field errors doesn't contain the provided error." for prefix, kwargs in self.msg_prefixes: msg = prefix + ( "The formset 'my_formset', form 0 in context 0 does not " "contain the non-field error 'Some error.' (actual errors: " "['Non-field error.'])" ) with self.assertRaisesMessage(AssertionError, msg): self.assertFormsetError(self.response_form_errors, 'my_formset', 0, None, 'Some error.', **kwargs) def test_nonfield_error(self): "No assertion is raised if the formsets non-field errors contains the provided error." for prefix, kwargs in self.msg_prefixes: self.assertFormsetError(self.response_form_errors, 'my_formset', 0, None, 'Non-field error.', **kwargs) def test_no_nonform_error(self): "An assertion is raised if the formsets non-form errors doesn't contain any errors." for prefix, kwargs in self.msg_prefixes: msg = prefix + "The formset 'my_formset' in context 0 does not contain any non-form errors." with self.assertRaisesMessage(AssertionError, msg): self.assertFormsetError(self.response_form_errors, 'my_formset', None, None, 'Some error.', **kwargs) def test_unknown_nonform_error(self): "An assertion is raised if the formsets non-form errors doesn't contain the provided error." for prefix, kwargs in self.msg_prefixes: msg = prefix + ( "The formset 'my_formset' in context 0 does not contain the " "non-form error 'Some error.' (actual errors: ['Forms in a set " "must have distinct email addresses.'])" ) with self.assertRaisesMessage(AssertionError, msg): self.assertFormsetError( self.response_nonform_errors, 'my_formset', None, None, 'Some error.', **kwargs ) def test_nonform_error(self): "No assertion is raised if the formsets non-form errors contains the provided error." msg = 'Forms in a set must have distinct email addresses.' for prefix, kwargs in self.msg_prefixes: self.assertFormsetError(self.response_nonform_errors, 'my_formset', None, None, msg, **kwargs) @override_settings(ROOT_URLCONF='test_client_regress.urls') class LoginTests(TestDataMixin, TestCase): def test_login_different_client(self): "Using a different test client doesn't violate authentication" # Create a second client, and log in. c = Client() login = c.login(username='testclient', password='password') self.assertTrue(login, 'Could not log in') # Get a redirection page with the second client. response = c.get("/login_protected_redirect_view/") # At this points, the self.client isn't logged in. # assertRedirects uses the original client, not the default client. self.assertRedirects(response, "/get_view/") @override_settings( SESSION_ENGINE='test_client_regress.session', ROOT_URLCONF='test_client_regress.urls', ) class SessionEngineTests(TestDataMixin, TestCase): def test_login(self): "A session engine that modifies the session key can be used to log in" login = self.client.login(username='testclient', password='password') self.assertTrue(login, 'Could not log in') # Try to access a login protected page. response = self.client.get("/login_protected_view/") self.assertEqual(response.status_code, 200) self.assertEqual(response.context['user'].username, 'testclient') @override_settings(ROOT_URLCONF='test_client_regress.urls',) class URLEscapingTests(SimpleTestCase): def test_simple_argument_get(self): "Get a view that has a simple string argument" response = self.client.get(reverse('arg_view', args=['Slartibartfast'])) self.assertEqual(response.status_code, 200) self.assertEqual(response.content, b'Howdy, Slartibartfast') def test_argument_with_space_get(self): "Get a view that has a string argument that requires escaping" response = self.client.get(reverse('arg_view', args=['Arthur Dent'])) self.assertEqual(response.status_code, 200) self.assertEqual(response.content, b'Hi, Arthur') def test_simple_argument_post(self): "Post for a view that has a simple string argument" response = self.client.post(reverse('arg_view', args=['Slartibartfast'])) self.assertEqual(response.status_code, 200) self.assertEqual(response.content, b'Howdy, Slartibartfast') def test_argument_with_space_post(self): "Post for a view that has a string argument that requires escaping" response = self.client.post(reverse('arg_view', args=['Arthur Dent'])) self.assertEqual(response.status_code, 200) self.assertEqual(response.content, b'Hi, Arthur') @override_settings(ROOT_URLCONF='test_client_regress.urls') class ExceptionTests(TestDataMixin, TestCase): def test_exception_cleared(self): "#5836 - A stale user exception isn't re-raised by the test client." login = self.client.login(username='testclient', password='password') self.assertTrue(login, 'Could not log in') with self.assertRaises(CustomTestException): self.client.get("/staff_only/") # At this point, an exception has been raised, and should be cleared. # This next operation should be successful; if it isn't we have a problem. login = self.client.login(username='staff', password='password') self.assertTrue(login, 'Could not log in') self.client.get("/staff_only/") @override_settings(ROOT_URLCONF='test_client_regress.urls') class TemplateExceptionTests(SimpleTestCase): @override_settings(TEMPLATES=[{ 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': [os.path.join(os.path.dirname(__file__), 'bad_templates')], }]) def test_bad_404_template(self): "Errors found when rendering 404 error templates are re-raised" with self.assertRaises(TemplateSyntaxError): self.client.get("/no_such_view/") # We need two different tests to check URLconf substitution - one to check # it was changed, and another one (without self.urls) to check it was reverted on # teardown. This pair of tests relies upon the alphabetical ordering of test execution. @override_settings(ROOT_URLCONF='test_client_regress.urls') class UrlconfSubstitutionTests(SimpleTestCase): def test_urlconf_was_changed(self): "TestCase can enforce a custom URLconf on a per-test basis" url = reverse('arg_view', args=['somename']) self.assertEqual(url, '/arg_view/somename/') # This test needs to run *after* UrlconfSubstitutionTests; the zz prefix in the # name is to ensure alphabetical ordering. class zzUrlconfSubstitutionTests(SimpleTestCase): def test_urlconf_was_reverted(self): """URLconf is reverted to original value after modification in a TestCase This will not find a match as the default ROOT_URLCONF is empty. """ with self.assertRaises(NoReverseMatch): reverse('arg_view', args=['somename']) @override_settings(ROOT_URLCONF='test_client_regress.urls') class ContextTests(TestDataMixin, TestCase): def test_single_context(self): "Context variables can be retrieved from a single context" response = self.client.get("/request_data/", data={'foo': 'whiz'}) self.assertIsInstance(response.context, RequestContext) self.assertIn('get-foo', response.context) self.assertEqual(response.context['get-foo'], 'whiz') self.assertEqual(response.context['data'], 'sausage') with self.assertRaisesMessage(KeyError, 'does-not-exist'): response.context['does-not-exist'] def test_inherited_context(self): "Context variables can be retrieved from a list of contexts" response = self.client.get("/request_data_extended/", data={'foo': 'whiz'}) self.assertEqual(response.context.__class__, ContextList) self.assertEqual(len(response.context), 2) self.assertIn('get-foo', response.context) self.assertEqual(response.context['get-foo'], 'whiz') self.assertEqual(response.context['data'], 'bacon') with self.assertRaises(KeyError) as cm: response.context['does-not-exist'] self.assertEqual(cm.exception.args[0], 'does-not-exist') def test_contextlist_keys(self): c1 = Context() c1.update({'hello': 'world', 'goodbye': 'john'}) c1.update({'hello': 'dolly', 'dolly': 'parton'}) c2 = Context() c2.update({'goodbye': 'world', 'python': 'rocks'}) c2.update({'goodbye': 'dolly'}) k = ContextList([c1, c2]) # None, True and False are builtins of BaseContext, and present # in every Context without needing to be added. self.assertEqual({'None', 'True', 'False', 'hello', 'goodbye', 'python', 'dolly'}, k.keys()) def test_contextlist_get(self): c1 = Context({'hello': 'world', 'goodbye': 'john'}) c2 = Context({'goodbye': 'world', 'python': 'rocks'}) k = ContextList([c1, c2]) self.assertEqual(k.get('hello'), 'world') self.assertEqual(k.get('goodbye'), 'john') self.assertEqual(k.get('python'), 'rocks') self.assertEqual(k.get('nonexistent', 'default'), 'default') def test_15368(self): # Need to insert a context processor that assumes certain things about # the request instance. This triggers a bug caused by some ways of # copying RequestContext. with self.settings(TEMPLATES=[{ 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'test_client_regress.context_processors.special', ], }, }]): response = self.client.get("/request_context_view/") self.assertContains(response, 'Path: /request_context_view/') def test_nested_requests(self): """ response.context is not lost when view call another view. """ response = self.client.get("/nested_view/") self.assertIsInstance(response.context, RequestContext) self.assertEqual(response.context['nested'], 'yes') @override_settings(ROOT_URLCONF='test_client_regress.urls') class SessionTests(TestDataMixin, TestCase): def test_session(self): "The session isn't lost if a user logs in" # The session doesn't exist to start. response = self.client.get('/check_session/') self.assertEqual(response.status_code, 200) self.assertEqual(response.content, b'NO') # This request sets a session variable. response = self.client.get('/set_session/') self.assertEqual(response.status_code, 200) self.assertEqual(response.content, b'set_session') # The session has been modified response = self.client.get('/check_session/') self.assertEqual(response.status_code, 200) self.assertEqual(response.content, b'YES') # Log in login = self.client.login(username='testclient', password='password') self.assertTrue(login, 'Could not log in') # Session should still contain the modified value response = self.client.get('/check_session/') self.assertEqual(response.status_code, 200) self.assertEqual(response.content, b'YES') def test_session_initiated(self): session = self.client.session session['session_var'] = 'foo' session.save() response = self.client.get('/check_session/') self.assertEqual(response.content, b'foo') def test_logout(self): """Logout should work whether the user is logged in or not (#9978).""" self.client.logout() login = self.client.login(username='testclient', password='password') self.assertTrue(login, 'Could not log in') self.client.logout() self.client.logout() def test_logout_with_user(self): """Logout should send user_logged_out signal if user was logged in.""" def listener(*args, **kwargs): listener.executed = True self.assertEqual(kwargs['sender'], User) listener.executed = False user_logged_out.connect(listener) self.client.login(username='testclient', password='password') self.client.logout() user_logged_out.disconnect(listener) self.assertTrue(listener.executed) @override_settings(AUTH_USER_MODEL='test_client_regress.CustomUser') def test_logout_with_custom_user(self): """Logout should send user_logged_out signal if custom user was logged in.""" def listener(*args, **kwargs): self.assertEqual(kwargs['sender'], CustomUser) listener.executed = True listener.executed = False u = CustomUser.custom_objects.create(email='[email protected]') u.set_password('password') u.save() user_logged_out.connect(listener) self.client.login(username='[email protected]', password='password') self.client.logout() user_logged_out.disconnect(listener) self.assertTrue(listener.executed) @override_settings(AUTHENTICATION_BACKENDS=( 'django.contrib.auth.backends.ModelBackend', 'test_client_regress.auth_backends.CustomUserBackend')) def test_logout_with_custom_auth_backend(self): "Request a logout after logging in with custom authentication backend" def listener(*args, **kwargs): self.assertEqual(kwargs['sender'], CustomUser) listener.executed = True listener.executed = False u = CustomUser.custom_objects.create(email='[email protected]') u.set_password('password') u.save() user_logged_out.connect(listener) self.client.login(username='[email protected]', password='password') self.client.logout() user_logged_out.disconnect(listener) self.assertTrue(listener.executed) def test_logout_without_user(self): """Logout should send signal even if user not authenticated.""" def listener(user, *args, **kwargs): listener.user = user listener.executed = True listener.executed = False user_logged_out.connect(listener) self.client.login(username='incorrect', password='password') self.client.logout() user_logged_out.disconnect(listener) self.assertTrue(listener.executed) self.assertIsNone(listener.user) def test_login_with_user(self): """Login should send user_logged_in signal on successful login.""" def listener(*args, **kwargs): listener.executed = True listener.executed = False user_logged_in.connect(listener) self.client.login(username='testclient', password='password') user_logged_out.disconnect(listener) self.assertTrue(listener.executed) def test_login_without_signal(self): """Login shouldn't send signal if user wasn't logged in""" def listener(*args, **kwargs): listener.executed = True listener.executed = False user_logged_in.connect(listener) self.client.login(username='incorrect', password='password') user_logged_in.disconnect(listener) self.assertFalse(listener.executed) @override_settings(ROOT_URLCONF='test_client_regress.urls') class RequestMethodTests(SimpleTestCase): def test_get(self): "Request a view via request method GET" response = self.client.get('/request_methods/') self.assertEqual(response.status_code, 200) self.assertEqual(response.content, b'request method: GET') def test_post(self): "Request a view via request method POST" response = self.client.post('/request_methods/') self.assertEqual(response.status_code, 200) self.assertEqual(response.content, b'request method: POST') def test_head(self): "Request a view via request method HEAD" response = self.client.head('/request_methods/') self.assertEqual(response.status_code, 200) # A HEAD request doesn't return any content. self.assertNotEqual(response.content, b'request method: HEAD') self.assertEqual(response.content, b'') def test_options(self): "Request a view via request method OPTIONS" response = self.client.options('/request_methods/') self.assertEqual(response.status_code, 200) self.assertEqual(response.content, b'request method: OPTIONS') def test_put(self): "Request a view via request method PUT" response = self.client.put('/request_methods/') self.assertEqual(response.status_code, 200) self.assertEqual(response.content, b'request method: PUT') def test_delete(self): "Request a view via request method DELETE" response = self.client.delete('/request_methods/') self.assertEqual(response.status_code, 200) self.assertEqual(response.content, b'request method: DELETE') def test_patch(self): "Request a view via request method PATCH" response = self.client.patch('/request_methods/') self.assertEqual(response.status_code, 200) self.assertEqual(response.content, b'request method: PATCH') @override_settings(ROOT_URLCONF='test_client_regress.urls') class RequestMethodStringDataTests(SimpleTestCase): def test_post(self): "Request a view with string data via request method POST" # Regression test for #11371 data = '{"test": "json"}' response = self.client.post('/request_methods/', data=data, content_type='application/json') self.assertEqual(response.status_code, 200) self.assertEqual(response.content, b'request method: POST') def test_put(self): "Request a view with string data via request method PUT" # Regression test for #11371 data = '{"test": "json"}' response = self.client.put('/request_methods/', data=data, content_type='application/json') self.assertEqual(response.status_code, 200) self.assertEqual(response.content, b'request method: PUT') def test_patch(self): "Request a view with string data via request method PATCH" # Regression test for #17797 data = '{"test": "json"}' response = self.client.patch('/request_methods/', data=data, content_type='application/json') self.assertEqual(response.status_code, 200) self.assertEqual(response.content, b'request method: PATCH') def test_empty_string_data(self): "Request a view with empty string data via request method GET/POST/HEAD" # Regression test for #21740 response = self.client.get('/body/', data='', content_type='application/json') self.assertEqual(response.content, b'') response = self.client.post('/body/', data='', content_type='application/json') self.assertEqual(response.content, b'') response = self.client.head('/body/', data='', content_type='application/json') self.assertEqual(response.content, b'') def test_json_bytes(self): response = self.client.post('/body/', data=b"{'value': 37}", content_type='application/json') self.assertEqual(response.content, b"{'value': 37}") def test_json(self): response = self.client.get('/json_response/') self.assertEqual(response.json(), {'key': 'value'}) def test_json_charset(self): response = self.client.get('/json_response_latin1/') self.assertEqual(response.charset, 'latin1') self.assertEqual(response.json(), {'a': 'Å'}) def test_json_structured_suffixes(self): valid_types = ( 'application/vnd.api+json', 'application/vnd.api.foo+json', 'application/json; charset=utf-8', 'application/activity+json', 'application/activity+json; charset=utf-8', ) for content_type in valid_types: response = self.client.get('/json_response/', {'content_type': content_type}) self.assertEqual(response['Content-Type'], content_type) self.assertEqual(response.json(), {'key': 'value'}) def test_json_multiple_access(self): response = self.client.get('/json_response/') self.assertIs(response.json(), response.json()) def test_json_wrong_header(self): response = self.client.get('/body/') msg = 'Content-Type header is "text/html; charset=utf-8", not "application/json"' with self.assertRaisesMessage(ValueError, msg): self.assertEqual(response.json(), {'key': 'value'}) @override_settings(ROOT_URLCONF='test_client_regress.urls',) class QueryStringTests(SimpleTestCase): def test_get_like_requests(self): for method_name in ('get', 'head'): # A GET-like request can pass a query string as data (#10571) method = getattr(self.client, method_name) response = method("/request_data/", data={'foo': 'whiz'}) self.assertEqual(response.context['get-foo'], 'whiz') # A GET-like request can pass a query string as part of the URL response = method("/request_data/?foo=whiz") self.assertEqual(response.context['get-foo'], 'whiz') # Data provided in the URL to a GET-like request is overridden by actual form data response = method("/request_data/?foo=whiz", data={'foo': 'bang'}) self.assertEqual(response.context['get-foo'], 'bang') response = method("/request_data/?foo=whiz", data={'bar': 'bang'}) self.assertIsNone(response.context['get-foo']) self.assertEqual(response.context['get-bar'], 'bang') def test_post_like_requests(self): # A POST-like request can pass a query string as data response = self.client.post("/request_data/", data={'foo': 'whiz'}) self.assertIsNone(response.context['get-foo']) self.assertEqual(response.context['post-foo'], 'whiz') # A POST-like request can pass a query string as part of the URL response = self.client.post("/request_data/?foo=whiz") self.assertEqual(response.context['get-foo'], 'whiz') self.assertIsNone(response.context['post-foo']) # POST data provided in the URL augments actual form data response = self.client.post("/request_data/?foo=whiz", data={'foo': 'bang'}) self.assertEqual(response.context['get-foo'], 'whiz') self.assertEqual(response.context['post-foo'], 'bang') response = self.client.post("/request_data/?foo=whiz", data={'bar': 'bang'}) self.assertEqual(response.context['get-foo'], 'whiz') self.assertIsNone(response.context['get-bar']) self.assertIsNone(response.context['post-foo']) self.assertEqual(response.context['post-bar'], 'bang') @override_settings(ROOT_URLCONF='test_client_regress.urls') class PayloadEncodingTests(SimpleTestCase): """Regression tests for #10571.""" def test_simple_payload(self): """A simple ASCII-only text can be POSTed.""" text = 'English: mountain pass' response = self.client.post('/parse_encoded_text/', text, content_type='text/plain') self.assertEqual(response.content, text.encode()) def test_utf8_payload(self): """Non-ASCII data encoded as UTF-8 can be POSTed.""" text = 'dog: собака' response = self.client.post('/parse_encoded_text/', text, content_type='text/plain; charset=utf-8') self.assertEqual(response.content, text.encode()) def test_utf16_payload(self): """Non-ASCII data encoded as UTF-16 can be POSTed.""" text = 'dog: собака' response = self.client.post('/parse_encoded_text/', text, content_type='text/plain; charset=utf-16') self.assertEqual(response.content, text.encode('utf-16')) def test_non_utf_payload(self): """Non-ASCII data as a non-UTF based encoding can be POSTed.""" text = 'dog: собака' response = self.client.post('/parse_encoded_text/', text, content_type='text/plain; charset=koi8-r') self.assertEqual(response.content, text.encode('koi8-r')) class DummyFile: def __init__(self, filename): self.name = filename def read(self): return b'TEST_FILE_CONTENT' class UploadedFileEncodingTest(SimpleTestCase): def test_file_encoding(self): encoded_file = encode_file('TEST_BOUNDARY', 'TEST_KEY', DummyFile('test_name.bin')) self.assertEqual(b'--TEST_BOUNDARY', encoded_file[0]) self.assertEqual(b'Content-Disposition: form-data; name="TEST_KEY"; filename="test_name.bin"', encoded_file[1]) self.assertEqual(b'TEST_FILE_CONTENT', encoded_file[-1]) def test_guesses_content_type_on_file_encoding(self): self.assertEqual(b'Content-Type: application/octet-stream', encode_file('IGNORE', 'IGNORE', DummyFile("file.bin"))[2]) self.assertEqual(b'Content-Type: text/plain', encode_file('IGNORE', 'IGNORE', DummyFile("file.txt"))[2]) self.assertIn(encode_file('IGNORE', 'IGNORE', DummyFile("file.zip"))[2], ( b'Content-Type: application/x-compress', b'Content-Type: application/x-zip', b'Content-Type: application/x-zip-compressed', b'Content-Type: application/zip',)) self.assertEqual(b'Content-Type: application/octet-stream', encode_file('IGNORE', 'IGNORE', DummyFile("file.unknown"))[2]) @override_settings(ROOT_URLCONF='test_client_regress.urls',) class RequestHeadersTest(SimpleTestCase): def test_client_headers(self): "A test client can receive custom headers" response = self.client.get("/check_headers/", HTTP_X_ARG_CHECK='Testing 123') self.assertEqual(response.content, b"HTTP_X_ARG_CHECK: Testing 123") self.assertEqual(response.status_code, 200) def test_client_headers_redirect(self): "Test client headers are preserved through redirects" response = self.client.get("/check_headers_redirect/", follow=True, HTTP_X_ARG_CHECK='Testing 123') self.assertEqual(response.content, b"HTTP_X_ARG_CHECK: Testing 123") self.assertRedirects(response, '/check_headers/', status_code=302, target_status_code=200) @override_settings(ROOT_URLCONF='test_client_regress.urls') class ReadLimitedStreamTest(SimpleTestCase): """ HttpRequest.body, HttpRequest.read(), and HttpRequest.read(BUFFER) have proper LimitedStream behavior. Refs #14753, #15785 """ def test_body_from_empty_request(self): """HttpRequest.body on a test client GET request should return the empty string.""" self.assertEqual(self.client.get("/body/").content, b'') def test_read_from_empty_request(self): """HttpRequest.read() on a test client GET request should return the empty string.""" self.assertEqual(self.client.get("/read_all/").content, b'') def test_read_numbytes_from_empty_request(self): """HttpRequest.read(LARGE_BUFFER) on a test client GET request should return the empty string.""" self.assertEqual(self.client.get("/read_buffer/").content, b'') def test_read_from_nonempty_request(self): """HttpRequest.read() on a test client PUT request with some payload should return that payload.""" payload = b'foobar' self.assertEqual(self.client.put("/read_all/", data=payload, content_type='text/plain').content, payload) def test_read_numbytes_from_nonempty_request(self): """HttpRequest.read(LARGE_BUFFER) on a test client PUT request with some payload should return that payload.""" payload = b'foobar' self.assertEqual(self.client.put("/read_buffer/", data=payload, content_type='text/plain').content, payload) @override_settings(ROOT_URLCONF='test_client_regress.urls') class RequestFactoryStateTest(SimpleTestCase): """Regression tests for #15929.""" # These tests are checking that certain middleware don't change certain # global state. Alternatively, from the point of view of a test, they are # ensuring test isolation behavior. So, unusually, it doesn't make sense to # run the tests individually, and if any are failing it is confusing to run # them with any other set of tests. def common_test_that_should_always_pass(self): request = RequestFactory().get('/') request.session = {} self.assertFalse(hasattr(request, 'user')) def test_request(self): self.common_test_that_should_always_pass() def test_request_after_client(self): # apart from the next line the three tests are identical self.client.get('/') self.common_test_that_should_always_pass() def test_request_after_client_2(self): # This test is executed after the previous one self.common_test_that_should_always_pass() @override_settings(ROOT_URLCONF='test_client_regress.urls') class RequestFactoryEnvironmentTests(SimpleTestCase): """ Regression tests for #8551 and #17067: ensure that environment variables are set correctly in RequestFactory. """ def test_should_set_correct_env_variables(self): request = RequestFactory().get('/path/') self.assertEqual(request.META.get('REMOTE_ADDR'), '127.0.0.1') self.assertEqual(request.META.get('SERVER_NAME'), 'testserver') self.assertEqual(request.META.get('SERVER_PORT'), '80') self.assertEqual(request.META.get('SERVER_PROTOCOL'), 'HTTP/1.1') self.assertEqual(request.META.get('SCRIPT_NAME') + request.META.get('PATH_INFO'), '/path/') def test_cookies(self): factory = RequestFactory() factory.cookies.load('A="B"; C="D"; Path=/; Version=1') request = factory.get('/') self.assertEqual(request.META['HTTP_COOKIE'], 'A="B"; C="D"')
0de74c265f97ff1ca2e84e488b2f558625d55adb1a683108ea575c5730a253f2
from django.urls import include, path from django.views.generic import RedirectView from . import views urlpatterns = [ path('', include('test_client.urls')), path('no_template_view/', views.no_template_view), path('staff_only/', views.staff_only_view), path('get_view/', views.get_view), path('request_data/', views.request_data), path('request_data_extended/', views.request_data, {'template': 'extended.html', 'data': 'bacon'}), path('arg_view/<name>/', views.view_with_argument, name='arg_view'), path('nested_view/', views.nested_view, name='nested_view'), path('login_protected_redirect_view/', views.login_protected_redirect_view), path('redirects/', RedirectView.as_view(url='/redirects/further/')), path('redirects/further/', RedirectView.as_view(url='/redirects/further/more/')), path('redirects/further/more/', RedirectView.as_view(url='/no_template_view/')), path('redirect_to_non_existent_view/', RedirectView.as_view(url='/non_existent_view/')), path('redirect_to_non_existent_view2/', RedirectView.as_view(url='/redirect_to_non_existent_view/')), path('redirect_to_self/', RedirectView.as_view(url='/redirect_to_self/')), path('redirect_to_self_with_changing_query_view/', views.redirect_to_self_with_changing_query_view), path('circular_redirect_1/', RedirectView.as_view(url='/circular_redirect_2/')), path('circular_redirect_2/', RedirectView.as_view(url='/circular_redirect_3/')), path('circular_redirect_3/', RedirectView.as_view(url='/circular_redirect_1/')), path('redirect_other_host/', RedirectView.as_view(url='https://otherserver:8443/no_template_view/')), path('redirect_based_on_extra_headers_1/', views.redirect_based_on_extra_headers_1_view), path('redirect_based_on_extra_headers_2/', views.redirect_based_on_extra_headers_2_view), path('set_session/', views.set_session_view), path('check_session/', views.check_session_view), path('request_methods/', views.request_methods_view), path('check_unicode/', views.return_unicode), path('check_binary/', views.return_undecodable_binary), path('json_response/', views.return_json_response), path('json_response_latin1/', views.return_json_response_latin1), path('parse_encoded_text/', views.return_text_file), path('check_headers/', views.check_headers), path('check_headers_redirect/', RedirectView.as_view(url='/check_headers/')), path('body/', views.body), path('read_all/', views.read_all), path('read_buffer/', views.read_buffer), path('request_context_view/', views.request_context_view), path('render_template_multiple_times/', views.render_template_multiple_times), ]
b383a10246ff18b619bb6185c97184e79fa55cf79c4893fd831ef1b6d599498a
from urllib.parse import urlencode from django.conf import settings from django.contrib.auth.decorators import login_required from django.http import HttpResponse, HttpResponseRedirect, JsonResponse from django.shortcuts import render from django.template.loader import render_to_string from django.test import Client from django.test.client import CONTENT_TYPE_RE class CustomTestException(Exception): pass def no_template_view(request): "A simple view that expects a GET request, and returns a rendered template" return HttpResponse("No template used. Sample content: twice once twice. Content ends.") def staff_only_view(request): "A view that can only be visited by staff. Non staff members get an exception" if request.user.is_staff: return HttpResponse() else: raise CustomTestException() @login_required def get_view(request): "A simple login protected view" return HttpResponse("Hello world") def request_data(request, template='base.html', data='sausage'): "A simple view that returns the request data in the context" return render(request, template, { 'get-foo': request.GET.get('foo'), 'get-bar': request.GET.get('bar'), 'post-foo': request.POST.get('foo'), 'post-bar': request.POST.get('bar'), 'data': data, }) def view_with_argument(request, name): """A view that takes a string argument The purpose of this view is to check that if a space is provided in the argument, the test framework unescapes the %20 before passing the value to the view. """ if name == 'Arthur Dent': return HttpResponse('Hi, Arthur') else: return HttpResponse('Howdy, %s' % name) def nested_view(request): """ A view that uses test client to call another view. """ c = Client() c.get("/no_template_view/") return render(request, 'base.html', {'nested': 'yes'}) @login_required def login_protected_redirect_view(request): "A view that redirects all requests to the GET view" return HttpResponseRedirect('/get_view/') def redirect_to_self_with_changing_query_view(request): query = request.GET.copy() query['counter'] += '0' return HttpResponseRedirect('/redirect_to_self_with_changing_query_view/?%s' % urlencode(query)) def set_session_view(request): "A view that sets a session variable" request.session['session_var'] = 'YES' return HttpResponse('set_session') def check_session_view(request): "A view that reads a session variable" return HttpResponse(request.session.get('session_var', 'NO')) def request_methods_view(request): "A view that responds with the request method" return HttpResponse('request method: %s' % request.method) def return_unicode(request): return render(request, 'unicode.html') def return_undecodable_binary(request): return HttpResponse( b'%PDF-1.4\r\n%\x93\x8c\x8b\x9e ReportLab Generated PDF document http://www.reportlab.com' ) def return_json_response(request): content_type = request.GET.get('content_type') kwargs = {'content_type': content_type} if content_type else {} return JsonResponse({'key': 'value'}, **kwargs) def return_json_response_latin1(request): return HttpResponse(b'{"a":"\xc5"}', content_type='application/json; charset=latin1') def return_text_file(request): "A view that parses and returns text as a file." match = CONTENT_TYPE_RE.match(request.META['CONTENT_TYPE']) if match: charset = match.group(1) else: charset = settings.DEFAULT_CHARSET return HttpResponse(request.body, status=200, content_type='text/plain; charset=%s' % charset) def check_headers(request): "A view that responds with value of the X-ARG-CHECK header" return HttpResponse('HTTP_X_ARG_CHECK: %s' % request.META.get('HTTP_X_ARG_CHECK', 'Undefined')) def body(request): "A view that is requested with GET and accesses request.body. Refs #14753." return HttpResponse(request.body) def read_all(request): "A view that is requested with accesses request.read()." return HttpResponse(request.read()) def read_buffer(request): "A view that is requested with accesses request.read(LARGE_BUFFER)." return HttpResponse(request.read(99999)) def request_context_view(request): # Special attribute that won't be present on a plain HttpRequest request.special_path = request.path return render(request, 'request_context.html') def render_template_multiple_times(request): """A view that renders a template multiple times.""" return HttpResponse( render_to_string('base.html') + render_to_string('base.html')) def redirect_based_on_extra_headers_1_view(request): if 'HTTP_REDIRECT' in request.META: return HttpResponseRedirect('/redirect_based_on_extra_headers_2/') return HttpResponse() def redirect_based_on_extra_headers_2_view(request): if 'HTTP_REDIRECT' in request.META: return HttpResponseRedirect('/redirects/further/more/') return HttpResponse()
f613e2804195418af41f5cc87af5eef68c62c75d3e9e8b145a1f538b73588d1c
# Unit tests for cache framework # Uses whatever cache backend is set in the test settings file. import copy import io import os import pickle import re import shutil import tempfile import threading import time import unittest from unittest import mock from django.conf import settings from django.core import management, signals from django.core.cache import ( DEFAULT_CACHE_ALIAS, CacheKeyWarning, cache, caches, ) from django.core.cache.utils import make_template_fragment_key from django.db import close_old_connections, connection, connections from django.http import ( HttpRequest, HttpResponse, HttpResponseNotModified, StreamingHttpResponse, ) from django.middleware.cache import ( CacheMiddleware, FetchFromCacheMiddleware, UpdateCacheMiddleware, ) from django.middleware.csrf import CsrfViewMiddleware from django.template import engines from django.template.context_processors import csrf from django.template.response import TemplateResponse from django.test import ( RequestFactory, SimpleTestCase, TestCase, TransactionTestCase, override_settings, ) from django.test.signals import setting_changed from django.utils import timezone, translation from django.utils.cache import ( get_cache_key, learn_cache_key, patch_cache_control, patch_vary_headers, ) from django.views.decorators.cache import cache_control, cache_page from .models import Poll, expensive_calculation # functions/classes for complex data type tests def f(): return 42 class C: def m(n): return 24 class Unpicklable: def __getstate__(self): raise pickle.PickleError() KEY_ERRORS_WITH_MEMCACHED_MSG = ( 'Cache key contains characters that will cause errors if used with ' 'memcached: %r' ) @override_settings(CACHES={ 'default': { 'BACKEND': 'django.core.cache.backends.dummy.DummyCache', } }) class DummyCacheTests(SimpleTestCase): # The Dummy cache backend doesn't really behave like a test backend, # so it has its own test case. def test_simple(self): "Dummy cache backend ignores cache set calls" cache.set("key", "value") self.assertIsNone(cache.get("key")) def test_add(self): "Add doesn't do anything in dummy cache backend" cache.add("addkey1", "value") result = cache.add("addkey1", "newvalue") self.assertTrue(result) self.assertIsNone(cache.get("addkey1")) def test_non_existent(self): "Nonexistent keys aren't found in the dummy cache backend" self.assertIsNone(cache.get("does_not_exist")) self.assertEqual(cache.get("does_not_exist", "bang!"), "bang!") def test_get_many(self): "get_many returns nothing for the dummy cache backend" cache.set_many({'a': 'a', 'b': 'b', 'c': 'c', 'd': 'd'}) self.assertEqual(cache.get_many(['a', 'c', 'd']), {}) self.assertEqual(cache.get_many(['a', 'b', 'e']), {}) def test_get_many_invalid_key(self): with self.assertWarns(CacheKeyWarning, msg=KEY_ERRORS_WITH_MEMCACHED_MSG % 'key with spaces'): cache.get_many(['key with spaces']) def test_delete(self): "Cache deletion is transparently ignored on the dummy cache backend" cache.set_many({'key1': 'spam', 'key2': 'eggs'}) self.assertIsNone(cache.get("key1")) cache.delete("key1") self.assertIsNone(cache.get("key1")) self.assertIsNone(cache.get("key2")) def test_has_key(self): "The has_key method doesn't ever return True for the dummy cache backend" cache.set("hello1", "goodbye1") self.assertFalse(cache.has_key("hello1")) self.assertFalse(cache.has_key("goodbye1")) def test_in(self): "The in operator doesn't ever return True for the dummy cache backend" cache.set("hello2", "goodbye2") self.assertNotIn("hello2", cache) self.assertNotIn("goodbye2", cache) def test_incr(self): "Dummy cache values can't be incremented" cache.set('answer', 42) with self.assertRaises(ValueError): cache.incr('answer') with self.assertRaises(ValueError): cache.incr('does_not_exist') def test_decr(self): "Dummy cache values can't be decremented" cache.set('answer', 42) with self.assertRaises(ValueError): cache.decr('answer') with self.assertRaises(ValueError): cache.decr('does_not_exist') def test_touch(self): """Dummy cache can't do touch().""" self.assertIs(cache.touch('whatever'), False) def test_data_types(self): "All data types are ignored equally by the dummy cache" stuff = { 'string': 'this is a string', 'int': 42, 'list': [1, 2, 3, 4], 'tuple': (1, 2, 3, 4), 'dict': {'A': 1, 'B': 2}, 'function': f, 'class': C, } cache.set("stuff", stuff) self.assertIsNone(cache.get("stuff")) def test_expiration(self): "Expiration has no effect on the dummy cache" cache.set('expire1', 'very quickly', 1) cache.set('expire2', 'very quickly', 1) cache.set('expire3', 'very quickly', 1) time.sleep(2) self.assertIsNone(cache.get("expire1")) cache.add("expire2", "newvalue") self.assertIsNone(cache.get("expire2")) self.assertFalse(cache.has_key("expire3")) def test_unicode(self): "Unicode values are ignored by the dummy cache" stuff = { 'ascii': 'ascii_value', 'unicode_ascii': 'Iñtërnâtiônàlizætiøn1', 'Iñtërnâtiônàlizætiøn': 'Iñtërnâtiônàlizætiøn2', 'ascii2': {'x': 1} } for (key, value) in stuff.items(): with self.subTest(key=key): cache.set(key, value) self.assertIsNone(cache.get(key)) def test_set_many(self): "set_many does nothing for the dummy cache backend" self.assertEqual(cache.set_many({'a': 1, 'b': 2}), []) self.assertEqual(cache.set_many({'a': 1, 'b': 2}, timeout=2, version='1'), []) def test_set_many_invalid_key(self): with self.assertWarns(CacheKeyWarning, msg=KEY_ERRORS_WITH_MEMCACHED_MSG % 'key with spaces'): cache.set_many({'key with spaces': 'foo'}) def test_delete_many(self): "delete_many does nothing for the dummy cache backend" cache.delete_many(['a', 'b']) def test_delete_many_invalid_key(self): with self.assertWarns(CacheKeyWarning, msg=KEY_ERRORS_WITH_MEMCACHED_MSG % 'key with spaces'): cache.delete_many({'key with spaces': 'foo'}) def test_clear(self): "clear does nothing for the dummy cache backend" cache.clear() def test_incr_version(self): "Dummy cache versions can't be incremented" cache.set('answer', 42) with self.assertRaises(ValueError): cache.incr_version('answer') with self.assertRaises(ValueError): cache.incr_version('does_not_exist') def test_decr_version(self): "Dummy cache versions can't be decremented" cache.set('answer', 42) with self.assertRaises(ValueError): cache.decr_version('answer') with self.assertRaises(ValueError): cache.decr_version('does_not_exist') def test_get_or_set(self): self.assertEqual(cache.get_or_set('mykey', 'default'), 'default') self.assertEqual(cache.get_or_set('mykey', None), None) def test_get_or_set_callable(self): def my_callable(): return 'default' self.assertEqual(cache.get_or_set('mykey', my_callable), 'default') self.assertEqual(cache.get_or_set('mykey', my_callable()), 'default') def custom_key_func(key, key_prefix, version): "A customized cache key function" return 'CUSTOM-' + '-'.join([key_prefix, str(version), key]) _caches_setting_base = { 'default': {}, 'prefix': {'KEY_PREFIX': 'cacheprefix{}'.format(os.getpid())}, 'v2': {'VERSION': 2}, 'custom_key': {'KEY_FUNCTION': custom_key_func}, 'custom_key2': {'KEY_FUNCTION': 'cache.tests.custom_key_func'}, 'cull': {'OPTIONS': {'MAX_ENTRIES': 30}}, 'zero_cull': {'OPTIONS': {'CULL_FREQUENCY': 0, 'MAX_ENTRIES': 30}}, } def caches_setting_for_tests(base=None, exclude=None, **params): # `base` is used to pull in the memcached config from the original settings, # `exclude` is a set of cache names denoting which `_caches_setting_base` keys # should be omitted. # `params` are test specific overrides and `_caches_settings_base` is the # base config for the tests. # This results in the following search order: # params -> _caches_setting_base -> base base = base or {} exclude = exclude or set() setting = {k: base.copy() for k in _caches_setting_base if k not in exclude} for key, cache_params in setting.items(): cache_params.update(_caches_setting_base[key]) cache_params.update(params) return setting class BaseCacheTests: # A common set of tests to apply to all cache backends factory = RequestFactory() def tearDown(self): cache.clear() def test_simple(self): # Simple cache set/get works cache.set("key", "value") self.assertEqual(cache.get("key"), "value") def test_default_used_when_none_is_set(self): """If None is cached, get() returns it instead of the default.""" cache.set('key_default_none', None) self.assertIsNone(cache.get('key_default_none', default='default')) def test_add(self): # A key can be added to a cache cache.add("addkey1", "value") result = cache.add("addkey1", "newvalue") self.assertFalse(result) self.assertEqual(cache.get("addkey1"), "value") def test_prefix(self): # Test for same cache key conflicts between shared backend cache.set('somekey', 'value') # should not be set in the prefixed cache self.assertFalse(caches['prefix'].has_key('somekey')) caches['prefix'].set('somekey', 'value2') self.assertEqual(cache.get('somekey'), 'value') self.assertEqual(caches['prefix'].get('somekey'), 'value2') def test_non_existent(self): """Nonexistent cache keys return as None/default.""" self.assertIsNone(cache.get("does_not_exist")) self.assertEqual(cache.get("does_not_exist", "bang!"), "bang!") def test_get_many(self): # Multiple cache keys can be returned using get_many cache.set_many({'a': 'a', 'b': 'b', 'c': 'c', 'd': 'd'}) self.assertEqual(cache.get_many(['a', 'c', 'd']), {'a': 'a', 'c': 'c', 'd': 'd'}) self.assertEqual(cache.get_many(['a', 'b', 'e']), {'a': 'a', 'b': 'b'}) self.assertEqual(cache.get_many(iter(['a', 'b', 'e'])), {'a': 'a', 'b': 'b'}) def test_delete(self): # Cache keys can be deleted cache.set_many({'key1': 'spam', 'key2': 'eggs'}) self.assertEqual(cache.get("key1"), "spam") cache.delete("key1") self.assertIsNone(cache.get("key1")) self.assertEqual(cache.get("key2"), "eggs") def test_has_key(self): # The cache can be inspected for cache keys cache.set("hello1", "goodbye1") self.assertTrue(cache.has_key("hello1")) self.assertFalse(cache.has_key("goodbye1")) cache.set("no_expiry", "here", None) self.assertTrue(cache.has_key("no_expiry")) def test_in(self): # The in operator can be used to inspect cache contents cache.set("hello2", "goodbye2") self.assertIn("hello2", cache) self.assertNotIn("goodbye2", cache) def test_incr(self): # Cache values can be incremented cache.set('answer', 41) self.assertEqual(cache.incr('answer'), 42) self.assertEqual(cache.get('answer'), 42) self.assertEqual(cache.incr('answer', 10), 52) self.assertEqual(cache.get('answer'), 52) self.assertEqual(cache.incr('answer', -10), 42) with self.assertRaises(ValueError): cache.incr('does_not_exist') def test_decr(self): # Cache values can be decremented cache.set('answer', 43) self.assertEqual(cache.decr('answer'), 42) self.assertEqual(cache.get('answer'), 42) self.assertEqual(cache.decr('answer', 10), 32) self.assertEqual(cache.get('answer'), 32) self.assertEqual(cache.decr('answer', -10), 42) with self.assertRaises(ValueError): cache.decr('does_not_exist') def test_close(self): self.assertTrue(hasattr(cache, 'close')) cache.close() def test_data_types(self): # Many different data types can be cached stuff = { 'string': 'this is a string', 'int': 42, 'list': [1, 2, 3, 4], 'tuple': (1, 2, 3, 4), 'dict': {'A': 1, 'B': 2}, 'function': f, 'class': C, } cache.set("stuff", stuff) self.assertEqual(cache.get("stuff"), stuff) def test_cache_read_for_model_instance(self): # Don't want fields with callable as default to be called on cache read expensive_calculation.num_runs = 0 Poll.objects.all().delete() my_poll = Poll.objects.create(question="Well?") self.assertEqual(Poll.objects.count(), 1) pub_date = my_poll.pub_date cache.set('question', my_poll) cached_poll = cache.get('question') self.assertEqual(cached_poll.pub_date, pub_date) # We only want the default expensive calculation run once self.assertEqual(expensive_calculation.num_runs, 1) def test_cache_write_for_model_instance_with_deferred(self): # Don't want fields with callable as default to be called on cache write expensive_calculation.num_runs = 0 Poll.objects.all().delete() Poll.objects.create(question="What?") self.assertEqual(expensive_calculation.num_runs, 1) defer_qs = Poll.objects.all().defer('question') self.assertEqual(defer_qs.count(), 1) self.assertEqual(expensive_calculation.num_runs, 1) cache.set('deferred_queryset', defer_qs) # cache set should not re-evaluate default functions self.assertEqual(expensive_calculation.num_runs, 1) def test_cache_read_for_model_instance_with_deferred(self): # Don't want fields with callable as default to be called on cache read expensive_calculation.num_runs = 0 Poll.objects.all().delete() Poll.objects.create(question="What?") self.assertEqual(expensive_calculation.num_runs, 1) defer_qs = Poll.objects.all().defer('question') self.assertEqual(defer_qs.count(), 1) cache.set('deferred_queryset', defer_qs) self.assertEqual(expensive_calculation.num_runs, 1) runs_before_cache_read = expensive_calculation.num_runs cache.get('deferred_queryset') # We only want the default expensive calculation run on creation and set self.assertEqual(expensive_calculation.num_runs, runs_before_cache_read) def test_expiration(self): # Cache values can be set to expire cache.set('expire1', 'very quickly', 1) cache.set('expire2', 'very quickly', 1) cache.set('expire3', 'very quickly', 1) time.sleep(2) self.assertIsNone(cache.get("expire1")) cache.add("expire2", "newvalue") self.assertEqual(cache.get("expire2"), "newvalue") self.assertFalse(cache.has_key("expire3")) def test_touch(self): # cache.touch() updates the timeout. cache.set('expire1', 'very quickly', timeout=1) self.assertIs(cache.touch('expire1', timeout=4), True) time.sleep(2) self.assertTrue(cache.has_key('expire1')) time.sleep(3) self.assertFalse(cache.has_key('expire1')) # cache.touch() works without the timeout argument. cache.set('expire1', 'very quickly', timeout=1) self.assertIs(cache.touch('expire1'), True) time.sleep(2) self.assertTrue(cache.has_key('expire1')) self.assertIs(cache.touch('nonexistent'), False) def test_unicode(self): # Unicode values can be cached stuff = { 'ascii': 'ascii_value', 'unicode_ascii': 'Iñtërnâtiônàlizætiøn1', 'Iñtërnâtiônàlizætiøn': 'Iñtërnâtiônàlizætiøn2', 'ascii2': {'x': 1} } # Test `set` for (key, value) in stuff.items(): with self.subTest(key=key): cache.set(key, value) self.assertEqual(cache.get(key), value) # Test `add` for (key, value) in stuff.items(): with self.subTest(key=key): cache.delete(key) cache.add(key, value) self.assertEqual(cache.get(key), value) # Test `set_many` for (key, value) in stuff.items(): cache.delete(key) cache.set_many(stuff) for (key, value) in stuff.items(): with self.subTest(key=key): self.assertEqual(cache.get(key), value) def test_binary_string(self): # Binary strings should be cacheable from zlib import compress, decompress value = 'value_to_be_compressed' compressed_value = compress(value.encode()) # Test set cache.set('binary1', compressed_value) compressed_result = cache.get('binary1') self.assertEqual(compressed_value, compressed_result) self.assertEqual(value, decompress(compressed_result).decode()) # Test add cache.add('binary1-add', compressed_value) compressed_result = cache.get('binary1-add') self.assertEqual(compressed_value, compressed_result) self.assertEqual(value, decompress(compressed_result).decode()) # Test set_many cache.set_many({'binary1-set_many': compressed_value}) compressed_result = cache.get('binary1-set_many') self.assertEqual(compressed_value, compressed_result) self.assertEqual(value, decompress(compressed_result).decode()) def test_set_many(self): # Multiple keys can be set using set_many cache.set_many({"key1": "spam", "key2": "eggs"}) self.assertEqual(cache.get("key1"), "spam") self.assertEqual(cache.get("key2"), "eggs") def test_set_many_returns_empty_list_on_success(self): """set_many() returns an empty list when all keys are inserted.""" failing_keys = cache.set_many({'key1': 'spam', 'key2': 'eggs'}) self.assertEqual(failing_keys, []) def test_set_many_expiration(self): # set_many takes a second ``timeout`` parameter cache.set_many({"key1": "spam", "key2": "eggs"}, 1) time.sleep(2) self.assertIsNone(cache.get("key1")) self.assertIsNone(cache.get("key2")) def test_delete_many(self): # Multiple keys can be deleted using delete_many cache.set_many({'key1': 'spam', 'key2': 'eggs', 'key3': 'ham'}) cache.delete_many(["key1", "key2"]) self.assertIsNone(cache.get("key1")) self.assertIsNone(cache.get("key2")) self.assertEqual(cache.get("key3"), "ham") def test_clear(self): # The cache can be emptied using clear cache.set_many({'key1': 'spam', 'key2': 'eggs'}) cache.clear() self.assertIsNone(cache.get("key1")) self.assertIsNone(cache.get("key2")) def test_long_timeout(self): """ Followe memcached's convention where a timeout greater than 30 days is treated as an absolute expiration timestamp instead of a relative offset (#12399). """ cache.set('key1', 'eggs', 60 * 60 * 24 * 30 + 1) # 30 days + 1 second self.assertEqual(cache.get('key1'), 'eggs') cache.add('key2', 'ham', 60 * 60 * 24 * 30 + 1) self.assertEqual(cache.get('key2'), 'ham') cache.set_many({'key3': 'sausage', 'key4': 'lobster bisque'}, 60 * 60 * 24 * 30 + 1) self.assertEqual(cache.get('key3'), 'sausage') self.assertEqual(cache.get('key4'), 'lobster bisque') def test_forever_timeout(self): """ Passing in None into timeout results in a value that is cached forever """ cache.set('key1', 'eggs', None) self.assertEqual(cache.get('key1'), 'eggs') cache.add('key2', 'ham', None) self.assertEqual(cache.get('key2'), 'ham') added = cache.add('key1', 'new eggs', None) self.assertIs(added, False) self.assertEqual(cache.get('key1'), 'eggs') cache.set_many({'key3': 'sausage', 'key4': 'lobster bisque'}, None) self.assertEqual(cache.get('key3'), 'sausage') self.assertEqual(cache.get('key4'), 'lobster bisque') cache.set('key5', 'belgian fries', timeout=1) cache.touch('key5', timeout=None) time.sleep(2) self.assertEqual(cache.get('key5'), 'belgian fries') def test_zero_timeout(self): """ Passing in zero into timeout results in a value that is not cached """ cache.set('key1', 'eggs', 0) self.assertIsNone(cache.get('key1')) cache.add('key2', 'ham', 0) self.assertIsNone(cache.get('key2')) cache.set_many({'key3': 'sausage', 'key4': 'lobster bisque'}, 0) self.assertIsNone(cache.get('key3')) self.assertIsNone(cache.get('key4')) cache.set('key5', 'belgian fries', timeout=5) cache.touch('key5', timeout=0) self.assertIsNone(cache.get('key5')) def test_float_timeout(self): # Make sure a timeout given as a float doesn't crash anything. cache.set("key1", "spam", 100.2) self.assertEqual(cache.get("key1"), "spam") def _perform_cull_test(self, cull_cache, initial_count, final_count): # Create initial cache key entries. This will overflow the cache, # causing a cull. for i in range(1, initial_count): cull_cache.set('cull%d' % i, 'value', 1000) count = 0 # Count how many keys are left in the cache. for i in range(1, initial_count): if cull_cache.has_key('cull%d' % i): count += 1 self.assertEqual(count, final_count) def test_cull(self): self._perform_cull_test(caches['cull'], 50, 29) def test_zero_cull(self): self._perform_cull_test(caches['zero_cull'], 50, 19) def _perform_invalid_key_test(self, key, expected_warning): """ All the builtin backends (except memcached, see below) should warn on keys that would be refused by memcached. This encourages portable caching code without making it too difficult to use production backends with more liberal key rules. Refs #6447. """ # mimic custom ``make_key`` method being defined since the default will # never show the below warnings def func(key, *args): return key old_func = cache.key_func cache.key_func = func try: with self.assertWarnsMessage(CacheKeyWarning, expected_warning): cache.set(key, 'value') finally: cache.key_func = old_func def test_invalid_key_characters(self): # memcached doesn't allow whitespace or control characters in keys. key = 'key with spaces and 清' self._perform_invalid_key_test(key, KEY_ERRORS_WITH_MEMCACHED_MSG % key) def test_invalid_key_length(self): # memcached limits key length to 250. key = ('a' * 250) + '清' expected_warning = ( 'Cache key will cause errors if used with memcached: ' '%r (longer than %s)' % (key, 250) ) self._perform_invalid_key_test(key, expected_warning) def test_cache_versioning_get_set(self): # set, using default version = 1 cache.set('answer1', 42) self.assertEqual(cache.get('answer1'), 42) self.assertEqual(cache.get('answer1', version=1), 42) self.assertIsNone(cache.get('answer1', version=2)) self.assertIsNone(caches['v2'].get('answer1')) self.assertEqual(caches['v2'].get('answer1', version=1), 42) self.assertIsNone(caches['v2'].get('answer1', version=2)) # set, default version = 1, but manually override version = 2 cache.set('answer2', 42, version=2) self.assertIsNone(cache.get('answer2')) self.assertIsNone(cache.get('answer2', version=1)) self.assertEqual(cache.get('answer2', version=2), 42) self.assertEqual(caches['v2'].get('answer2'), 42) self.assertIsNone(caches['v2'].get('answer2', version=1)) self.assertEqual(caches['v2'].get('answer2', version=2), 42) # v2 set, using default version = 2 caches['v2'].set('answer3', 42) self.assertIsNone(cache.get('answer3')) self.assertIsNone(cache.get('answer3', version=1)) self.assertEqual(cache.get('answer3', version=2), 42) self.assertEqual(caches['v2'].get('answer3'), 42) self.assertIsNone(caches['v2'].get('answer3', version=1)) self.assertEqual(caches['v2'].get('answer3', version=2), 42) # v2 set, default version = 2, but manually override version = 1 caches['v2'].set('answer4', 42, version=1) self.assertEqual(cache.get('answer4'), 42) self.assertEqual(cache.get('answer4', version=1), 42) self.assertIsNone(cache.get('answer4', version=2)) self.assertIsNone(caches['v2'].get('answer4')) self.assertEqual(caches['v2'].get('answer4', version=1), 42) self.assertIsNone(caches['v2'].get('answer4', version=2)) def test_cache_versioning_add(self): # add, default version = 1, but manually override version = 2 cache.add('answer1', 42, version=2) self.assertIsNone(cache.get('answer1', version=1)) self.assertEqual(cache.get('answer1', version=2), 42) cache.add('answer1', 37, version=2) self.assertIsNone(cache.get('answer1', version=1)) self.assertEqual(cache.get('answer1', version=2), 42) cache.add('answer1', 37, version=1) self.assertEqual(cache.get('answer1', version=1), 37) self.assertEqual(cache.get('answer1', version=2), 42) # v2 add, using default version = 2 caches['v2'].add('answer2', 42) self.assertIsNone(cache.get('answer2', version=1)) self.assertEqual(cache.get('answer2', version=2), 42) caches['v2'].add('answer2', 37) self.assertIsNone(cache.get('answer2', version=1)) self.assertEqual(cache.get('answer2', version=2), 42) caches['v2'].add('answer2', 37, version=1) self.assertEqual(cache.get('answer2', version=1), 37) self.assertEqual(cache.get('answer2', version=2), 42) # v2 add, default version = 2, but manually override version = 1 caches['v2'].add('answer3', 42, version=1) self.assertEqual(cache.get('answer3', version=1), 42) self.assertIsNone(cache.get('answer3', version=2)) caches['v2'].add('answer3', 37, version=1) self.assertEqual(cache.get('answer3', version=1), 42) self.assertIsNone(cache.get('answer3', version=2)) caches['v2'].add('answer3', 37) self.assertEqual(cache.get('answer3', version=1), 42) self.assertEqual(cache.get('answer3', version=2), 37) def test_cache_versioning_has_key(self): cache.set('answer1', 42) # has_key self.assertTrue(cache.has_key('answer1')) self.assertTrue(cache.has_key('answer1', version=1)) self.assertFalse(cache.has_key('answer1', version=2)) self.assertFalse(caches['v2'].has_key('answer1')) self.assertTrue(caches['v2'].has_key('answer1', version=1)) self.assertFalse(caches['v2'].has_key('answer1', version=2)) def test_cache_versioning_delete(self): cache.set('answer1', 37, version=1) cache.set('answer1', 42, version=2) cache.delete('answer1') self.assertIsNone(cache.get('answer1', version=1)) self.assertEqual(cache.get('answer1', version=2), 42) cache.set('answer2', 37, version=1) cache.set('answer2', 42, version=2) cache.delete('answer2', version=2) self.assertEqual(cache.get('answer2', version=1), 37) self.assertIsNone(cache.get('answer2', version=2)) cache.set('answer3', 37, version=1) cache.set('answer3', 42, version=2) caches['v2'].delete('answer3') self.assertEqual(cache.get('answer3', version=1), 37) self.assertIsNone(cache.get('answer3', version=2)) cache.set('answer4', 37, version=1) cache.set('answer4', 42, version=2) caches['v2'].delete('answer4', version=1) self.assertIsNone(cache.get('answer4', version=1)) self.assertEqual(cache.get('answer4', version=2), 42) def test_cache_versioning_incr_decr(self): cache.set('answer1', 37, version=1) cache.set('answer1', 42, version=2) cache.incr('answer1') self.assertEqual(cache.get('answer1', version=1), 38) self.assertEqual(cache.get('answer1', version=2), 42) cache.decr('answer1') self.assertEqual(cache.get('answer1', version=1), 37) self.assertEqual(cache.get('answer1', version=2), 42) cache.set('answer2', 37, version=1) cache.set('answer2', 42, version=2) cache.incr('answer2', version=2) self.assertEqual(cache.get('answer2', version=1), 37) self.assertEqual(cache.get('answer2', version=2), 43) cache.decr('answer2', version=2) self.assertEqual(cache.get('answer2', version=1), 37) self.assertEqual(cache.get('answer2', version=2), 42) cache.set('answer3', 37, version=1) cache.set('answer3', 42, version=2) caches['v2'].incr('answer3') self.assertEqual(cache.get('answer3', version=1), 37) self.assertEqual(cache.get('answer3', version=2), 43) caches['v2'].decr('answer3') self.assertEqual(cache.get('answer3', version=1), 37) self.assertEqual(cache.get('answer3', version=2), 42) cache.set('answer4', 37, version=1) cache.set('answer4', 42, version=2) caches['v2'].incr('answer4', version=1) self.assertEqual(cache.get('answer4', version=1), 38) self.assertEqual(cache.get('answer4', version=2), 42) caches['v2'].decr('answer4', version=1) self.assertEqual(cache.get('answer4', version=1), 37) self.assertEqual(cache.get('answer4', version=2), 42) def test_cache_versioning_get_set_many(self): # set, using default version = 1 cache.set_many({'ford1': 37, 'arthur1': 42}) self.assertEqual(cache.get_many(['ford1', 'arthur1']), {'ford1': 37, 'arthur1': 42}) self.assertEqual(cache.get_many(['ford1', 'arthur1'], version=1), {'ford1': 37, 'arthur1': 42}) self.assertEqual(cache.get_many(['ford1', 'arthur1'], version=2), {}) self.assertEqual(caches['v2'].get_many(['ford1', 'arthur1']), {}) self.assertEqual(caches['v2'].get_many(['ford1', 'arthur1'], version=1), {'ford1': 37, 'arthur1': 42}) self.assertEqual(caches['v2'].get_many(['ford1', 'arthur1'], version=2), {}) # set, default version = 1, but manually override version = 2 cache.set_many({'ford2': 37, 'arthur2': 42}, version=2) self.assertEqual(cache.get_many(['ford2', 'arthur2']), {}) self.assertEqual(cache.get_many(['ford2', 'arthur2'], version=1), {}) self.assertEqual(cache.get_many(['ford2', 'arthur2'], version=2), {'ford2': 37, 'arthur2': 42}) self.assertEqual(caches['v2'].get_many(['ford2', 'arthur2']), {'ford2': 37, 'arthur2': 42}) self.assertEqual(caches['v2'].get_many(['ford2', 'arthur2'], version=1), {}) self.assertEqual(caches['v2'].get_many(['ford2', 'arthur2'], version=2), {'ford2': 37, 'arthur2': 42}) # v2 set, using default version = 2 caches['v2'].set_many({'ford3': 37, 'arthur3': 42}) self.assertEqual(cache.get_many(['ford3', 'arthur3']), {}) self.assertEqual(cache.get_many(['ford3', 'arthur3'], version=1), {}) self.assertEqual(cache.get_many(['ford3', 'arthur3'], version=2), {'ford3': 37, 'arthur3': 42}) self.assertEqual(caches['v2'].get_many(['ford3', 'arthur3']), {'ford3': 37, 'arthur3': 42}) self.assertEqual(caches['v2'].get_many(['ford3', 'arthur3'], version=1), {}) self.assertEqual(caches['v2'].get_many(['ford3', 'arthur3'], version=2), {'ford3': 37, 'arthur3': 42}) # v2 set, default version = 2, but manually override version = 1 caches['v2'].set_many({'ford4': 37, 'arthur4': 42}, version=1) self.assertEqual(cache.get_many(['ford4', 'arthur4']), {'ford4': 37, 'arthur4': 42}) self.assertEqual(cache.get_many(['ford4', 'arthur4'], version=1), {'ford4': 37, 'arthur4': 42}) self.assertEqual(cache.get_many(['ford4', 'arthur4'], version=2), {}) self.assertEqual(caches['v2'].get_many(['ford4', 'arthur4']), {}) self.assertEqual(caches['v2'].get_many(['ford4', 'arthur4'], version=1), {'ford4': 37, 'arthur4': 42}) self.assertEqual(caches['v2'].get_many(['ford4', 'arthur4'], version=2), {}) def test_incr_version(self): cache.set('answer', 42, version=2) self.assertIsNone(cache.get('answer')) self.assertIsNone(cache.get('answer', version=1)) self.assertEqual(cache.get('answer', version=2), 42) self.assertIsNone(cache.get('answer', version=3)) self.assertEqual(cache.incr_version('answer', version=2), 3) self.assertIsNone(cache.get('answer')) self.assertIsNone(cache.get('answer', version=1)) self.assertIsNone(cache.get('answer', version=2)) self.assertEqual(cache.get('answer', version=3), 42) caches['v2'].set('answer2', 42) self.assertEqual(caches['v2'].get('answer2'), 42) self.assertIsNone(caches['v2'].get('answer2', version=1)) self.assertEqual(caches['v2'].get('answer2', version=2), 42) self.assertIsNone(caches['v2'].get('answer2', version=3)) self.assertEqual(caches['v2'].incr_version('answer2'), 3) self.assertIsNone(caches['v2'].get('answer2')) self.assertIsNone(caches['v2'].get('answer2', version=1)) self.assertIsNone(caches['v2'].get('answer2', version=2)) self.assertEqual(caches['v2'].get('answer2', version=3), 42) with self.assertRaises(ValueError): cache.incr_version('does_not_exist') def test_decr_version(self): cache.set('answer', 42, version=2) self.assertIsNone(cache.get('answer')) self.assertIsNone(cache.get('answer', version=1)) self.assertEqual(cache.get('answer', version=2), 42) self.assertEqual(cache.decr_version('answer', version=2), 1) self.assertEqual(cache.get('answer'), 42) self.assertEqual(cache.get('answer', version=1), 42) self.assertIsNone(cache.get('answer', version=2)) caches['v2'].set('answer2', 42) self.assertEqual(caches['v2'].get('answer2'), 42) self.assertIsNone(caches['v2'].get('answer2', version=1)) self.assertEqual(caches['v2'].get('answer2', version=2), 42) self.assertEqual(caches['v2'].decr_version('answer2'), 1) self.assertIsNone(caches['v2'].get('answer2')) self.assertEqual(caches['v2'].get('answer2', version=1), 42) self.assertIsNone(caches['v2'].get('answer2', version=2)) with self.assertRaises(ValueError): cache.decr_version('does_not_exist', version=2) def test_custom_key_func(self): # Two caches with different key functions aren't visible to each other cache.set('answer1', 42) self.assertEqual(cache.get('answer1'), 42) self.assertIsNone(caches['custom_key'].get('answer1')) self.assertIsNone(caches['custom_key2'].get('answer1')) caches['custom_key'].set('answer2', 42) self.assertIsNone(cache.get('answer2')) self.assertEqual(caches['custom_key'].get('answer2'), 42) self.assertEqual(caches['custom_key2'].get('answer2'), 42) def test_cache_write_unpicklable_object(self): update_middleware = UpdateCacheMiddleware() update_middleware.cache = cache fetch_middleware = FetchFromCacheMiddleware() fetch_middleware.cache = cache request = self.factory.get('/cache/test') request._cache_update_cache = True get_cache_data = FetchFromCacheMiddleware().process_request(request) self.assertIsNone(get_cache_data) response = HttpResponse() content = 'Testing cookie serialization.' response.content = content response.set_cookie('foo', 'bar') update_middleware.process_response(request, response) get_cache_data = fetch_middleware.process_request(request) self.assertIsNotNone(get_cache_data) self.assertEqual(get_cache_data.content, content.encode()) self.assertEqual(get_cache_data.cookies, response.cookies) update_middleware.process_response(request, get_cache_data) get_cache_data = fetch_middleware.process_request(request) self.assertIsNotNone(get_cache_data) self.assertEqual(get_cache_data.content, content.encode()) self.assertEqual(get_cache_data.cookies, response.cookies) def test_add_fail_on_pickleerror(self): # Shouldn't fail silently if trying to cache an unpicklable type. with self.assertRaises(pickle.PickleError): cache.add('unpicklable', Unpicklable()) def test_set_fail_on_pickleerror(self): with self.assertRaises(pickle.PickleError): cache.set('unpicklable', Unpicklable()) def test_get_or_set(self): self.assertIsNone(cache.get('projector')) self.assertEqual(cache.get_or_set('projector', 42), 42) self.assertEqual(cache.get('projector'), 42) self.assertEqual(cache.get_or_set('null', None), None) def test_get_or_set_callable(self): def my_callable(): return 'value' self.assertEqual(cache.get_or_set('mykey', my_callable), 'value') self.assertEqual(cache.get_or_set('mykey', my_callable()), 'value') def test_get_or_set_callable_returning_none(self): self.assertIsNone(cache.get_or_set('mykey', lambda: None)) # Previous get_or_set() doesn't store None in the cache. self.assertEqual(cache.get('mykey', 'default'), 'default') def test_get_or_set_version(self): msg = "get_or_set() missing 1 required positional argument: 'default'" cache.get_or_set('brian', 1979, version=2) with self.assertRaisesMessage(TypeError, msg): cache.get_or_set('brian') with self.assertRaisesMessage(TypeError, msg): cache.get_or_set('brian', version=1) self.assertIsNone(cache.get('brian', version=1)) self.assertEqual(cache.get_or_set('brian', 42, version=1), 42) self.assertEqual(cache.get_or_set('brian', 1979, version=2), 1979) self.assertIsNone(cache.get('brian', version=3)) def test_get_or_set_racing(self): with mock.patch('%s.%s' % (settings.CACHES['default']['BACKEND'], 'add')) as cache_add: # Simulate cache.add() failing to add a value. In that case, the # default value should be returned. cache_add.return_value = False self.assertEqual(cache.get_or_set('key', 'default'), 'default') @override_settings(CACHES=caches_setting_for_tests( BACKEND='django.core.cache.backends.db.DatabaseCache', # Spaces are used in the table name to ensure quoting/escaping is working LOCATION='test cache table' )) class DBCacheTests(BaseCacheTests, TransactionTestCase): available_apps = ['cache'] def setUp(self): # The super calls needs to happen first for the settings override. super().setUp() self.create_table() def tearDown(self): # The super call needs to happen first because it uses the database. super().tearDown() self.drop_table() def create_table(self): management.call_command('createcachetable', verbosity=0) def drop_table(self): with connection.cursor() as cursor: table_name = connection.ops.quote_name('test cache table') cursor.execute('DROP TABLE %s' % table_name) def test_get_many_num_queries(self): cache.set_many({'a': 1, 'b': 2}) cache.set('expired', 'expired', 0.01) with self.assertNumQueries(1): self.assertEqual(cache.get_many(['a', 'b']), {'a': 1, 'b': 2}) time.sleep(0.02) with self.assertNumQueries(2): self.assertEqual(cache.get_many(['a', 'b', 'expired']), {'a': 1, 'b': 2}) def test_delete_many_num_queries(self): cache.set_many({'a': 1, 'b': 2, 'c': 3}) with self.assertNumQueries(1): cache.delete_many(['a', 'b', 'c']) def test_zero_cull(self): self._perform_cull_test(caches['zero_cull'], 50, 18) def test_second_call_doesnt_crash(self): out = io.StringIO() management.call_command('createcachetable', stdout=out) self.assertEqual(out.getvalue(), "Cache table 'test cache table' already exists.\n" * len(settings.CACHES)) @override_settings(CACHES=caches_setting_for_tests( BACKEND='django.core.cache.backends.db.DatabaseCache', # Use another table name to avoid the 'table already exists' message. LOCATION='createcachetable_dry_run_mode' )) def test_createcachetable_dry_run_mode(self): out = io.StringIO() management.call_command('createcachetable', dry_run=True, stdout=out) output = out.getvalue() self.assertTrue(output.startswith("CREATE TABLE")) def test_createcachetable_with_table_argument(self): """ Delete and recreate cache table with legacy behavior (explicitly specifying the table name). """ self.drop_table() out = io.StringIO() management.call_command( 'createcachetable', 'test cache table', verbosity=2, stdout=out, ) self.assertEqual(out.getvalue(), "Cache table 'test cache table' created.\n") @override_settings(USE_TZ=True) class DBCacheWithTimeZoneTests(DBCacheTests): pass class DBCacheRouter: """A router that puts the cache table on the 'other' database.""" def db_for_read(self, model, **hints): if model._meta.app_label == 'django_cache': return 'other' return None def db_for_write(self, model, **hints): if model._meta.app_label == 'django_cache': return 'other' return None def allow_migrate(self, db, app_label, **hints): if app_label == 'django_cache': return db == 'other' return None @override_settings( CACHES={ 'default': { 'BACKEND': 'django.core.cache.backends.db.DatabaseCache', 'LOCATION': 'my_cache_table', }, }, ) class CreateCacheTableForDBCacheTests(TestCase): databases = {'default', 'other'} @override_settings(DATABASE_ROUTERS=[DBCacheRouter()]) def test_createcachetable_observes_database_router(self): # cache table should not be created on 'default' with self.assertNumQueries(0, using='default'): management.call_command('createcachetable', database='default', verbosity=0) # cache table should be created on 'other' # Queries: # 1: check table doesn't already exist # 2: create savepoint (if transactional DDL is supported) # 3: create the table # 4: create the index # 5: release savepoint (if transactional DDL is supported) num = 5 if connections['other'].features.can_rollback_ddl else 3 with self.assertNumQueries(num, using='other'): management.call_command('createcachetable', database='other', verbosity=0) class PicklingSideEffect: def __init__(self, cache): self.cache = cache self.locked = False def __getstate__(self): self.locked = self.cache._lock.locked() return {} limit_locmem_entries = override_settings(CACHES=caches_setting_for_tests( BACKEND='django.core.cache.backends.locmem.LocMemCache', OPTIONS={'MAX_ENTRIES': 9}, )) @override_settings(CACHES=caches_setting_for_tests( BACKEND='django.core.cache.backends.locmem.LocMemCache', )) class LocMemCacheTests(BaseCacheTests, TestCase): def setUp(self): super().setUp() # LocMem requires a hack to make the other caches # share a data store with the 'normal' cache. caches['prefix']._cache = cache._cache caches['prefix']._expire_info = cache._expire_info caches['v2']._cache = cache._cache caches['v2']._expire_info = cache._expire_info caches['custom_key']._cache = cache._cache caches['custom_key']._expire_info = cache._expire_info caches['custom_key2']._cache = cache._cache caches['custom_key2']._expire_info = cache._expire_info @override_settings(CACHES={ 'default': {'BACKEND': 'django.core.cache.backends.locmem.LocMemCache'}, 'other': { 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache', 'LOCATION': 'other' }, }) def test_multiple_caches(self): "Multiple locmem caches are isolated" cache.set('value', 42) self.assertEqual(caches['default'].get('value'), 42) self.assertIsNone(caches['other'].get('value')) def test_locking_on_pickle(self): """#20613/#18541 -- Ensures pickling is done outside of the lock.""" bad_obj = PicklingSideEffect(cache) cache.set('set', bad_obj) self.assertFalse(bad_obj.locked, "Cache was locked during pickling") cache.add('add', bad_obj) self.assertFalse(bad_obj.locked, "Cache was locked during pickling") def test_incr_decr_timeout(self): """incr/decr does not modify expiry time (matches memcached behavior)""" key = 'value' _key = cache.make_key(key) cache.set(key, 1, timeout=cache.default_timeout * 10) expire = cache._expire_info[_key] cache.incr(key) self.assertEqual(expire, cache._expire_info[_key]) cache.decr(key) self.assertEqual(expire, cache._expire_info[_key]) @limit_locmem_entries def test_lru_get(self): """get() moves cache keys.""" for key in range(9): cache.set(key, key, timeout=None) for key in range(6): self.assertEqual(cache.get(key), key) cache.set(9, 9, timeout=None) for key in range(6): self.assertEqual(cache.get(key), key) for key in range(6, 9): self.assertIsNone(cache.get(key)) self.assertEqual(cache.get(9), 9) @limit_locmem_entries def test_lru_set(self): """set() moves cache keys.""" for key in range(9): cache.set(key, key, timeout=None) for key in range(3, 9): cache.set(key, key, timeout=None) cache.set(9, 9, timeout=None) for key in range(3, 10): self.assertEqual(cache.get(key), key) for key in range(3): self.assertIsNone(cache.get(key)) @limit_locmem_entries def test_lru_incr(self): """incr() moves cache keys.""" for key in range(9): cache.set(key, key, timeout=None) for key in range(6): cache.incr(key) cache.set(9, 9, timeout=None) for key in range(6): self.assertEqual(cache.get(key), key + 1) for key in range(6, 9): self.assertIsNone(cache.get(key)) self.assertEqual(cache.get(9), 9) # memcached backend isn't guaranteed to be available. # To check the memcached backend, the test settings file will # need to contain at least one cache backend setting that points at # your memcache server. configured_caches = {} for _cache_params in settings.CACHES.values(): configured_caches[_cache_params['BACKEND']] = _cache_params MemcachedCache_params = configured_caches.get('django.core.cache.backends.memcached.MemcachedCache') PyLibMCCache_params = configured_caches.get('django.core.cache.backends.memcached.PyLibMCCache') # The memcached backends don't support cull-related options like `MAX_ENTRIES`. memcached_excluded_caches = {'cull', 'zero_cull'} class BaseMemcachedTests(BaseCacheTests): # By default it's assumed that the client doesn't clean up connections # properly, in which case the backend must do so after each request. should_disconnect_on_close = True def test_location_multiple_servers(self): locations = [ ['server1.tld', 'server2:11211'], 'server1.tld;server2:11211', 'server1.tld,server2:11211', ] for location in locations: with self.subTest(location=location): params = {'BACKEND': self.base_params['BACKEND'], 'LOCATION': location} with self.settings(CACHES={'default': params}): self.assertEqual(cache._servers, ['server1.tld', 'server2:11211']) def test_invalid_key_characters(self): """ On memcached, we don't introduce a duplicate key validation step (for speed reasons), we just let the memcached API library raise its own exception on bad keys. Refs #6447. In order to be memcached-API-library agnostic, we only assert that a generic exception of some kind is raised. """ # memcached does not allow whitespace or control characters in keys # when using the ascii protocol. with self.assertRaises(Exception): cache.set('key with spaces', 'value') def test_invalid_key_length(self): # memcached limits key length to 250 with self.assertRaises(Exception): cache.set('a' * 251, 'value') def test_default_never_expiring_timeout(self): # Regression test for #22845 with self.settings(CACHES=caches_setting_for_tests( base=self.base_params, exclude=memcached_excluded_caches, TIMEOUT=None)): cache.set('infinite_foo', 'bar') self.assertEqual(cache.get('infinite_foo'), 'bar') def test_default_far_future_timeout(self): # Regression test for #22845 with self.settings(CACHES=caches_setting_for_tests( base=self.base_params, exclude=memcached_excluded_caches, # 60*60*24*365, 1 year TIMEOUT=31536000)): cache.set('future_foo', 'bar') self.assertEqual(cache.get('future_foo'), 'bar') def test_cull(self): # culling isn't implemented, memcached deals with it. pass def test_zero_cull(self): # culling isn't implemented, memcached deals with it. pass def test_memcached_deletes_key_on_failed_set(self): # By default memcached allows objects up to 1MB. For the cache_db session # backend to always use the current session, memcached needs to delete # the old key if it fails to set. # pylibmc doesn't seem to have SERVER_MAX_VALUE_LENGTH as far as I can # tell from a quick check of its source code. This is falling back to # the default value exposed by python-memcached on my system. max_value_length = getattr(cache._lib, 'SERVER_MAX_VALUE_LENGTH', 1048576) cache.set('small_value', 'a') self.assertEqual(cache.get('small_value'), 'a') large_value = 'a' * (max_value_length + 1) try: cache.set('small_value', large_value) except Exception: # Some clients (e.g. pylibmc) raise when the value is too large, # while others (e.g. python-memcached) intentionally return True # indicating success. This test is primarily checking that the key # was deleted, so the return/exception behavior for the set() # itself is not important. pass # small_value should be deleted, or set if configured to accept larger values value = cache.get('small_value') self.assertTrue(value is None or value == large_value) def test_close(self): # For clients that don't manage their connections properly, the # connection is closed when the request is complete. signals.request_finished.disconnect(close_old_connections) try: with mock.patch.object(cache._lib.Client, 'disconnect_all', autospec=True) as mock_disconnect: signals.request_finished.send(self.__class__) self.assertIs(mock_disconnect.called, self.should_disconnect_on_close) finally: signals.request_finished.connect(close_old_connections) def test_set_many_returns_failing_keys(self): def fail_set_multi(mapping, *args, **kwargs): return mapping.keys() with mock.patch('%s.Client.set_multi' % self.client_library_name, side_effect=fail_set_multi): failing_keys = cache.set_many({'key': 'value'}) self.assertEqual(failing_keys, ['key']) @unittest.skipUnless(MemcachedCache_params, "MemcachedCache backend not configured") @override_settings(CACHES=caches_setting_for_tests( base=MemcachedCache_params, exclude=memcached_excluded_caches, )) class MemcachedCacheTests(BaseMemcachedTests, TestCase): base_params = MemcachedCache_params client_library_name = 'memcache' def test_memcached_uses_highest_pickle_version(self): # Regression test for #19810 for cache_key in settings.CACHES: with self.subTest(cache_key=cache_key): self.assertEqual(caches[cache_key]._cache.pickleProtocol, pickle.HIGHEST_PROTOCOL) @override_settings(CACHES=caches_setting_for_tests( base=MemcachedCache_params, exclude=memcached_excluded_caches, OPTIONS={'server_max_value_length': 9999}, )) def test_memcached_options(self): self.assertEqual(cache._cache.server_max_value_length, 9999) def test_default_used_when_none_is_set(self): """ python-memcached doesn't support default in get() so this test overrides the one in BaseCacheTests. """ cache.set('key_default_none', None) self.assertEqual(cache.get('key_default_none', default='default'), 'default') @unittest.skipUnless(PyLibMCCache_params, "PyLibMCCache backend not configured") @override_settings(CACHES=caches_setting_for_tests( base=PyLibMCCache_params, exclude=memcached_excluded_caches, )) class PyLibMCCacheTests(BaseMemcachedTests, TestCase): base_params = PyLibMCCache_params client_library_name = 'pylibmc' # libmemcached manages its own connections. should_disconnect_on_close = False # By default, pylibmc/libmemcached don't verify keys client-side and so # this test triggers a server-side bug that causes later tests to fail # (#19914). The `verify_keys` behavior option could be set to True (which # would avoid triggering the server-side bug), however this test would # still fail due to https://github.com/lericson/pylibmc/issues/219. @unittest.skip("triggers a memcached-server bug, causing subsequent tests to fail") def test_invalid_key_characters(self): pass @override_settings(CACHES=caches_setting_for_tests( base=PyLibMCCache_params, exclude=memcached_excluded_caches, OPTIONS={ 'binary': True, 'behaviors': {'tcp_nodelay': True}, }, )) def test_pylibmc_options(self): self.assertTrue(cache._cache.binary) self.assertEqual(cache._cache.behaviors['tcp_nodelay'], int(True)) @override_settings(CACHES=caches_setting_for_tests( BACKEND='django.core.cache.backends.filebased.FileBasedCache', )) class FileBasedCacheTests(BaseCacheTests, TestCase): """ Specific test cases for the file-based cache. """ def setUp(self): super().setUp() self.dirname = tempfile.mkdtemp() # Caches location cannot be modified through override_settings / modify_settings, # hence settings are manipulated directly here and the setting_changed signal # is triggered manually. for cache_params in settings.CACHES.values(): cache_params.update({'LOCATION': self.dirname}) setting_changed.send(self.__class__, setting='CACHES', enter=False) def tearDown(self): super().tearDown() # Call parent first, as cache.clear() may recreate cache base directory shutil.rmtree(self.dirname) def test_ignores_non_cache_files(self): fname = os.path.join(self.dirname, 'not-a-cache-file') with open(fname, 'w'): os.utime(fname, None) cache.clear() self.assertTrue(os.path.exists(fname), 'Expected cache.clear to ignore non cache files') os.remove(fname) def test_clear_does_not_remove_cache_dir(self): cache.clear() self.assertTrue(os.path.exists(self.dirname), 'Expected cache.clear to keep the cache dir') def test_creates_cache_dir_if_nonexistent(self): os.rmdir(self.dirname) cache.set('foo', 'bar') self.assertTrue(os.path.exists(self.dirname)) def test_get_ignores_enoent(self): cache.set('foo', 'bar') os.unlink(cache._key_to_file('foo')) # Returns the default instead of erroring. self.assertEqual(cache.get('foo', 'baz'), 'baz') def test_get_does_not_ignore_non_filenotfound_exceptions(self): with mock.patch('builtins.open', side_effect=OSError): with self.assertRaises(OSError): cache.get('foo') def test_empty_cache_file_considered_expired(self): cache_file = cache._key_to_file('foo') with open(cache_file, 'wb') as fh: fh.write(b'') with open(cache_file, 'rb') as fh: self.assertIs(cache._is_expired(fh), True) @override_settings(CACHES={ 'default': { 'BACKEND': 'cache.liberal_backend.CacheClass', }, }) class CustomCacheKeyValidationTests(SimpleTestCase): """ Tests for the ability to mixin a custom ``validate_key`` method to a custom cache backend that otherwise inherits from a builtin backend, and override the default key validation. Refs #6447. """ def test_custom_key_validation(self): # this key is both longer than 250 characters, and has spaces key = 'some key with spaces' * 15 val = 'a value' cache.set(key, val) self.assertEqual(cache.get(key), val) @override_settings( CACHES={ 'default': { 'BACKEND': 'cache.closeable_cache.CacheClass', } } ) class CacheClosingTests(SimpleTestCase): def test_close(self): self.assertFalse(cache.closed) signals.request_finished.send(self.__class__) self.assertTrue(cache.closed) DEFAULT_MEMORY_CACHES_SETTINGS = { 'default': { 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache', 'LOCATION': 'unique-snowflake', } } NEVER_EXPIRING_CACHES_SETTINGS = copy.deepcopy(DEFAULT_MEMORY_CACHES_SETTINGS) NEVER_EXPIRING_CACHES_SETTINGS['default']['TIMEOUT'] = None class DefaultNonExpiringCacheKeyTests(SimpleTestCase): """ Settings having Cache arguments with a TIMEOUT=None create Caches that will set non-expiring keys. """ def setUp(self): # The 5 minute (300 seconds) default expiration time for keys is # defined in the implementation of the initializer method of the # BaseCache type. self.DEFAULT_TIMEOUT = caches[DEFAULT_CACHE_ALIAS].default_timeout def tearDown(self): del(self.DEFAULT_TIMEOUT) def test_default_expiration_time_for_keys_is_5_minutes(self): """The default expiration time of a cache key is 5 minutes. This value is defined in django.core.cache.backends.base.BaseCache.__init__(). """ self.assertEqual(300, self.DEFAULT_TIMEOUT) def test_caches_with_unset_timeout_has_correct_default_timeout(self): """Caches that have the TIMEOUT parameter undefined in the default settings will use the default 5 minute timeout. """ cache = caches[DEFAULT_CACHE_ALIAS] self.assertEqual(self.DEFAULT_TIMEOUT, cache.default_timeout) @override_settings(CACHES=NEVER_EXPIRING_CACHES_SETTINGS) def test_caches_set_with_timeout_as_none_has_correct_default_timeout(self): """Memory caches that have the TIMEOUT parameter set to `None` in the default settings with have `None` as the default timeout. This means "no timeout". """ cache = caches[DEFAULT_CACHE_ALIAS] self.assertIsNone(cache.default_timeout) self.assertIsNone(cache.get_backend_timeout()) @override_settings(CACHES=DEFAULT_MEMORY_CACHES_SETTINGS) def test_caches_with_unset_timeout_set_expiring_key(self): """Memory caches that have the TIMEOUT parameter unset will set cache keys having the default 5 minute timeout. """ key = "my-key" value = "my-value" cache = caches[DEFAULT_CACHE_ALIAS] cache.set(key, value) cache_key = cache.make_key(key) self.assertIsNotNone(cache._expire_info[cache_key]) @override_settings(CACHES=NEVER_EXPIRING_CACHES_SETTINGS) def test_caches_set_with_timeout_as_none_set_non_expiring_key(self): """Memory caches that have the TIMEOUT parameter set to `None` will set a non expiring key by default. """ key = "another-key" value = "another-value" cache = caches[DEFAULT_CACHE_ALIAS] cache.set(key, value) cache_key = cache.make_key(key) self.assertIsNone(cache._expire_info[cache_key]) @override_settings( CACHE_MIDDLEWARE_KEY_PREFIX='settingsprefix', CACHE_MIDDLEWARE_SECONDS=1, CACHES={ 'default': { 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache', }, }, USE_I18N=False, ALLOWED_HOSTS=['.example.com'], ) class CacheUtils(SimpleTestCase): """TestCase for django.utils.cache functions.""" host = 'www.example.com' path = '/cache/test/' factory = RequestFactory(HTTP_HOST=host) def tearDown(self): cache.clear() def _get_request_cache(self, method='GET', query_string=None, update_cache=None): request = self._get_request(self.host, self.path, method, query_string=query_string) request._cache_update_cache = True if not update_cache else update_cache return request def _set_cache(self, request, msg): response = HttpResponse() response.content = msg return UpdateCacheMiddleware().process_response(request, response) def test_patch_vary_headers(self): headers = ( # Initial vary, new headers, resulting vary. (None, ('Accept-Encoding',), 'Accept-Encoding'), ('Accept-Encoding', ('accept-encoding',), 'Accept-Encoding'), ('Accept-Encoding', ('ACCEPT-ENCODING',), 'Accept-Encoding'), ('Cookie', ('Accept-Encoding',), 'Cookie, Accept-Encoding'), ('Cookie, Accept-Encoding', ('Accept-Encoding',), 'Cookie, Accept-Encoding'), ('Cookie, Accept-Encoding', ('Accept-Encoding', 'cookie'), 'Cookie, Accept-Encoding'), (None, ('Accept-Encoding', 'COOKIE'), 'Accept-Encoding, COOKIE'), ('Cookie, Accept-Encoding', ('Accept-Encoding', 'cookie'), 'Cookie, Accept-Encoding'), ('Cookie , Accept-Encoding', ('Accept-Encoding', 'cookie'), 'Cookie, Accept-Encoding'), ('*', ('Accept-Language', 'Cookie'), '*'), ('Accept-Language, Cookie', ('*',), '*'), ) for initial_vary, newheaders, resulting_vary in headers: with self.subTest(initial_vary=initial_vary, newheaders=newheaders): response = HttpResponse() if initial_vary is not None: response['Vary'] = initial_vary patch_vary_headers(response, newheaders) self.assertEqual(response['Vary'], resulting_vary) def test_get_cache_key(self): request = self.factory.get(self.path) response = HttpResponse() # Expect None if no headers have been set yet. self.assertIsNone(get_cache_key(request)) # Set headers to an empty list. learn_cache_key(request, response) self.assertEqual( get_cache_key(request), 'views.decorators.cache.cache_page.settingsprefix.GET.' '18a03f9c9649f7d684af5db3524f5c99.d41d8cd98f00b204e9800998ecf8427e' ) # A specified key_prefix is taken into account. key_prefix = 'localprefix' learn_cache_key(request, response, key_prefix=key_prefix) self.assertEqual( get_cache_key(request, key_prefix=key_prefix), 'views.decorators.cache.cache_page.localprefix.GET.' '18a03f9c9649f7d684af5db3524f5c99.d41d8cd98f00b204e9800998ecf8427e' ) def test_get_cache_key_with_query(self): request = self.factory.get(self.path, {'test': 1}) response = HttpResponse() # Expect None if no headers have been set yet. self.assertIsNone(get_cache_key(request)) # Set headers to an empty list. learn_cache_key(request, response) # The querystring is taken into account. self.assertEqual( get_cache_key(request), 'views.decorators.cache.cache_page.settingsprefix.GET.' 'beaf87a9a99ee81c673ea2d67ccbec2a.d41d8cd98f00b204e9800998ecf8427e' ) def test_cache_key_varies_by_url(self): """ get_cache_key keys differ by fully-qualified URL instead of path """ request1 = self.factory.get(self.path, HTTP_HOST='sub-1.example.com') learn_cache_key(request1, HttpResponse()) request2 = self.factory.get(self.path, HTTP_HOST='sub-2.example.com') learn_cache_key(request2, HttpResponse()) self.assertNotEqual(get_cache_key(request1), get_cache_key(request2)) def test_learn_cache_key(self): request = self.factory.head(self.path) response = HttpResponse() response['Vary'] = 'Pony' # Make sure that the Vary header is added to the key hash learn_cache_key(request, response) self.assertEqual( get_cache_key(request), 'views.decorators.cache.cache_page.settingsprefix.GET.' '18a03f9c9649f7d684af5db3524f5c99.d41d8cd98f00b204e9800998ecf8427e' ) def test_patch_cache_control(self): tests = ( # Initial Cache-Control, kwargs to patch_cache_control, expected Cache-Control parts (None, {'private': True}, {'private'}), ('', {'private': True}, {'private'}), # no-cache. ('', {'no_cache': 'Set-Cookie'}, {'no-cache=Set-Cookie'}), ('', {'no-cache': 'Set-Cookie'}, {'no-cache=Set-Cookie'}), ('no-cache=Set-Cookie', {'no_cache': True}, {'no-cache'}), ('no-cache=Set-Cookie,no-cache=Link', {'no_cache': True}, {'no-cache'}), ('no-cache=Set-Cookie', {'no_cache': 'Link'}, {'no-cache=Set-Cookie', 'no-cache=Link'}), ( 'no-cache=Set-Cookie,no-cache=Link', {'no_cache': 'Custom'}, {'no-cache=Set-Cookie', 'no-cache=Link', 'no-cache=Custom'}, ), # Test whether private/public attributes are mutually exclusive ('private', {'private': True}, {'private'}), ('private', {'public': True}, {'public'}), ('public', {'public': True}, {'public'}), ('public', {'private': True}, {'private'}), ('must-revalidate,max-age=60,private', {'public': True}, {'must-revalidate', 'max-age=60', 'public'}), ('must-revalidate,max-age=60,public', {'private': True}, {'must-revalidate', 'max-age=60', 'private'}), ('must-revalidate,max-age=60', {'public': True}, {'must-revalidate', 'max-age=60', 'public'}), ) cc_delim_re = re.compile(r'\s*,\s*') for initial_cc, newheaders, expected_cc in tests: with self.subTest(initial_cc=initial_cc, newheaders=newheaders): response = HttpResponse() if initial_cc is not None: response['Cache-Control'] = initial_cc patch_cache_control(response, **newheaders) parts = set(cc_delim_re.split(response['Cache-Control'])) self.assertEqual(parts, expected_cc) @override_settings( CACHES={ 'default': { 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache', 'KEY_PREFIX': 'cacheprefix', }, }, ) class PrefixedCacheUtils(CacheUtils): pass @override_settings( CACHE_MIDDLEWARE_SECONDS=60, CACHE_MIDDLEWARE_KEY_PREFIX='test', CACHES={ 'default': { 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache', }, }, ) class CacheHEADTest(SimpleTestCase): path = '/cache/test/' factory = RequestFactory() def tearDown(self): cache.clear() def _set_cache(self, request, msg): response = HttpResponse() response.content = msg return UpdateCacheMiddleware().process_response(request, response) def test_head_caches_correctly(self): test_content = 'test content' request = self.factory.head(self.path) request._cache_update_cache = True self._set_cache(request, test_content) request = self.factory.head(self.path) request._cache_update_cache = True get_cache_data = FetchFromCacheMiddleware().process_request(request) self.assertIsNotNone(get_cache_data) self.assertEqual(test_content.encode(), get_cache_data.content) def test_head_with_cached_get(self): test_content = 'test content' request = self.factory.get(self.path) request._cache_update_cache = True self._set_cache(request, test_content) request = self.factory.head(self.path) get_cache_data = FetchFromCacheMiddleware().process_request(request) self.assertIsNotNone(get_cache_data) self.assertEqual(test_content.encode(), get_cache_data.content) @override_settings( CACHE_MIDDLEWARE_KEY_PREFIX='settingsprefix', CACHES={ 'default': { 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache', }, }, LANGUAGES=[ ('en', 'English'), ('es', 'Spanish'), ], ) class CacheI18nTest(SimpleTestCase): path = '/cache/test/' factory = RequestFactory() def tearDown(self): cache.clear() @override_settings(USE_I18N=True, USE_L10N=False, USE_TZ=False) def test_cache_key_i18n_translation(self): request = self.factory.get(self.path) lang = translation.get_language() response = HttpResponse() key = learn_cache_key(request, response) self.assertIn(lang, key, "Cache keys should include the language name when translation is active") key2 = get_cache_key(request) self.assertEqual(key, key2) def check_accept_language_vary(self, accept_language, vary, reference_key): request = self.factory.get(self.path) request.META['HTTP_ACCEPT_LANGUAGE'] = accept_language request.META['HTTP_ACCEPT_ENCODING'] = 'gzip;q=1.0, identity; q=0.5, *;q=0' response = HttpResponse() response['Vary'] = vary key = learn_cache_key(request, response) key2 = get_cache_key(request) self.assertEqual(key, reference_key) self.assertEqual(key2, reference_key) @override_settings(USE_I18N=True, USE_L10N=False, USE_TZ=False) def test_cache_key_i18n_translation_accept_language(self): lang = translation.get_language() self.assertEqual(lang, 'en') request = self.factory.get(self.path) request.META['HTTP_ACCEPT_ENCODING'] = 'gzip;q=1.0, identity; q=0.5, *;q=0' response = HttpResponse() response['Vary'] = 'accept-encoding' key = learn_cache_key(request, response) self.assertIn(lang, key, "Cache keys should include the language name when translation is active") self.check_accept_language_vary( 'en-us', 'cookie, accept-language, accept-encoding', key ) self.check_accept_language_vary( 'en-US', 'cookie, accept-encoding, accept-language', key ) self.check_accept_language_vary( 'en-US,en;q=0.8', 'accept-encoding, accept-language, cookie', key ) self.check_accept_language_vary( 'en-US,en;q=0.8,ko;q=0.6', 'accept-language, cookie, accept-encoding', key ) self.check_accept_language_vary( 'ko-kr,ko;q=0.8,en-us;q=0.5,en;q=0.3 ', 'accept-encoding, cookie, accept-language', key ) self.check_accept_language_vary( 'ko-KR,ko;q=0.8,en-US;q=0.6,en;q=0.4', 'accept-language, accept-encoding, cookie', key ) self.check_accept_language_vary( 'ko;q=1.0,en;q=0.5', 'cookie, accept-language, accept-encoding', key ) self.check_accept_language_vary( 'ko, en', 'cookie, accept-encoding, accept-language', key ) self.check_accept_language_vary( 'ko-KR, en-US', 'accept-encoding, accept-language, cookie', key ) @override_settings(USE_I18N=False, USE_L10N=True, USE_TZ=False) def test_cache_key_i18n_formatting(self): request = self.factory.get(self.path) lang = translation.get_language() response = HttpResponse() key = learn_cache_key(request, response) self.assertIn(lang, key, "Cache keys should include the language name when formatting is active") key2 = get_cache_key(request) self.assertEqual(key, key2) @override_settings(USE_I18N=False, USE_L10N=False, USE_TZ=True) def test_cache_key_i18n_timezone(self): request = self.factory.get(self.path) tz = timezone.get_current_timezone_name() response = HttpResponse() key = learn_cache_key(request, response) self.assertIn(tz, key, "Cache keys should include the time zone name when time zones are active") key2 = get_cache_key(request) self.assertEqual(key, key2) @override_settings(USE_I18N=False, USE_L10N=False) def test_cache_key_no_i18n(self): request = self.factory.get(self.path) lang = translation.get_language() tz = timezone.get_current_timezone_name() response = HttpResponse() key = learn_cache_key(request, response) self.assertNotIn(lang, key, "Cache keys shouldn't include the language name when i18n isn't active") self.assertNotIn(tz, key, "Cache keys shouldn't include the time zone name when i18n isn't active") @override_settings( CACHE_MIDDLEWARE_KEY_PREFIX="test", CACHE_MIDDLEWARE_SECONDS=60, USE_I18N=True, ) def test_middleware(self): def set_cache(request, lang, msg): translation.activate(lang) response = HttpResponse() response.content = msg return UpdateCacheMiddleware().process_response(request, response) # cache with non empty request.GET request = self.factory.get(self.path, {'foo': 'bar', 'other': 'true'}) request._cache_update_cache = True get_cache_data = FetchFromCacheMiddleware().process_request(request) # first access, cache must return None self.assertIsNone(get_cache_data) response = HttpResponse() content = 'Check for cache with QUERY_STRING' response.content = content UpdateCacheMiddleware().process_response(request, response) get_cache_data = FetchFromCacheMiddleware().process_request(request) # cache must return content self.assertIsNotNone(get_cache_data) self.assertEqual(get_cache_data.content, content.encode()) # different QUERY_STRING, cache must be empty request = self.factory.get(self.path, {'foo': 'bar', 'somethingelse': 'true'}) request._cache_update_cache = True get_cache_data = FetchFromCacheMiddleware().process_request(request) self.assertIsNone(get_cache_data) # i18n tests en_message = "Hello world!" es_message = "Hola mundo!" request = self.factory.get(self.path) request._cache_update_cache = True set_cache(request, 'en', en_message) get_cache_data = FetchFromCacheMiddleware().process_request(request) # The cache can be recovered self.assertIsNotNone(get_cache_data) self.assertEqual(get_cache_data.content, en_message.encode()) # change the session language and set content request = self.factory.get(self.path) request._cache_update_cache = True set_cache(request, 'es', es_message) # change again the language translation.activate('en') # retrieve the content from cache get_cache_data = FetchFromCacheMiddleware().process_request(request) self.assertEqual(get_cache_data.content, en_message.encode()) # change again the language translation.activate('es') get_cache_data = FetchFromCacheMiddleware().process_request(request) self.assertEqual(get_cache_data.content, es_message.encode()) # reset the language translation.deactivate() @override_settings( CACHE_MIDDLEWARE_KEY_PREFIX="test", CACHE_MIDDLEWARE_SECONDS=60, ) def test_middleware_doesnt_cache_streaming_response(self): request = self.factory.get(self.path) get_cache_data = FetchFromCacheMiddleware().process_request(request) self.assertIsNone(get_cache_data) content = ['Check for cache with streaming content.'] response = StreamingHttpResponse(content) UpdateCacheMiddleware().process_response(request, response) get_cache_data = FetchFromCacheMiddleware().process_request(request) self.assertIsNone(get_cache_data) @override_settings( CACHES={ 'default': { 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache', 'KEY_PREFIX': 'cacheprefix' }, }, ) class PrefixedCacheI18nTest(CacheI18nTest): pass def hello_world_view(request, value): return HttpResponse('Hello World %s' % value) def csrf_view(request): return HttpResponse(csrf(request)['csrf_token']) @override_settings( CACHE_MIDDLEWARE_ALIAS='other', CACHE_MIDDLEWARE_KEY_PREFIX='middlewareprefix', CACHE_MIDDLEWARE_SECONDS=30, CACHES={ 'default': { 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache', }, 'other': { 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache', 'LOCATION': 'other', 'TIMEOUT': '1', }, }, ) class CacheMiddlewareTest(SimpleTestCase): factory = RequestFactory() def setUp(self): self.default_cache = caches['default'] self.other_cache = caches['other'] def tearDown(self): self.default_cache.clear() self.other_cache.clear() super().tearDown() def test_constructor(self): """ Ensure the constructor is correctly distinguishing between usage of CacheMiddleware as Middleware vs. usage of CacheMiddleware as view decorator and setting attributes appropriately. """ # If no arguments are passed in construction, it's being used as middleware. middleware = CacheMiddleware() # Now test object attributes against values defined in setUp above self.assertEqual(middleware.cache_timeout, 30) self.assertEqual(middleware.key_prefix, 'middlewareprefix') self.assertEqual(middleware.cache_alias, 'other') # If arguments are being passed in construction, it's being used as a decorator. # First, test with "defaults": as_view_decorator = CacheMiddleware(cache_alias=None, key_prefix=None) self.assertEqual(as_view_decorator.cache_timeout, 30) # Timeout value for 'default' cache, i.e. 30 self.assertEqual(as_view_decorator.key_prefix, '') # Value of DEFAULT_CACHE_ALIAS from django.core.cache self.assertEqual(as_view_decorator.cache_alias, 'default') # Next, test with custom values: as_view_decorator_with_custom = CacheMiddleware(cache_timeout=60, cache_alias='other', key_prefix='foo') self.assertEqual(as_view_decorator_with_custom.cache_timeout, 60) self.assertEqual(as_view_decorator_with_custom.key_prefix, 'foo') self.assertEqual(as_view_decorator_with_custom.cache_alias, 'other') def test_middleware(self): middleware = CacheMiddleware() prefix_middleware = CacheMiddleware(key_prefix='prefix1') timeout_middleware = CacheMiddleware(cache_timeout=1) request = self.factory.get('/view/') # Put the request through the request middleware result = middleware.process_request(request) self.assertIsNone(result) response = hello_world_view(request, '1') # Now put the response through the response middleware response = middleware.process_response(request, response) # Repeating the request should result in a cache hit result = middleware.process_request(request) self.assertIsNotNone(result) self.assertEqual(result.content, b'Hello World 1') # The same request through a different middleware won't hit result = prefix_middleware.process_request(request) self.assertIsNone(result) # The same request with a timeout _will_ hit result = timeout_middleware.process_request(request) self.assertIsNotNone(result) self.assertEqual(result.content, b'Hello World 1') def test_view_decorator(self): # decorate the same view with different cache decorators default_view = cache_page(3)(hello_world_view) default_with_prefix_view = cache_page(3, key_prefix='prefix1')(hello_world_view) explicit_default_view = cache_page(3, cache='default')(hello_world_view) explicit_default_with_prefix_view = cache_page(3, cache='default', key_prefix='prefix1')(hello_world_view) other_view = cache_page(1, cache='other')(hello_world_view) other_with_prefix_view = cache_page(1, cache='other', key_prefix='prefix2')(hello_world_view) request = self.factory.get('/view/') # Request the view once response = default_view(request, '1') self.assertEqual(response.content, b'Hello World 1') # Request again -- hit the cache response = default_view(request, '2') self.assertEqual(response.content, b'Hello World 1') # Requesting the same view with the explicit cache should yield the same result response = explicit_default_view(request, '3') self.assertEqual(response.content, b'Hello World 1') # Requesting with a prefix will hit a different cache key response = explicit_default_with_prefix_view(request, '4') self.assertEqual(response.content, b'Hello World 4') # Hitting the same view again gives a cache hit response = explicit_default_with_prefix_view(request, '5') self.assertEqual(response.content, b'Hello World 4') # And going back to the implicit cache will hit the same cache response = default_with_prefix_view(request, '6') self.assertEqual(response.content, b'Hello World 4') # Requesting from an alternate cache won't hit cache response = other_view(request, '7') self.assertEqual(response.content, b'Hello World 7') # But a repeated hit will hit cache response = other_view(request, '8') self.assertEqual(response.content, b'Hello World 7') # And prefixing the alternate cache yields yet another cache entry response = other_with_prefix_view(request, '9') self.assertEqual(response.content, b'Hello World 9') # But if we wait a couple of seconds... time.sleep(2) # ... the default cache will still hit caches['default'] response = default_view(request, '11') self.assertEqual(response.content, b'Hello World 1') # ... the default cache with a prefix will still hit response = default_with_prefix_view(request, '12') self.assertEqual(response.content, b'Hello World 4') # ... the explicit default cache will still hit response = explicit_default_view(request, '13') self.assertEqual(response.content, b'Hello World 1') # ... the explicit default cache with a prefix will still hit response = explicit_default_with_prefix_view(request, '14') self.assertEqual(response.content, b'Hello World 4') # .. but a rapidly expiring cache won't hit response = other_view(request, '15') self.assertEqual(response.content, b'Hello World 15') # .. even if it has a prefix response = other_with_prefix_view(request, '16') self.assertEqual(response.content, b'Hello World 16') def test_cached_control_private_not_cached(self): """Responses with 'Cache-Control: private' are not cached.""" view_with_private_cache = cache_page(3)(cache_control(private=True)(hello_world_view)) request = self.factory.get('/view/') response = view_with_private_cache(request, '1') self.assertEqual(response.content, b'Hello World 1') response = view_with_private_cache(request, '2') self.assertEqual(response.content, b'Hello World 2') def test_sensitive_cookie_not_cached(self): """ Django must prevent caching of responses that set a user-specific (and maybe security sensitive) cookie in response to a cookie-less request. """ csrf_middleware = CsrfViewMiddleware() cache_middleware = CacheMiddleware() request = self.factory.get('/view/') self.assertIsNone(cache_middleware.process_request(request)) csrf_middleware.process_view(request, csrf_view, (), {}) response = csrf_view(request) response = csrf_middleware.process_response(request, response) response = cache_middleware.process_response(request, response) # Inserting a CSRF cookie in a cookie-less request prevented caching. self.assertIsNone(cache_middleware.process_request(request)) def test_304_response_has_http_caching_headers_but_not_cached(self): original_view = mock.Mock(return_value=HttpResponseNotModified()) view = cache_page(2)(original_view) request = self.factory.get('/view/') # The view shouldn't be cached on the second call. view(request).close() response = view(request) response.close() self.assertEqual(original_view.call_count, 2) self.assertIsInstance(response, HttpResponseNotModified) self.assertIn('Cache-Control', response) self.assertIn('Expires', response) @override_settings( CACHE_MIDDLEWARE_KEY_PREFIX='settingsprefix', CACHE_MIDDLEWARE_SECONDS=1, CACHES={ 'default': { 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache', }, }, USE_I18N=False, ) class TestWithTemplateResponse(SimpleTestCase): """ Tests various headers w/ TemplateResponse. Most are probably redundant since they manipulate the same object anyway but the ETag header is 'special' because it relies on the content being complete (which is not necessarily always the case with a TemplateResponse) """ path = '/cache/test/' factory = RequestFactory() def tearDown(self): cache.clear() def test_patch_vary_headers(self): headers = ( # Initial vary, new headers, resulting vary. (None, ('Accept-Encoding',), 'Accept-Encoding'), ('Accept-Encoding', ('accept-encoding',), 'Accept-Encoding'), ('Accept-Encoding', ('ACCEPT-ENCODING',), 'Accept-Encoding'), ('Cookie', ('Accept-Encoding',), 'Cookie, Accept-Encoding'), ('Cookie, Accept-Encoding', ('Accept-Encoding',), 'Cookie, Accept-Encoding'), ('Cookie, Accept-Encoding', ('Accept-Encoding', 'cookie'), 'Cookie, Accept-Encoding'), (None, ('Accept-Encoding', 'COOKIE'), 'Accept-Encoding, COOKIE'), ('Cookie, Accept-Encoding', ('Accept-Encoding', 'cookie'), 'Cookie, Accept-Encoding'), ('Cookie , Accept-Encoding', ('Accept-Encoding', 'cookie'), 'Cookie, Accept-Encoding'), ) for initial_vary, newheaders, resulting_vary in headers: with self.subTest(initial_vary=initial_vary, newheaders=newheaders): template = engines['django'].from_string("This is a test") response = TemplateResponse(HttpRequest(), template) if initial_vary is not None: response['Vary'] = initial_vary patch_vary_headers(response, newheaders) self.assertEqual(response['Vary'], resulting_vary) def test_get_cache_key(self): request = self.factory.get(self.path) template = engines['django'].from_string("This is a test") response = TemplateResponse(HttpRequest(), template) key_prefix = 'localprefix' # Expect None if no headers have been set yet. self.assertIsNone(get_cache_key(request)) # Set headers to an empty list. learn_cache_key(request, response) self.assertEqual( get_cache_key(request), 'views.decorators.cache.cache_page.settingsprefix.GET.' '58a0a05c8a5620f813686ff969c26853.d41d8cd98f00b204e9800998ecf8427e' ) # A specified key_prefix is taken into account. learn_cache_key(request, response, key_prefix=key_prefix) self.assertEqual( get_cache_key(request, key_prefix=key_prefix), 'views.decorators.cache.cache_page.localprefix.GET.' '58a0a05c8a5620f813686ff969c26853.d41d8cd98f00b204e9800998ecf8427e' ) def test_get_cache_key_with_query(self): request = self.factory.get(self.path, {'test': 1}) template = engines['django'].from_string("This is a test") response = TemplateResponse(HttpRequest(), template) # Expect None if no headers have been set yet. self.assertIsNone(get_cache_key(request)) # Set headers to an empty list. learn_cache_key(request, response) # The querystring is taken into account. self.assertEqual( get_cache_key(request), 'views.decorators.cache.cache_page.settingsprefix.GET.' '0f1c2d56633c943073c4569d9a9502fe.d41d8cd98f00b204e9800998ecf8427e' ) class TestMakeTemplateFragmentKey(SimpleTestCase): def test_without_vary_on(self): key = make_template_fragment_key('a.fragment') self.assertEqual(key, 'template.cache.a.fragment.d41d8cd98f00b204e9800998ecf8427e') def test_with_one_vary_on(self): key = make_template_fragment_key('foo', ['abc']) self.assertEqual(key, 'template.cache.foo.493e283d571a73056196f1a68efd0f66') def test_with_many_vary_on(self): key = make_template_fragment_key('bar', ['abc', 'def']) self.assertEqual(key, 'template.cache.bar.17c1a507a0cb58384f4c639067a93520') def test_proper_escaping(self): key = make_template_fragment_key('spam', ['abc:def%']) self.assertEqual(key, 'template.cache.spam.06c8ae8e8c430b69fb0a6443504153dc') def test_with_ints_vary_on(self): key = make_template_fragment_key('foo', [1, 2, 3, 4, 5]) self.assertEqual(key, 'template.cache.foo.7ae8fd2e0d25d651c683bdeebdb29461') def test_with_unicode_vary_on(self): key = make_template_fragment_key('foo', ['42º', '😀']) self.assertEqual(key, 'template.cache.foo.7ced1c94e543668590ba39b3c08b0237') def test_long_vary_on(self): key = make_template_fragment_key('foo', ['x' * 10000]) self.assertEqual(key, 'template.cache.foo.3670b349b5124aa56bdb50678b02b23a') class CacheHandlerTest(SimpleTestCase): def test_same_instance(self): """ Attempting to retrieve the same alias should yield the same instance. """ cache1 = caches['default'] cache2 = caches['default'] self.assertIs(cache1, cache2) def test_per_thread(self): """ Requesting the same alias from separate threads should yield separate instances. """ c = [] def runner(): c.append(caches['default']) for x in range(2): t = threading.Thread(target=runner) t.start() t.join() self.assertIsNot(c[0], c[1])
9fa5752a01ed3a5a8c6d721d6c14bc80142ed02670bce1df046e06322f0c7755
from unittest import mock from django.test import SimpleTestCase from django.utils.functional import cached_property, classproperty, lazy class FunctionalTests(SimpleTestCase): def test_lazy(self): t = lazy(lambda: tuple(range(3)), list, tuple) for a, b in zip(t(), range(3)): self.assertEqual(a, b) def test_lazy_base_class(self): """lazy also finds base class methods in the proxy object""" class Base: def base_method(self): pass class Klazz(Base): pass t = lazy(lambda: Klazz(), Klazz)() self.assertIn('base_method', dir(t)) def test_lazy_base_class_override(self): """lazy finds the correct (overridden) method implementation""" class Base: def method(self): return 'Base' class Klazz(Base): def method(self): return 'Klazz' t = lazy(lambda: Klazz(), Base)() self.assertEqual(t.method(), 'Klazz') def test_lazy_object_to_string(self): class Klazz: def __str__(self): return "Î am ā Ǩlâzz." def __bytes__(self): return b"\xc3\x8e am \xc4\x81 binary \xc7\xa8l\xc3\xa2zz." t = lazy(lambda: Klazz(), Klazz)() self.assertEqual(str(t), "Î am ā Ǩlâzz.") self.assertEqual(bytes(t), b"\xc3\x8e am \xc4\x81 binary \xc7\xa8l\xc3\xa2zz.") def assertCachedPropertyWorks(self, attr, Class): with self.subTest(attr=attr): def get(source): return getattr(source, attr) obj = Class() class SubClass(Class): pass subobj = SubClass() # Docstring is preserved. self.assertEqual(get(Class).__doc__, 'Here is the docstring...') self.assertEqual(get(SubClass).__doc__, 'Here is the docstring...') # It's cached. self.assertEqual(get(obj), get(obj)) self.assertEqual(get(subobj), get(subobj)) # The correct value is returned. self.assertEqual(get(obj)[0], 1) self.assertEqual(get(subobj)[0], 1) # State isn't shared between instances. obj2 = Class() subobj2 = SubClass() self.assertNotEqual(get(obj), get(obj2)) self.assertNotEqual(get(subobj), get(subobj2)) # It behaves like a property when there's no instance. self.assertIsInstance(get(Class), cached_property) self.assertIsInstance(get(SubClass), cached_property) # 'other_value' doesn't become a property. self.assertTrue(callable(obj.other_value)) self.assertTrue(callable(subobj.other_value)) def test_cached_property(self): """cached_property caches its value and behaves like a property.""" class Class: @cached_property def value(self): """Here is the docstring...""" return 1, object() @cached_property def __foo__(self): """Here is the docstring...""" return 1, object() def other_value(self): """Here is the docstring...""" return 1, object() other = cached_property(other_value, name='other') attrs = ['value', 'other', '__foo__'] for attr in attrs: self.assertCachedPropertyWorks(attr, Class) def test_cached_property_auto_name(self): """ cached_property caches its value and behaves like a property on mangled methods or when the name kwarg isn't set. """ class Class: @cached_property def __value(self): """Here is the docstring...""" return 1, object() def other_value(self): """Here is the docstring...""" return 1, object() other = cached_property(other_value) other2 = cached_property(other_value, name='different_name') attrs = ['_Class__value', 'other'] for attr in attrs: self.assertCachedPropertyWorks(attr, Class) # An explicit name is ignored. obj = Class() obj.other2 self.assertFalse(hasattr(obj, 'different_name')) def test_cached_property_reuse_different_names(self): """Disallow this case because the decorated function wouldn't be cached.""" with self.assertRaises(RuntimeError) as ctx: class ReusedCachedProperty: @cached_property def a(self): pass b = a self.assertEqual( str(ctx.exception.__context__), str(TypeError( "Cannot assign the same cached_property to two different " "names ('a' and 'b')." )) ) def test_cached_property_reuse_same_name(self): """ Reusing a cached_property on different classes under the same name is allowed. """ counter = 0 @cached_property def _cp(_self): nonlocal counter counter += 1 return counter class A: cp = _cp class B: cp = _cp a = A() b = B() self.assertEqual(a.cp, 1) self.assertEqual(b.cp, 2) self.assertEqual(a.cp, 1) def test_cached_property_set_name_not_called(self): cp = cached_property(lambda s: None) class Foo: pass Foo.cp = cp msg = 'Cannot use cached_property instance without calling __set_name__() on it.' with self.assertRaisesMessage(TypeError, msg): Foo().cp def test_lazy_equality(self): """ == and != work correctly for Promises. """ lazy_a = lazy(lambda: 4, int) lazy_b = lazy(lambda: 4, int) lazy_c = lazy(lambda: 5, int) self.assertEqual(lazy_a(), lazy_b()) self.assertNotEqual(lazy_b(), lazy_c()) def test_lazy_repr_text(self): original_object = 'Lazy translation text' lazy_obj = lazy(lambda: original_object, str) self.assertEqual(repr(original_object), repr(lazy_obj())) def test_lazy_repr_int(self): original_object = 15 lazy_obj = lazy(lambda: original_object, int) self.assertEqual(repr(original_object), repr(lazy_obj())) def test_lazy_repr_bytes(self): original_object = b'J\xc3\xbcst a str\xc3\xadng' lazy_obj = lazy(lambda: original_object, bytes) self.assertEqual(repr(original_object), repr(lazy_obj())) def test_lazy_class_preparation_caching(self): # lazy() should prepare the proxy class only once i.e. the first time # it's used. lazified = lazy(lambda: 0, int) __proxy__ = lazified().__class__ with mock.patch.object(__proxy__, '__prepare_class__') as mocked: lazified() mocked.assert_not_called() def test_classproperty_getter(self): class Foo: foo_attr = 123 def __init__(self): self.foo_attr = 456 @classproperty def foo(cls): return cls.foo_attr class Bar: bar = classproperty() @bar.getter def bar(cls): return 123 self.assertEqual(Foo.foo, 123) self.assertEqual(Foo().foo, 123) self.assertEqual(Bar.bar, 123) self.assertEqual(Bar().bar, 123) def test_classproperty_override_getter(self): class Foo: @classproperty def foo(cls): return 123 @foo.getter def foo(cls): return 456 self.assertEqual(Foo.foo, 456) self.assertEqual(Foo().foo, 456)
e40637bff581e57a2c906476a3c369e1bfa9e3da948dc87263facafa41ab8e4b
from django.http import HttpResponse from django.template import engines from django.template.response import TemplateResponse from django.test import RequestFactory, SimpleTestCase from django.utils.decorators import decorator_from_middleware class ProcessViewMiddleware: def process_view(self, request, view_func, view_args, view_kwargs): pass process_view_dec = decorator_from_middleware(ProcessViewMiddleware) @process_view_dec def process_view(request): return HttpResponse() class ClassProcessView: def __call__(self, request): return HttpResponse() class_process_view = process_view_dec(ClassProcessView()) class FullMiddleware: def process_request(self, request): request.process_request_reached = True def process_view(self, request, view_func, view_args, view_kwargs): request.process_view_reached = True def process_template_response(self, request, response): request.process_template_response_reached = True return response def process_response(self, request, response): # This should never receive unrendered content. request.process_response_content = response.content request.process_response_reached = True return response full_dec = decorator_from_middleware(FullMiddleware) class DecoratorFromMiddlewareTests(SimpleTestCase): """ Tests for view decorators created using ``django.utils.decorators.decorator_from_middleware``. """ rf = RequestFactory() def test_process_view_middleware(self): """ Test a middleware that implements process_view. """ process_view(self.rf.get('/')) def test_callable_process_view_middleware(self): """ Test a middleware that implements process_view, operating on a callable class. """ class_process_view(self.rf.get('/')) def test_full_dec_normal(self): """ All methods of middleware are called for normal HttpResponses """ @full_dec def normal_view(request): template = engines['django'].from_string("Hello world") return HttpResponse(template.render()) request = self.rf.get('/') normal_view(request) self.assertTrue(getattr(request, 'process_request_reached', False)) self.assertTrue(getattr(request, 'process_view_reached', False)) # process_template_response must not be called for HttpResponse self.assertFalse(getattr(request, 'process_template_response_reached', False)) self.assertTrue(getattr(request, 'process_response_reached', False)) def test_full_dec_templateresponse(self): """ All methods of middleware are called for TemplateResponses in the right sequence. """ @full_dec def template_response_view(request): template = engines['django'].from_string("Hello world") return TemplateResponse(request, template) request = self.rf.get('/') response = template_response_view(request) self.assertTrue(getattr(request, 'process_request_reached', False)) self.assertTrue(getattr(request, 'process_view_reached', False)) self.assertTrue(getattr(request, 'process_template_response_reached', False)) # response must not be rendered yet. self.assertFalse(response._is_rendered) # process_response must not be called until after response is rendered, # otherwise some decorators like csrf_protect and gzip_page will not # work correctly. See #16004 self.assertFalse(getattr(request, 'process_response_reached', False)) response.render() self.assertTrue(getattr(request, 'process_response_reached', False)) # process_response saw the rendered content self.assertEqual(request.process_response_content, b"Hello world")
759d3ae11a21177220906922f66a1cfc6ccbb0e6ed66d33274fc9ebdc729a2c6
import datetime import itertools import unittest from copy import copy from unittest import mock from django.core.management.color import no_style from django.db import ( DatabaseError, IntegrityError, OperationalError, connection, ) from django.db.models import Index, Model, Q from django.db.models.constraints import CheckConstraint, UniqueConstraint from django.db.models.deletion import CASCADE, PROTECT from django.db.models.fields import ( AutoField, BigAutoField, BigIntegerField, BinaryField, BooleanField, CharField, DateField, DateTimeField, IntegerField, PositiveIntegerField, SlugField, SmallAutoField, SmallIntegerField, TextField, TimeField, UUIDField, ) from django.db.models.fields.related import ( ForeignKey, ForeignObject, ManyToManyField, OneToOneField, ) from django.db.transaction import TransactionManagementError, atomic from django.db.utils import DataError from django.test import ( TransactionTestCase, skipIfDBFeature, skipUnlessDBFeature, ) from django.test.utils import CaptureQueriesContext, isolate_apps from django.utils import timezone from .fields import ( CustomManyToManyField, InheritedManyToManyField, MediumBlobField, ) from .models import ( Author, AuthorCharFieldWithIndex, AuthorTextFieldWithIndex, AuthorWithDefaultHeight, AuthorWithEvenLongerName, AuthorWithIndexedName, AuthorWithIndexedNameAndBirthday, AuthorWithUniqueName, AuthorWithUniqueNameAndBirthday, Book, BookForeignObj, BookWeak, BookWithLongName, BookWithO2O, BookWithoutAuthor, BookWithSlug, IntegerPK, Node, Note, NoteRename, Tag, TagIndexed, TagM2MTest, TagUniqueRename, Thing, UniqueTest, new_apps, ) class SchemaTests(TransactionTestCase): """ Tests for the schema-alteration code. Be aware that these tests are more liable than most to false results, as sometimes the code to check if a test has worked is almost as complex as the code it is testing. """ available_apps = [] models = [ Author, AuthorCharFieldWithIndex, AuthorTextFieldWithIndex, AuthorWithDefaultHeight, AuthorWithEvenLongerName, Book, BookWeak, BookWithLongName, BookWithO2O, BookWithSlug, IntegerPK, Node, Note, Tag, TagIndexed, TagM2MTest, TagUniqueRename, Thing, UniqueTest, ] # Utility functions def setUp(self): # local_models should contain test dependent model classes that will be # automatically removed from the app cache on test tear down. self.local_models = [] # isolated_local_models contains models that are in test methods # decorated with @isolate_apps. self.isolated_local_models = [] def tearDown(self): # Delete any tables made for our models self.delete_tables() new_apps.clear_cache() for model in new_apps.get_models(): model._meta._expire_cache() if 'schema' in new_apps.all_models: for model in self.local_models: for many_to_many in model._meta.many_to_many: through = many_to_many.remote_field.through if through and through._meta.auto_created: del new_apps.all_models['schema'][through._meta.model_name] del new_apps.all_models['schema'][model._meta.model_name] if self.isolated_local_models: with connection.schema_editor() as editor: for model in self.isolated_local_models: editor.delete_model(model) def delete_tables(self): "Deletes all model tables for our models for a clean test environment" converter = connection.introspection.identifier_converter with connection.schema_editor() as editor: connection.disable_constraint_checking() table_names = connection.introspection.table_names() for model in itertools.chain(SchemaTests.models, self.local_models): tbl = converter(model._meta.db_table) if tbl in table_names: editor.delete_model(model) table_names.remove(tbl) connection.enable_constraint_checking() def column_classes(self, model): with connection.cursor() as cursor: columns = { d[0]: (connection.introspection.get_field_type(d[1], d), d) for d in connection.introspection.get_table_description( cursor, model._meta.db_table, ) } # SQLite has a different format for field_type for name, (type, desc) in columns.items(): if isinstance(type, tuple): columns[name] = (type[0], desc) # SQLite also doesn't error properly if not columns: raise DatabaseError("Table does not exist (empty pragma)") return columns def get_primary_key(self, table): with connection.cursor() as cursor: return connection.introspection.get_primary_key_column(cursor, table) def get_indexes(self, table): """ Get the indexes on the table using a new cursor. """ with connection.cursor() as cursor: return [ c['columns'][0] for c in connection.introspection.get_constraints(cursor, table).values() if c['index'] and len(c['columns']) == 1 ] def get_uniques(self, table): with connection.cursor() as cursor: return [ c['columns'][0] for c in connection.introspection.get_constraints(cursor, table).values() if c['unique'] and len(c['columns']) == 1 ] def get_constraints(self, table): """ Get the constraints on a table using a new cursor. """ with connection.cursor() as cursor: return connection.introspection.get_constraints(cursor, table) def get_constraints_for_column(self, model, column_name): constraints = self.get_constraints(model._meta.db_table) constraints_for_column = [] for name, details in constraints.items(): if details['columns'] == [column_name]: constraints_for_column.append(name) return sorted(constraints_for_column) def check_added_field_default(self, schema_editor, model, field, field_name, expected_default, cast_function=None): with connection.cursor() as cursor: schema_editor.add_field(model, field) cursor.execute("SELECT {} FROM {};".format(field_name, model._meta.db_table)) database_default = cursor.fetchall()[0][0] if cast_function and not type(database_default) == type(expected_default): database_default = cast_function(database_default) self.assertEqual(database_default, expected_default) def get_constraints_count(self, table, column, fk_to): """ Return a dict with keys 'fks', 'uniques, and 'indexes' indicating the number of foreign keys, unique constraints, and indexes on `table`.`column`. The `fk_to` argument is a 2-tuple specifying the expected foreign key relationship's (table, column). """ with connection.cursor() as cursor: constraints = connection.introspection.get_constraints(cursor, table) counts = {'fks': 0, 'uniques': 0, 'indexes': 0} for c in constraints.values(): if c['columns'] == [column]: if c['foreign_key'] == fk_to: counts['fks'] += 1 if c['unique']: counts['uniques'] += 1 elif c['index']: counts['indexes'] += 1 return counts def assertIndexOrder(self, table, index, order): constraints = self.get_constraints(table) self.assertIn(index, constraints) index_orders = constraints[index]['orders'] self.assertTrue(all(val == expected for val, expected in zip(index_orders, order))) def assertForeignKeyExists(self, model, column, expected_fk_table, field='id'): """ Fail if the FK constraint on `model.Meta.db_table`.`column` to `expected_fk_table`.id doesn't exist. """ constraints = self.get_constraints(model._meta.db_table) constraint_fk = None for details in constraints.values(): if details['columns'] == [column] and details['foreign_key']: constraint_fk = details['foreign_key'] break self.assertEqual(constraint_fk, (expected_fk_table, field)) def assertForeignKeyNotExists(self, model, column, expected_fk_table): with self.assertRaises(AssertionError): self.assertForeignKeyExists(model, column, expected_fk_table) # Tests def test_creation_deletion(self): """ Tries creating a model's table, and then deleting it. """ with connection.schema_editor() as editor: # Create the table editor.create_model(Author) # The table is there list(Author.objects.all()) # Clean up that table editor.delete_model(Author) # No deferred SQL should be left over. self.assertEqual(editor.deferred_sql, []) # The table is gone with self.assertRaises(DatabaseError): list(Author.objects.all()) @skipUnlessDBFeature('supports_foreign_keys') def test_fk(self): "Creating tables out of FK order, then repointing, works" # Create the table with connection.schema_editor() as editor: editor.create_model(Book) editor.create_model(Author) editor.create_model(Tag) # Initial tables are there list(Author.objects.all()) list(Book.objects.all()) # Make sure the FK constraint is present with self.assertRaises(IntegrityError): Book.objects.create( author_id=1, title="Much Ado About Foreign Keys", pub_date=datetime.datetime.now(), ) # Repoint the FK constraint old_field = Book._meta.get_field("author") new_field = ForeignKey(Tag, CASCADE) new_field.set_attributes_from_name("author") with connection.schema_editor() as editor: editor.alter_field(Book, old_field, new_field, strict=True) self.assertForeignKeyExists(Book, 'author_id', 'schema_tag') @skipUnlessDBFeature('can_create_inline_fk') def test_inline_fk(self): # Create some tables. with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(Book) editor.create_model(Note) self.assertForeignKeyNotExists(Note, 'book_id', 'schema_book') # Add a foreign key from one to the other. with connection.schema_editor() as editor: new_field = ForeignKey(Book, CASCADE) new_field.set_attributes_from_name('book') editor.add_field(Note, new_field) self.assertForeignKeyExists(Note, 'book_id', 'schema_book') # Creating a FK field with a constraint uses a single statement without # a deferred ALTER TABLE. self.assertFalse([ sql for sql in (str(statement) for statement in editor.deferred_sql) if sql.startswith('ALTER TABLE') and 'ADD CONSTRAINT' in sql ]) @skipUnlessDBFeature('supports_foreign_keys') def test_char_field_with_db_index_to_fk(self): # Create the table with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(AuthorCharFieldWithIndex) # Change CharField to FK old_field = AuthorCharFieldWithIndex._meta.get_field('char_field') new_field = ForeignKey(Author, CASCADE, blank=True) new_field.set_attributes_from_name('char_field') with connection.schema_editor() as editor: editor.alter_field(AuthorCharFieldWithIndex, old_field, new_field, strict=True) self.assertForeignKeyExists(AuthorCharFieldWithIndex, 'char_field_id', 'schema_author') @skipUnlessDBFeature('supports_foreign_keys') @skipUnlessDBFeature('supports_index_on_text_field') def test_text_field_with_db_index_to_fk(self): # Create the table with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(AuthorTextFieldWithIndex) # Change TextField to FK old_field = AuthorTextFieldWithIndex._meta.get_field('text_field') new_field = ForeignKey(Author, CASCADE, blank=True) new_field.set_attributes_from_name('text_field') with connection.schema_editor() as editor: editor.alter_field(AuthorTextFieldWithIndex, old_field, new_field, strict=True) self.assertForeignKeyExists(AuthorTextFieldWithIndex, 'text_field_id', 'schema_author') @isolate_apps('schema') def test_char_field_pk_to_auto_field(self): class Foo(Model): id = CharField(max_length=255, primary_key=True) class Meta: app_label = 'schema' with connection.schema_editor() as editor: editor.create_model(Foo) self.isolated_local_models = [Foo] old_field = Foo._meta.get_field('id') new_field = AutoField(primary_key=True) new_field.set_attributes_from_name('id') new_field.model = Foo with connection.schema_editor() as editor: editor.alter_field(Foo, old_field, new_field, strict=True) @skipUnlessDBFeature('supports_foreign_keys') def test_fk_to_proxy(self): "Creating a FK to a proxy model creates database constraints." class AuthorProxy(Author): class Meta: app_label = 'schema' apps = new_apps proxy = True class AuthorRef(Model): author = ForeignKey(AuthorProxy, on_delete=CASCADE) class Meta: app_label = 'schema' apps = new_apps self.local_models = [AuthorProxy, AuthorRef] # Create the table with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(AuthorRef) self.assertForeignKeyExists(AuthorRef, 'author_id', 'schema_author') @skipUnlessDBFeature('supports_foreign_keys') def test_fk_db_constraint(self): "The db_constraint parameter is respected" # Create the table with connection.schema_editor() as editor: editor.create_model(Tag) editor.create_model(Author) editor.create_model(BookWeak) # Initial tables are there list(Author.objects.all()) list(Tag.objects.all()) list(BookWeak.objects.all()) self.assertForeignKeyNotExists(BookWeak, 'author_id', 'schema_author') # Make a db_constraint=False FK new_field = ForeignKey(Tag, CASCADE, db_constraint=False) new_field.set_attributes_from_name("tag") with connection.schema_editor() as editor: editor.add_field(Author, new_field) self.assertForeignKeyNotExists(Author, 'tag_id', 'schema_tag') # Alter to one with a constraint new_field2 = ForeignKey(Tag, CASCADE) new_field2.set_attributes_from_name("tag") with connection.schema_editor() as editor: editor.alter_field(Author, new_field, new_field2, strict=True) self.assertForeignKeyExists(Author, 'tag_id', 'schema_tag') # Alter to one without a constraint again new_field2 = ForeignKey(Tag, CASCADE) new_field2.set_attributes_from_name("tag") with connection.schema_editor() as editor: editor.alter_field(Author, new_field2, new_field, strict=True) self.assertForeignKeyNotExists(Author, 'tag_id', 'schema_tag') @isolate_apps('schema') def test_no_db_constraint_added_during_primary_key_change(self): """ When a primary key that's pointed to by a ForeignKey with db_constraint=False is altered, a foreign key constraint isn't added. """ class Author(Model): class Meta: app_label = 'schema' class BookWeak(Model): author = ForeignKey(Author, CASCADE, db_constraint=False) class Meta: app_label = 'schema' with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(BookWeak) self.assertForeignKeyNotExists(BookWeak, 'author_id', 'schema_author') old_field = Author._meta.get_field('id') new_field = BigAutoField(primary_key=True) new_field.model = Author new_field.set_attributes_from_name('id') # @isolate_apps() and inner models are needed to have the model # relations populated, otherwise this doesn't act as a regression test. self.assertEqual(len(new_field.model._meta.related_objects), 1) with connection.schema_editor() as editor: editor.alter_field(Author, old_field, new_field, strict=True) self.assertForeignKeyNotExists(BookWeak, 'author_id', 'schema_author') def _test_m2m_db_constraint(self, M2MFieldClass): class LocalAuthorWithM2M(Model): name = CharField(max_length=255) class Meta: app_label = 'schema' apps = new_apps self.local_models = [LocalAuthorWithM2M] # Create the table with connection.schema_editor() as editor: editor.create_model(Tag) editor.create_model(LocalAuthorWithM2M) # Initial tables are there list(LocalAuthorWithM2M.objects.all()) list(Tag.objects.all()) # Make a db_constraint=False FK new_field = M2MFieldClass(Tag, related_name="authors", db_constraint=False) new_field.contribute_to_class(LocalAuthorWithM2M, "tags") # Add the field with connection.schema_editor() as editor: editor.add_field(LocalAuthorWithM2M, new_field) self.assertForeignKeyNotExists(new_field.remote_field.through, 'tag_id', 'schema_tag') @skipUnlessDBFeature('supports_foreign_keys') def test_m2m_db_constraint(self): self._test_m2m_db_constraint(ManyToManyField) @skipUnlessDBFeature('supports_foreign_keys') def test_m2m_db_constraint_custom(self): self._test_m2m_db_constraint(CustomManyToManyField) @skipUnlessDBFeature('supports_foreign_keys') def test_m2m_db_constraint_inherited(self): self._test_m2m_db_constraint(InheritedManyToManyField) def test_add_field(self): """ Tests adding fields to models """ # Create the table with connection.schema_editor() as editor: editor.create_model(Author) # Ensure there's no age field columns = self.column_classes(Author) self.assertNotIn("age", columns) # Add the new field new_field = IntegerField(null=True) new_field.set_attributes_from_name("age") with CaptureQueriesContext(connection) as ctx, connection.schema_editor() as editor: editor.add_field(Author, new_field) drop_default_sql = editor.sql_alter_column_no_default % { 'column': editor.quote_name(new_field.name), } self.assertFalse(any(drop_default_sql in query['sql'] for query in ctx.captured_queries)) # Ensure the field is right afterwards columns = self.column_classes(Author) self.assertEqual(columns['age'][0], "IntegerField") self.assertEqual(columns['age'][1][6], True) def test_add_field_remove_field(self): """ Adding a field and removing it removes all deferred sql referring to it. """ with connection.schema_editor() as editor: # Create a table with a unique constraint on the slug field. editor.create_model(Tag) # Remove the slug column. editor.remove_field(Tag, Tag._meta.get_field('slug')) self.assertEqual(editor.deferred_sql, []) def test_add_field_temp_default(self): """ Tests adding fields to models with a temporary default """ # Create the table with connection.schema_editor() as editor: editor.create_model(Author) # Ensure there's no age field columns = self.column_classes(Author) self.assertNotIn("age", columns) # Add some rows of data Author.objects.create(name="Andrew", height=30) Author.objects.create(name="Andrea") # Add a not-null field new_field = CharField(max_length=30, default="Godwin") new_field.set_attributes_from_name("surname") with connection.schema_editor() as editor: editor.add_field(Author, new_field) # Ensure the field is right afterwards columns = self.column_classes(Author) self.assertEqual(columns['surname'][0], "CharField") self.assertEqual(columns['surname'][1][6], connection.features.interprets_empty_strings_as_nulls) def test_add_field_temp_default_boolean(self): """ Tests adding fields to models with a temporary default where the default is False. (#21783) """ # Create the table with connection.schema_editor() as editor: editor.create_model(Author) # Ensure there's no age field columns = self.column_classes(Author) self.assertNotIn("age", columns) # Add some rows of data Author.objects.create(name="Andrew", height=30) Author.objects.create(name="Andrea") # Add a not-null field new_field = BooleanField(default=False) new_field.set_attributes_from_name("awesome") with connection.schema_editor() as editor: editor.add_field(Author, new_field) # Ensure the field is right afterwards columns = self.column_classes(Author) # BooleanField are stored as TINYINT(1) on MySQL. field_type = columns['awesome'][0] self.assertEqual(field_type, connection.features.introspected_boolean_field_type) def test_add_field_default_transform(self): """ Tests adding fields to models with a default that is not directly valid in the database (#22581) """ class TestTransformField(IntegerField): # Weird field that saves the count of items in its value def get_default(self): return self.default def get_prep_value(self, value): if value is None: return 0 return len(value) # Create the table with connection.schema_editor() as editor: editor.create_model(Author) # Add some rows of data Author.objects.create(name="Andrew", height=30) Author.objects.create(name="Andrea") # Add the field with a default it needs to cast (to string in this case) new_field = TestTransformField(default={1: 2}) new_field.set_attributes_from_name("thing") with connection.schema_editor() as editor: editor.add_field(Author, new_field) # Ensure the field is there columns = self.column_classes(Author) field_type, field_info = columns['thing'] self.assertEqual(field_type, 'IntegerField') # Make sure the values were transformed correctly self.assertEqual(Author.objects.extra(where=["thing = 1"]).count(), 2) def test_add_field_binary(self): """ Tests binary fields get a sane default (#22851) """ # Create the table with connection.schema_editor() as editor: editor.create_model(Author) # Add the new field new_field = BinaryField(blank=True) new_field.set_attributes_from_name("bits") with connection.schema_editor() as editor: editor.add_field(Author, new_field) # Ensure the field is right afterwards columns = self.column_classes(Author) # MySQL annoyingly uses the same backend, so it'll come back as one of # these two types. self.assertIn(columns['bits'][0], ("BinaryField", "TextField")) @unittest.skipUnless(connection.vendor == 'mysql', "MySQL specific") def test_add_binaryfield_mediumblob(self): """ Test adding a custom-sized binary field on MySQL (#24846). """ # Create the table with connection.schema_editor() as editor: editor.create_model(Author) # Add the new field with default new_field = MediumBlobField(blank=True, default=b'123') new_field.set_attributes_from_name('bits') with connection.schema_editor() as editor: editor.add_field(Author, new_field) columns = self.column_classes(Author) # Introspection treats BLOBs as TextFields self.assertEqual(columns['bits'][0], "TextField") def test_alter(self): """ Tests simple altering of fields """ # Create the table with connection.schema_editor() as editor: editor.create_model(Author) # Ensure the field is right to begin with columns = self.column_classes(Author) self.assertEqual(columns['name'][0], "CharField") self.assertEqual(bool(columns['name'][1][6]), bool(connection.features.interprets_empty_strings_as_nulls)) # Alter the name field to a TextField old_field = Author._meta.get_field("name") new_field = TextField(null=True) new_field.set_attributes_from_name("name") with connection.schema_editor() as editor: editor.alter_field(Author, old_field, new_field, strict=True) # Ensure the field is right afterwards columns = self.column_classes(Author) self.assertEqual(columns['name'][0], "TextField") self.assertEqual(columns['name'][1][6], True) # Change nullability again new_field2 = TextField(null=False) new_field2.set_attributes_from_name("name") with connection.schema_editor() as editor: editor.alter_field(Author, new_field, new_field2, strict=True) # Ensure the field is right afterwards columns = self.column_classes(Author) self.assertEqual(columns['name'][0], "TextField") self.assertEqual(bool(columns['name'][1][6]), bool(connection.features.interprets_empty_strings_as_nulls)) def test_alter_auto_field_to_integer_field(self): # Create the table with connection.schema_editor() as editor: editor.create_model(Author) # Change AutoField to IntegerField old_field = Author._meta.get_field('id') new_field = IntegerField(primary_key=True) new_field.set_attributes_from_name('id') new_field.model = Author with connection.schema_editor() as editor: editor.alter_field(Author, old_field, new_field, strict=True) def test_alter_auto_field_to_char_field(self): # Create the table with connection.schema_editor() as editor: editor.create_model(Author) # Change AutoField to CharField old_field = Author._meta.get_field('id') new_field = CharField(primary_key=True, max_length=50) new_field.set_attributes_from_name('id') new_field.model = Author with connection.schema_editor() as editor: editor.alter_field(Author, old_field, new_field, strict=True) @isolate_apps('schema') def test_alter_auto_field_quoted_db_column(self): class Foo(Model): id = AutoField(primary_key=True, db_column='"quoted_id"') class Meta: app_label = 'schema' with connection.schema_editor() as editor: editor.create_model(Foo) self.isolated_local_models = [Foo] old_field = Foo._meta.get_field('id') new_field = BigAutoField(primary_key=True) new_field.model = Foo new_field.db_column = '"quoted_id"' new_field.set_attributes_from_name('id') with connection.schema_editor() as editor: editor.alter_field(Foo, old_field, new_field, strict=True) Foo.objects.create() def test_alter_not_unique_field_to_primary_key(self): # Create the table. with connection.schema_editor() as editor: editor.create_model(Author) # Change UUIDField to primary key. old_field = Author._meta.get_field('uuid') new_field = UUIDField(primary_key=True) new_field.set_attributes_from_name('uuid') new_field.model = Author with connection.schema_editor() as editor: editor.remove_field(Author, Author._meta.get_field('id')) editor.alter_field(Author, old_field, new_field, strict=True) @isolate_apps('schema') def test_alter_primary_key_quoted_db_table(self): class Foo(Model): class Meta: app_label = 'schema' db_table = '"foo"' with connection.schema_editor() as editor: editor.create_model(Foo) self.isolated_local_models = [Foo] old_field = Foo._meta.get_field('id') new_field = BigAutoField(primary_key=True) new_field.model = Foo new_field.set_attributes_from_name('id') with connection.schema_editor() as editor: editor.alter_field(Foo, old_field, new_field, strict=True) Foo.objects.create() def test_alter_text_field(self): # Regression for "BLOB/TEXT column 'info' can't have a default value") # on MySQL. # Create the table with connection.schema_editor() as editor: editor.create_model(Note) old_field = Note._meta.get_field("info") new_field = TextField(blank=True) new_field.set_attributes_from_name("info") with connection.schema_editor() as editor: editor.alter_field(Note, old_field, new_field, strict=True) @skipUnlessDBFeature('can_defer_constraint_checks', 'can_rollback_ddl') def test_alter_fk_checks_deferred_constraints(self): """ #25492 - Altering a foreign key's structure and data in the same transaction. """ with connection.schema_editor() as editor: editor.create_model(Node) old_field = Node._meta.get_field('parent') new_field = ForeignKey(Node, CASCADE) new_field.set_attributes_from_name('parent') parent = Node.objects.create() with connection.schema_editor() as editor: # Update the parent FK to create a deferred constraint check. Node.objects.update(parent=parent) editor.alter_field(Node, old_field, new_field, strict=True) def test_alter_text_field_to_date_field(self): """ #25002 - Test conversion of text field to date field. """ with connection.schema_editor() as editor: editor.create_model(Note) Note.objects.create(info='1988-05-05') old_field = Note._meta.get_field('info') new_field = DateField(blank=True) new_field.set_attributes_from_name('info') with connection.schema_editor() as editor: editor.alter_field(Note, old_field, new_field, strict=True) # Make sure the field isn't nullable columns = self.column_classes(Note) self.assertFalse(columns['info'][1][6]) def test_alter_text_field_to_datetime_field(self): """ #25002 - Test conversion of text field to datetime field. """ with connection.schema_editor() as editor: editor.create_model(Note) Note.objects.create(info='1988-05-05 3:16:17.4567') old_field = Note._meta.get_field('info') new_field = DateTimeField(blank=True) new_field.set_attributes_from_name('info') with connection.schema_editor() as editor: editor.alter_field(Note, old_field, new_field, strict=True) # Make sure the field isn't nullable columns = self.column_classes(Note) self.assertFalse(columns['info'][1][6]) def test_alter_text_field_to_time_field(self): """ #25002 - Test conversion of text field to time field. """ with connection.schema_editor() as editor: editor.create_model(Note) Note.objects.create(info='3:16:17.4567') old_field = Note._meta.get_field('info') new_field = TimeField(blank=True) new_field.set_attributes_from_name('info') with connection.schema_editor() as editor: editor.alter_field(Note, old_field, new_field, strict=True) # Make sure the field isn't nullable columns = self.column_classes(Note) self.assertFalse(columns['info'][1][6]) @skipIfDBFeature('interprets_empty_strings_as_nulls') def test_alter_textual_field_keep_null_status(self): """ Changing a field type shouldn't affect the not null status. """ with connection.schema_editor() as editor: editor.create_model(Note) with self.assertRaises(IntegrityError): Note.objects.create(info=None) old_field = Note._meta.get_field("info") new_field = CharField(max_length=50) new_field.set_attributes_from_name("info") with connection.schema_editor() as editor: editor.alter_field(Note, old_field, new_field, strict=True) with self.assertRaises(IntegrityError): Note.objects.create(info=None) def test_alter_numeric_field_keep_null_status(self): """ Changing a field type shouldn't affect the not null status. """ with connection.schema_editor() as editor: editor.create_model(UniqueTest) with self.assertRaises(IntegrityError): UniqueTest.objects.create(year=None, slug='aaa') old_field = UniqueTest._meta.get_field("year") new_field = BigIntegerField() new_field.set_attributes_from_name("year") with connection.schema_editor() as editor: editor.alter_field(UniqueTest, old_field, new_field, strict=True) with self.assertRaises(IntegrityError): UniqueTest.objects.create(year=None, slug='bbb') def test_alter_null_to_not_null(self): """ #23609 - Tests handling of default values when altering from NULL to NOT NULL. """ # Create the table with connection.schema_editor() as editor: editor.create_model(Author) # Ensure the field is right to begin with columns = self.column_classes(Author) self.assertTrue(columns['height'][1][6]) # Create some test data Author.objects.create(name='Not null author', height=12) Author.objects.create(name='Null author') # Verify null value self.assertEqual(Author.objects.get(name='Not null author').height, 12) self.assertIsNone(Author.objects.get(name='Null author').height) # Alter the height field to NOT NULL with default old_field = Author._meta.get_field("height") new_field = PositiveIntegerField(default=42) new_field.set_attributes_from_name("height") with connection.schema_editor() as editor: editor.alter_field(Author, old_field, new_field, strict=True) # Ensure the field is right afterwards columns = self.column_classes(Author) self.assertFalse(columns['height'][1][6]) # Verify default value self.assertEqual(Author.objects.get(name='Not null author').height, 12) self.assertEqual(Author.objects.get(name='Null author').height, 42) def test_alter_charfield_to_null(self): """ #24307 - Should skip an alter statement on databases with interprets_empty_strings_as_null when changing a CharField to null. """ # Create the table with connection.schema_editor() as editor: editor.create_model(Author) # Change the CharField to null old_field = Author._meta.get_field('name') new_field = copy(old_field) new_field.null = True with connection.schema_editor() as editor: editor.alter_field(Author, old_field, new_field, strict=True) @unittest.skipUnless(connection.vendor == 'postgresql', 'PostgreSQL specific') def test_alter_char_field_decrease_length(self): # Create the table. with connection.schema_editor() as editor: editor.create_model(Author) Author.objects.create(name='x' * 255) # Change max_length of CharField. old_field = Author._meta.get_field('name') new_field = CharField(max_length=254) new_field.set_attributes_from_name('name') with connection.schema_editor() as editor: msg = 'value too long for type character varying(254)' with self.assertRaisesMessage(DataError, msg): editor.alter_field(Author, old_field, new_field, strict=True) @unittest.skipUnless(connection.vendor == 'postgresql', 'PostgreSQL specific') def test_alter_field_with_custom_db_type(self): from django.contrib.postgres.fields import ArrayField class Foo(Model): field = ArrayField(CharField(max_length=255)) class Meta: app_label = 'schema' with connection.schema_editor() as editor: editor.create_model(Foo) self.isolated_local_models = [Foo] old_field = Foo._meta.get_field('field') new_field = ArrayField(CharField(max_length=16)) new_field.set_attributes_from_name('field') new_field.model = Foo with connection.schema_editor() as editor: editor.alter_field(Foo, old_field, new_field, strict=True) @isolate_apps('schema') @unittest.skipUnless(connection.vendor == 'postgresql', 'PostgreSQL specific') def test_alter_array_field_decrease_base_field_length(self): from django.contrib.postgres.fields import ArrayField class ArrayModel(Model): field = ArrayField(CharField(max_length=16)) class Meta: app_label = 'schema' with connection.schema_editor() as editor: editor.create_model(ArrayModel) self.isolated_local_models = [ArrayModel] ArrayModel.objects.create(field=['x' * 16]) old_field = ArrayModel._meta.get_field('field') new_field = ArrayField(CharField(max_length=15)) new_field.set_attributes_from_name('field') new_field.model = ArrayModel with connection.schema_editor() as editor: msg = 'value too long for type character varying(15)' with self.assertRaisesMessage(DataError, msg): editor.alter_field(ArrayModel, old_field, new_field, strict=True) @isolate_apps('schema') @unittest.skipUnless(connection.vendor == 'postgresql', 'PostgreSQL specific') def test_alter_array_field_decrease_nested_base_field_length(self): from django.contrib.postgres.fields import ArrayField class ArrayModel(Model): field = ArrayField(ArrayField(CharField(max_length=16))) class Meta: app_label = 'schema' with connection.schema_editor() as editor: editor.create_model(ArrayModel) self.isolated_local_models = [ArrayModel] ArrayModel.objects.create(field=[['x' * 16]]) old_field = ArrayModel._meta.get_field('field') new_field = ArrayField(ArrayField(CharField(max_length=15))) new_field.set_attributes_from_name('field') new_field.model = ArrayModel with connection.schema_editor() as editor: msg = 'value too long for type character varying(15)' with self.assertRaisesMessage(DataError, msg): editor.alter_field(ArrayModel, old_field, new_field, strict=True) def test_alter_textfield_to_null(self): """ #24307 - Should skip an alter statement on databases with interprets_empty_strings_as_null when changing a TextField to null. """ # Create the table with connection.schema_editor() as editor: editor.create_model(Note) # Change the TextField to null old_field = Note._meta.get_field('info') new_field = copy(old_field) new_field.null = True with connection.schema_editor() as editor: editor.alter_field(Note, old_field, new_field, strict=True) @skipUnlessDBFeature('supports_combined_alters') def test_alter_null_to_not_null_keeping_default(self): """ #23738 - Can change a nullable field with default to non-nullable with the same default. """ # Create the table with connection.schema_editor() as editor: editor.create_model(AuthorWithDefaultHeight) # Ensure the field is right to begin with columns = self.column_classes(AuthorWithDefaultHeight) self.assertTrue(columns['height'][1][6]) # Alter the height field to NOT NULL keeping the previous default old_field = AuthorWithDefaultHeight._meta.get_field("height") new_field = PositiveIntegerField(default=42) new_field.set_attributes_from_name("height") with connection.schema_editor() as editor: editor.alter_field(AuthorWithDefaultHeight, old_field, new_field, strict=True) # Ensure the field is right afterwards columns = self.column_classes(AuthorWithDefaultHeight) self.assertFalse(columns['height'][1][6]) @skipUnlessDBFeature('supports_foreign_keys') def test_alter_fk(self): """ Tests altering of FKs """ # Create the table with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(Book) # Ensure the field is right to begin with columns = self.column_classes(Book) self.assertEqual(columns['author_id'][0], "IntegerField") self.assertForeignKeyExists(Book, 'author_id', 'schema_author') # Alter the FK old_field = Book._meta.get_field("author") new_field = ForeignKey(Author, CASCADE, editable=False) new_field.set_attributes_from_name("author") with connection.schema_editor() as editor: editor.alter_field(Book, old_field, new_field, strict=True) # Ensure the field is right afterwards columns = self.column_classes(Book) self.assertEqual(columns['author_id'][0], "IntegerField") self.assertForeignKeyExists(Book, 'author_id', 'schema_author') @skipUnlessDBFeature('supports_foreign_keys') def test_alter_to_fk(self): """ #24447 - Tests adding a FK constraint for an existing column """ class LocalBook(Model): author = IntegerField() title = CharField(max_length=100, db_index=True) pub_date = DateTimeField() class Meta: app_label = 'schema' apps = new_apps self.local_models = [LocalBook] # Create the tables with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(LocalBook) # Ensure no FK constraint exists constraints = self.get_constraints(LocalBook._meta.db_table) for details in constraints.values(): if details['foreign_key']: self.fail('Found an unexpected FK constraint to %s' % details['columns']) old_field = LocalBook._meta.get_field("author") new_field = ForeignKey(Author, CASCADE) new_field.set_attributes_from_name("author") with connection.schema_editor() as editor: editor.alter_field(LocalBook, old_field, new_field, strict=True) self.assertForeignKeyExists(LocalBook, 'author_id', 'schema_author') @skipUnlessDBFeature('supports_foreign_keys') def test_alter_o2o_to_fk(self): """ #24163 - Tests altering of OneToOneField to ForeignKey """ # Create the table with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(BookWithO2O) # Ensure the field is right to begin with columns = self.column_classes(BookWithO2O) self.assertEqual(columns['author_id'][0], "IntegerField") # Ensure the field is unique author = Author.objects.create(name="Joe") BookWithO2O.objects.create(author=author, title="Django 1", pub_date=datetime.datetime.now()) with self.assertRaises(IntegrityError): BookWithO2O.objects.create(author=author, title="Django 2", pub_date=datetime.datetime.now()) BookWithO2O.objects.all().delete() self.assertForeignKeyExists(BookWithO2O, 'author_id', 'schema_author') # Alter the OneToOneField to ForeignKey old_field = BookWithO2O._meta.get_field("author") new_field = ForeignKey(Author, CASCADE) new_field.set_attributes_from_name("author") with connection.schema_editor() as editor: editor.alter_field(BookWithO2O, old_field, new_field, strict=True) # Ensure the field is right afterwards columns = self.column_classes(Book) self.assertEqual(columns['author_id'][0], "IntegerField") # Ensure the field is not unique anymore Book.objects.create(author=author, title="Django 1", pub_date=datetime.datetime.now()) Book.objects.create(author=author, title="Django 2", pub_date=datetime.datetime.now()) self.assertForeignKeyExists(Book, 'author_id', 'schema_author') @skipUnlessDBFeature('supports_foreign_keys') def test_alter_fk_to_o2o(self): """ #24163 - Tests altering of ForeignKey to OneToOneField """ # Create the table with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(Book) # Ensure the field is right to begin with columns = self.column_classes(Book) self.assertEqual(columns['author_id'][0], "IntegerField") # Ensure the field is not unique author = Author.objects.create(name="Joe") Book.objects.create(author=author, title="Django 1", pub_date=datetime.datetime.now()) Book.objects.create(author=author, title="Django 2", pub_date=datetime.datetime.now()) Book.objects.all().delete() self.assertForeignKeyExists(Book, 'author_id', 'schema_author') # Alter the ForeignKey to OneToOneField old_field = Book._meta.get_field("author") new_field = OneToOneField(Author, CASCADE) new_field.set_attributes_from_name("author") with connection.schema_editor() as editor: editor.alter_field(Book, old_field, new_field, strict=True) # Ensure the field is right afterwards columns = self.column_classes(BookWithO2O) self.assertEqual(columns['author_id'][0], "IntegerField") # Ensure the field is unique now BookWithO2O.objects.create(author=author, title="Django 1", pub_date=datetime.datetime.now()) with self.assertRaises(IntegrityError): BookWithO2O.objects.create(author=author, title="Django 2", pub_date=datetime.datetime.now()) self.assertForeignKeyExists(BookWithO2O, 'author_id', 'schema_author') def test_alter_field_fk_to_o2o(self): with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(Book) expected_fks = 1 if connection.features.supports_foreign_keys else 0 # Check the index is right to begin with. counts = self.get_constraints_count( Book._meta.db_table, Book._meta.get_field('author').column, (Author._meta.db_table, Author._meta.pk.column), ) self.assertEqual(counts, {'fks': expected_fks, 'uniques': 0, 'indexes': 1}) old_field = Book._meta.get_field('author') new_field = OneToOneField(Author, CASCADE) new_field.set_attributes_from_name('author') with connection.schema_editor() as editor: editor.alter_field(Book, old_field, new_field, strict=True) counts = self.get_constraints_count( Book._meta.db_table, Book._meta.get_field('author').column, (Author._meta.db_table, Author._meta.pk.column), ) # The index on ForeignKey is replaced with a unique constraint for OneToOneField. self.assertEqual(counts, {'fks': expected_fks, 'uniques': 1, 'indexes': 0}) def test_alter_field_fk_keeps_index(self): with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(Book) expected_fks = 1 if connection.features.supports_foreign_keys else 0 # Check the index is right to begin with. counts = self.get_constraints_count( Book._meta.db_table, Book._meta.get_field('author').column, (Author._meta.db_table, Author._meta.pk.column), ) self.assertEqual(counts, {'fks': expected_fks, 'uniques': 0, 'indexes': 1}) old_field = Book._meta.get_field('author') # on_delete changed from CASCADE. new_field = ForeignKey(Author, PROTECT) new_field.set_attributes_from_name('author') with connection.schema_editor() as editor: editor.alter_field(Book, old_field, new_field, strict=True) counts = self.get_constraints_count( Book._meta.db_table, Book._meta.get_field('author').column, (Author._meta.db_table, Author._meta.pk.column), ) # The index remains. self.assertEqual(counts, {'fks': expected_fks, 'uniques': 0, 'indexes': 1}) def test_alter_field_o2o_to_fk(self): with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(BookWithO2O) expected_fks = 1 if connection.features.supports_foreign_keys else 0 # Check the unique constraint is right to begin with. counts = self.get_constraints_count( BookWithO2O._meta.db_table, BookWithO2O._meta.get_field('author').column, (Author._meta.db_table, Author._meta.pk.column), ) self.assertEqual(counts, {'fks': expected_fks, 'uniques': 1, 'indexes': 0}) old_field = BookWithO2O._meta.get_field('author') new_field = ForeignKey(Author, CASCADE) new_field.set_attributes_from_name('author') with connection.schema_editor() as editor: editor.alter_field(BookWithO2O, old_field, new_field, strict=True) counts = self.get_constraints_count( BookWithO2O._meta.db_table, BookWithO2O._meta.get_field('author').column, (Author._meta.db_table, Author._meta.pk.column), ) # The unique constraint on OneToOneField is replaced with an index for ForeignKey. self.assertEqual(counts, {'fks': expected_fks, 'uniques': 0, 'indexes': 1}) def test_alter_field_o2o_keeps_unique(self): with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(BookWithO2O) expected_fks = 1 if connection.features.supports_foreign_keys else 0 # Check the unique constraint is right to begin with. counts = self.get_constraints_count( BookWithO2O._meta.db_table, BookWithO2O._meta.get_field('author').column, (Author._meta.db_table, Author._meta.pk.column), ) self.assertEqual(counts, {'fks': expected_fks, 'uniques': 1, 'indexes': 0}) old_field = BookWithO2O._meta.get_field('author') # on_delete changed from CASCADE. new_field = OneToOneField(Author, PROTECT) new_field.set_attributes_from_name('author') with connection.schema_editor() as editor: editor.alter_field(BookWithO2O, old_field, new_field, strict=True) counts = self.get_constraints_count( BookWithO2O._meta.db_table, BookWithO2O._meta.get_field('author').column, (Author._meta.db_table, Author._meta.pk.column), ) # The unique constraint remains. self.assertEqual(counts, {'fks': expected_fks, 'uniques': 1, 'indexes': 0}) @skipUnlessDBFeature('ignores_table_name_case') def test_alter_db_table_case(self): # Create the table with connection.schema_editor() as editor: editor.create_model(Author) # Alter the case of the table old_table_name = Author._meta.db_table with connection.schema_editor() as editor: editor.alter_db_table(Author, old_table_name, old_table_name.upper()) def test_alter_implicit_id_to_explicit(self): """ Should be able to convert an implicit "id" field to an explicit "id" primary key field. """ with connection.schema_editor() as editor: editor.create_model(Author) old_field = Author._meta.get_field("id") new_field = AutoField(primary_key=True) new_field.set_attributes_from_name("id") new_field.model = Author with connection.schema_editor() as editor: editor.alter_field(Author, old_field, new_field, strict=True) # This will fail if DROP DEFAULT is inadvertently executed on this # field which drops the id sequence, at least on PostgreSQL. Author.objects.create(name='Foo') Author.objects.create(name='Bar') def test_alter_autofield_pk_to_bigautofield_pk_sequence_owner(self): """ Converting an implicit PK to BigAutoField(primary_key=True) should keep a sequence owner on PostgreSQL. """ with connection.schema_editor() as editor: editor.create_model(Author) old_field = Author._meta.get_field('id') new_field = BigAutoField(primary_key=True) new_field.set_attributes_from_name('id') new_field.model = Author with connection.schema_editor() as editor: editor.alter_field(Author, old_field, new_field, strict=True) Author.objects.create(name='Foo', pk=1) with connection.cursor() as cursor: sequence_reset_sqls = connection.ops.sequence_reset_sql(no_style(), [Author]) if sequence_reset_sqls: cursor.execute(sequence_reset_sqls[0]) # Fail on PostgreSQL if sequence is missing an owner. self.assertIsNotNone(Author.objects.create(name='Bar')) def test_alter_autofield_pk_to_smallautofield_pk_sequence_owner(self): """ Converting an implicit PK to SmallAutoField(primary_key=True) should keep a sequence owner on PostgreSQL. """ with connection.schema_editor() as editor: editor.create_model(Author) old_field = Author._meta.get_field('id') new_field = SmallAutoField(primary_key=True) new_field.set_attributes_from_name('id') new_field.model = Author with connection.schema_editor() as editor: editor.alter_field(Author, old_field, new_field, strict=True) Author.objects.create(name='Foo', pk=1) with connection.cursor() as cursor: sequence_reset_sqls = connection.ops.sequence_reset_sql(no_style(), [Author]) if sequence_reset_sqls: cursor.execute(sequence_reset_sqls[0]) # Fail on PostgreSQL if sequence is missing an owner. self.assertIsNotNone(Author.objects.create(name='Bar')) def test_alter_int_pk_to_autofield_pk(self): """ Should be able to rename an IntegerField(primary_key=True) to AutoField(primary_key=True). """ with connection.schema_editor() as editor: editor.create_model(IntegerPK) old_field = IntegerPK._meta.get_field('i') new_field = AutoField(primary_key=True) new_field.model = IntegerPK new_field.set_attributes_from_name('i') with connection.schema_editor() as editor: editor.alter_field(IntegerPK, old_field, new_field, strict=True) def test_alter_int_pk_to_bigautofield_pk(self): """ Should be able to rename an IntegerField(primary_key=True) to BigAutoField(primary_key=True). """ with connection.schema_editor() as editor: editor.create_model(IntegerPK) old_field = IntegerPK._meta.get_field('i') new_field = BigAutoField(primary_key=True) new_field.model = IntegerPK new_field.set_attributes_from_name('i') with connection.schema_editor() as editor: editor.alter_field(IntegerPK, old_field, new_field, strict=True) @isolate_apps('schema') def test_alter_smallint_pk_to_smallautofield_pk(self): """ Should be able to rename an SmallIntegerField(primary_key=True) to SmallAutoField(primary_key=True). """ class SmallIntegerPK(Model): i = SmallIntegerField(primary_key=True) class Meta: app_label = 'schema' with connection.schema_editor() as editor: editor.create_model(SmallIntegerPK) self.isolated_local_models = [SmallIntegerPK] old_field = SmallIntegerPK._meta.get_field('i') new_field = SmallAutoField(primary_key=True) new_field.model = SmallIntegerPK new_field.set_attributes_from_name('i') with connection.schema_editor() as editor: editor.alter_field(SmallIntegerPK, old_field, new_field, strict=True) def test_alter_int_pk_to_int_unique(self): """ Should be able to rename an IntegerField(primary_key=True) to IntegerField(unique=True). """ with connection.schema_editor() as editor: editor.create_model(IntegerPK) # Delete the old PK old_field = IntegerPK._meta.get_field('i') new_field = IntegerField(unique=True) new_field.model = IntegerPK new_field.set_attributes_from_name('i') with connection.schema_editor() as editor: editor.alter_field(IntegerPK, old_field, new_field, strict=True) # The primary key constraint is gone. Result depends on database: # 'id' for SQLite, None for others (must not be 'i'). self.assertIn(self.get_primary_key(IntegerPK._meta.db_table), ('id', None)) # Set up a model class as it currently stands. The original IntegerPK # class is now out of date and some backends make use of the whole # model class when modifying a field (such as sqlite3 when remaking a # table) so an outdated model class leads to incorrect results. class Transitional(Model): i = IntegerField(unique=True) j = IntegerField(unique=True) class Meta: app_label = 'schema' apps = new_apps db_table = 'INTEGERPK' # model requires a new PK old_field = Transitional._meta.get_field('j') new_field = IntegerField(primary_key=True) new_field.model = Transitional new_field.set_attributes_from_name('j') with connection.schema_editor() as editor: editor.alter_field(Transitional, old_field, new_field, strict=True) # Create a model class representing the updated model. class IntegerUnique(Model): i = IntegerField(unique=True) j = IntegerField(primary_key=True) class Meta: app_label = 'schema' apps = new_apps db_table = 'INTEGERPK' # Ensure unique constraint works. IntegerUnique.objects.create(i=1, j=1) with self.assertRaises(IntegrityError): IntegerUnique.objects.create(i=1, j=2) def test_rename(self): """ Tests simple altering of fields """ # Create the table with connection.schema_editor() as editor: editor.create_model(Author) # Ensure the field is right to begin with columns = self.column_classes(Author) self.assertEqual(columns['name'][0], "CharField") self.assertNotIn("display_name", columns) # Alter the name field's name old_field = Author._meta.get_field("name") new_field = CharField(max_length=254) new_field.set_attributes_from_name("display_name") with connection.schema_editor() as editor: editor.alter_field(Author, old_field, new_field, strict=True) # Ensure the field is right afterwards columns = self.column_classes(Author) self.assertEqual(columns['display_name'][0], "CharField") self.assertNotIn("name", columns) @isolate_apps('schema') def test_rename_referenced_field(self): class Author(Model): name = CharField(max_length=255, unique=True) class Meta: app_label = 'schema' class Book(Model): author = ForeignKey(Author, CASCADE, to_field='name') class Meta: app_label = 'schema' with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(Book) new_field = CharField(max_length=255, unique=True) new_field.set_attributes_from_name('renamed') with connection.schema_editor(atomic=connection.features.supports_atomic_references_rename) as editor: editor.alter_field(Author, Author._meta.get_field('name'), new_field) # Ensure the foreign key reference was updated. self.assertForeignKeyExists(Book, 'author_id', 'schema_author', 'renamed') @skipIfDBFeature('interprets_empty_strings_as_nulls') def test_rename_keep_null_status(self): """ Renaming a field shouldn't affect the not null status. """ with connection.schema_editor() as editor: editor.create_model(Note) with self.assertRaises(IntegrityError): Note.objects.create(info=None) old_field = Note._meta.get_field("info") new_field = TextField() new_field.set_attributes_from_name("detail_info") with connection.schema_editor() as editor: editor.alter_field(Note, old_field, new_field, strict=True) columns = self.column_classes(Note) self.assertEqual(columns['detail_info'][0], "TextField") self.assertNotIn("info", columns) with self.assertRaises(IntegrityError): NoteRename.objects.create(detail_info=None) def _test_m2m_create(self, M2MFieldClass): """ Tests M2M fields on models during creation """ class LocalBookWithM2M(Model): author = ForeignKey(Author, CASCADE) title = CharField(max_length=100, db_index=True) pub_date = DateTimeField() tags = M2MFieldClass("TagM2MTest", related_name="books") class Meta: app_label = 'schema' apps = new_apps self.local_models = [LocalBookWithM2M] # Create the tables with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(TagM2MTest) editor.create_model(LocalBookWithM2M) # Ensure there is now an m2m table there columns = self.column_classes(LocalBookWithM2M._meta.get_field("tags").remote_field.through) self.assertEqual(columns['tagm2mtest_id'][0], "IntegerField") def test_m2m_create(self): self._test_m2m_create(ManyToManyField) def test_m2m_create_custom(self): self._test_m2m_create(CustomManyToManyField) def test_m2m_create_inherited(self): self._test_m2m_create(InheritedManyToManyField) def _test_m2m_create_through(self, M2MFieldClass): """ Tests M2M fields on models during creation with through models """ class LocalTagThrough(Model): book = ForeignKey("schema.LocalBookWithM2MThrough", CASCADE) tag = ForeignKey("schema.TagM2MTest", CASCADE) class Meta: app_label = 'schema' apps = new_apps class LocalBookWithM2MThrough(Model): tags = M2MFieldClass("TagM2MTest", related_name="books", through=LocalTagThrough) class Meta: app_label = 'schema' apps = new_apps self.local_models = [LocalTagThrough, LocalBookWithM2MThrough] # Create the tables with connection.schema_editor() as editor: editor.create_model(LocalTagThrough) editor.create_model(TagM2MTest) editor.create_model(LocalBookWithM2MThrough) # Ensure there is now an m2m table there columns = self.column_classes(LocalTagThrough) self.assertEqual(columns['book_id'][0], "IntegerField") self.assertEqual(columns['tag_id'][0], "IntegerField") def test_m2m_create_through(self): self._test_m2m_create_through(ManyToManyField) def test_m2m_create_through_custom(self): self._test_m2m_create_through(CustomManyToManyField) def test_m2m_create_through_inherited(self): self._test_m2m_create_through(InheritedManyToManyField) def _test_m2m(self, M2MFieldClass): """ Tests adding/removing M2M fields on models """ class LocalAuthorWithM2M(Model): name = CharField(max_length=255) class Meta: app_label = 'schema' apps = new_apps self.local_models = [LocalAuthorWithM2M] # Create the tables with connection.schema_editor() as editor: editor.create_model(LocalAuthorWithM2M) editor.create_model(TagM2MTest) # Create an M2M field new_field = M2MFieldClass("schema.TagM2MTest", related_name="authors") new_field.contribute_to_class(LocalAuthorWithM2M, "tags") # Ensure there's no m2m table there with self.assertRaises(DatabaseError): self.column_classes(new_field.remote_field.through) # Add the field with connection.schema_editor() as editor: editor.add_field(LocalAuthorWithM2M, new_field) # Ensure there is now an m2m table there columns = self.column_classes(new_field.remote_field.through) self.assertEqual(columns['tagm2mtest_id'][0], "IntegerField") # "Alter" the field. This should not rename the DB table to itself. with connection.schema_editor() as editor: editor.alter_field(LocalAuthorWithM2M, new_field, new_field, strict=True) # Remove the M2M table again with connection.schema_editor() as editor: editor.remove_field(LocalAuthorWithM2M, new_field) # Ensure there's no m2m table there with self.assertRaises(DatabaseError): self.column_classes(new_field.remote_field.through) # Make sure the model state is coherent with the table one now that # we've removed the tags field. opts = LocalAuthorWithM2M._meta opts.local_many_to_many.remove(new_field) del new_apps.all_models['schema'][new_field.remote_field.through._meta.model_name] opts._expire_cache() def test_m2m(self): self._test_m2m(ManyToManyField) def test_m2m_custom(self): self._test_m2m(CustomManyToManyField) def test_m2m_inherited(self): self._test_m2m(InheritedManyToManyField) def _test_m2m_through_alter(self, M2MFieldClass): """ Tests altering M2Ms with explicit through models (should no-op) """ class LocalAuthorTag(Model): author = ForeignKey("schema.LocalAuthorWithM2MThrough", CASCADE) tag = ForeignKey("schema.TagM2MTest", CASCADE) class Meta: app_label = 'schema' apps = new_apps class LocalAuthorWithM2MThrough(Model): name = CharField(max_length=255) tags = M2MFieldClass("schema.TagM2MTest", related_name="authors", through=LocalAuthorTag) class Meta: app_label = 'schema' apps = new_apps self.local_models = [LocalAuthorTag, LocalAuthorWithM2MThrough] # Create the tables with connection.schema_editor() as editor: editor.create_model(LocalAuthorTag) editor.create_model(LocalAuthorWithM2MThrough) editor.create_model(TagM2MTest) # Ensure the m2m table is there self.assertEqual(len(self.column_classes(LocalAuthorTag)), 3) # "Alter" the field's blankness. This should not actually do anything. old_field = LocalAuthorWithM2MThrough._meta.get_field("tags") new_field = M2MFieldClass("schema.TagM2MTest", related_name="authors", through=LocalAuthorTag) new_field.contribute_to_class(LocalAuthorWithM2MThrough, "tags") with connection.schema_editor() as editor: editor.alter_field(LocalAuthorWithM2MThrough, old_field, new_field, strict=True) # Ensure the m2m table is still there self.assertEqual(len(self.column_classes(LocalAuthorTag)), 3) def test_m2m_through_alter(self): self._test_m2m_through_alter(ManyToManyField) def test_m2m_through_alter_custom(self): self._test_m2m_through_alter(CustomManyToManyField) def test_m2m_through_alter_inherited(self): self._test_m2m_through_alter(InheritedManyToManyField) def _test_m2m_repoint(self, M2MFieldClass): """ Tests repointing M2M fields """ class LocalBookWithM2M(Model): author = ForeignKey(Author, CASCADE) title = CharField(max_length=100, db_index=True) pub_date = DateTimeField() tags = M2MFieldClass("TagM2MTest", related_name="books") class Meta: app_label = 'schema' apps = new_apps self.local_models = [LocalBookWithM2M] # Create the tables with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(LocalBookWithM2M) editor.create_model(TagM2MTest) editor.create_model(UniqueTest) # Ensure the M2M exists and points to TagM2MTest if connection.features.supports_foreign_keys: self.assertForeignKeyExists( LocalBookWithM2M._meta.get_field("tags").remote_field.through, 'tagm2mtest_id', 'schema_tagm2mtest', ) # Repoint the M2M old_field = LocalBookWithM2M._meta.get_field("tags") new_field = M2MFieldClass(UniqueTest) new_field.contribute_to_class(LocalBookWithM2M, "uniques") with connection.schema_editor() as editor: editor.alter_field(LocalBookWithM2M, old_field, new_field, strict=True) # Ensure old M2M is gone with self.assertRaises(DatabaseError): self.column_classes(LocalBookWithM2M._meta.get_field("tags").remote_field.through) # This model looks like the new model and is used for teardown. opts = LocalBookWithM2M._meta opts.local_many_to_many.remove(old_field) # Ensure the new M2M exists and points to UniqueTest if connection.features.supports_foreign_keys: self.assertForeignKeyExists(new_field.remote_field.through, 'uniquetest_id', 'schema_uniquetest') def test_m2m_repoint(self): self._test_m2m_repoint(ManyToManyField) def test_m2m_repoint_custom(self): self._test_m2m_repoint(CustomManyToManyField) def test_m2m_repoint_inherited(self): self._test_m2m_repoint(InheritedManyToManyField) @isolate_apps('schema') def test_m2m_rename_field_in_target_model(self): class LocalTagM2MTest(Model): title = CharField(max_length=255) class Meta: app_label = 'schema' class LocalM2M(Model): tags = ManyToManyField(LocalTagM2MTest) class Meta: app_label = 'schema' # Create the tables. with connection.schema_editor() as editor: editor.create_model(LocalM2M) editor.create_model(LocalTagM2MTest) self.isolated_local_models = [LocalM2M, LocalTagM2MTest] # Ensure the m2m table is there. self.assertEqual(len(self.column_classes(LocalM2M)), 1) # Alter a field in LocalTagM2MTest. old_field = LocalTagM2MTest._meta.get_field('title') new_field = CharField(max_length=254) new_field.contribute_to_class(LocalTagM2MTest, 'title1') # @isolate_apps() and inner models are needed to have the model # relations populated, otherwise this doesn't act as a regression test. self.assertEqual(len(new_field.model._meta.related_objects), 1) with connection.schema_editor() as editor: editor.alter_field(LocalTagM2MTest, old_field, new_field, strict=True) # Ensure the m2m table is still there. self.assertEqual(len(self.column_classes(LocalM2M)), 1) @skipUnlessDBFeature('supports_column_check_constraints', 'can_introspect_check_constraints') def test_check_constraints(self): """ Tests creating/deleting CHECK constraints """ # Create the tables with connection.schema_editor() as editor: editor.create_model(Author) # Ensure the constraint exists constraints = self.get_constraints(Author._meta.db_table) if not any(details['columns'] == ['height'] and details['check'] for details in constraints.values()): self.fail("No check constraint for height found") # Alter the column to remove it old_field = Author._meta.get_field("height") new_field = IntegerField(null=True, blank=True) new_field.set_attributes_from_name("height") with connection.schema_editor() as editor: editor.alter_field(Author, old_field, new_field, strict=True) constraints = self.get_constraints(Author._meta.db_table) for details in constraints.values(): if details['columns'] == ["height"] and details['check']: self.fail("Check constraint for height found") # Alter the column to re-add it new_field2 = Author._meta.get_field("height") with connection.schema_editor() as editor: editor.alter_field(Author, new_field, new_field2, strict=True) constraints = self.get_constraints(Author._meta.db_table) if not any(details['columns'] == ['height'] and details['check'] for details in constraints.values()): self.fail("No check constraint for height found") @skipUnlessDBFeature('supports_column_check_constraints', 'can_introspect_check_constraints') def test_remove_field_check_does_not_remove_meta_constraints(self): with connection.schema_editor() as editor: editor.create_model(Author) # Add the custom check constraint constraint = CheckConstraint(check=Q(height__gte=0), name='author_height_gte_0_check') custom_constraint_name = constraint.name Author._meta.constraints = [constraint] with connection.schema_editor() as editor: editor.add_constraint(Author, constraint) # Ensure the constraints exist constraints = self.get_constraints(Author._meta.db_table) self.assertIn(custom_constraint_name, constraints) other_constraints = [ name for name, details in constraints.items() if details['columns'] == ['height'] and details['check'] and name != custom_constraint_name ] self.assertEqual(len(other_constraints), 1) # Alter the column to remove field check old_field = Author._meta.get_field('height') new_field = IntegerField(null=True, blank=True) new_field.set_attributes_from_name('height') with connection.schema_editor() as editor: editor.alter_field(Author, old_field, new_field, strict=True) constraints = self.get_constraints(Author._meta.db_table) self.assertIn(custom_constraint_name, constraints) other_constraints = [ name for name, details in constraints.items() if details['columns'] == ['height'] and details['check'] and name != custom_constraint_name ] self.assertEqual(len(other_constraints), 0) # Alter the column to re-add field check new_field2 = Author._meta.get_field('height') with connection.schema_editor() as editor: editor.alter_field(Author, new_field, new_field2, strict=True) constraints = self.get_constraints(Author._meta.db_table) self.assertIn(custom_constraint_name, constraints) other_constraints = [ name for name, details in constraints.items() if details['columns'] == ['height'] and details['check'] and name != custom_constraint_name ] self.assertEqual(len(other_constraints), 1) # Drop the check constraint with connection.schema_editor() as editor: Author._meta.constraints = [] editor.remove_constraint(Author, constraint) def test_unique(self): """ Tests removing and adding unique constraints to a single column. """ # Create the table with connection.schema_editor() as editor: editor.create_model(Tag) # Ensure the field is unique to begin with Tag.objects.create(title="foo", slug="foo") with self.assertRaises(IntegrityError): Tag.objects.create(title="bar", slug="foo") Tag.objects.all().delete() # Alter the slug field to be non-unique old_field = Tag._meta.get_field("slug") new_field = SlugField(unique=False) new_field.set_attributes_from_name("slug") with connection.schema_editor() as editor: editor.alter_field(Tag, old_field, new_field, strict=True) # Ensure the field is no longer unique Tag.objects.create(title="foo", slug="foo") Tag.objects.create(title="bar", slug="foo") Tag.objects.all().delete() # Alter the slug field to be unique new_field2 = SlugField(unique=True) new_field2.set_attributes_from_name("slug") with connection.schema_editor() as editor: editor.alter_field(Tag, new_field, new_field2, strict=True) # Ensure the field is unique again Tag.objects.create(title="foo", slug="foo") with self.assertRaises(IntegrityError): Tag.objects.create(title="bar", slug="foo") Tag.objects.all().delete() # Rename the field new_field3 = SlugField(unique=True) new_field3.set_attributes_from_name("slug2") with connection.schema_editor() as editor: editor.alter_field(Tag, new_field2, new_field3, strict=True) # Ensure the field is still unique TagUniqueRename.objects.create(title="foo", slug2="foo") with self.assertRaises(IntegrityError): TagUniqueRename.objects.create(title="bar", slug2="foo") Tag.objects.all().delete() def test_unique_name_quoting(self): old_table_name = TagUniqueRename._meta.db_table try: with connection.schema_editor() as editor: editor.create_model(TagUniqueRename) editor.alter_db_table(TagUniqueRename, old_table_name, 'unique-table') TagUniqueRename._meta.db_table = 'unique-table' # This fails if the unique index name isn't quoted. editor.alter_unique_together(TagUniqueRename, [], (('title', 'slug2'),)) finally: TagUniqueRename._meta.db_table = old_table_name @isolate_apps('schema') @unittest.skipIf(connection.vendor == 'sqlite', 'SQLite naively remakes the table on field alteration.') @skipUnlessDBFeature('supports_foreign_keys') def test_unique_no_unnecessary_fk_drops(self): """ If AlterField isn't selective about dropping foreign key constraints when modifying a field with a unique constraint, the AlterField incorrectly drops and recreates the Book.author foreign key even though it doesn't restrict the field being changed (#29193). """ class Author(Model): name = CharField(max_length=254, unique=True) class Meta: app_label = 'schema' class Book(Model): author = ForeignKey(Author, CASCADE) class Meta: app_label = 'schema' with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(Book) new_field = CharField(max_length=255, unique=True) new_field.model = Author new_field.set_attributes_from_name('name') with self.assertLogs('django.db.backends.schema', 'DEBUG') as cm: with connection.schema_editor() as editor: editor.alter_field(Author, Author._meta.get_field('name'), new_field) # One SQL statement is executed to alter the field. self.assertEqual(len(cm.records), 1) @isolate_apps('schema') @unittest.skipIf(connection.vendor == 'sqlite', 'SQLite remakes the table on field alteration.') def test_unique_and_reverse_m2m(self): """ AlterField can modify a unique field when there's a reverse M2M relation on the model. """ class Tag(Model): title = CharField(max_length=255) slug = SlugField(unique=True) class Meta: app_label = 'schema' class Book(Model): tags = ManyToManyField(Tag, related_name='books') class Meta: app_label = 'schema' self.isolated_local_models = [Book._meta.get_field('tags').remote_field.through] with connection.schema_editor() as editor: editor.create_model(Tag) editor.create_model(Book) new_field = SlugField(max_length=75, unique=True) new_field.model = Tag new_field.set_attributes_from_name('slug') with self.assertLogs('django.db.backends.schema', 'DEBUG') as cm: with connection.schema_editor() as editor: editor.alter_field(Tag, Tag._meta.get_field('slug'), new_field) # One SQL statement is executed to alter the field. self.assertEqual(len(cm.records), 1) # Ensure that the field is still unique. Tag.objects.create(title='foo', slug='foo') with self.assertRaises(IntegrityError): Tag.objects.create(title='bar', slug='foo') @skipUnlessDBFeature('allows_multiple_constraints_on_same_fields') def test_remove_field_unique_does_not_remove_meta_constraints(self): with connection.schema_editor() as editor: editor.create_model(AuthorWithUniqueName) # Add the custom unique constraint constraint = UniqueConstraint(fields=['name'], name='author_name_uniq') custom_constraint_name = constraint.name AuthorWithUniqueName._meta.constraints = [constraint] with connection.schema_editor() as editor: editor.add_constraint(AuthorWithUniqueName, constraint) # Ensure the constraints exist constraints = self.get_constraints(AuthorWithUniqueName._meta.db_table) self.assertIn(custom_constraint_name, constraints) other_constraints = [ name for name, details in constraints.items() if details['columns'] == ['name'] and details['unique'] and name != custom_constraint_name ] self.assertEqual(len(other_constraints), 1) # Alter the column to remove field uniqueness old_field = AuthorWithUniqueName._meta.get_field('name') new_field = CharField(max_length=255) new_field.set_attributes_from_name('name') with connection.schema_editor() as editor: editor.alter_field(AuthorWithUniqueName, old_field, new_field, strict=True) constraints = self.get_constraints(AuthorWithUniqueName._meta.db_table) self.assertIn(custom_constraint_name, constraints) other_constraints = [ name for name, details in constraints.items() if details['columns'] == ['name'] and details['unique'] and name != custom_constraint_name ] self.assertEqual(len(other_constraints), 0) # Alter the column to re-add field uniqueness new_field2 = AuthorWithUniqueName._meta.get_field('name') with connection.schema_editor() as editor: editor.alter_field(AuthorWithUniqueName, new_field, new_field2, strict=True) constraints = self.get_constraints(AuthorWithUniqueName._meta.db_table) self.assertIn(custom_constraint_name, constraints) other_constraints = [ name for name, details in constraints.items() if details['columns'] == ['name'] and details['unique'] and name != custom_constraint_name ] self.assertEqual(len(other_constraints), 1) # Drop the unique constraint with connection.schema_editor() as editor: AuthorWithUniqueName._meta.constraints = [] editor.remove_constraint(AuthorWithUniqueName, constraint) def test_unique_together(self): """ Tests removing and adding unique_together constraints on a model. """ # Create the table with connection.schema_editor() as editor: editor.create_model(UniqueTest) # Ensure the fields are unique to begin with UniqueTest.objects.create(year=2012, slug="foo") UniqueTest.objects.create(year=2011, slug="foo") UniqueTest.objects.create(year=2011, slug="bar") with self.assertRaises(IntegrityError): UniqueTest.objects.create(year=2012, slug="foo") UniqueTest.objects.all().delete() # Alter the model to its non-unique-together companion with connection.schema_editor() as editor: editor.alter_unique_together(UniqueTest, UniqueTest._meta.unique_together, []) # Ensure the fields are no longer unique UniqueTest.objects.create(year=2012, slug="foo") UniqueTest.objects.create(year=2012, slug="foo") UniqueTest.objects.all().delete() # Alter it back new_field2 = SlugField(unique=True) new_field2.set_attributes_from_name("slug") with connection.schema_editor() as editor: editor.alter_unique_together(UniqueTest, [], UniqueTest._meta.unique_together) # Ensure the fields are unique again UniqueTest.objects.create(year=2012, slug="foo") with self.assertRaises(IntegrityError): UniqueTest.objects.create(year=2012, slug="foo") UniqueTest.objects.all().delete() def test_unique_together_with_fk(self): """ Tests removing and adding unique_together constraints that include a foreign key. """ # Create the table with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(Book) # Ensure the fields are unique to begin with self.assertEqual(Book._meta.unique_together, ()) # Add the unique_together constraint with connection.schema_editor() as editor: editor.alter_unique_together(Book, [], [['author', 'title']]) # Alter it back with connection.schema_editor() as editor: editor.alter_unique_together(Book, [['author', 'title']], []) def test_unique_together_with_fk_with_existing_index(self): """ Tests removing and adding unique_together constraints that include a foreign key, where the foreign key is added after the model is created. """ # Create the tables with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(BookWithoutAuthor) new_field = ForeignKey(Author, CASCADE) new_field.set_attributes_from_name('author') editor.add_field(BookWithoutAuthor, new_field) # Ensure the fields aren't unique to begin with self.assertEqual(Book._meta.unique_together, ()) # Add the unique_together constraint with connection.schema_editor() as editor: editor.alter_unique_together(Book, [], [['author', 'title']]) # Alter it back with connection.schema_editor() as editor: editor.alter_unique_together(Book, [['author', 'title']], []) @skipUnlessDBFeature('allows_multiple_constraints_on_same_fields') def test_remove_unique_together_does_not_remove_meta_constraints(self): with connection.schema_editor() as editor: editor.create_model(AuthorWithUniqueNameAndBirthday) # Add the custom unique constraint constraint = UniqueConstraint(fields=['name', 'birthday'], name='author_name_birthday_uniq') custom_constraint_name = constraint.name AuthorWithUniqueNameAndBirthday._meta.constraints = [constraint] with connection.schema_editor() as editor: editor.add_constraint(AuthorWithUniqueNameAndBirthday, constraint) # Ensure the constraints exist constraints = self.get_constraints(AuthorWithUniqueNameAndBirthday._meta.db_table) self.assertIn(custom_constraint_name, constraints) other_constraints = [ name for name, details in constraints.items() if details['columns'] == ['name', 'birthday'] and details['unique'] and name != custom_constraint_name ] self.assertEqual(len(other_constraints), 1) # Remove unique together unique_together = AuthorWithUniqueNameAndBirthday._meta.unique_together with connection.schema_editor() as editor: editor.alter_unique_together(AuthorWithUniqueNameAndBirthday, unique_together, []) constraints = self.get_constraints(AuthorWithUniqueNameAndBirthday._meta.db_table) self.assertIn(custom_constraint_name, constraints) other_constraints = [ name for name, details in constraints.items() if details['columns'] == ['name', 'birthday'] and details['unique'] and name != custom_constraint_name ] self.assertEqual(len(other_constraints), 0) # Re-add unique together with connection.schema_editor() as editor: editor.alter_unique_together(AuthorWithUniqueNameAndBirthday, [], unique_together) constraints = self.get_constraints(AuthorWithUniqueNameAndBirthday._meta.db_table) self.assertIn(custom_constraint_name, constraints) other_constraints = [ name for name, details in constraints.items() if details['columns'] == ['name', 'birthday'] and details['unique'] and name != custom_constraint_name ] self.assertEqual(len(other_constraints), 1) # Drop the unique constraint with connection.schema_editor() as editor: AuthorWithUniqueNameAndBirthday._meta.constraints = [] editor.remove_constraint(AuthorWithUniqueNameAndBirthday, constraint) def test_index_together(self): """ Tests removing and adding index_together constraints on a model. """ # Create the table with connection.schema_editor() as editor: editor.create_model(Tag) # Ensure there's no index on the year/slug columns first self.assertEqual( False, any( c["index"] for c in self.get_constraints("schema_tag").values() if c['columns'] == ["slug", "title"] ), ) # Alter the model to add an index with connection.schema_editor() as editor: editor.alter_index_together(Tag, [], [("slug", "title")]) # Ensure there is now an index self.assertEqual( True, any( c["index"] for c in self.get_constraints("schema_tag").values() if c['columns'] == ["slug", "title"] ), ) # Alter it back new_field2 = SlugField(unique=True) new_field2.set_attributes_from_name("slug") with connection.schema_editor() as editor: editor.alter_index_together(Tag, [("slug", "title")], []) # Ensure there's no index self.assertEqual( False, any( c["index"] for c in self.get_constraints("schema_tag").values() if c['columns'] == ["slug", "title"] ), ) def test_index_together_with_fk(self): """ Tests removing and adding index_together constraints that include a foreign key. """ # Create the table with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(Book) # Ensure the fields are unique to begin with self.assertEqual(Book._meta.index_together, ()) # Add the unique_together constraint with connection.schema_editor() as editor: editor.alter_index_together(Book, [], [['author', 'title']]) # Alter it back with connection.schema_editor() as editor: editor.alter_index_together(Book, [['author', 'title']], []) def test_create_index_together(self): """ Tests creating models with index_together already defined """ # Create the table with connection.schema_editor() as editor: editor.create_model(TagIndexed) # Ensure there is an index self.assertEqual( True, any( c["index"] for c in self.get_constraints("schema_tagindexed").values() if c['columns'] == ["slug", "title"] ), ) @skipUnlessDBFeature('allows_multiple_constraints_on_same_fields') def test_remove_index_together_does_not_remove_meta_indexes(self): with connection.schema_editor() as editor: editor.create_model(AuthorWithIndexedNameAndBirthday) # Add the custom index index = Index(fields=['name', 'birthday'], name='author_name_birthday_idx') custom_index_name = index.name AuthorWithIndexedNameAndBirthday._meta.indexes = [index] with connection.schema_editor() as editor: editor.add_index(AuthorWithIndexedNameAndBirthday, index) # Ensure the indexes exist constraints = self.get_constraints(AuthorWithIndexedNameAndBirthday._meta.db_table) self.assertIn(custom_index_name, constraints) other_constraints = [ name for name, details in constraints.items() if details['columns'] == ['name', 'birthday'] and details['index'] and name != custom_index_name ] self.assertEqual(len(other_constraints), 1) # Remove index together index_together = AuthorWithIndexedNameAndBirthday._meta.index_together with connection.schema_editor() as editor: editor.alter_index_together(AuthorWithIndexedNameAndBirthday, index_together, []) constraints = self.get_constraints(AuthorWithIndexedNameAndBirthday._meta.db_table) self.assertIn(custom_index_name, constraints) other_constraints = [ name for name, details in constraints.items() if details['columns'] == ['name', 'birthday'] and details['index'] and name != custom_index_name ] self.assertEqual(len(other_constraints), 0) # Re-add index together with connection.schema_editor() as editor: editor.alter_index_together(AuthorWithIndexedNameAndBirthday, [], index_together) constraints = self.get_constraints(AuthorWithIndexedNameAndBirthday._meta.db_table) self.assertIn(custom_index_name, constraints) other_constraints = [ name for name, details in constraints.items() if details['columns'] == ['name', 'birthday'] and details['index'] and name != custom_index_name ] self.assertEqual(len(other_constraints), 1) # Drop the index with connection.schema_editor() as editor: AuthorWithIndexedNameAndBirthday._meta.indexes = [] editor.remove_index(AuthorWithIndexedNameAndBirthday, index) @isolate_apps('schema') def test_db_table(self): """ Tests renaming of the table """ class Author(Model): name = CharField(max_length=255) class Meta: app_label = 'schema' class Book(Model): author = ForeignKey(Author, CASCADE) class Meta: app_label = 'schema' # Create the table and one referring it. with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(Book) # Ensure the table is there to begin with columns = self.column_classes(Author) self.assertEqual(columns['name'][0], "CharField") # Alter the table with connection.schema_editor(atomic=connection.features.supports_atomic_references_rename) as editor: editor.alter_db_table(Author, "schema_author", "schema_otherauthor") # Ensure the table is there afterwards Author._meta.db_table = "schema_otherauthor" columns = self.column_classes(Author) self.assertEqual(columns['name'][0], "CharField") # Ensure the foreign key reference was updated self.assertForeignKeyExists(Book, "author_id", "schema_otherauthor") # Alter the table again with connection.schema_editor(atomic=connection.features.supports_atomic_references_rename) as editor: editor.alter_db_table(Author, "schema_otherauthor", "schema_author") # Ensure the table is still there Author._meta.db_table = "schema_author" columns = self.column_classes(Author) self.assertEqual(columns['name'][0], "CharField") def test_add_remove_index(self): """ Tests index addition and removal """ # Create the table with connection.schema_editor() as editor: editor.create_model(Author) # Ensure the table is there and has no index self.assertNotIn('title', self.get_indexes(Author._meta.db_table)) # Add the index index = Index(fields=['name'], name='author_title_idx') with connection.schema_editor() as editor: editor.add_index(Author, index) self.assertIn('name', self.get_indexes(Author._meta.db_table)) # Drop the index with connection.schema_editor() as editor: editor.remove_index(Author, index) self.assertNotIn('name', self.get_indexes(Author._meta.db_table)) def test_remove_db_index_doesnt_remove_custom_indexes(self): """ Changing db_index to False doesn't remove indexes from Meta.indexes. """ with connection.schema_editor() as editor: editor.create_model(AuthorWithIndexedName) # Ensure the table has its index self.assertIn('name', self.get_indexes(AuthorWithIndexedName._meta.db_table)) # Add the custom index index = Index(fields=['-name'], name='author_name_idx') author_index_name = index.name with connection.schema_editor() as editor: db_index_name = editor._create_index_name( table_name=AuthorWithIndexedName._meta.db_table, column_names=('name',), ) try: AuthorWithIndexedName._meta.indexes = [index] with connection.schema_editor() as editor: editor.add_index(AuthorWithIndexedName, index) old_constraints = self.get_constraints(AuthorWithIndexedName._meta.db_table) self.assertIn(author_index_name, old_constraints) self.assertIn(db_index_name, old_constraints) # Change name field to db_index=False old_field = AuthorWithIndexedName._meta.get_field('name') new_field = CharField(max_length=255) new_field.set_attributes_from_name('name') with connection.schema_editor() as editor: editor.alter_field(AuthorWithIndexedName, old_field, new_field, strict=True) new_constraints = self.get_constraints(AuthorWithIndexedName._meta.db_table) self.assertNotIn(db_index_name, new_constraints) # The index from Meta.indexes is still in the database. self.assertIn(author_index_name, new_constraints) # Drop the index with connection.schema_editor() as editor: editor.remove_index(AuthorWithIndexedName, index) finally: AuthorWithIndexedName._meta.indexes = [] def test_order_index(self): """ Indexes defined with ordering (ASC/DESC) defined on column """ with connection.schema_editor() as editor: editor.create_model(Author) # The table doesn't have an index self.assertNotIn('title', self.get_indexes(Author._meta.db_table)) index_name = 'author_name_idx' # Add the index index = Index(fields=['name', '-weight'], name=index_name) with connection.schema_editor() as editor: editor.add_index(Author, index) if connection.features.supports_index_column_ordering: self.assertIndexOrder(Author._meta.db_table, index_name, ['ASC', 'DESC']) # Drop the index with connection.schema_editor() as editor: editor.remove_index(Author, index) def test_indexes(self): """ Tests creation/altering of indexes """ # Create the table with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(Book) # Ensure the table is there and has the right index self.assertIn( "title", self.get_indexes(Book._meta.db_table), ) # Alter to remove the index old_field = Book._meta.get_field("title") new_field = CharField(max_length=100, db_index=False) new_field.set_attributes_from_name("title") with connection.schema_editor() as editor: editor.alter_field(Book, old_field, new_field, strict=True) # Ensure the table is there and has no index self.assertNotIn( "title", self.get_indexes(Book._meta.db_table), ) # Alter to re-add the index new_field2 = Book._meta.get_field("title") with connection.schema_editor() as editor: editor.alter_field(Book, new_field, new_field2, strict=True) # Ensure the table is there and has the index again self.assertIn( "title", self.get_indexes(Book._meta.db_table), ) # Add a unique column, verify that creates an implicit index new_field3 = BookWithSlug._meta.get_field("slug") with connection.schema_editor() as editor: editor.add_field(Book, new_field3) self.assertIn( "slug", self.get_uniques(Book._meta.db_table), ) # Remove the unique, check the index goes with it new_field4 = CharField(max_length=20, unique=False) new_field4.set_attributes_from_name("slug") with connection.schema_editor() as editor: editor.alter_field(BookWithSlug, new_field3, new_field4, strict=True) self.assertNotIn( "slug", self.get_uniques(Book._meta.db_table), ) def test_text_field_with_db_index(self): with connection.schema_editor() as editor: editor.create_model(AuthorTextFieldWithIndex) # The text_field index is present if the database supports it. assertion = self.assertIn if connection.features.supports_index_on_text_field else self.assertNotIn assertion('text_field', self.get_indexes(AuthorTextFieldWithIndex._meta.db_table)) def test_primary_key(self): """ Tests altering of the primary key """ # Create the table with connection.schema_editor() as editor: editor.create_model(Tag) # Ensure the table is there and has the right PK self.assertEqual(self.get_primary_key(Tag._meta.db_table), 'id') # Alter to change the PK id_field = Tag._meta.get_field("id") old_field = Tag._meta.get_field("slug") new_field = SlugField(primary_key=True) new_field.set_attributes_from_name("slug") new_field.model = Tag with connection.schema_editor() as editor: editor.remove_field(Tag, id_field) editor.alter_field(Tag, old_field, new_field) # Ensure the PK changed self.assertNotIn( 'id', self.get_indexes(Tag._meta.db_table), ) self.assertEqual(self.get_primary_key(Tag._meta.db_table), 'slug') def test_context_manager_exit(self): """ Ensures transaction is correctly closed when an error occurs inside a SchemaEditor context. """ class SomeError(Exception): pass try: with connection.schema_editor(): raise SomeError except SomeError: self.assertFalse(connection.in_atomic_block) @skipIfDBFeature('can_rollback_ddl') def test_unsupported_transactional_ddl_disallowed(self): message = ( "Executing DDL statements while in a transaction on databases " "that can't perform a rollback is prohibited." ) with atomic(), connection.schema_editor() as editor: with self.assertRaisesMessage(TransactionManagementError, message): editor.execute(editor.sql_create_table % {'table': 'foo', 'definition': ''}) @skipUnlessDBFeature('supports_foreign_keys') def test_foreign_key_index_long_names_regression(self): """ Regression test for #21497. Only affects databases that supports foreign keys. """ # Create the table with connection.schema_editor() as editor: editor.create_model(AuthorWithEvenLongerName) editor.create_model(BookWithLongName) # Find the properly shortened column name column_name = connection.ops.quote_name("author_foreign_key_with_really_long_field_name_id") column_name = column_name[1:-1].lower() # unquote, and, for Oracle, un-upcase # Ensure the table is there and has an index on the column self.assertIn( column_name, self.get_indexes(BookWithLongName._meta.db_table), ) @skipUnlessDBFeature('supports_foreign_keys') def test_add_foreign_key_long_names(self): """ Regression test for #23009. Only affects databases that supports foreign keys. """ # Create the initial tables with connection.schema_editor() as editor: editor.create_model(AuthorWithEvenLongerName) editor.create_model(BookWithLongName) # Add a second FK, this would fail due to long ref name before the fix new_field = ForeignKey(AuthorWithEvenLongerName, CASCADE, related_name="something") new_field.set_attributes_from_name("author_other_really_long_named_i_mean_so_long_fk") with connection.schema_editor() as editor: editor.add_field(BookWithLongName, new_field) @isolate_apps('schema') @skipUnlessDBFeature('supports_foreign_keys') def test_add_foreign_key_quoted_db_table(self): class Author(Model): class Meta: db_table = '"table_author_double_quoted"' app_label = 'schema' class Book(Model): author = ForeignKey(Author, CASCADE) class Meta: app_label = 'schema' with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(Book) if connection.vendor == 'mysql': self.assertForeignKeyExists(Book, 'author_id', '"table_author_double_quoted"') else: self.assertForeignKeyExists(Book, 'author_id', 'table_author_double_quoted') def test_add_foreign_object(self): with connection.schema_editor() as editor: editor.create_model(BookForeignObj) new_field = ForeignObject(Author, on_delete=CASCADE, from_fields=['author_id'], to_fields=['id']) new_field.set_attributes_from_name('author') with connection.schema_editor() as editor: editor.add_field(BookForeignObj, new_field) def test_creation_deletion_reserved_names(self): """ Tries creating a model's table, and then deleting it when it has a SQL reserved name. """ # Create the table with connection.schema_editor() as editor: try: editor.create_model(Thing) except OperationalError as e: self.fail("Errors when applying initial migration for a model " "with a table named after an SQL reserved word: %s" % e) # The table is there list(Thing.objects.all()) # Clean up that table with connection.schema_editor() as editor: editor.delete_model(Thing) # The table is gone with self.assertRaises(DatabaseError): list(Thing.objects.all()) def test_remove_constraints_capital_letters(self): """ #23065 - Constraint names must be quoted if they contain capital letters. """ def get_field(*args, field_class=IntegerField, **kwargs): kwargs['db_column'] = "CamelCase" field = field_class(*args, **kwargs) field.set_attributes_from_name("CamelCase") return field model = Author field = get_field() table = model._meta.db_table column = field.column identifier_converter = connection.introspection.identifier_converter with connection.schema_editor() as editor: editor.create_model(model) editor.add_field(model, field) constraint_name = 'CamelCaseIndex' expected_constraint_name = identifier_converter(constraint_name) editor.execute( editor.sql_create_index % { "table": editor.quote_name(table), "name": editor.quote_name(constraint_name), "using": "", "columns": editor.quote_name(column), "extra": "", "condition": "", } ) self.assertIn(expected_constraint_name, self.get_constraints(model._meta.db_table)) editor.alter_field(model, get_field(db_index=True), field, strict=True) self.assertNotIn(expected_constraint_name, self.get_constraints(model._meta.db_table)) constraint_name = 'CamelCaseUniqConstraint' expected_constraint_name = identifier_converter(constraint_name) editor.execute(editor._create_unique_sql(model, [field.column], constraint_name)) self.assertIn(expected_constraint_name, self.get_constraints(model._meta.db_table)) editor.alter_field(model, get_field(unique=True), field, strict=True) self.assertNotIn(expected_constraint_name, self.get_constraints(model._meta.db_table)) if editor.sql_create_fk: constraint_name = 'CamelCaseFKConstraint' expected_constraint_name = identifier_converter(constraint_name) editor.execute( editor.sql_create_fk % { "table": editor.quote_name(table), "name": editor.quote_name(constraint_name), "column": editor.quote_name(column), "to_table": editor.quote_name(table), "to_column": editor.quote_name(model._meta.auto_field.column), "deferrable": connection.ops.deferrable_sql(), } ) self.assertIn(expected_constraint_name, self.get_constraints(model._meta.db_table)) editor.alter_field(model, get_field(Author, CASCADE, field_class=ForeignKey), field, strict=True) self.assertNotIn(expected_constraint_name, self.get_constraints(model._meta.db_table)) def test_add_field_use_effective_default(self): """ #23987 - effective_default() should be used as the field default when adding a new field. """ # Create the table with connection.schema_editor() as editor: editor.create_model(Author) # Ensure there's no surname field columns = self.column_classes(Author) self.assertNotIn("surname", columns) # Create a row Author.objects.create(name='Anonymous1') # Add new CharField to ensure default will be used from effective_default new_field = CharField(max_length=15, blank=True) new_field.set_attributes_from_name("surname") with connection.schema_editor() as editor: editor.add_field(Author, new_field) # Ensure field was added with the right default with connection.cursor() as cursor: cursor.execute("SELECT surname FROM schema_author;") item = cursor.fetchall()[0] self.assertEqual(item[0], None if connection.features.interprets_empty_strings_as_nulls else '') def test_add_field_default_dropped(self): # Create the table with connection.schema_editor() as editor: editor.create_model(Author) # Ensure there's no surname field columns = self.column_classes(Author) self.assertNotIn("surname", columns) # Create a row Author.objects.create(name='Anonymous1') # Add new CharField with a default new_field = CharField(max_length=15, blank=True, default='surname default') new_field.set_attributes_from_name("surname") with connection.schema_editor() as editor: editor.add_field(Author, new_field) # Ensure field was added with the right default with connection.cursor() as cursor: cursor.execute("SELECT surname FROM schema_author;") item = cursor.fetchall()[0] self.assertEqual(item[0], 'surname default') # And that the default is no longer set in the database. field = next( f for f in connection.introspection.get_table_description(cursor, "schema_author") if f.name == "surname" ) if connection.features.can_introspect_default: self.assertIsNone(field.default) def test_alter_field_default_dropped(self): # Create the table with connection.schema_editor() as editor: editor.create_model(Author) # Create a row Author.objects.create(name='Anonymous1') self.assertIsNone(Author.objects.get().height) old_field = Author._meta.get_field('height') # The default from the new field is used in updating existing rows. new_field = IntegerField(blank=True, default=42) new_field.set_attributes_from_name('height') with connection.schema_editor() as editor: editor.alter_field(Author, old_field, new_field, strict=True) self.assertEqual(Author.objects.get().height, 42) # The database default should be removed. with connection.cursor() as cursor: field = next( f for f in connection.introspection.get_table_description(cursor, "schema_author") if f.name == "height" ) if connection.features.can_introspect_default: self.assertIsNone(field.default) @unittest.skipIf(connection.vendor == 'sqlite', 'SQLite naively remakes the table on field alteration.') def test_alter_field_default_doesnt_perform_queries(self): """ No queries are performed if a field default changes and the field's not changing from null to non-null. """ with connection.schema_editor() as editor: editor.create_model(AuthorWithDefaultHeight) old_field = AuthorWithDefaultHeight._meta.get_field('height') new_default = old_field.default * 2 new_field = PositiveIntegerField(null=True, blank=True, default=new_default) new_field.set_attributes_from_name('height') with connection.schema_editor() as editor, self.assertNumQueries(0): editor.alter_field(AuthorWithDefaultHeight, old_field, new_field, strict=True) def test_add_textfield_unhashable_default(self): # Create the table with connection.schema_editor() as editor: editor.create_model(Author) # Create a row Author.objects.create(name='Anonymous1') # Create a field that has an unhashable default new_field = TextField(default={}) new_field.set_attributes_from_name("info") with connection.schema_editor() as editor: editor.add_field(Author, new_field) @unittest.skipUnless(connection.vendor == 'postgresql', "PostgreSQL specific") def test_add_indexed_charfield(self): field = CharField(max_length=255, db_index=True) field.set_attributes_from_name('nom_de_plume') with connection.schema_editor() as editor: editor.create_model(Author) editor.add_field(Author, field) # Should create two indexes; one for like operator. self.assertEqual( self.get_constraints_for_column(Author, 'nom_de_plume'), ['schema_author_nom_de_plume_7570a851', 'schema_author_nom_de_plume_7570a851_like'], ) @unittest.skipUnless(connection.vendor == 'postgresql', "PostgreSQL specific") def test_add_unique_charfield(self): field = CharField(max_length=255, unique=True) field.set_attributes_from_name('nom_de_plume') with connection.schema_editor() as editor: editor.create_model(Author) editor.add_field(Author, field) # Should create two indexes; one for like operator. self.assertEqual( self.get_constraints_for_column(Author, 'nom_de_plume'), ['schema_author_nom_de_plume_7570a851_like', 'schema_author_nom_de_plume_key'] ) @unittest.skipUnless(connection.vendor == 'postgresql', "PostgreSQL specific") def test_alter_field_add_index_to_charfield(self): # Create the table and verify no initial indexes. with connection.schema_editor() as editor: editor.create_model(Author) self.assertEqual(self.get_constraints_for_column(Author, 'name'), []) # Alter to add db_index=True and create 2 indexes. old_field = Author._meta.get_field('name') new_field = CharField(max_length=255, db_index=True) new_field.set_attributes_from_name('name') with connection.schema_editor() as editor: editor.alter_field(Author, old_field, new_field, strict=True) self.assertEqual( self.get_constraints_for_column(Author, 'name'), ['schema_author_name_1fbc5617', 'schema_author_name_1fbc5617_like'] ) # Remove db_index=True to drop both indexes. with connection.schema_editor() as editor: editor.alter_field(Author, new_field, old_field, strict=True) self.assertEqual(self.get_constraints_for_column(Author, 'name'), []) @unittest.skipUnless(connection.vendor == 'postgresql', "PostgreSQL specific") def test_alter_field_add_unique_to_charfield(self): # Create the table and verify no initial indexes. with connection.schema_editor() as editor: editor.create_model(Author) self.assertEqual(self.get_constraints_for_column(Author, 'name'), []) # Alter to add unique=True and create 2 indexes. old_field = Author._meta.get_field('name') new_field = CharField(max_length=255, unique=True) new_field.set_attributes_from_name('name') with connection.schema_editor() as editor: editor.alter_field(Author, old_field, new_field, strict=True) self.assertEqual( self.get_constraints_for_column(Author, 'name'), ['schema_author_name_1fbc5617_like', 'schema_author_name_1fbc5617_uniq'] ) # Remove unique=True to drop both indexes. with connection.schema_editor() as editor: editor.alter_field(Author, new_field, old_field, strict=True) self.assertEqual(self.get_constraints_for_column(Author, 'name'), []) @unittest.skipUnless(connection.vendor == 'postgresql', "PostgreSQL specific") def test_alter_field_add_index_to_textfield(self): # Create the table and verify no initial indexes. with connection.schema_editor() as editor: editor.create_model(Note) self.assertEqual(self.get_constraints_for_column(Note, 'info'), []) # Alter to add db_index=True and create 2 indexes. old_field = Note._meta.get_field('info') new_field = TextField(db_index=True) new_field.set_attributes_from_name('info') with connection.schema_editor() as editor: editor.alter_field(Note, old_field, new_field, strict=True) self.assertEqual( self.get_constraints_for_column(Note, 'info'), ['schema_note_info_4b0ea695', 'schema_note_info_4b0ea695_like'] ) # Remove db_index=True to drop both indexes. with connection.schema_editor() as editor: editor.alter_field(Note, new_field, old_field, strict=True) self.assertEqual(self.get_constraints_for_column(Note, 'info'), []) @unittest.skipUnless(connection.vendor == 'postgresql', "PostgreSQL specific") def test_alter_field_add_unique_to_charfield_with_db_index(self): # Create the table and verify initial indexes. with connection.schema_editor() as editor: editor.create_model(BookWithoutAuthor) self.assertEqual( self.get_constraints_for_column(BookWithoutAuthor, 'title'), ['schema_book_title_2dfb2dff', 'schema_book_title_2dfb2dff_like'] ) # Alter to add unique=True (should replace the index) old_field = BookWithoutAuthor._meta.get_field('title') new_field = CharField(max_length=100, db_index=True, unique=True) new_field.set_attributes_from_name('title') with connection.schema_editor() as editor: editor.alter_field(BookWithoutAuthor, old_field, new_field, strict=True) self.assertEqual( self.get_constraints_for_column(BookWithoutAuthor, 'title'), ['schema_book_title_2dfb2dff_like', 'schema_book_title_2dfb2dff_uniq'] ) # Alter to remove unique=True (should drop unique index) new_field2 = CharField(max_length=100, db_index=True) new_field2.set_attributes_from_name('title') with connection.schema_editor() as editor: editor.alter_field(BookWithoutAuthor, new_field, new_field2, strict=True) self.assertEqual( self.get_constraints_for_column(BookWithoutAuthor, 'title'), ['schema_book_title_2dfb2dff', 'schema_book_title_2dfb2dff_like'] ) @unittest.skipUnless(connection.vendor == 'postgresql', "PostgreSQL specific") def test_alter_field_remove_unique_and_db_index_from_charfield(self): # Create the table and verify initial indexes. with connection.schema_editor() as editor: editor.create_model(BookWithoutAuthor) self.assertEqual( self.get_constraints_for_column(BookWithoutAuthor, 'title'), ['schema_book_title_2dfb2dff', 'schema_book_title_2dfb2dff_like'] ) # Alter to add unique=True (should replace the index) old_field = BookWithoutAuthor._meta.get_field('title') new_field = CharField(max_length=100, db_index=True, unique=True) new_field.set_attributes_from_name('title') with connection.schema_editor() as editor: editor.alter_field(BookWithoutAuthor, old_field, new_field, strict=True) self.assertEqual( self.get_constraints_for_column(BookWithoutAuthor, 'title'), ['schema_book_title_2dfb2dff_like', 'schema_book_title_2dfb2dff_uniq'] ) # Alter to remove both unique=True and db_index=True (should drop all indexes) new_field2 = CharField(max_length=100) new_field2.set_attributes_from_name('title') with connection.schema_editor() as editor: editor.alter_field(BookWithoutAuthor, new_field, new_field2, strict=True) self.assertEqual(self.get_constraints_for_column(BookWithoutAuthor, 'title'), []) @unittest.skipUnless(connection.vendor == 'postgresql', "PostgreSQL specific") def test_alter_field_swap_unique_and_db_index_with_charfield(self): # Create the table and verify initial indexes. with connection.schema_editor() as editor: editor.create_model(BookWithoutAuthor) self.assertEqual( self.get_constraints_for_column(BookWithoutAuthor, 'title'), ['schema_book_title_2dfb2dff', 'schema_book_title_2dfb2dff_like'] ) # Alter to set unique=True and remove db_index=True (should replace the index) old_field = BookWithoutAuthor._meta.get_field('title') new_field = CharField(max_length=100, unique=True) new_field.set_attributes_from_name('title') with connection.schema_editor() as editor: editor.alter_field(BookWithoutAuthor, old_field, new_field, strict=True) self.assertEqual( self.get_constraints_for_column(BookWithoutAuthor, 'title'), ['schema_book_title_2dfb2dff_like', 'schema_book_title_2dfb2dff_uniq'] ) # Alter to set db_index=True and remove unique=True (should restore index) new_field2 = CharField(max_length=100, db_index=True) new_field2.set_attributes_from_name('title') with connection.schema_editor() as editor: editor.alter_field(BookWithoutAuthor, new_field, new_field2, strict=True) self.assertEqual( self.get_constraints_for_column(BookWithoutAuthor, 'title'), ['schema_book_title_2dfb2dff', 'schema_book_title_2dfb2dff_like'] ) @unittest.skipUnless(connection.vendor == 'postgresql', "PostgreSQL specific") def test_alter_field_add_db_index_to_charfield_with_unique(self): # Create the table and verify initial indexes. with connection.schema_editor() as editor: editor.create_model(Tag) self.assertEqual( self.get_constraints_for_column(Tag, 'slug'), ['schema_tag_slug_2c418ba3_like', 'schema_tag_slug_key'] ) # Alter to add db_index=True old_field = Tag._meta.get_field('slug') new_field = SlugField(db_index=True, unique=True) new_field.set_attributes_from_name('slug') with connection.schema_editor() as editor: editor.alter_field(Tag, old_field, new_field, strict=True) self.assertEqual( self.get_constraints_for_column(Tag, 'slug'), ['schema_tag_slug_2c418ba3_like', 'schema_tag_slug_key'] ) # Alter to remove db_index=True new_field2 = SlugField(unique=True) new_field2.set_attributes_from_name('slug') with connection.schema_editor() as editor: editor.alter_field(Tag, new_field, new_field2, strict=True) self.assertEqual( self.get_constraints_for_column(Tag, 'slug'), ['schema_tag_slug_2c418ba3_like', 'schema_tag_slug_key'] ) def test_alter_field_add_index_to_integerfield(self): # Create the table and verify no initial indexes. with connection.schema_editor() as editor: editor.create_model(Author) self.assertEqual(self.get_constraints_for_column(Author, 'weight'), []) # Alter to add db_index=True and create index. old_field = Author._meta.get_field('weight') new_field = IntegerField(null=True, db_index=True) new_field.set_attributes_from_name('weight') with connection.schema_editor() as editor: editor.alter_field(Author, old_field, new_field, strict=True) self.assertEqual(self.get_constraints_for_column(Author, 'weight'), ['schema_author_weight_587740f9']) # Remove db_index=True to drop index. with connection.schema_editor() as editor: editor.alter_field(Author, new_field, old_field, strict=True) self.assertEqual(self.get_constraints_for_column(Author, 'weight'), []) def test_alter_pk_with_self_referential_field(self): """ Changing the primary key field name of a model with a self-referential foreign key (#26384). """ with connection.schema_editor() as editor: editor.create_model(Node) old_field = Node._meta.get_field('node_id') new_field = AutoField(primary_key=True) new_field.set_attributes_from_name('id') with connection.schema_editor() as editor: editor.alter_field(Node, old_field, new_field, strict=True) self.assertForeignKeyExists(Node, 'parent_id', Node._meta.db_table) @mock.patch('django.db.backends.base.schema.datetime') @mock.patch('django.db.backends.base.schema.timezone') def test_add_datefield_and_datetimefield_use_effective_default(self, mocked_datetime, mocked_tz): """ effective_default() should be used for DateField, DateTimeField, and TimeField if auto_now or auto_add_now is set (#25005). """ now = datetime.datetime(month=1, day=1, year=2000, hour=1, minute=1) now_tz = datetime.datetime(month=1, day=1, year=2000, hour=1, minute=1, tzinfo=timezone.utc) mocked_datetime.now = mock.MagicMock(return_value=now) mocked_tz.now = mock.MagicMock(return_value=now_tz) # Create the table with connection.schema_editor() as editor: editor.create_model(Author) # Check auto_now/auto_now_add attributes are not defined columns = self.column_classes(Author) self.assertNotIn("dob_auto_now", columns) self.assertNotIn("dob_auto_now_add", columns) self.assertNotIn("dtob_auto_now", columns) self.assertNotIn("dtob_auto_now_add", columns) self.assertNotIn("tob_auto_now", columns) self.assertNotIn("tob_auto_now_add", columns) # Create a row Author.objects.create(name='Anonymous1') # Ensure fields were added with the correct defaults dob_auto_now = DateField(auto_now=True) dob_auto_now.set_attributes_from_name('dob_auto_now') self.check_added_field_default( editor, Author, dob_auto_now, 'dob_auto_now', now.date(), cast_function=lambda x: x.date(), ) dob_auto_now_add = DateField(auto_now_add=True) dob_auto_now_add.set_attributes_from_name('dob_auto_now_add') self.check_added_field_default( editor, Author, dob_auto_now_add, 'dob_auto_now_add', now.date(), cast_function=lambda x: x.date(), ) dtob_auto_now = DateTimeField(auto_now=True) dtob_auto_now.set_attributes_from_name('dtob_auto_now') self.check_added_field_default( editor, Author, dtob_auto_now, 'dtob_auto_now', now, ) dt_tm_of_birth_auto_now_add = DateTimeField(auto_now_add=True) dt_tm_of_birth_auto_now_add.set_attributes_from_name('dtob_auto_now_add') self.check_added_field_default( editor, Author, dt_tm_of_birth_auto_now_add, 'dtob_auto_now_add', now, ) tob_auto_now = TimeField(auto_now=True) tob_auto_now.set_attributes_from_name('tob_auto_now') self.check_added_field_default( editor, Author, tob_auto_now, 'tob_auto_now', now.time(), cast_function=lambda x: x.time(), ) tob_auto_now_add = TimeField(auto_now_add=True) tob_auto_now_add.set_attributes_from_name('tob_auto_now_add') self.check_added_field_default( editor, Author, tob_auto_now_add, 'tob_auto_now_add', now.time(), cast_function=lambda x: x.time(), ) def test_namespaced_db_table_create_index_name(self): """ Table names are stripped of their namespace/schema before being used to generate index names. """ with connection.schema_editor() as editor: max_name_length = connection.ops.max_name_length() or 200 namespace = 'n' * max_name_length table_name = 't' * max_name_length namespaced_table_name = '"%s"."%s"' % (namespace, table_name) self.assertEqual( editor._create_index_name(table_name, []), editor._create_index_name(namespaced_table_name, []), ) @unittest.skipUnless(connection.vendor == 'oracle', 'Oracle specific db_table syntax') def test_creation_with_db_table_double_quotes(self): oracle_user = connection.creation._test_database_user() class Student(Model): name = CharField(max_length=30) class Meta: app_label = 'schema' apps = new_apps db_table = '"%s"."DJANGO_STUDENT_TABLE"' % oracle_user class Document(Model): name = CharField(max_length=30) students = ManyToManyField(Student) class Meta: app_label = 'schema' apps = new_apps db_table = '"%s"."DJANGO_DOCUMENT_TABLE"' % oracle_user self.local_models = [Student, Document] with connection.schema_editor() as editor: editor.create_model(Student) editor.create_model(Document) doc = Document.objects.create(name='Test Name') student = Student.objects.create(name='Some man') doc.students.add(student) def test_rename_table_renames_deferred_sql_references(self): atomic_rename = connection.features.supports_atomic_references_rename with connection.schema_editor(atomic=atomic_rename) as editor: editor.create_model(Author) editor.create_model(Book) editor.alter_db_table(Author, 'schema_author', 'schema_renamed_author') editor.alter_db_table(Author, 'schema_book', 'schema_renamed_book') self.assertGreater(len(editor.deferred_sql), 0) for statement in editor.deferred_sql: self.assertIs(statement.references_table('schema_author'), False) self.assertIs(statement.references_table('schema_book'), False) @unittest.skipIf(connection.vendor == 'sqlite', 'SQLite naively remakes the table on field alteration.') def test_rename_column_renames_deferred_sql_references(self): with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(Book) old_title = Book._meta.get_field('title') new_title = CharField(max_length=100, db_index=True) new_title.set_attributes_from_name('renamed_title') editor.alter_field(Book, old_title, new_title) old_author = Book._meta.get_field('author') new_author = ForeignKey(Author, CASCADE) new_author.set_attributes_from_name('renamed_author') editor.alter_field(Book, old_author, new_author) self.assertGreater(len(editor.deferred_sql), 0) for statement in editor.deferred_sql: self.assertIs(statement.references_column('book', 'title'), False) self.assertIs(statement.references_column('book', 'author_id'), False) @isolate_apps('schema') def test_referenced_field_without_constraint_rename_inside_atomic_block(self): """ Foreign keys without database level constraint don't prevent the field they reference from being renamed in an atomic block. """ class Foo(Model): field = CharField(max_length=255, unique=True) class Meta: app_label = 'schema' class Bar(Model): foo = ForeignKey(Foo, CASCADE, to_field='field', db_constraint=False) class Meta: app_label = 'schema' self.isolated_local_models = [Foo, Bar] with connection.schema_editor() as editor: editor.create_model(Foo) editor.create_model(Bar) new_field = CharField(max_length=255, unique=True) new_field.set_attributes_from_name('renamed') with connection.schema_editor(atomic=True) as editor: editor.alter_field(Foo, Foo._meta.get_field('field'), new_field) @isolate_apps('schema') def test_referenced_table_without_constraint_rename_inside_atomic_block(self): """ Foreign keys without database level constraint don't prevent the table they reference from being renamed in an atomic block. """ class Foo(Model): field = CharField(max_length=255, unique=True) class Meta: app_label = 'schema' class Bar(Model): foo = ForeignKey(Foo, CASCADE, to_field='field', db_constraint=False) class Meta: app_label = 'schema' self.isolated_local_models = [Foo, Bar] with connection.schema_editor() as editor: editor.create_model(Foo) editor.create_model(Bar) new_field = CharField(max_length=255, unique=True) new_field.set_attributes_from_name('renamed') with connection.schema_editor(atomic=True) as editor: editor.alter_db_table(Foo, Foo._meta.db_table, 'renamed_table') Foo._meta.db_table = 'renamed_table'
459e8a739ac2de525885c2771038134bcbac406285ff7ddf610dcde48332a10c
""" A series of tests to establish that the command-line management tools work as advertised - especially with regards to the handling of the DJANGO_SETTINGS_MODULE and default settings.py files. """ import os import re import shutil import socket import subprocess import sys import tempfile import unittest from io import StringIO from unittest import mock import django from django import conf, get_version from django.conf import settings from django.core.management import ( BaseCommand, CommandError, call_command, color, ) from django.core.management.commands.loaddata import Command as LoaddataCommand from django.core.management.commands.runserver import ( Command as RunserverCommand, ) from django.core.management.commands.testserver import ( Command as TestserverCommand, ) from django.db import ConnectionHandler, connection from django.db.migrations.recorder import MigrationRecorder from django.test import ( LiveServerTestCase, SimpleTestCase, TestCase, override_settings, ) custom_templates_dir = os.path.join(os.path.dirname(__file__), 'custom_templates') SYSTEM_CHECK_MSG = 'System check identified no issues' class AdminScriptTestCase(SimpleTestCase): def setUp(self): tmpdir = tempfile.TemporaryDirectory() self.addCleanup(tmpdir.cleanup) # os.path.realpath() is required for temporary directories on macOS, # where `/var` is a symlink to `/private/var`. self.test_dir = os.path.realpath(os.path.join(tmpdir.name, 'test_project')) os.mkdir(self.test_dir) with open(os.path.join(self.test_dir, '__init__.py'), 'w'): pass def write_settings(self, filename, apps=None, is_dir=False, sdict=None, extra=None): if is_dir: settings_dir = os.path.join(self.test_dir, filename) os.mkdir(settings_dir) settings_file_path = os.path.join(settings_dir, '__init__.py') else: settings_file_path = os.path.join(self.test_dir, filename) with open(settings_file_path, 'w') as settings_file: settings_file.write('# Settings file automatically generated by admin_scripts test case\n') if extra: settings_file.write("%s\n" % extra) exports = [ 'DATABASES', 'ROOT_URLCONF', 'SECRET_KEY', ] for s in exports: if hasattr(settings, s): o = getattr(settings, s) if not isinstance(o, (dict, tuple, list)): o = "'%s'" % o settings_file.write("%s = %s\n" % (s, o)) if apps is None: apps = ['django.contrib.auth', 'django.contrib.contenttypes', 'admin_scripts'] settings_file.write("INSTALLED_APPS = %s\n" % apps) if sdict: for k, v in sdict.items(): settings_file.write("%s = %s\n" % (k, v)) def _ext_backend_paths(self): """ Returns the paths for any external backend packages. """ paths = [] for backend in settings.DATABASES.values(): package = backend['ENGINE'].split('.')[0] if package != 'django': backend_pkg = __import__(package) backend_dir = os.path.dirname(backend_pkg.__file__) paths.append(os.path.dirname(backend_dir)) return paths def run_test(self, script, args, settings_file=None, apps=None): base_dir = os.path.dirname(self.test_dir) # The base dir for Django's tests is one level up. tests_dir = os.path.dirname(os.path.dirname(__file__)) # The base dir for Django is one level above the test dir. We don't use # `import django` to figure that out, so we don't pick up a Django # from site-packages or similar. django_dir = os.path.dirname(tests_dir) ext_backend_base_dirs = self._ext_backend_paths() # Define a temporary environment for the subprocess test_environ = os.environ.copy() # Set the test environment if settings_file: test_environ['DJANGO_SETTINGS_MODULE'] = settings_file elif 'DJANGO_SETTINGS_MODULE' in test_environ: del test_environ['DJANGO_SETTINGS_MODULE'] python_path = [base_dir, django_dir, tests_dir] python_path.extend(ext_backend_base_dirs) test_environ['PYTHONPATH'] = os.pathsep.join(python_path) test_environ['PYTHONWARNINGS'] = '' p = subprocess.run( [sys.executable, script] + args, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=self.test_dir, env=test_environ, universal_newlines=True, ) return p.stdout, p.stderr def run_django_admin(self, args, settings_file=None): script_dir = os.path.abspath(os.path.join(os.path.dirname(django.__file__), 'bin')) return self.run_test(os.path.join(script_dir, 'django-admin.py'), args, settings_file) def run_manage(self, args, settings_file=None, manage_py=None): template_manage_py = ( os.path.join(os.path.dirname(__file__), manage_py) if manage_py else os.path.join(os.path.dirname(conf.__file__), 'project_template', 'manage.py-tpl') ) test_manage_py = os.path.join(self.test_dir, 'manage.py') shutil.copyfile(template_manage_py, test_manage_py) with open(test_manage_py) as fp: manage_py_contents = fp.read() manage_py_contents = manage_py_contents.replace( "{{ project_name }}", "test_project") with open(test_manage_py, 'w') as fp: fp.write(manage_py_contents) return self.run_test('./manage.py', args, settings_file) def assertNoOutput(self, stream): "Utility assertion: assert that the given stream is empty" self.assertEqual(len(stream), 0, "Stream should be empty: actually contains '%s'" % stream) def assertOutput(self, stream, msg, regex=False): "Utility assertion: assert that the given message exists in the output" if regex: self.assertIsNotNone( re.search(msg, stream), "'%s' does not match actual output text '%s'" % (msg, stream) ) else: self.assertIn(msg, stream, "'%s' does not match actual output text '%s'" % (msg, stream)) def assertNotInOutput(self, stream, msg): "Utility assertion: assert that the given message doesn't exist in the output" self.assertNotIn(msg, stream, "'%s' matches actual output text '%s'" % (msg, stream)) ########################################################################## # DJANGO ADMIN TESTS # This first series of test classes checks the environment processing # of the django-admin.py script ########################################################################## class DjangoAdminNoSettings(AdminScriptTestCase): "A series of tests for django-admin.py when there is no settings.py file." def test_builtin_command(self): "no settings: django-admin builtin commands fail with an error when no settings provided" args = ['check', 'admin_scripts'] out, err = self.run_django_admin(args) self.assertNoOutput(out) self.assertOutput(err, 'settings are not configured') def test_builtin_with_bad_settings(self): "no settings: django-admin builtin commands fail if settings file (from argument) doesn't exist" args = ['check', '--settings=bad_settings', 'admin_scripts'] out, err = self.run_django_admin(args) self.assertNoOutput(out) self.assertOutput(err, "No module named '?bad_settings'?", regex=True) def test_builtin_with_bad_environment(self): "no settings: django-admin builtin commands fail if settings file (from environment) doesn't exist" args = ['check', 'admin_scripts'] out, err = self.run_django_admin(args, 'bad_settings') self.assertNoOutput(out) self.assertOutput(err, "No module named '?bad_settings'?", regex=True) def test_commands_with_invalid_settings(self): """" Commands that don't require settings succeed if the settings file doesn't exist. """ args = ['startproject'] out, err = self.run_django_admin(args, settings_file='bad_settings') self.assertNoOutput(out) self.assertOutput(err, "You must provide a project name", regex=True) class DjangoAdminDefaultSettings(AdminScriptTestCase): """A series of tests for django-admin.py when using a settings.py file that contains the test application. """ def setUp(self): super().setUp() self.write_settings('settings.py') def test_builtin_command(self): "default: django-admin builtin commands fail with an error when no settings provided" args = ['check', 'admin_scripts'] out, err = self.run_django_admin(args) self.assertNoOutput(out) self.assertOutput(err, 'settings are not configured') def test_builtin_with_settings(self): "default: django-admin builtin commands succeed if settings are provided as argument" args = ['check', '--settings=test_project.settings', 'admin_scripts'] out, err = self.run_django_admin(args) self.assertNoOutput(err) self.assertOutput(out, SYSTEM_CHECK_MSG) def test_builtin_with_environment(self): "default: django-admin builtin commands succeed if settings are provided in the environment" args = ['check', 'admin_scripts'] out, err = self.run_django_admin(args, 'test_project.settings') self.assertNoOutput(err) self.assertOutput(out, SYSTEM_CHECK_MSG) def test_builtin_with_bad_settings(self): "default: django-admin builtin commands fail if settings file (from argument) doesn't exist" args = ['check', '--settings=bad_settings', 'admin_scripts'] out, err = self.run_django_admin(args) self.assertNoOutput(out) self.assertOutput(err, "No module named '?bad_settings'?", regex=True) def test_builtin_with_bad_environment(self): "default: django-admin builtin commands fail if settings file (from environment) doesn't exist" args = ['check', 'admin_scripts'] out, err = self.run_django_admin(args, 'bad_settings') self.assertNoOutput(out) self.assertOutput(err, "No module named '?bad_settings'?", regex=True) def test_custom_command(self): "default: django-admin can't execute user commands if it isn't provided settings" args = ['noargs_command'] out, err = self.run_django_admin(args) self.assertNoOutput(out) self.assertOutput(err, "No Django settings specified") self.assertOutput(err, "Unknown command: 'noargs_command'") def test_custom_command_with_settings(self): "default: django-admin can execute user commands if settings are provided as argument" args = ['noargs_command', '--settings=test_project.settings'] out, err = self.run_django_admin(args) self.assertNoOutput(err) self.assertOutput(out, "EXECUTE: noargs_command") def test_custom_command_with_environment(self): "default: django-admin can execute user commands if settings are provided in environment" args = ['noargs_command'] out, err = self.run_django_admin(args, 'test_project.settings') self.assertNoOutput(err) self.assertOutput(out, "EXECUTE: noargs_command") class DjangoAdminFullPathDefaultSettings(AdminScriptTestCase): """A series of tests for django-admin.py when using a settings.py file that contains the test application specified using a full path. """ def setUp(self): super().setUp() self.write_settings('settings.py', ['django.contrib.auth', 'django.contrib.contenttypes', 'admin_scripts', 'admin_scripts.complex_app']) def test_builtin_command(self): "fulldefault: django-admin builtin commands fail with an error when no settings provided" args = ['check', 'admin_scripts'] out, err = self.run_django_admin(args) self.assertNoOutput(out) self.assertOutput(err, 'settings are not configured') def test_builtin_with_settings(self): "fulldefault: django-admin builtin commands succeed if a settings file is provided" args = ['check', '--settings=test_project.settings', 'admin_scripts'] out, err = self.run_django_admin(args) self.assertNoOutput(err) self.assertOutput(out, SYSTEM_CHECK_MSG) def test_builtin_with_environment(self): "fulldefault: django-admin builtin commands succeed if the environment contains settings" args = ['check', 'admin_scripts'] out, err = self.run_django_admin(args, 'test_project.settings') self.assertNoOutput(err) self.assertOutput(out, SYSTEM_CHECK_MSG) def test_builtin_with_bad_settings(self): "fulldefault: django-admin builtin commands fail if settings file (from argument) doesn't exist" args = ['check', '--settings=bad_settings', 'admin_scripts'] out, err = self.run_django_admin(args) self.assertNoOutput(out) self.assertOutput(err, "No module named '?bad_settings'?", regex=True) def test_builtin_with_bad_environment(self): "fulldefault: django-admin builtin commands fail if settings file (from environment) doesn't exist" args = ['check', 'admin_scripts'] out, err = self.run_django_admin(args, 'bad_settings') self.assertNoOutput(out) self.assertOutput(err, "No module named '?bad_settings'?", regex=True) def test_custom_command(self): "fulldefault: django-admin can't execute user commands unless settings are provided" args = ['noargs_command'] out, err = self.run_django_admin(args) self.assertNoOutput(out) self.assertOutput(err, "No Django settings specified") self.assertOutput(err, "Unknown command: 'noargs_command'") def test_custom_command_with_settings(self): "fulldefault: django-admin can execute user commands if settings are provided as argument" args = ['noargs_command', '--settings=test_project.settings'] out, err = self.run_django_admin(args) self.assertNoOutput(err) self.assertOutput(out, "EXECUTE: noargs_command") def test_custom_command_with_environment(self): "fulldefault: django-admin can execute user commands if settings are provided in environment" args = ['noargs_command'] out, err = self.run_django_admin(args, 'test_project.settings') self.assertNoOutput(err) self.assertOutput(out, "EXECUTE: noargs_command") class DjangoAdminMinimalSettings(AdminScriptTestCase): """A series of tests for django-admin.py when using a settings.py file that doesn't contain the test application. """ def setUp(self): super().setUp() self.write_settings('settings.py', apps=['django.contrib.auth', 'django.contrib.contenttypes']) def test_builtin_command(self): "minimal: django-admin builtin commands fail with an error when no settings provided" args = ['check', 'admin_scripts'] out, err = self.run_django_admin(args) self.assertNoOutput(out) self.assertOutput(err, 'settings are not configured') def test_builtin_with_settings(self): "minimal: django-admin builtin commands fail if settings are provided as argument" args = ['check', '--settings=test_project.settings', 'admin_scripts'] out, err = self.run_django_admin(args) self.assertNoOutput(out) self.assertOutput(err, "No installed app with label 'admin_scripts'.") def test_builtin_with_environment(self): "minimal: django-admin builtin commands fail if settings are provided in the environment" args = ['check', 'admin_scripts'] out, err = self.run_django_admin(args, 'test_project.settings') self.assertNoOutput(out) self.assertOutput(err, "No installed app with label 'admin_scripts'.") def test_builtin_with_bad_settings(self): "minimal: django-admin builtin commands fail if settings file (from argument) doesn't exist" args = ['check', '--settings=bad_settings', 'admin_scripts'] out, err = self.run_django_admin(args) self.assertNoOutput(out) self.assertOutput(err, "No module named '?bad_settings'?", regex=True) def test_builtin_with_bad_environment(self): "minimal: django-admin builtin commands fail if settings file (from environment) doesn't exist" args = ['check', 'admin_scripts'] out, err = self.run_django_admin(args, 'bad_settings') self.assertNoOutput(out) self.assertOutput(err, "No module named '?bad_settings'?", regex=True) def test_custom_command(self): "minimal: django-admin can't execute user commands unless settings are provided" args = ['noargs_command'] out, err = self.run_django_admin(args) self.assertNoOutput(out) self.assertOutput(err, "No Django settings specified") self.assertOutput(err, "Unknown command: 'noargs_command'") def test_custom_command_with_settings(self): "minimal: django-admin can't execute user commands, even if settings are provided as argument" args = ['noargs_command', '--settings=test_project.settings'] out, err = self.run_django_admin(args) self.assertNoOutput(out) self.assertOutput(err, "Unknown command: 'noargs_command'") def test_custom_command_with_environment(self): "minimal: django-admin can't execute user commands, even if settings are provided in environment" args = ['noargs_command'] out, err = self.run_django_admin(args, 'test_project.settings') self.assertNoOutput(out) self.assertOutput(err, "Unknown command: 'noargs_command'") class DjangoAdminAlternateSettings(AdminScriptTestCase): """A series of tests for django-admin.py when using a settings file with a name other than 'settings.py'. """ def setUp(self): super().setUp() self.write_settings('alternate_settings.py') def test_builtin_command(self): "alternate: django-admin builtin commands fail with an error when no settings provided" args = ['check', 'admin_scripts'] out, err = self.run_django_admin(args) self.assertNoOutput(out) self.assertOutput(err, 'settings are not configured') def test_builtin_with_settings(self): "alternate: django-admin builtin commands succeed if settings are provided as argument" args = ['check', '--settings=test_project.alternate_settings', 'admin_scripts'] out, err = self.run_django_admin(args) self.assertNoOutput(err) self.assertOutput(out, SYSTEM_CHECK_MSG) def test_builtin_with_environment(self): "alternate: django-admin builtin commands succeed if settings are provided in the environment" args = ['check', 'admin_scripts'] out, err = self.run_django_admin(args, 'test_project.alternate_settings') self.assertNoOutput(err) self.assertOutput(out, SYSTEM_CHECK_MSG) def test_builtin_with_bad_settings(self): "alternate: django-admin builtin commands fail if settings file (from argument) doesn't exist" args = ['check', '--settings=bad_settings', 'admin_scripts'] out, err = self.run_django_admin(args) self.assertNoOutput(out) self.assertOutput(err, "No module named '?bad_settings'?", regex=True) def test_builtin_with_bad_environment(self): "alternate: django-admin builtin commands fail if settings file (from environment) doesn't exist" args = ['check', 'admin_scripts'] out, err = self.run_django_admin(args, 'bad_settings') self.assertNoOutput(out) self.assertOutput(err, "No module named '?bad_settings'?", regex=True) def test_custom_command(self): "alternate: django-admin can't execute user commands unless settings are provided" args = ['noargs_command'] out, err = self.run_django_admin(args) self.assertNoOutput(out) self.assertOutput(err, "No Django settings specified") self.assertOutput(err, "Unknown command: 'noargs_command'") def test_custom_command_with_settings(self): "alternate: django-admin can execute user commands if settings are provided as argument" args = ['noargs_command', '--settings=test_project.alternate_settings'] out, err = self.run_django_admin(args) self.assertNoOutput(err) self.assertOutput(out, "EXECUTE: noargs_command") def test_custom_command_with_environment(self): "alternate: django-admin can execute user commands if settings are provided in environment" args = ['noargs_command'] out, err = self.run_django_admin(args, 'test_project.alternate_settings') self.assertNoOutput(err) self.assertOutput(out, "EXECUTE: noargs_command") class DjangoAdminMultipleSettings(AdminScriptTestCase): """A series of tests for django-admin.py when multiple settings files (including the default 'settings.py') are available. The default settings file is insufficient for performing the operations described, so the alternate settings must be used by the running script. """ def setUp(self): super().setUp() self.write_settings('settings.py', apps=['django.contrib.auth', 'django.contrib.contenttypes']) self.write_settings('alternate_settings.py') def test_builtin_command(self): "alternate: django-admin builtin commands fail with an error when no settings provided" args = ['check', 'admin_scripts'] out, err = self.run_django_admin(args) self.assertNoOutput(out) self.assertOutput(err, 'settings are not configured') def test_builtin_with_settings(self): "alternate: django-admin builtin commands succeed if settings are provided as argument" args = ['check', '--settings=test_project.alternate_settings', 'admin_scripts'] out, err = self.run_django_admin(args) self.assertNoOutput(err) self.assertOutput(out, SYSTEM_CHECK_MSG) def test_builtin_with_environment(self): "alternate: django-admin builtin commands succeed if settings are provided in the environment" args = ['check', 'admin_scripts'] out, err = self.run_django_admin(args, 'test_project.alternate_settings') self.assertNoOutput(err) self.assertOutput(out, SYSTEM_CHECK_MSG) def test_builtin_with_bad_settings(self): "alternate: django-admin builtin commands fail if settings file (from argument) doesn't exist" args = ['check', '--settings=bad_settings', 'admin_scripts'] out, err = self.run_django_admin(args) self.assertOutput(err, "No module named '?bad_settings'?", regex=True) def test_builtin_with_bad_environment(self): "alternate: django-admin builtin commands fail if settings file (from environment) doesn't exist" args = ['check', 'admin_scripts'] out, err = self.run_django_admin(args, 'bad_settings') self.assertNoOutput(out) self.assertOutput(err, "No module named '?bad_settings'?", regex=True) def test_custom_command(self): "alternate: django-admin can't execute user commands unless settings are provided" args = ['noargs_command'] out, err = self.run_django_admin(args) self.assertNoOutput(out) self.assertOutput(err, "No Django settings specified") self.assertOutput(err, "Unknown command: 'noargs_command'") def test_custom_command_with_settings(self): "alternate: django-admin can execute user commands if settings are provided as argument" args = ['noargs_command', '--settings=test_project.alternate_settings'] out, err = self.run_django_admin(args) self.assertNoOutput(err) self.assertOutput(out, "EXECUTE: noargs_command") def test_custom_command_with_environment(self): "alternate: django-admin can execute user commands if settings are provided in environment" args = ['noargs_command'] out, err = self.run_django_admin(args, 'test_project.alternate_settings') self.assertNoOutput(err) self.assertOutput(out, "EXECUTE: noargs_command") class DjangoAdminSettingsDirectory(AdminScriptTestCase): """ A series of tests for django-admin.py when the settings file is in a directory. (see #9751). """ def setUp(self): super().setUp() self.write_settings('settings', is_dir=True) def test_setup_environ(self): "directory: startapp creates the correct directory" args = ['startapp', 'settings_test'] app_path = os.path.join(self.test_dir, 'settings_test') out, err = self.run_django_admin(args, 'test_project.settings') self.assertNoOutput(err) self.assertTrue(os.path.exists(app_path)) with open(os.path.join(app_path, 'apps.py')) as f: content = f.read() self.assertIn("class SettingsTestConfig(AppConfig)", content) self.assertIn("name = 'settings_test'", content) def test_setup_environ_custom_template(self): "directory: startapp creates the correct directory with a custom template" template_path = os.path.join(custom_templates_dir, 'app_template') args = ['startapp', '--template', template_path, 'custom_settings_test'] app_path = os.path.join(self.test_dir, 'custom_settings_test') out, err = self.run_django_admin(args, 'test_project.settings') self.assertNoOutput(err) self.assertTrue(os.path.exists(app_path)) self.assertTrue(os.path.exists(os.path.join(app_path, 'api.py'))) def test_startapp_unicode_name(self): "directory: startapp creates the correct directory with unicode characters" args = ['startapp', 'こんにちは'] app_path = os.path.join(self.test_dir, 'こんにちは') out, err = self.run_django_admin(args, 'test_project.settings') self.assertNoOutput(err) self.assertTrue(os.path.exists(app_path)) with open(os.path.join(app_path, 'apps.py'), encoding='utf8') as f: content = f.read() self.assertIn("class こんにちはConfig(AppConfig)", content) self.assertIn("name = 'こんにちは'", content) def test_builtin_command(self): "directory: django-admin builtin commands fail with an error when no settings provided" args = ['check', 'admin_scripts'] out, err = self.run_django_admin(args) self.assertNoOutput(out) self.assertOutput(err, 'settings are not configured') def test_builtin_with_bad_settings(self): "directory: django-admin builtin commands fail if settings file (from argument) doesn't exist" args = ['check', '--settings=bad_settings', 'admin_scripts'] out, err = self.run_django_admin(args) self.assertOutput(err, "No module named '?bad_settings'?", regex=True) def test_builtin_with_bad_environment(self): "directory: django-admin builtin commands fail if settings file (from environment) doesn't exist" args = ['check', 'admin_scripts'] out, err = self.run_django_admin(args, 'bad_settings') self.assertNoOutput(out) self.assertOutput(err, "No module named '?bad_settings'?", regex=True) def test_custom_command(self): "directory: django-admin can't execute user commands unless settings are provided" args = ['noargs_command'] out, err = self.run_django_admin(args) self.assertNoOutput(out) self.assertOutput(err, "No Django settings specified") self.assertOutput(err, "Unknown command: 'noargs_command'") def test_builtin_with_settings(self): "directory: django-admin builtin commands succeed if settings are provided as argument" args = ['check', '--settings=test_project.settings', 'admin_scripts'] out, err = self.run_django_admin(args) self.assertNoOutput(err) self.assertOutput(out, SYSTEM_CHECK_MSG) def test_builtin_with_environment(self): "directory: django-admin builtin commands succeed if settings are provided in the environment" args = ['check', 'admin_scripts'] out, err = self.run_django_admin(args, 'test_project.settings') self.assertNoOutput(err) self.assertOutput(out, SYSTEM_CHECK_MSG) ########################################################################## # MANAGE.PY TESTS # This next series of test classes checks the environment processing # of the generated manage.py script ########################################################################## class ManageManuallyConfiguredSettings(AdminScriptTestCase): """Customized manage.py calling settings.configure().""" def test_non_existent_command_output(self): out, err = self.run_manage(['invalid_command'], manage_py='configured_settings_manage.py') self.assertNoOutput(out) self.assertOutput(err, "Unknown command: 'invalid_command'") self.assertNotInOutput(err, 'No Django settings specified') class ManageNoSettings(AdminScriptTestCase): "A series of tests for manage.py when there is no settings.py file." def test_builtin_command(self): "no settings: manage.py builtin commands fail with an error when no settings provided" args = ['check', 'admin_scripts'] out, err = self.run_manage(args) self.assertNoOutput(out) self.assertOutput(err, r"No module named '?(test_project\.)?settings'?", regex=True) def test_builtin_with_bad_settings(self): "no settings: manage.py builtin commands fail if settings file (from argument) doesn't exist" args = ['check', '--settings=bad_settings', 'admin_scripts'] out, err = self.run_manage(args) self.assertNoOutput(out) self.assertOutput(err, "No module named '?bad_settings'?", regex=True) def test_builtin_with_bad_environment(self): "no settings: manage.py builtin commands fail if settings file (from environment) doesn't exist" args = ['check', 'admin_scripts'] out, err = self.run_manage(args, 'bad_settings') self.assertNoOutput(out) self.assertOutput(err, "No module named '?bad_settings'?", regex=True) class ManageDefaultSettings(AdminScriptTestCase): """A series of tests for manage.py when using a settings.py file that contains the test application. """ def setUp(self): super().setUp() self.write_settings('settings.py') def test_builtin_command(self): "default: manage.py builtin commands succeed when default settings are appropriate" args = ['check', 'admin_scripts'] out, err = self.run_manage(args) self.assertNoOutput(err) self.assertOutput(out, SYSTEM_CHECK_MSG) def test_builtin_with_settings(self): "default: manage.py builtin commands succeed if settings are provided as argument" args = ['check', '--settings=test_project.settings', 'admin_scripts'] out, err = self.run_manage(args) self.assertNoOutput(err) self.assertOutput(out, SYSTEM_CHECK_MSG) def test_builtin_with_environment(self): "default: manage.py builtin commands succeed if settings are provided in the environment" args = ['check', 'admin_scripts'] out, err = self.run_manage(args, 'test_project.settings') self.assertNoOutput(err) self.assertOutput(out, SYSTEM_CHECK_MSG) def test_builtin_with_bad_settings(self): "default: manage.py builtin commands succeed if settings file (from argument) doesn't exist" args = ['check', '--settings=bad_settings', 'admin_scripts'] out, err = self.run_manage(args) self.assertNoOutput(out) self.assertOutput(err, "No module named '?bad_settings'?", regex=True) def test_builtin_with_bad_environment(self): "default: manage.py builtin commands fail if settings file (from environment) doesn't exist" args = ['check', 'admin_scripts'] out, err = self.run_manage(args, 'bad_settings') self.assertNoOutput(out) self.assertOutput(err, "No module named '?bad_settings'?", regex=True) def test_custom_command(self): "default: manage.py can execute user commands when default settings are appropriate" args = ['noargs_command'] out, err = self.run_manage(args) self.assertNoOutput(err) self.assertOutput(out, "EXECUTE: noargs_command") def test_custom_command_with_settings(self): "default: manage.py can execute user commands when settings are provided as argument" args = ['noargs_command', '--settings=test_project.settings'] out, err = self.run_manage(args) self.assertNoOutput(err) self.assertOutput(out, "EXECUTE: noargs_command") def test_custom_command_with_environment(self): "default: manage.py can execute user commands when settings are provided in environment" args = ['noargs_command'] out, err = self.run_manage(args, 'test_project.settings') self.assertNoOutput(err) self.assertOutput(out, "EXECUTE: noargs_command") class ManageFullPathDefaultSettings(AdminScriptTestCase): """A series of tests for manage.py when using a settings.py file that contains the test application specified using a full path. """ def setUp(self): super().setUp() self.write_settings('settings.py', ['django.contrib.auth', 'django.contrib.contenttypes', 'admin_scripts']) def test_builtin_command(self): "fulldefault: manage.py builtin commands succeed when default settings are appropriate" args = ['check', 'admin_scripts'] out, err = self.run_manage(args) self.assertNoOutput(err) self.assertOutput(out, SYSTEM_CHECK_MSG) def test_builtin_with_settings(self): "fulldefault: manage.py builtin commands succeed if settings are provided as argument" args = ['check', '--settings=test_project.settings', 'admin_scripts'] out, err = self.run_manage(args) self.assertNoOutput(err) self.assertOutput(out, SYSTEM_CHECK_MSG) def test_builtin_with_environment(self): "fulldefault: manage.py builtin commands succeed if settings are provided in the environment" args = ['check', 'admin_scripts'] out, err = self.run_manage(args, 'test_project.settings') self.assertNoOutput(err) self.assertOutput(out, SYSTEM_CHECK_MSG) def test_builtin_with_bad_settings(self): "fulldefault: manage.py builtin commands succeed if settings file (from argument) doesn't exist" args = ['check', '--settings=bad_settings', 'admin_scripts'] out, err = self.run_manage(args) self.assertNoOutput(out) self.assertOutput(err, "No module named '?bad_settings'?", regex=True) def test_builtin_with_bad_environment(self): "fulldefault: manage.py builtin commands fail if settings file (from environment) doesn't exist" args = ['check', 'admin_scripts'] out, err = self.run_manage(args, 'bad_settings') self.assertNoOutput(out) self.assertOutput(err, "No module named '?bad_settings'?", regex=True) def test_custom_command(self): "fulldefault: manage.py can execute user commands when default settings are appropriate" args = ['noargs_command'] out, err = self.run_manage(args) self.assertNoOutput(err) self.assertOutput(out, "EXECUTE: noargs_command") def test_custom_command_with_settings(self): "fulldefault: manage.py can execute user commands when settings are provided as argument" args = ['noargs_command', '--settings=test_project.settings'] out, err = self.run_manage(args) self.assertNoOutput(err) self.assertOutput(out, "EXECUTE: noargs_command") def test_custom_command_with_environment(self): "fulldefault: manage.py can execute user commands when settings are provided in environment" args = ['noargs_command'] out, err = self.run_manage(args, 'test_project.settings') self.assertNoOutput(err) self.assertOutput(out, "EXECUTE: noargs_command") class ManageMinimalSettings(AdminScriptTestCase): """A series of tests for manage.py when using a settings.py file that doesn't contain the test application. """ def setUp(self): super().setUp() self.write_settings('settings.py', apps=['django.contrib.auth', 'django.contrib.contenttypes']) def test_builtin_command(self): "minimal: manage.py builtin commands fail with an error when no settings provided" args = ['check', 'admin_scripts'] out, err = self.run_manage(args) self.assertNoOutput(out) self.assertOutput(err, "No installed app with label 'admin_scripts'.") def test_builtin_with_settings(self): "minimal: manage.py builtin commands fail if settings are provided as argument" args = ['check', '--settings=test_project.settings', 'admin_scripts'] out, err = self.run_manage(args) self.assertNoOutput(out) self.assertOutput(err, "No installed app with label 'admin_scripts'.") def test_builtin_with_environment(self): "minimal: manage.py builtin commands fail if settings are provided in the environment" args = ['check', 'admin_scripts'] out, err = self.run_manage(args, 'test_project.settings') self.assertNoOutput(out) self.assertOutput(err, "No installed app with label 'admin_scripts'.") def test_builtin_with_bad_settings(self): "minimal: manage.py builtin commands fail if settings file (from argument) doesn't exist" args = ['check', '--settings=bad_settings', 'admin_scripts'] out, err = self.run_manage(args) self.assertNoOutput(out) self.assertOutput(err, "No module named '?bad_settings'?", regex=True) def test_builtin_with_bad_environment(self): "minimal: manage.py builtin commands fail if settings file (from environment) doesn't exist" args = ['check', 'admin_scripts'] out, err = self.run_manage(args, 'bad_settings') self.assertNoOutput(out) self.assertOutput(err, "No module named '?bad_settings'?", regex=True) def test_custom_command(self): "minimal: manage.py can't execute user commands without appropriate settings" args = ['noargs_command'] out, err = self.run_manage(args) self.assertNoOutput(out) self.assertOutput(err, "Unknown command: 'noargs_command'") def test_custom_command_with_settings(self): "minimal: manage.py can't execute user commands, even if settings are provided as argument" args = ['noargs_command', '--settings=test_project.settings'] out, err = self.run_manage(args) self.assertNoOutput(out) self.assertOutput(err, "Unknown command: 'noargs_command'") def test_custom_command_with_environment(self): "minimal: manage.py can't execute user commands, even if settings are provided in environment" args = ['noargs_command'] out, err = self.run_manage(args, 'test_project.settings') self.assertNoOutput(out) self.assertOutput(err, "Unknown command: 'noargs_command'") class ManageAlternateSettings(AdminScriptTestCase): """A series of tests for manage.py when using a settings file with a name other than 'settings.py'. """ def setUp(self): super().setUp() self.write_settings('alternate_settings.py') def test_builtin_command(self): "alternate: manage.py builtin commands fail with an error when no default settings provided" args = ['check', 'admin_scripts'] out, err = self.run_manage(args) self.assertNoOutput(out) self.assertOutput(err, r"No module named '?(test_project\.)?settings'?", regex=True) def test_builtin_with_settings(self): "alternate: manage.py builtin commands work with settings provided as argument" args = ['check', '--settings=alternate_settings', 'admin_scripts'] out, err = self.run_manage(args) self.assertOutput(out, SYSTEM_CHECK_MSG) self.assertNoOutput(err) def test_builtin_with_environment(self): "alternate: manage.py builtin commands work if settings are provided in the environment" args = ['check', 'admin_scripts'] out, err = self.run_manage(args, 'alternate_settings') self.assertOutput(out, SYSTEM_CHECK_MSG) self.assertNoOutput(err) def test_builtin_with_bad_settings(self): "alternate: manage.py builtin commands fail if settings file (from argument) doesn't exist" args = ['check', '--settings=bad_settings', 'admin_scripts'] out, err = self.run_manage(args) self.assertNoOutput(out) self.assertOutput(err, "No module named '?bad_settings'?", regex=True) def test_builtin_with_bad_environment(self): "alternate: manage.py builtin commands fail if settings file (from environment) doesn't exist" args = ['check', 'admin_scripts'] out, err = self.run_manage(args, 'bad_settings') self.assertNoOutput(out) self.assertOutput(err, "No module named '?bad_settings'?", regex=True) def test_custom_command(self): "alternate: manage.py can't execute user commands without settings" args = ['noargs_command'] out, err = self.run_manage(args) self.assertNoOutput(out) self.assertOutput(err, r"No module named '?(test_project\.)?settings'?", regex=True) def test_custom_command_with_settings(self): "alternate: manage.py can execute user commands if settings are provided as argument" args = ['noargs_command', '--settings=alternate_settings'] out, err = self.run_manage(args) self.assertOutput( out, "EXECUTE: noargs_command options=[('force_color', False), " "('no_color', False), ('pythonpath', None), ('settings', " "'alternate_settings'), ('traceback', False), ('verbosity', 1)]" ) self.assertNoOutput(err) def test_custom_command_with_environment(self): "alternate: manage.py can execute user commands if settings are provided in environment" args = ['noargs_command'] out, err = self.run_manage(args, 'alternate_settings') self.assertOutput( out, "EXECUTE: noargs_command options=[('force_color', False), " "('no_color', False), ('pythonpath', None), ('settings', None), " "('traceback', False), ('verbosity', 1)]" ) self.assertNoOutput(err) def test_custom_command_output_color(self): "alternate: manage.py output syntax color can be deactivated with the `--no-color` option" args = ['noargs_command', '--no-color', '--settings=alternate_settings'] out, err = self.run_manage(args) self.assertOutput( out, "EXECUTE: noargs_command options=[('force_color', False), " "('no_color', True), ('pythonpath', None), ('settings', " "'alternate_settings'), ('traceback', False), ('verbosity', 1)]" ) self.assertNoOutput(err) class ManageMultipleSettings(AdminScriptTestCase): """A series of tests for manage.py when multiple settings files (including the default 'settings.py') are available. The default settings file is insufficient for performing the operations described, so the alternate settings must be used by the running script. """ def setUp(self): super().setUp() self.write_settings('settings.py', apps=['django.contrib.auth', 'django.contrib.contenttypes']) self.write_settings('alternate_settings.py') def test_builtin_command(self): "multiple: manage.py builtin commands fail with an error when no settings provided" args = ['check', 'admin_scripts'] out, err = self.run_manage(args) self.assertNoOutput(out) self.assertOutput(err, "No installed app with label 'admin_scripts'.") def test_builtin_with_settings(self): "multiple: manage.py builtin commands succeed if settings are provided as argument" args = ['check', '--settings=alternate_settings', 'admin_scripts'] out, err = self.run_manage(args) self.assertNoOutput(err) self.assertOutput(out, SYSTEM_CHECK_MSG) def test_builtin_with_environment(self): "multiple: manage.py can execute builtin commands if settings are provided in the environment" args = ['check', 'admin_scripts'] out, err = self.run_manage(args, 'alternate_settings') self.assertNoOutput(err) self.assertOutput(out, SYSTEM_CHECK_MSG) def test_builtin_with_bad_settings(self): "multiple: manage.py builtin commands fail if settings file (from argument) doesn't exist" args = ['check', '--settings=bad_settings', 'admin_scripts'] out, err = self.run_manage(args) self.assertNoOutput(out) self.assertOutput(err, "No module named '?bad_settings'?", regex=True) def test_builtin_with_bad_environment(self): "multiple: manage.py builtin commands fail if settings file (from environment) doesn't exist" args = ['check', 'admin_scripts'] out, err = self.run_manage(args, 'bad_settings') self.assertNoOutput(out) self.assertOutput(err, "No module named '?bad_settings'?", regex=True) def test_custom_command(self): "multiple: manage.py can't execute user commands using default settings" args = ['noargs_command'] out, err = self.run_manage(args) self.assertNoOutput(out) self.assertOutput(err, "Unknown command: 'noargs_command'") def test_custom_command_with_settings(self): "multiple: manage.py can execute user commands if settings are provided as argument" args = ['noargs_command', '--settings=alternate_settings'] out, err = self.run_manage(args) self.assertNoOutput(err) self.assertOutput(out, "EXECUTE: noargs_command") def test_custom_command_with_environment(self): "multiple: manage.py can execute user commands if settings are provided in environment" args = ['noargs_command'] out, err = self.run_manage(args, 'alternate_settings') self.assertNoOutput(err) self.assertOutput(out, "EXECUTE: noargs_command") class ManageSettingsWithSettingsErrors(AdminScriptTestCase): """ Tests for manage.py when using the default settings.py file containing runtime errors. """ def write_settings_with_import_error(self, filename): settings_file_path = os.path.join(self.test_dir, filename) with open(settings_file_path, 'w') as settings_file: settings_file.write('# Settings file automatically generated by admin_scripts test case\n') settings_file.write('# The next line will cause an import error:\nimport foo42bar\n') def test_import_error(self): """ import error: manage.py builtin commands shows useful diagnostic info when settings with import errors is provided (#14130). """ self.write_settings_with_import_error('settings.py') args = ['check', 'admin_scripts'] out, err = self.run_manage(args) self.assertNoOutput(out) self.assertOutput(err, "No module named") self.assertOutput(err, "foo42bar") def test_attribute_error(self): """ manage.py builtin commands does not swallow attribute error due to bad settings (#18845). """ self.write_settings('settings.py', sdict={'BAD_VAR': 'INSTALLED_APPS.crash'}) args = ['collectstatic', 'admin_scripts'] out, err = self.run_manage(args) self.assertNoOutput(out) self.assertOutput(err, "AttributeError: 'list' object has no attribute 'crash'") def test_key_error(self): self.write_settings('settings.py', sdict={'BAD_VAR': 'DATABASES["blah"]'}) args = ['collectstatic', 'admin_scripts'] out, err = self.run_manage(args) self.assertNoOutput(out) self.assertOutput(err, "KeyError: 'blah'") def test_help(self): """ Test listing available commands output note when only core commands are available. """ self.write_settings( 'settings.py', extra='from django.core.exceptions import ImproperlyConfigured\n' 'raise ImproperlyConfigured()', ) args = ['help'] out, err = self.run_manage(args) self.assertOutput(out, 'only Django core commands are listed') self.assertNoOutput(err) class ManageCheck(AdminScriptTestCase): def test_nonexistent_app(self): """check reports an error on a nonexistent app in INSTALLED_APPS.""" self.write_settings( 'settings.py', apps=['admin_scriptz.broken_app'], sdict={'USE_I18N': False}, ) args = ['check'] out, err = self.run_manage(args) self.assertNoOutput(out) self.assertOutput(err, 'ModuleNotFoundError') self.assertOutput(err, 'No module named') self.assertOutput(err, 'admin_scriptz') def test_broken_app(self): """ manage.py check reports an ImportError if an app's models.py raises one on import """ self.write_settings('settings.py', apps=['admin_scripts.broken_app']) args = ['check'] out, err = self.run_manage(args) self.assertNoOutput(out) self.assertOutput(err, 'ImportError') def test_complex_app(self): """ manage.py check does not raise an ImportError validating a complex app with nested calls to load_app """ self.write_settings( 'settings.py', apps=[ 'admin_scripts.complex_app', 'admin_scripts.simple_app', 'django.contrib.admin.apps.SimpleAdminConfig', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.messages', ], sdict={ 'DEBUG': True, 'MIDDLEWARE': [ 'django.contrib.messages.middleware.MessageMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', ], 'TEMPLATES': [ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': [], 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages', ], }, }, ], } ) args = ['check'] out, err = self.run_manage(args) self.assertNoOutput(err) self.assertEqual(out, 'System check identified no issues (0 silenced).\n') def test_app_with_import(self): """ manage.py check does not raise errors when an app imports a base class that itself has an abstract base. """ self.write_settings( 'settings.py', apps=[ 'admin_scripts.app_with_import', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sites', ], sdict={'DEBUG': True}, ) args = ['check'] out, err = self.run_manage(args) self.assertNoOutput(err) self.assertEqual(out, 'System check identified no issues (0 silenced).\n') def test_output_format(self): """ All errors/warnings should be sorted by level and by message. """ self.write_settings( 'settings.py', apps=[ 'admin_scripts.app_raising_messages', 'django.contrib.auth', 'django.contrib.contenttypes', ], sdict={'DEBUG': True}, ) args = ['check'] out, err = self.run_manage(args) expected_err = ( "SystemCheckError: System check identified some issues:\n" "\n" "ERRORS:\n" "?: An error\n" "\tHINT: Error hint\n" "\n" "WARNINGS:\n" "a: Second warning\n" "obj: First warning\n" "\tHINT: Hint\n" "\n" "System check identified 3 issues (0 silenced).\n" ) self.assertEqual(err, expected_err) self.assertNoOutput(out) def test_warning_does_not_halt(self): """ When there are only warnings or less serious messages, then Django should not prevent user from launching their project, so `check` command should not raise `CommandError` exception. In this test we also test output format. """ self.write_settings( 'settings.py', apps=[ 'admin_scripts.app_raising_warning', 'django.contrib.auth', 'django.contrib.contenttypes', ], sdict={'DEBUG': True}, ) args = ['check'] out, err = self.run_manage(args) expected_err = ( "System check identified some issues:\n" # No "CommandError: " part "\n" "WARNINGS:\n" "?: A warning\n" "\n" "System check identified 1 issue (0 silenced).\n" ) self.assertEqual(err, expected_err) self.assertNoOutput(out) class ManageRunserver(SimpleTestCase): def setUp(self): def monkey_run(*args, **options): return self.output = StringIO() self.cmd = RunserverCommand(stdout=self.output) self.cmd.run = monkey_run def assertServerSettings(self, addr, port, ipv6=False, raw_ipv6=False): self.assertEqual(self.cmd.addr, addr) self.assertEqual(self.cmd.port, port) self.assertEqual(self.cmd.use_ipv6, ipv6) self.assertEqual(self.cmd._raw_ipv6, raw_ipv6) def test_runserver_addrport(self): call_command(self.cmd) self.assertServerSettings('127.0.0.1', '8000') call_command(self.cmd, addrport="1.2.3.4:8000") self.assertServerSettings('1.2.3.4', '8000') call_command(self.cmd, addrport="7000") self.assertServerSettings('127.0.0.1', '7000') @unittest.skipUnless(socket.has_ipv6, "platform doesn't support IPv6") def test_runner_addrport_ipv6(self): call_command(self.cmd, addrport="", use_ipv6=True) self.assertServerSettings('::1', '8000', ipv6=True, raw_ipv6=True) call_command(self.cmd, addrport="7000", use_ipv6=True) self.assertServerSettings('::1', '7000', ipv6=True, raw_ipv6=True) call_command(self.cmd, addrport="[2001:0db8:1234:5678::9]:7000") self.assertServerSettings('2001:0db8:1234:5678::9', '7000', ipv6=True, raw_ipv6=True) def test_runner_hostname(self): call_command(self.cmd, addrport="localhost:8000") self.assertServerSettings('localhost', '8000') call_command(self.cmd, addrport="test.domain.local:7000") self.assertServerSettings('test.domain.local', '7000') @unittest.skipUnless(socket.has_ipv6, "platform doesn't support IPv6") def test_runner_hostname_ipv6(self): call_command(self.cmd, addrport="test.domain.local:7000", use_ipv6=True) self.assertServerSettings('test.domain.local', '7000', ipv6=True) def test_runner_custom_defaults(self): self.cmd.default_addr = '0.0.0.0' self.cmd.default_port = '5000' call_command(self.cmd) self.assertServerSettings('0.0.0.0', '5000') @unittest.skipUnless(socket.has_ipv6, "platform doesn't support IPv6") def test_runner_custom_defaults_ipv6(self): self.cmd.default_addr_ipv6 = '::' call_command(self.cmd, use_ipv6=True) self.assertServerSettings('::', '8000', ipv6=True, raw_ipv6=True) def test_runner_ambiguous(self): # Only 4 characters, all of which could be in an ipv6 address call_command(self.cmd, addrport="beef:7654") self.assertServerSettings('beef', '7654') # Uses only characters that could be in an ipv6 address call_command(self.cmd, addrport="deadbeef:7654") self.assertServerSettings('deadbeef', '7654') def test_no_database(self): """ Ensure runserver.check_migrations doesn't choke on empty DATABASES. """ tested_connections = ConnectionHandler({}) with mock.patch('django.core.management.base.connections', new=tested_connections): self.cmd.check_migrations() def test_readonly_database(self): """ runserver.check_migrations() doesn't choke when a database is read-only. """ with mock.patch.object(MigrationRecorder, 'has_table', return_value=False): self.cmd.check_migrations() # You have # ... self.assertIn('unapplied migration(s)', self.output.getvalue()) class ManageRunserverMigrationWarning(TestCase): def setUp(self): self.stdout = StringIO() self.runserver_command = RunserverCommand(stdout=self.stdout) @override_settings(INSTALLED_APPS=["admin_scripts.app_waiting_migration"]) def test_migration_warning_one_app(self): self.runserver_command.check_migrations() output = self.stdout.getvalue() self.assertIn('You have 1 unapplied migration(s)', output) self.assertIn('apply the migrations for app(s): app_waiting_migration.', output) @override_settings( INSTALLED_APPS=[ "admin_scripts.app_waiting_migration", "admin_scripts.another_app_waiting_migration", ], ) def test_migration_warning_multiple_apps(self): self.runserver_command.check_migrations() output = self.stdout.getvalue() self.assertIn('You have 2 unapplied migration(s)', output) self.assertIn( 'apply the migrations for app(s): another_app_waiting_migration, ' 'app_waiting_migration.', output ) class ManageRunserverEmptyAllowedHosts(AdminScriptTestCase): def setUp(self): super().setUp() self.write_settings('settings.py', sdict={ 'ALLOWED_HOSTS': [], 'DEBUG': False, }) def test_empty_allowed_hosts_error(self): out, err = self.run_manage(['runserver']) self.assertNoOutput(out) self.assertOutput(err, 'CommandError: You must set settings.ALLOWED_HOSTS if DEBUG is False.') class ManageTestserver(SimpleTestCase): @mock.patch.object(TestserverCommand, 'handle', return_value='') def test_testserver_handle_params(self, mock_handle): out = StringIO() call_command('testserver', 'blah.json', stdout=out) mock_handle.assert_called_with( 'blah.json', stdout=out, settings=None, pythonpath=None, verbosity=1, traceback=False, addrport='', no_color=False, use_ipv6=False, skip_checks=True, interactive=True, force_color=False, ) @mock.patch('django.db.connection.creation.create_test_db', return_value='test_db') @mock.patch.object(LoaddataCommand, 'handle', return_value='') @mock.patch.object(RunserverCommand, 'handle', return_value='') def test_params_to_runserver(self, mock_runserver_handle, mock_loaddata_handle, mock_create_test_db): out = StringIO() call_command('testserver', 'blah.json', stdout=out) mock_runserver_handle.assert_called_with( addrport='', force_color=False, insecure_serving=False, no_color=False, pythonpath=None, settings=None, shutdown_message=( "\nServer stopped.\nNote that the test database, 'test_db', " "has not been deleted. You can explore it on your own." ), skip_checks=True, traceback=False, use_ipv6=False, use_reloader=False, use_static_handler=True, use_threading=connection.features.test_db_allows_multiple_connections, verbosity=1, ) ########################################################################## # COMMAND PROCESSING TESTS # user-space commands are correctly handled - in particular, arguments to # the commands are correctly parsed and processed. ########################################################################## class ColorCommand(BaseCommand): requires_system_checks = False def handle(self, *args, **options): self.stdout.write('Hello, world!', self.style.ERROR) self.stderr.write('Hello, world!', self.style.ERROR) class CommandTypes(AdminScriptTestCase): "Tests for the various types of base command types that can be defined." def setUp(self): super().setUp() self.write_settings('settings.py') def test_version(self): "version is handled as a special case" args = ['version'] out, err = self.run_manage(args) self.assertNoOutput(err) self.assertOutput(out, get_version()) def test_version_alternative(self): "--version is equivalent to version" args1, args2 = ['version'], ['--version'] # It's possible one outputs on stderr and the other on stdout, hence the set self.assertEqual(set(self.run_manage(args1)), set(self.run_manage(args2))) def test_help(self): "help is handled as a special case" args = ['help'] out, err = self.run_manage(args) self.assertOutput(out, "Type 'manage.py help <subcommand>' for help on a specific subcommand.") self.assertOutput(out, '[django]') self.assertOutput(out, 'startapp') self.assertOutput(out, 'startproject') def test_help_commands(self): "help --commands shows the list of all available commands" args = ['help', '--commands'] out, err = self.run_manage(args) self.assertNotInOutput(out, 'usage:') self.assertNotInOutput(out, 'Options:') self.assertNotInOutput(out, '[django]') self.assertOutput(out, 'startapp') self.assertOutput(out, 'startproject') self.assertNotInOutput(out, '\n\n') def test_help_alternative(self): "--help is equivalent to help" args1, args2 = ['help'], ['--help'] self.assertEqual(self.run_manage(args1), self.run_manage(args2)) def test_help_short_altert(self): "-h is handled as a short form of --help" args1, args2 = ['--help'], ['-h'] self.assertEqual(self.run_manage(args1), self.run_manage(args2)) def test_specific_help(self): "--help can be used on a specific command" args = ['check', '--help'] out, err = self.run_manage(args) self.assertNoOutput(err) # Command-specific options like --tag appear before options common to # all commands like --version. tag_location = out.find('--tag') version_location = out.find('--version') self.assertNotEqual(tag_location, -1) self.assertNotEqual(version_location, -1) self.assertLess(tag_location, version_location) self.assertOutput(out, "Checks the entire Django project for potential problems.") def test_color_style(self): style = color.no_style() self.assertEqual(style.ERROR('Hello, world!'), 'Hello, world!') style = color.make_style('nocolor') self.assertEqual(style.ERROR('Hello, world!'), 'Hello, world!') style = color.make_style('dark') self.assertIn('Hello, world!', style.ERROR('Hello, world!')) self.assertNotEqual(style.ERROR('Hello, world!'), 'Hello, world!') # Default palette has color. style = color.make_style('') self.assertIn('Hello, world!', style.ERROR('Hello, world!')) self.assertNotEqual(style.ERROR('Hello, world!'), 'Hello, world!') def test_command_color(self): out = StringIO() err = StringIO() command = ColorCommand(stdout=out, stderr=err) call_command(command) if color.supports_color(): self.assertIn('Hello, world!\n', out.getvalue()) self.assertIn('Hello, world!\n', err.getvalue()) self.assertNotEqual(out.getvalue(), 'Hello, world!\n') self.assertNotEqual(err.getvalue(), 'Hello, world!\n') else: self.assertEqual(out.getvalue(), 'Hello, world!\n') self.assertEqual(err.getvalue(), 'Hello, world!\n') def test_command_no_color(self): "--no-color prevent colorization of the output" out = StringIO() err = StringIO() command = ColorCommand(stdout=out, stderr=err, no_color=True) call_command(command) self.assertEqual(out.getvalue(), 'Hello, world!\n') self.assertEqual(err.getvalue(), 'Hello, world!\n') out = StringIO() err = StringIO() command = ColorCommand(stdout=out, stderr=err) call_command(command, no_color=True) self.assertEqual(out.getvalue(), 'Hello, world!\n') self.assertEqual(err.getvalue(), 'Hello, world!\n') def test_force_color_execute(self): out = StringIO() err = StringIO() with mock.patch.object(sys.stdout, 'isatty', lambda: False): command = ColorCommand(stdout=out, stderr=err) call_command(command, force_color=True) self.assertEqual(out.getvalue(), '\x1b[31;1mHello, world!\n\x1b[0m') self.assertEqual(err.getvalue(), '\x1b[31;1mHello, world!\n\x1b[0m') def test_force_color_command_init(self): out = StringIO() err = StringIO() with mock.patch.object(sys.stdout, 'isatty', lambda: False): command = ColorCommand(stdout=out, stderr=err, force_color=True) call_command(command) self.assertEqual(out.getvalue(), '\x1b[31;1mHello, world!\n\x1b[0m') self.assertEqual(err.getvalue(), '\x1b[31;1mHello, world!\n\x1b[0m') def test_no_color_force_color_mutually_exclusive_execute(self): msg = "The --no-color and --force-color options can't be used together." with self.assertRaisesMessage(CommandError, msg): call_command(BaseCommand(), no_color=True, force_color=True) def test_no_color_force_color_mutually_exclusive_command_init(self): msg = "'no_color' and 'force_color' can't be used together." with self.assertRaisesMessage(CommandError, msg): call_command(BaseCommand(no_color=True, force_color=True)) def test_custom_stdout(self): class Command(BaseCommand): requires_system_checks = False def handle(self, *args, **options): self.stdout.write("Hello, World!") out = StringIO() command = Command(stdout=out) call_command(command) self.assertEqual(out.getvalue(), "Hello, World!\n") out.truncate(0) new_out = StringIO() call_command(command, stdout=new_out) self.assertEqual(out.getvalue(), "") self.assertEqual(new_out.getvalue(), "Hello, World!\n") def test_custom_stderr(self): class Command(BaseCommand): requires_system_checks = False def handle(self, *args, **options): self.stderr.write("Hello, World!") err = StringIO() command = Command(stderr=err) call_command(command) self.assertEqual(err.getvalue(), "Hello, World!\n") err.truncate(0) new_err = StringIO() call_command(command, stderr=new_err) self.assertEqual(err.getvalue(), "") self.assertEqual(new_err.getvalue(), "Hello, World!\n") def test_base_command(self): "User BaseCommands can execute when a label is provided" args = ['base_command', 'testlabel'] expected_labels = "('testlabel',)" self._test_base_command(args, expected_labels) def test_base_command_no_label(self): "User BaseCommands can execute when no labels are provided" args = ['base_command'] expected_labels = "()" self._test_base_command(args, expected_labels) def test_base_command_multiple_label(self): "User BaseCommands can execute when no labels are provided" args = ['base_command', 'testlabel', 'anotherlabel'] expected_labels = "('testlabel', 'anotherlabel')" self._test_base_command(args, expected_labels) def test_base_command_with_option(self): "User BaseCommands can execute with options when a label is provided" args = ['base_command', 'testlabel', '--option_a=x'] expected_labels = "('testlabel',)" self._test_base_command(args, expected_labels, option_a="'x'") def test_base_command_with_options(self): "User BaseCommands can execute with multiple options when a label is provided" args = ['base_command', 'testlabel', '-a', 'x', '--option_b=y'] expected_labels = "('testlabel',)" self._test_base_command(args, expected_labels, option_a="'x'", option_b="'y'") def test_base_command_with_wrong_option(self): "User BaseCommands outputs command usage when wrong option is specified" args = ['base_command', '--invalid'] out, err = self.run_manage(args) self.assertNoOutput(out) self.assertOutput(err, "usage: manage.py base_command") self.assertOutput(err, "error: unrecognized arguments: --invalid") def _test_base_command(self, args, labels, option_a="'1'", option_b="'2'"): out, err = self.run_manage(args) expected_out = ( "EXECUTE:BaseCommand labels=%s, " "options=[('force_color', False), ('no_color', False), " "('option_a', %s), ('option_b', %s), ('option_c', '3'), " "('pythonpath', None), ('settings', None), ('traceback', False), " "('verbosity', 1)]") % (labels, option_a, option_b) self.assertNoOutput(err) self.assertOutput(out, expected_out) def test_base_run_from_argv(self): """ Test run_from_argv properly terminates even with custom execute() (#19665) Also test proper traceback display. """ err = StringIO() command = BaseCommand(stderr=err) def raise_command_error(*args, **kwargs): raise CommandError("Custom error") command.execute = lambda args: args # This will trigger TypeError # If the Exception is not CommandError it should always # raise the original exception. with self.assertRaises(TypeError): command.run_from_argv(['', '']) # If the Exception is CommandError and --traceback is not present # this command should raise a SystemExit and don't print any # traceback to the stderr. command.execute = raise_command_error err.truncate(0) with self.assertRaises(SystemExit): command.run_from_argv(['', '']) err_message = err.getvalue() self.assertNotIn("Traceback", err_message) self.assertIn("CommandError", err_message) # If the Exception is CommandError and --traceback is present # this command should raise the original CommandError as if it # were not a CommandError. err.truncate(0) with self.assertRaises(CommandError): command.run_from_argv(['', '', '--traceback']) def test_run_from_argv_non_ascii_error(self): """ Non-ASCII message of CommandError does not raise any UnicodeDecodeError in run_from_argv. """ def raise_command_error(*args, **kwargs): raise CommandError("Erreur personnalisée") command = BaseCommand(stderr=StringIO()) command.execute = raise_command_error with self.assertRaises(SystemExit): command.run_from_argv(['', '']) def test_run_from_argv_closes_connections(self): """ A command called from the command line should close connections after being executed (#21255). """ command = BaseCommand(stderr=StringIO()) command.check = lambda: [] command.handle = lambda *args, **kwargs: args with mock.patch('django.core.management.base.connections') as mock_connections: command.run_from_argv(['', '']) # Test connections have been closed self.assertTrue(mock_connections.close_all.called) def test_noargs(self): "NoArg Commands can be executed" args = ['noargs_command'] out, err = self.run_manage(args) self.assertNoOutput(err) self.assertOutput( out, "EXECUTE: noargs_command options=[('force_color', False), " "('no_color', False), ('pythonpath', None), ('settings', None), " "('traceback', False), ('verbosity', 1)]" ) def test_noargs_with_args(self): "NoArg Commands raise an error if an argument is provided" args = ['noargs_command', 'argument'] out, err = self.run_manage(args) self.assertOutput(err, "error: unrecognized arguments: argument") def test_app_command(self): "User AppCommands can execute when a single app name is provided" args = ['app_command', 'auth'] out, err = self.run_manage(args) self.assertNoOutput(err) self.assertOutput(out, "EXECUTE:AppCommand name=django.contrib.auth, options=") self.assertOutput( out, ", options=[('force_color', False), ('no_color', False), " "('pythonpath', None), ('settings', None), ('traceback', False), " "('verbosity', 1)]" ) def test_app_command_no_apps(self): "User AppCommands raise an error when no app name is provided" args = ['app_command'] out, err = self.run_manage(args) self.assertOutput(err, 'error: Enter at least one application label.') def test_app_command_multiple_apps(self): "User AppCommands raise an error when multiple app names are provided" args = ['app_command', 'auth', 'contenttypes'] out, err = self.run_manage(args) self.assertNoOutput(err) self.assertOutput(out, "EXECUTE:AppCommand name=django.contrib.auth, options=") self.assertOutput( out, ", options=[('force_color', False), ('no_color', False), " "('pythonpath', None), ('settings', None), ('traceback', False), " "('verbosity', 1)]" ) self.assertOutput(out, "EXECUTE:AppCommand name=django.contrib.contenttypes, options=") self.assertOutput( out, ", options=[('force_color', False), ('no_color', False), " "('pythonpath', None), ('settings', None), ('traceback', False), " "('verbosity', 1)]" ) def test_app_command_invalid_app_label(self): "User AppCommands can execute when a single app name is provided" args = ['app_command', 'NOT_AN_APP'] out, err = self.run_manage(args) self.assertOutput(err, "No installed app with label 'NOT_AN_APP'.") def test_app_command_some_invalid_app_labels(self): "User AppCommands can execute when some of the provided app names are invalid" args = ['app_command', 'auth', 'NOT_AN_APP'] out, err = self.run_manage(args) self.assertOutput(err, "No installed app with label 'NOT_AN_APP'.") def test_label_command(self): "User LabelCommands can execute when a label is provided" args = ['label_command', 'testlabel'] out, err = self.run_manage(args) self.assertNoOutput(err) self.assertOutput( out, "EXECUTE:LabelCommand label=testlabel, options=[('force_color', " "False), ('no_color', False), ('pythonpath', None), ('settings', " "None), ('traceback', False), ('verbosity', 1)]" ) def test_label_command_no_label(self): "User LabelCommands raise an error if no label is provided" args = ['label_command'] out, err = self.run_manage(args) self.assertOutput(err, 'Enter at least one label') def test_label_command_multiple_label(self): "User LabelCommands are executed multiple times if multiple labels are provided" args = ['label_command', 'testlabel', 'anotherlabel'] out, err = self.run_manage(args) self.assertNoOutput(err) self.assertOutput( out, "EXECUTE:LabelCommand label=testlabel, options=[('force_color', " "False), ('no_color', False), ('pythonpath', None), " "('settings', None), ('traceback', False), ('verbosity', 1)]" ) self.assertOutput( out, "EXECUTE:LabelCommand label=anotherlabel, options=[('force_color', " "False), ('no_color', False), ('pythonpath', None), " "('settings', None), ('traceback', False), ('verbosity', 1)]" ) class Discovery(SimpleTestCase): def test_precedence(self): """ Apps listed first in INSTALLED_APPS have precedence. """ with self.settings(INSTALLED_APPS=['admin_scripts.complex_app', 'admin_scripts.simple_app', 'django.contrib.auth', 'django.contrib.contenttypes']): out = StringIO() call_command('duplicate', stdout=out) self.assertEqual(out.getvalue().strip(), 'complex_app') with self.settings(INSTALLED_APPS=['admin_scripts.simple_app', 'admin_scripts.complex_app', 'django.contrib.auth', 'django.contrib.contenttypes']): out = StringIO() call_command('duplicate', stdout=out) self.assertEqual(out.getvalue().strip(), 'simple_app') class ArgumentOrder(AdminScriptTestCase): """Tests for 2-stage argument parsing scheme. django-admin command arguments are parsed in 2 parts; the core arguments (--settings, --traceback and --pythonpath) are parsed using a basic parser, ignoring any unknown options. Then the full settings are passed to the command parser, which extracts commands of interest to the individual command. """ def setUp(self): super().setUp() self.write_settings('settings.py', apps=['django.contrib.auth', 'django.contrib.contenttypes']) self.write_settings('alternate_settings.py') def test_setting_then_option(self): """ Options passed after settings are correctly handled. """ args = ['base_command', 'testlabel', '--settings=alternate_settings', '--option_a=x'] self._test(args) def test_setting_then_short_option(self): """ Short options passed after settings are correctly handled. """ args = ['base_command', 'testlabel', '--settings=alternate_settings', '-a', 'x'] self._test(args) def test_option_then_setting(self): """ Options passed before settings are correctly handled. """ args = ['base_command', 'testlabel', '--option_a=x', '--settings=alternate_settings'] self._test(args) def test_short_option_then_setting(self): """ Short options passed before settings are correctly handled. """ args = ['base_command', 'testlabel', '-a', 'x', '--settings=alternate_settings'] self._test(args) def test_option_then_setting_then_option(self): """ Options are correctly handled when they are passed before and after a setting. """ args = ['base_command', 'testlabel', '--option_a=x', '--settings=alternate_settings', '--option_b=y'] self._test(args, option_b="'y'") def _test(self, args, option_b="'2'"): out, err = self.run_manage(args) self.assertNoOutput(err) self.assertOutput( out, "EXECUTE:BaseCommand labels=('testlabel',), options=[" "('force_color', False), ('no_color', False), ('option_a', 'x'), " "('option_b', %s), ('option_c', '3'), ('pythonpath', None), " "('settings', 'alternate_settings'), ('traceback', False), " "('verbosity', 1)]" % option_b ) @override_settings(ROOT_URLCONF='admin_scripts.urls') class StartProject(LiveServerTestCase, AdminScriptTestCase): available_apps = [ 'admin_scripts', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', ] def test_wrong_args(self): "Make sure passing the wrong kinds of arguments outputs an error and prints usage" out, err = self.run_django_admin(['startproject']) self.assertNoOutput(out) self.assertOutput(err, "usage:") self.assertOutput(err, "You must provide a project name.") def test_simple_project(self): "Make sure the startproject management command creates a project" args = ['startproject', 'testproject'] testproject_dir = os.path.join(self.test_dir, 'testproject') out, err = self.run_django_admin(args) self.assertNoOutput(err) self.assertTrue(os.path.isdir(testproject_dir)) # running again.. out, err = self.run_django_admin(args) self.assertNoOutput(out) self.assertOutput(err, "already exists") def test_invalid_project_name(self): "Make sure the startproject management command validates a project name" for bad_name in ('7testproject', '../testproject'): with self.subTest(project_name=bad_name): args = ['startproject', bad_name] testproject_dir = os.path.join(self.test_dir, bad_name) out, err = self.run_django_admin(args) self.assertOutput( err, "Error: '%s' is not a valid project name. Please make " "sure the name is a valid identifier." % bad_name ) self.assertFalse(os.path.exists(testproject_dir)) def test_importable_project_name(self): """ startproject validates that project name doesn't clash with existing Python modules. """ bad_name = 'os' args = ['startproject', bad_name] testproject_dir = os.path.join(self.test_dir, bad_name) out, err = self.run_django_admin(args) self.assertOutput( err, "CommandError: 'os' conflicts with the name of an existing " "Python module and cannot be used as a project name. Please try " "another name." ) self.assertFalse(os.path.exists(testproject_dir)) def test_simple_project_different_directory(self): "Make sure the startproject management command creates a project in a specific directory" args = ['startproject', 'testproject', 'othertestproject'] testproject_dir = os.path.join(self.test_dir, 'othertestproject') os.mkdir(testproject_dir) out, err = self.run_django_admin(args) self.assertNoOutput(err) self.assertTrue(os.path.exists(os.path.join(testproject_dir, 'manage.py'))) # running again.. out, err = self.run_django_admin(args) self.assertNoOutput(out) self.assertOutput( err, "already exists. Overlaying a project into an existing directory " "won't replace conflicting files." ) def test_custom_project_template(self): "Make sure the startproject management command is able to use a different project template" template_path = os.path.join(custom_templates_dir, 'project_template') args = ['startproject', '--template', template_path, 'customtestproject'] testproject_dir = os.path.join(self.test_dir, 'customtestproject') out, err = self.run_django_admin(args) self.assertNoOutput(err) self.assertTrue(os.path.isdir(testproject_dir)) self.assertTrue(os.path.exists(os.path.join(testproject_dir, 'additional_dir'))) def test_template_dir_with_trailing_slash(self): "Ticket 17475: Template dir passed has a trailing path separator" template_path = os.path.join(custom_templates_dir, 'project_template' + os.sep) args = ['startproject', '--template', template_path, 'customtestproject'] testproject_dir = os.path.join(self.test_dir, 'customtestproject') out, err = self.run_django_admin(args) self.assertNoOutput(err) self.assertTrue(os.path.isdir(testproject_dir)) self.assertTrue(os.path.exists(os.path.join(testproject_dir, 'additional_dir'))) def test_custom_project_template_from_tarball_by_path(self): "Make sure the startproject management command is able to use a different project template from a tarball" template_path = os.path.join(custom_templates_dir, 'project_template.tgz') args = ['startproject', '--template', template_path, 'tarballtestproject'] testproject_dir = os.path.join(self.test_dir, 'tarballtestproject') out, err = self.run_django_admin(args) self.assertNoOutput(err) self.assertTrue(os.path.isdir(testproject_dir)) self.assertTrue(os.path.exists(os.path.join(testproject_dir, 'run.py'))) def test_custom_project_template_from_tarball_to_alternative_location(self): "Startproject can use a project template from a tarball and create it in a specified location" template_path = os.path.join(custom_templates_dir, 'project_template.tgz') args = ['startproject', '--template', template_path, 'tarballtestproject', 'altlocation'] testproject_dir = os.path.join(self.test_dir, 'altlocation') os.mkdir(testproject_dir) out, err = self.run_django_admin(args) self.assertNoOutput(err) self.assertTrue(os.path.isdir(testproject_dir)) self.assertTrue(os.path.exists(os.path.join(testproject_dir, 'run.py'))) def test_custom_project_template_from_tarball_by_url(self): """ The startproject management command is able to use a different project template from a tarball via a URL. """ template_url = '%s/custom_templates/project_template.tgz' % self.live_server_url args = ['startproject', '--template', template_url, 'urltestproject'] testproject_dir = os.path.join(self.test_dir, 'urltestproject') out, err = self.run_django_admin(args) self.assertNoOutput(err) self.assertTrue(os.path.isdir(testproject_dir)) self.assertTrue(os.path.exists(os.path.join(testproject_dir, 'run.py'))) def test_project_template_tarball_url(self): "Startproject management command handles project template tar/zip balls from non-canonical urls" template_url = '%s/custom_templates/project_template.tgz/' % self.live_server_url args = ['startproject', '--template', template_url, 'urltestproject'] testproject_dir = os.path.join(self.test_dir, 'urltestproject') out, err = self.run_django_admin(args) self.assertNoOutput(err) self.assertTrue(os.path.isdir(testproject_dir)) self.assertTrue(os.path.exists(os.path.join(testproject_dir, 'run.py'))) def test_file_without_extension(self): "Make sure the startproject management command is able to render custom files" template_path = os.path.join(custom_templates_dir, 'project_template') args = ['startproject', '--template', template_path, 'customtestproject', '-e', 'txt', '-n', 'Procfile'] testproject_dir = os.path.join(self.test_dir, 'customtestproject') out, err = self.run_django_admin(args) self.assertNoOutput(err) self.assertTrue(os.path.isdir(testproject_dir)) self.assertTrue(os.path.exists(os.path.join(testproject_dir, 'additional_dir'))) base_path = os.path.join(testproject_dir, 'additional_dir') for f in ('Procfile', 'additional_file.py', 'requirements.txt'): self.assertTrue(os.path.exists(os.path.join(base_path, f))) with open(os.path.join(base_path, f)) as fh: self.assertEqual(fh.read().strip(), '# some file for customtestproject test project') def test_custom_project_template_context_variables(self): "Make sure template context variables are rendered with proper values" template_path = os.path.join(custom_templates_dir, 'project_template') args = ['startproject', '--template', template_path, 'another_project', 'project_dir'] testproject_dir = os.path.join(self.test_dir, 'project_dir') os.mkdir(testproject_dir) out, err = self.run_django_admin(args) self.assertNoOutput(err) test_manage_py = os.path.join(testproject_dir, 'manage.py') with open(test_manage_py) as fp: content = fp.read() self.assertIn("project_name = 'another_project'", content) self.assertIn("project_directory = '%s'" % testproject_dir, content) def test_no_escaping_of_project_variables(self): "Make sure template context variables are not html escaped" # We're using a custom command so we need the alternate settings self.write_settings('alternate_settings.py') template_path = os.path.join(custom_templates_dir, 'project_template') args = [ 'custom_startproject', '--template', template_path, 'another_project', 'project_dir', '--extra', '<&>', '--settings=alternate_settings', ] testproject_dir = os.path.join(self.test_dir, 'project_dir') os.mkdir(testproject_dir) out, err = self.run_manage(args) self.assertNoOutput(err) test_manage_py = os.path.join(testproject_dir, 'additional_dir', 'extra.py') with open(test_manage_py) as fp: content = fp.read() self.assertIn("<&>", content) def test_custom_project_destination_missing(self): """ Make sure an exception is raised when the provided destination directory doesn't exist """ template_path = os.path.join(custom_templates_dir, 'project_template') args = ['startproject', '--template', template_path, 'yet_another_project', 'project_dir2'] testproject_dir = os.path.join(self.test_dir, 'project_dir2') out, err = self.run_django_admin(args) self.assertNoOutput(out) self.assertOutput(err, "Destination directory '%s' does not exist, please create it first." % testproject_dir) self.assertFalse(os.path.exists(testproject_dir)) def test_custom_project_template_with_non_ascii_templates(self): """ The startproject management command is able to render templates with non-ASCII content. """ template_path = os.path.join(custom_templates_dir, 'project_template') args = ['startproject', '--template', template_path, '--extension=txt', 'customtestproject'] testproject_dir = os.path.join(self.test_dir, 'customtestproject') out, err = self.run_django_admin(args) self.assertNoOutput(err) self.assertTrue(os.path.isdir(testproject_dir)) path = os.path.join(testproject_dir, 'ticket-18091-non-ascii-template.txt') with open(path, encoding='utf-8') as f: self.assertEqual(f.read().splitlines(False), [ 'Some non-ASCII text for testing ticket #18091:', 'üäö €']) class StartApp(AdminScriptTestCase): def test_invalid_name(self): """startapp validates that app name is a valid Python identifier.""" for bad_name in ('7testproject', '../testproject'): with self.subTest(app_name=bad_name): args = ['startapp', bad_name] testproject_dir = os.path.join(self.test_dir, bad_name) out, err = self.run_django_admin(args) self.assertOutput( err, "CommandError: '{}' is not a valid app name. Please make " "sure the name is a valid identifier.".format(bad_name) ) self.assertFalse(os.path.exists(testproject_dir)) def test_importable_name(self): """ startapp validates that app name doesn't clash with existing Python modules. """ bad_name = 'os' args = ['startapp', bad_name] testproject_dir = os.path.join(self.test_dir, bad_name) out, err = self.run_django_admin(args) self.assertOutput( err, "CommandError: 'os' conflicts with the name of an existing " "Python module and cannot be used as an app name. Please try " "another name." ) self.assertFalse(os.path.exists(testproject_dir)) def test_invalid_target_name(self): for bad_target in ('invalid.dir_name', '7invalid_dir_name', '.invalid_dir_name'): with self.subTest(bad_target): _, err = self.run_django_admin(['startapp', 'app', bad_target]) self.assertOutput( err, "CommandError: '%s' is not a valid app directory. Please " "make sure the directory is a valid identifier." % bad_target ) def test_importable_target_name(self): _, err = self.run_django_admin(['startapp', 'app', 'os']) self.assertOutput( err, "CommandError: 'os' conflicts with the name of an existing Python " "module and cannot be used as an app directory. Please try " "another directory." ) def test_overlaying_app(self): self.run_django_admin(['startapp', 'app1']) out, err = self.run_django_admin(['startapp', 'app2', 'app1']) self.assertOutput( err, "already exists. Overlaying an app into an existing directory " "won't replace conflicting files." ) class DiffSettings(AdminScriptTestCase): """Tests for diffsettings management command.""" def test_basic(self): """Runs without error and emits settings diff.""" self.write_settings('settings_to_diff.py', sdict={'FOO': '"bar"'}) args = ['diffsettings', '--settings=settings_to_diff'] out, err = self.run_manage(args) self.assertNoOutput(err) self.assertOutput(out, "FOO = 'bar' ###") # Attributes from django.conf.Settings don't appear. self.assertNotInOutput(out, 'is_overridden = ') def test_settings_configured(self): out, err = self.run_manage(['diffsettings'], manage_py='configured_settings_manage.py') self.assertNoOutput(err) self.assertOutput(out, 'CUSTOM = 1 ###\nDEBUG = True') # Attributes from django.conf.UserSettingsHolder don't appear. self.assertNotInOutput(out, 'default_settings = ') def test_dynamic_settings_configured(self): # Custom default settings appear. out, err = self.run_manage(['diffsettings'], manage_py='configured_dynamic_settings_manage.py') self.assertNoOutput(err) self.assertOutput(out, "FOO = 'bar' ###") def test_all(self): """The all option also shows settings with the default value.""" self.write_settings('settings_to_diff.py', sdict={'STATIC_URL': 'None'}) args = ['diffsettings', '--settings=settings_to_diff', '--all'] out, err = self.run_manage(args) self.assertNoOutput(err) self.assertOutput(out, "### STATIC_URL = None") def test_custom_default(self): """ The --default option specifies an alternate settings module for comparison. """ self.write_settings('settings_default.py', sdict={'FOO': '"foo"', 'BAR': '"bar1"'}) self.write_settings('settings_to_diff.py', sdict={'FOO': '"foo"', 'BAR': '"bar2"'}) out, err = self.run_manage(['diffsettings', '--settings=settings_to_diff', '--default=settings_default']) self.assertNoOutput(err) self.assertNotInOutput(out, "FOO") self.assertOutput(out, "BAR = 'bar2'") def test_unified(self): """--output=unified emits settings diff in unified mode.""" self.write_settings('settings_to_diff.py', sdict={'FOO': '"bar"'}) args = ['diffsettings', '--settings=settings_to_diff', '--output=unified'] out, err = self.run_manage(args) self.assertNoOutput(err) self.assertOutput(out, "+ FOO = 'bar'") self.assertOutput(out, "- SECRET_KEY = ''") self.assertOutput(out, "+ SECRET_KEY = 'django_tests_secret_key'") self.assertNotInOutput(out, " APPEND_SLASH = True") def test_unified_all(self): """ --output=unified --all emits settings diff in unified mode and includes settings with the default value. """ self.write_settings('settings_to_diff.py', sdict={'FOO': '"bar"'}) args = ['diffsettings', '--settings=settings_to_diff', '--output=unified', '--all'] out, err = self.run_manage(args) self.assertNoOutput(err) self.assertOutput(out, " APPEND_SLASH = True") self.assertOutput(out, "+ FOO = 'bar'") self.assertOutput(out, "- SECRET_KEY = ''") class Dumpdata(AdminScriptTestCase): """Tests for dumpdata management command.""" def setUp(self): super().setUp() self.write_settings('settings.py') def test_pks_parsing(self): """Regression for #20509 Test would raise an exception rather than printing an error message. """ args = ['dumpdata', '--pks=1'] out, err = self.run_manage(args) self.assertOutput(err, "You can only use --pks option with one model") self.assertNoOutput(out) class MainModule(AdminScriptTestCase): """python -m django works like django-admin.""" def test_runs_django_admin(self): cmd_out, _ = self.run_django_admin(['--version']) mod_out, _ = self.run_test('-m', ['django', '--version']) self.assertEqual(mod_out, cmd_out) def test_program_name_in_help(self): out, err = self.run_test('-m', ['django', 'help']) self.assertOutput(out, "Type 'python -m django help <subcommand>' for help on a specific subcommand.") class DjangoAdminSuggestions(AdminScriptTestCase): def setUp(self): super().setUp() self.write_settings('settings.py') def test_suggestions(self): args = ['rnserver', '--settings=test_project.settings'] out, err = self.run_django_admin(args) self.assertNoOutput(out) self.assertOutput(err, "Unknown command: 'rnserver'. Did you mean runserver?") def test_no_suggestions(self): args = ['abcdef', '--settings=test_project.settings'] out, err = self.run_django_admin(args) self.assertNoOutput(out) self.assertNotInOutput(err, 'Did you mean')
5bde31ce02ff45ce9ad99e6c6ee4f5ee62df9c863e81ade07eae8c3a537f0492
import datetime from django import forms from django.core.validators import ValidationError from django.forms.models import ModelChoiceIterator from django.forms.widgets import CheckboxSelectMultiple from django.template import Context, Template from django.test import TestCase from .models import Article, Author, Book, Category, Writer class ModelChoiceFieldTests(TestCase): @classmethod def setUpTestData(cls): cls.c1 = Category.objects.create(name='Entertainment', slug='entertainment', url='entertainment') cls.c2 = Category.objects.create(name='A test', slug='test', url='test') cls.c3 = Category.objects.create(name='Third', slug='third-test', url='third') def test_basics(self): f = forms.ModelChoiceField(Category.objects.all()) self.assertEqual(list(f.choices), [ ('', '---------'), (self.c1.pk, 'Entertainment'), (self.c2.pk, 'A test'), (self.c3.pk, 'Third'), ]) with self.assertRaises(ValidationError): f.clean('') with self.assertRaises(ValidationError): f.clean(None) with self.assertRaises(ValidationError): f.clean(0) # Invalid types that require TypeError to be caught. with self.assertRaises(ValidationError): f.clean([['fail']]) with self.assertRaises(ValidationError): f.clean([{'foo': 'bar'}]) self.assertEqual(f.clean(self.c2.id).name, 'A test') self.assertEqual(f.clean(self.c3.id).name, 'Third') # Add a Category object *after* the ModelChoiceField has already been # instantiated. This proves clean() checks the database during clean() # rather than caching it at instantiation time. c4 = Category.objects.create(name='Fourth', url='4th') self.assertEqual(f.clean(c4.id).name, 'Fourth') # Delete a Category object *after* the ModelChoiceField has already been # instantiated. This proves clean() checks the database during clean() # rather than caching it at instantiation time. Category.objects.get(url='4th').delete() msg = "['Select a valid choice. That choice is not one of the available choices.']" with self.assertRaisesMessage(ValidationError, msg): f.clean(c4.id) def test_clean_model_instance(self): f = forms.ModelChoiceField(Category.objects.all()) self.assertEqual(f.clean(self.c1), self.c1) # An instance of incorrect model. msg = "['Select a valid choice. That choice is not one of the available choices.']" with self.assertRaisesMessage(ValidationError, msg): f.clean(Book.objects.create()) def test_clean_to_field_name(self): f = forms.ModelChoiceField(Category.objects.all(), to_field_name='slug') self.assertEqual(f.clean(self.c1.slug), self.c1) self.assertEqual(f.clean(self.c1), self.c1) def test_choices(self): f = forms.ModelChoiceField(Category.objects.filter(pk=self.c1.id), required=False) self.assertIsNone(f.clean('')) self.assertEqual(f.clean(str(self.c1.id)).name, 'Entertainment') with self.assertRaises(ValidationError): f.clean('100') # len() can be called on choices. self.assertEqual(len(f.choices), 2) # queryset can be changed after the field is created. f.queryset = Category.objects.exclude(name='Third') self.assertEqual(list(f.choices), [ ('', '---------'), (self.c1.pk, 'Entertainment'), (self.c2.pk, 'A test'), ]) self.assertEqual(f.clean(self.c2.id).name, 'A test') with self.assertRaises(ValidationError): f.clean(self.c3.id) # Choices can be iterated repeatedly. gen_one = list(f.choices) gen_two = f.choices self.assertEqual(gen_one[2], (self.c2.pk, 'A test')) self.assertEqual(list(gen_two), [ ('', '---------'), (self.c1.pk, 'Entertainment'), (self.c2.pk, 'A test'), ]) # Overriding label_from_instance() to print custom labels. f.queryset = Category.objects.all() f.label_from_instance = lambda obj: 'category ' + str(obj) self.assertEqual(list(f.choices), [ ('', '---------'), (self.c1.pk, 'category Entertainment'), (self.c2.pk, 'category A test'), (self.c3.pk, 'category Third'), ]) def test_choices_freshness(self): f = forms.ModelChoiceField(Category.objects.all()) self.assertEqual(len(f.choices), 4) self.assertEqual(list(f.choices), [ ('', '---------'), (self.c1.pk, 'Entertainment'), (self.c2.pk, 'A test'), (self.c3.pk, 'Third'), ]) c4 = Category.objects.create(name='Fourth', slug='4th', url='4th') self.assertEqual(len(f.choices), 5) self.assertEqual(list(f.choices), [ ('', '---------'), (self.c1.pk, 'Entertainment'), (self.c2.pk, 'A test'), (self.c3.pk, 'Third'), (c4.pk, 'Fourth'), ]) def test_choices_bool(self): f = forms.ModelChoiceField(Category.objects.all(), empty_label=None) self.assertIs(bool(f.choices), True) Category.objects.all().delete() self.assertIs(bool(f.choices), False) def test_choices_bool_empty_label(self): f = forms.ModelChoiceField(Category.objects.all(), empty_label='--------') Category.objects.all().delete() self.assertIs(bool(f.choices), True) def test_deepcopies_widget(self): class ModelChoiceForm(forms.Form): category = forms.ModelChoiceField(Category.objects.all()) form1 = ModelChoiceForm() field1 = form1.fields['category'] # To allow the widget to change the queryset of field1.widget.choices # without affecting other forms, the following must hold (#11183): self.assertIsNot(field1, ModelChoiceForm.base_fields['category']) self.assertIs(field1.widget.choices.field, field1) def test_result_cache_not_shared(self): class ModelChoiceForm(forms.Form): category = forms.ModelChoiceField(Category.objects.all()) form1 = ModelChoiceForm() self.assertCountEqual(form1.fields['category'].queryset, [self.c1, self.c2, self.c3]) form2 = ModelChoiceForm() self.assertIsNone(form2.fields['category'].queryset._result_cache) def test_queryset_none(self): class ModelChoiceForm(forms.Form): category = forms.ModelChoiceField(queryset=None) def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.fields['category'].queryset = Category.objects.filter(slug__contains='test') form = ModelChoiceForm() self.assertCountEqual(form.fields['category'].queryset, [self.c2, self.c3]) def test_no_extra_query_when_accessing_attrs(self): """ ModelChoiceField with RadioSelect widget doesn't produce unnecessary db queries when accessing its BoundField's attrs. """ class ModelChoiceForm(forms.Form): category = forms.ModelChoiceField(Category.objects.all(), widget=forms.RadioSelect) form = ModelChoiceForm() field = form['category'] # BoundField template = Template('{{ field.name }}{{ field }}{{ field.help_text }}') with self.assertNumQueries(1): template.render(Context({'field': field})) def test_disabled_modelchoicefield(self): class ModelChoiceForm(forms.ModelForm): author = forms.ModelChoiceField(Author.objects.all(), disabled=True) class Meta: model = Book fields = ['author'] book = Book.objects.create(author=Writer.objects.create(name='Test writer')) form = ModelChoiceForm({}, instance=book) self.assertEqual( form.errors['author'], ['Select a valid choice. That choice is not one of the available choices.'] ) def test_disabled_modelchoicefield_has_changed(self): field = forms.ModelChoiceField(Author.objects.all(), disabled=True) self.assertIs(field.has_changed('x', 'y'), False) def test_disabled_modelchoicefield_initial_model_instance(self): class ModelChoiceForm(forms.Form): categories = forms.ModelChoiceField( Category.objects.all(), disabled=True, initial=self.c1, ) self.assertTrue(ModelChoiceForm(data={'categories': self.c1.pk}).is_valid()) def test_disabled_multiplemodelchoicefield(self): class ArticleForm(forms.ModelForm): categories = forms.ModelMultipleChoiceField(Category.objects.all(), required=False) class Meta: model = Article fields = ['categories'] category1 = Category.objects.create(name='cat1') category2 = Category.objects.create(name='cat2') article = Article.objects.create( pub_date=datetime.date(1988, 1, 4), writer=Writer.objects.create(name='Test writer'), ) article.categories.set([category1.pk]) form = ArticleForm(data={'categories': [category2.pk]}, instance=article) self.assertEqual(form.errors, {}) self.assertEqual([x.pk for x in form.cleaned_data['categories']], [category2.pk]) # Disabled fields use the value from `instance` rather than `data`. form = ArticleForm(data={'categories': [category2.pk]}, instance=article) form.fields['categories'].disabled = True self.assertEqual(form.errors, {}) self.assertEqual([x.pk for x in form.cleaned_data['categories']], [category1.pk]) def test_disabled_modelmultiplechoicefield_has_changed(self): field = forms.ModelMultipleChoiceField(Author.objects.all(), disabled=True) self.assertIs(field.has_changed('x', 'y'), False) def test_overridable_choice_iterator(self): """ Iterator defaults to ModelChoiceIterator and can be overridden with the iterator attribute on a ModelChoiceField subclass. """ field = forms.ModelChoiceField(Category.objects.all()) self.assertIsInstance(field.choices, ModelChoiceIterator) class CustomModelChoiceIterator(ModelChoiceIterator): pass class CustomModelChoiceField(forms.ModelChoiceField): iterator = CustomModelChoiceIterator field = CustomModelChoiceField(Category.objects.all()) self.assertIsInstance(field.choices, CustomModelChoiceIterator) def test_choice_iterator_passes_model_to_widget(self): class CustomModelChoiceValue: def __init__(self, value, obj): self.value = value self.obj = obj def __str__(self): return str(self.value) class CustomModelChoiceIterator(ModelChoiceIterator): def choice(self, obj): value, label = super().choice(obj) return CustomModelChoiceValue(value, obj), label class CustomCheckboxSelectMultiple(CheckboxSelectMultiple): def create_option(self, name, value, label, selected, index, subindex=None, attrs=None): option = super().create_option(name, value, label, selected, index, subindex=None, attrs=None) # Modify the HTML based on the object being rendered. c = value.obj option['attrs']['data-slug'] = c.slug return option class CustomModelMultipleChoiceField(forms.ModelMultipleChoiceField): iterator = CustomModelChoiceIterator widget = CustomCheckboxSelectMultiple field = CustomModelMultipleChoiceField(Category.objects.all()) self.assertHTMLEqual( field.widget.render('name', []), '''<ul> <li><label><input type="checkbox" name="name" value="%d" data-slug="entertainment">Entertainment</label></li> <li><label><input type="checkbox" name="name" value="%d" data-slug="test">A test</label></li> <li><label><input type="checkbox" name="name" value="%d" data-slug="third-test">Third</label></li> </ul>''' % (self.c1.pk, self.c2.pk, self.c3.pk), ) def test_choices_not_fetched_when_not_rendering(self): with self.assertNumQueries(1): field = forms.ModelChoiceField(Category.objects.order_by('-name')) self.assertEqual('Entertainment', field.clean(self.c1.pk).name) def test_queryset_manager(self): f = forms.ModelChoiceField(Category.objects) self.assertEqual(len(f.choices), 4) self.assertEqual(list(f.choices), [ ('', '---------'), (self.c1.pk, 'Entertainment'), (self.c2.pk, 'A test'), (self.c3.pk, 'Third'), ]) def test_num_queries(self): """ Widgets that render multiple subwidgets shouldn't make more than one database query. """ categories = Category.objects.all() class CategoriesForm(forms.Form): radio = forms.ModelChoiceField(queryset=categories, widget=forms.RadioSelect) checkbox = forms.ModelMultipleChoiceField(queryset=categories, widget=forms.CheckboxSelectMultiple) template = Template( '{% for widget in form.checkbox %}{{ widget }}{% endfor %}' '{% for widget in form.radio %}{{ widget }}{% endfor %}' ) with self.assertNumQueries(2): template.render(Context({'form': CategoriesForm()}))
0bc957a602bb1062a82d73de216118b8ae1aaa58fb3d922a14ed44cf3575f64e
import copy import unittest from functools import wraps from unittest import mock from django.conf import settings from django.db import DEFAULT_DB_ALIAS, connection from django.db.models.expressions import Func def skipUnlessGISLookup(*gis_lookups): """ Skip a test unless a database supports all of gis_lookups. """ def decorator(test_func): @wraps(test_func) def skip_wrapper(*args, **kwargs): if any(key not in connection.ops.gis_operators for key in gis_lookups): raise unittest.SkipTest( "Database doesn't support all the lookups: %s" % ", ".join(gis_lookups) ) return test_func(*args, **kwargs) return skip_wrapper return decorator def no_backend(test_func, backend): "Use this decorator to disable test on specified backend." if settings.DATABASES[DEFAULT_DB_ALIAS]['ENGINE'].rsplit('.')[-1] == backend: @unittest.skip("This test is skipped on '%s' backend" % backend) def inner(): pass return inner else: return test_func # Decorators to disable entire test functions for specific # spatial backends. def no_oracle(func): return no_backend(func, 'oracle') # Shortcut booleans to omit only portions of tests. _default_db = settings.DATABASES[DEFAULT_DB_ALIAS]['ENGINE'].rsplit('.')[-1] oracle = _default_db == 'oracle' postgis = _default_db == 'postgis' mysql = _default_db == 'mysql' mariadb = mysql and connection.mysql_is_mariadb spatialite = _default_db == 'spatialite' # MySQL spatial indices can't handle NULL geometries. gisfield_may_be_null = not mysql if oracle and 'gis' in settings.DATABASES[DEFAULT_DB_ALIAS]['ENGINE']: from django.contrib.gis.db.backends.oracle.models import OracleSpatialRefSys as SpatialRefSys elif postgis: from django.contrib.gis.db.backends.postgis.models import PostGISSpatialRefSys as SpatialRefSys elif spatialite: from django.contrib.gis.db.backends.spatialite.models import SpatialiteSpatialRefSys as SpatialRefSys else: SpatialRefSys = None class FuncTestMixin: """Assert that Func expressions aren't mutated during their as_sql().""" def setUp(self): def as_sql_wrapper(original_as_sql): def inner(*args, **kwargs): func = original_as_sql.__self__ # Resolve output_field before as_sql() so touching it in # as_sql() won't change __dict__. func.output_field __dict__original = copy.deepcopy(func.__dict__) result = original_as_sql(*args, **kwargs) msg = '%s Func was mutated during compilation.' % func.__class__.__name__ self.assertEqual(func.__dict__, __dict__original, msg) return result return inner def __getattribute__(self, name): if name != vendor_impl: return __getattribute__original(self, name) try: as_sql = __getattribute__original(self, vendor_impl) except AttributeError: as_sql = __getattribute__original(self, 'as_sql') return as_sql_wrapper(as_sql) vendor_impl = 'as_' + connection.vendor __getattribute__original = Func.__getattribute__ self.func_patcher = mock.patch.object(Func, '__getattribute__', __getattribute__) self.func_patcher.start() super().setUp() def tearDown(self): super().tearDown() self.func_patcher.stop()
b93d73e4af73e2367aad86e0500628ac4c365bce93b93fbe16054354906f4379
from unittest import skipIf from django.core.exceptions import ValidationError from django.db import connection, models from django.test import SimpleTestCase, TestCase from .models import Post class TestCharField(TestCase): def test_max_length_passed_to_formfield(self): """ CharField passes its max_length attribute to form fields created using the formfield() method. """ cf1 = models.CharField() cf2 = models.CharField(max_length=1234) self.assertIsNone(cf1.formfield().max_length) self.assertEqual(1234, cf2.formfield().max_length) def test_lookup_integer_in_charfield(self): self.assertEqual(Post.objects.filter(title=9).count(), 0) @skipIf(connection.vendor == 'mysql', 'Running on MySQL requires utf8mb4 encoding (#18392)') def test_emoji(self): p = Post.objects.create(title='Smile 😀', body='Whatever.') p.refresh_from_db() self.assertEqual(p.title, 'Smile 😀') def test_assignment_from_choice_enum(self): class Event(models.TextChoices): C = 'Carnival!' F = 'Festival!' p1 = Post.objects.create(title=Event.C, body=Event.F) p1.refresh_from_db() self.assertEqual(p1.title, 'Carnival!') self.assertEqual(p1.body, 'Festival!') self.assertEqual(p1.title, Event.C) self.assertEqual(p1.body, Event.F) p2 = Post.objects.get(title='Carnival!') self.assertEqual(p1, p2) self.assertEqual(p2.title, Event.C) class ValidationTests(SimpleTestCase): class Choices(models.TextChoices): C = 'c', 'C' def test_charfield_raises_error_on_empty_string(self): f = models.CharField() with self.assertRaises(ValidationError): f.clean('', None) def test_charfield_cleans_empty_string_when_blank_true(self): f = models.CharField(blank=True) self.assertEqual('', f.clean('', None)) def test_charfield_with_choices_cleans_valid_choice(self): f = models.CharField(max_length=1, choices=[('a', 'A'), ('b', 'B')]) self.assertEqual('a', f.clean('a', None)) def test_charfield_with_choices_raises_error_on_invalid_choice(self): f = models.CharField(choices=[('a', 'A'), ('b', 'B')]) with self.assertRaises(ValidationError): f.clean('not a', None) def test_enum_choices_cleans_valid_string(self): f = models.CharField(choices=self.Choices.choices, max_length=1) self.assertEqual(f.clean('c', None), 'c') def test_enum_choices_invalid_input(self): f = models.CharField(choices=self.Choices.choices, max_length=1) with self.assertRaises(ValidationError): f.clean('a', None) def test_charfield_raises_error_on_empty_input(self): f = models.CharField(null=False) with self.assertRaises(ValidationError): f.clean(None, None)
c424dff394b40bc6ac2183d49ef8e66341dfbd2c1754e5604270844da93cd38c
import collections.abc from datetime import datetime from math import ceil from operator import attrgetter from django.core.exceptions import FieldError from django.db import connection from django.db.models import Max from django.db.models.expressions import Exists, OuterRef from django.db.models.functions import Substr from django.test import TestCase, skipUnlessDBFeature from django.utils.deprecation import RemovedInDjango40Warning from .models import ( Article, Author, Freebie, Game, IsNullWithNoneAsRHS, Player, Season, Tag, ) class LookupTests(TestCase): @classmethod def setUpTestData(cls): # Create a few Authors. cls.au1 = Author.objects.create(name='Author 1', alias='a1') cls.au2 = Author.objects.create(name='Author 2', alias='a2') # Create a few Articles. cls.a1 = Article.objects.create( headline='Article 1', pub_date=datetime(2005, 7, 26), author=cls.au1, slug='a1', ) cls.a2 = Article.objects.create( headline='Article 2', pub_date=datetime(2005, 7, 27), author=cls.au1, slug='a2', ) cls.a3 = Article.objects.create( headline='Article 3', pub_date=datetime(2005, 7, 27), author=cls.au1, slug='a3', ) cls.a4 = Article.objects.create( headline='Article 4', pub_date=datetime(2005, 7, 28), author=cls.au1, slug='a4', ) cls.a5 = Article.objects.create( headline='Article 5', pub_date=datetime(2005, 8, 1, 9, 0), author=cls.au2, slug='a5', ) cls.a6 = Article.objects.create( headline='Article 6', pub_date=datetime(2005, 8, 1, 8, 0), author=cls.au2, slug='a6', ) cls.a7 = Article.objects.create( headline='Article 7', pub_date=datetime(2005, 7, 27), author=cls.au2, slug='a7', ) # Create a few Tags. cls.t1 = Tag.objects.create(name='Tag 1') cls.t1.articles.add(cls.a1, cls.a2, cls.a3) cls.t2 = Tag.objects.create(name='Tag 2') cls.t2.articles.add(cls.a3, cls.a4, cls.a5) cls.t3 = Tag.objects.create(name='Tag 3') cls.t3.articles.add(cls.a5, cls.a6, cls.a7) def test_exists(self): # We can use .exists() to check that there are some self.assertTrue(Article.objects.exists()) for a in Article.objects.all(): a.delete() # There should be none now! self.assertFalse(Article.objects.exists()) def test_lookup_int_as_str(self): # Integer value can be queried using string self.assertQuerysetEqual(Article.objects.filter(id__iexact=str(self.a1.id)), ['<Article: Article 1>']) @skipUnlessDBFeature('supports_date_lookup_using_string') def test_lookup_date_as_str(self): # A date lookup can be performed using a string search self.assertQuerysetEqual( Article.objects.filter(pub_date__startswith='2005'), [ '<Article: Article 5>', '<Article: Article 6>', '<Article: Article 4>', '<Article: Article 2>', '<Article: Article 3>', '<Article: Article 7>', '<Article: Article 1>', ] ) def test_iterator(self): # Each QuerySet gets iterator(), which is a generator that "lazily" # returns results using database-level iteration. self.assertIsInstance(Article.objects.iterator(), collections.abc.Iterator) self.assertQuerysetEqual( Article.objects.iterator(), [ 'Article 5', 'Article 6', 'Article 4', 'Article 2', 'Article 3', 'Article 7', 'Article 1', ], transform=attrgetter('headline') ) # iterator() can be used on any QuerySet. self.assertQuerysetEqual( Article.objects.filter(headline__endswith='4').iterator(), ['Article 4'], transform=attrgetter('headline')) def test_count(self): # count() returns the number of objects matching search criteria. self.assertEqual(Article.objects.count(), 7) self.assertEqual(Article.objects.filter(pub_date__exact=datetime(2005, 7, 27)).count(), 3) self.assertEqual(Article.objects.filter(headline__startswith='Blah blah').count(), 0) # count() should respect sliced query sets. articles = Article.objects.all() self.assertEqual(articles.count(), 7) self.assertEqual(articles[:4].count(), 4) self.assertEqual(articles[1:100].count(), 6) self.assertEqual(articles[10:100].count(), 0) # Date and date/time lookups can also be done with strings. self.assertEqual(Article.objects.filter(pub_date__exact='2005-07-27 00:00:00').count(), 3) def test_in_bulk(self): # in_bulk() takes a list of IDs and returns a dictionary mapping IDs to objects. arts = Article.objects.in_bulk([self.a1.id, self.a2.id]) self.assertEqual(arts[self.a1.id], self.a1) self.assertEqual(arts[self.a2.id], self.a2) self.assertEqual( Article.objects.in_bulk(), { self.a1.id: self.a1, self.a2.id: self.a2, self.a3.id: self.a3, self.a4.id: self.a4, self.a5.id: self.a5, self.a6.id: self.a6, self.a7.id: self.a7, } ) self.assertEqual(Article.objects.in_bulk([self.a3.id]), {self.a3.id: self.a3}) self.assertEqual(Article.objects.in_bulk({self.a3.id}), {self.a3.id: self.a3}) self.assertEqual(Article.objects.in_bulk(frozenset([self.a3.id])), {self.a3.id: self.a3}) self.assertEqual(Article.objects.in_bulk((self.a3.id,)), {self.a3.id: self.a3}) self.assertEqual(Article.objects.in_bulk([1000]), {}) self.assertEqual(Article.objects.in_bulk([]), {}) self.assertEqual(Article.objects.in_bulk(iter([self.a1.id])), {self.a1.id: self.a1}) self.assertEqual(Article.objects.in_bulk(iter([])), {}) with self.assertRaises(TypeError): Article.objects.in_bulk(headline__startswith='Blah') def test_in_bulk_lots_of_ids(self): test_range = 2000 max_query_params = connection.features.max_query_params expected_num_queries = ceil(test_range / max_query_params) if max_query_params else 1 Author.objects.bulk_create([Author() for i in range(test_range - Author.objects.count())]) authors = {author.pk: author for author in Author.objects.all()} with self.assertNumQueries(expected_num_queries): self.assertEqual(Author.objects.in_bulk(authors), authors) def test_in_bulk_with_field(self): self.assertEqual( Article.objects.in_bulk([self.a1.slug, self.a2.slug, self.a3.slug], field_name='slug'), { self.a1.slug: self.a1, self.a2.slug: self.a2, self.a3.slug: self.a3, } ) def test_in_bulk_non_unique_field(self): msg = "in_bulk()'s field_name must be a unique field but 'author' isn't." with self.assertRaisesMessage(ValueError, msg): Article.objects.in_bulk([self.au1], field_name='author') def test_values(self): # values() returns a list of dictionaries instead of object instances -- # and you can specify which fields you want to retrieve. self.assertSequenceEqual( Article.objects.values('headline'), [ {'headline': 'Article 5'}, {'headline': 'Article 6'}, {'headline': 'Article 4'}, {'headline': 'Article 2'}, {'headline': 'Article 3'}, {'headline': 'Article 7'}, {'headline': 'Article 1'}, ], ) self.assertSequenceEqual( Article.objects.filter(pub_date__exact=datetime(2005, 7, 27)).values('id'), [{'id': self.a2.id}, {'id': self.a3.id}, {'id': self.a7.id}], ) self.assertSequenceEqual( Article.objects.values('id', 'headline'), [ {'id': self.a5.id, 'headline': 'Article 5'}, {'id': self.a6.id, 'headline': 'Article 6'}, {'id': self.a4.id, 'headline': 'Article 4'}, {'id': self.a2.id, 'headline': 'Article 2'}, {'id': self.a3.id, 'headline': 'Article 3'}, {'id': self.a7.id, 'headline': 'Article 7'}, {'id': self.a1.id, 'headline': 'Article 1'}, ], ) # You can use values() with iterator() for memory savings, # because iterator() uses database-level iteration. self.assertSequenceEqual( list(Article.objects.values('id', 'headline').iterator()), [ {'headline': 'Article 5', 'id': self.a5.id}, {'headline': 'Article 6', 'id': self.a6.id}, {'headline': 'Article 4', 'id': self.a4.id}, {'headline': 'Article 2', 'id': self.a2.id}, {'headline': 'Article 3', 'id': self.a3.id}, {'headline': 'Article 7', 'id': self.a7.id}, {'headline': 'Article 1', 'id': self.a1.id}, ], ) # The values() method works with "extra" fields specified in extra(select). self.assertSequenceEqual( Article.objects.extra(select={'id_plus_one': 'id + 1'}).values('id', 'id_plus_one'), [ {'id': self.a5.id, 'id_plus_one': self.a5.id + 1}, {'id': self.a6.id, 'id_plus_one': self.a6.id + 1}, {'id': self.a4.id, 'id_plus_one': self.a4.id + 1}, {'id': self.a2.id, 'id_plus_one': self.a2.id + 1}, {'id': self.a3.id, 'id_plus_one': self.a3.id + 1}, {'id': self.a7.id, 'id_plus_one': self.a7.id + 1}, {'id': self.a1.id, 'id_plus_one': self.a1.id + 1}, ], ) data = { 'id_plus_one': 'id+1', 'id_plus_two': 'id+2', 'id_plus_three': 'id+3', 'id_plus_four': 'id+4', 'id_plus_five': 'id+5', 'id_plus_six': 'id+6', 'id_plus_seven': 'id+7', 'id_plus_eight': 'id+8', } self.assertSequenceEqual( Article.objects.filter(id=self.a1.id).extra(select=data).values(*data), [{ 'id_plus_one': self.a1.id + 1, 'id_plus_two': self.a1.id + 2, 'id_plus_three': self.a1.id + 3, 'id_plus_four': self.a1.id + 4, 'id_plus_five': self.a1.id + 5, 'id_plus_six': self.a1.id + 6, 'id_plus_seven': self.a1.id + 7, 'id_plus_eight': self.a1.id + 8, }], ) # You can specify fields from forward and reverse relations, just like filter(). self.assertSequenceEqual( Article.objects.values('headline', 'author__name'), [ {'headline': self.a5.headline, 'author__name': self.au2.name}, {'headline': self.a6.headline, 'author__name': self.au2.name}, {'headline': self.a4.headline, 'author__name': self.au1.name}, {'headline': self.a2.headline, 'author__name': self.au1.name}, {'headline': self.a3.headline, 'author__name': self.au1.name}, {'headline': self.a7.headline, 'author__name': self.au2.name}, {'headline': self.a1.headline, 'author__name': self.au1.name}, ], ) self.assertSequenceEqual( Author.objects.values('name', 'article__headline').order_by('name', 'article__headline'), [ {'name': self.au1.name, 'article__headline': self.a1.headline}, {'name': self.au1.name, 'article__headline': self.a2.headline}, {'name': self.au1.name, 'article__headline': self.a3.headline}, {'name': self.au1.name, 'article__headline': self.a4.headline}, {'name': self.au2.name, 'article__headline': self.a5.headline}, {'name': self.au2.name, 'article__headline': self.a6.headline}, {'name': self.au2.name, 'article__headline': self.a7.headline}, ], ) self.assertSequenceEqual( ( Author.objects .values('name', 'article__headline', 'article__tag__name') .order_by('name', 'article__headline', 'article__tag__name') ), [ {'name': self.au1.name, 'article__headline': self.a1.headline, 'article__tag__name': self.t1.name}, {'name': self.au1.name, 'article__headline': self.a2.headline, 'article__tag__name': self.t1.name}, {'name': self.au1.name, 'article__headline': self.a3.headline, 'article__tag__name': self.t1.name}, {'name': self.au1.name, 'article__headline': self.a3.headline, 'article__tag__name': self.t2.name}, {'name': self.au1.name, 'article__headline': self.a4.headline, 'article__tag__name': self.t2.name}, {'name': self.au2.name, 'article__headline': self.a5.headline, 'article__tag__name': self.t2.name}, {'name': self.au2.name, 'article__headline': self.a5.headline, 'article__tag__name': self.t3.name}, {'name': self.au2.name, 'article__headline': self.a6.headline, 'article__tag__name': self.t3.name}, {'name': self.au2.name, 'article__headline': self.a7.headline, 'article__tag__name': self.t3.name}, ], ) # However, an exception FieldDoesNotExist will be thrown if you specify # a nonexistent field name in values() (a field that is neither in the # model nor in extra(select)). msg = ( "Cannot resolve keyword 'id_plus_two' into field. Choices are: " "author, author_id, headline, id, id_plus_one, pub_date, slug, tag" ) with self.assertRaisesMessage(FieldError, msg): Article.objects.extra(select={'id_plus_one': 'id + 1'}).values('id', 'id_plus_two') # If you don't specify field names to values(), all are returned. self.assertSequenceEqual( Article.objects.filter(id=self.a5.id).values(), [{ 'id': self.a5.id, 'author_id': self.au2.id, 'headline': 'Article 5', 'pub_date': datetime(2005, 8, 1, 9, 0), 'slug': 'a5', }], ) def test_values_list(self): # values_list() is similar to values(), except that the results are # returned as a list of tuples, rather than a list of dictionaries. # Within each tuple, the order of the elements is the same as the order # of fields in the values_list() call. self.assertSequenceEqual( Article.objects.values_list('headline'), [ ('Article 5',), ('Article 6',), ('Article 4',), ('Article 2',), ('Article 3',), ('Article 7',), ('Article 1',), ], ) self.assertSequenceEqual( Article.objects.values_list('id').order_by('id'), [(self.a1.id,), (self.a2.id,), (self.a3.id,), (self.a4.id,), (self.a5.id,), (self.a6.id,), (self.a7.id,)], ) self.assertSequenceEqual( Article.objects.values_list('id', flat=True).order_by('id'), [self.a1.id, self.a2.id, self.a3.id, self.a4.id, self.a5.id, self.a6.id, self.a7.id], ) self.assertSequenceEqual( Article.objects.extra(select={'id_plus_one': 'id+1'}).order_by('id').values_list('id'), [(self.a1.id,), (self.a2.id,), (self.a3.id,), (self.a4.id,), (self.a5.id,), (self.a6.id,), (self.a7.id,)], ) self.assertSequenceEqual( Article.objects.extra(select={'id_plus_one': 'id+1'}).order_by('id').values_list('id_plus_one', 'id'), [ (self.a1.id + 1, self.a1.id), (self.a2.id + 1, self.a2.id), (self.a3.id + 1, self.a3.id), (self.a4.id + 1, self.a4.id), (self.a5.id + 1, self.a5.id), (self.a6.id + 1, self.a6.id), (self.a7.id + 1, self.a7.id) ], ) self.assertSequenceEqual( Article.objects.extra(select={'id_plus_one': 'id+1'}).order_by('id').values_list('id', 'id_plus_one'), [ (self.a1.id, self.a1.id + 1), (self.a2.id, self.a2.id + 1), (self.a3.id, self.a3.id + 1), (self.a4.id, self.a4.id + 1), (self.a5.id, self.a5.id + 1), (self.a6.id, self.a6.id + 1), (self.a7.id, self.a7.id + 1) ], ) args = ('name', 'article__headline', 'article__tag__name') self.assertSequenceEqual( Author.objects.values_list(*args).order_by(*args), [ (self.au1.name, self.a1.headline, self.t1.name), (self.au1.name, self.a2.headline, self.t1.name), (self.au1.name, self.a3.headline, self.t1.name), (self.au1.name, self.a3.headline, self.t2.name), (self.au1.name, self.a4.headline, self.t2.name), (self.au2.name, self.a5.headline, self.t2.name), (self.au2.name, self.a5.headline, self.t3.name), (self.au2.name, self.a6.headline, self.t3.name), (self.au2.name, self.a7.headline, self.t3.name), ], ) with self.assertRaises(TypeError): Article.objects.values_list('id', 'headline', flat=True) def test_get_next_previous_by(self): # Every DateField and DateTimeField creates get_next_by_FOO() and # get_previous_by_FOO() methods. In the case of identical date values, # these methods will use the ID as a fallback check. This guarantees # that no records are skipped or duplicated. self.assertEqual(repr(self.a1.get_next_by_pub_date()), '<Article: Article 2>') self.assertEqual(repr(self.a2.get_next_by_pub_date()), '<Article: Article 3>') self.assertEqual(repr(self.a2.get_next_by_pub_date(headline__endswith='6')), '<Article: Article 6>') self.assertEqual(repr(self.a3.get_next_by_pub_date()), '<Article: Article 7>') self.assertEqual(repr(self.a4.get_next_by_pub_date()), '<Article: Article 6>') with self.assertRaises(Article.DoesNotExist): self.a5.get_next_by_pub_date() self.assertEqual(repr(self.a6.get_next_by_pub_date()), '<Article: Article 5>') self.assertEqual(repr(self.a7.get_next_by_pub_date()), '<Article: Article 4>') self.assertEqual(repr(self.a7.get_previous_by_pub_date()), '<Article: Article 3>') self.assertEqual(repr(self.a6.get_previous_by_pub_date()), '<Article: Article 4>') self.assertEqual(repr(self.a5.get_previous_by_pub_date()), '<Article: Article 6>') self.assertEqual(repr(self.a4.get_previous_by_pub_date()), '<Article: Article 7>') self.assertEqual(repr(self.a3.get_previous_by_pub_date()), '<Article: Article 2>') self.assertEqual(repr(self.a2.get_previous_by_pub_date()), '<Article: Article 1>') def test_escaping(self): # Underscores, percent signs and backslashes have special meaning in the # underlying SQL code, but Django handles the quoting of them automatically. Article.objects.create(headline='Article_ with underscore', pub_date=datetime(2005, 11, 20)) self.assertQuerysetEqual( Article.objects.filter(headline__startswith='Article'), [ '<Article: Article_ with underscore>', '<Article: Article 5>', '<Article: Article 6>', '<Article: Article 4>', '<Article: Article 2>', '<Article: Article 3>', '<Article: Article 7>', '<Article: Article 1>', ] ) self.assertQuerysetEqual( Article.objects.filter(headline__startswith='Article_'), ['<Article: Article_ with underscore>'] ) Article.objects.create(headline='Article% with percent sign', pub_date=datetime(2005, 11, 21)) self.assertQuerysetEqual( Article.objects.filter(headline__startswith='Article'), [ '<Article: Article% with percent sign>', '<Article: Article_ with underscore>', '<Article: Article 5>', '<Article: Article 6>', '<Article: Article 4>', '<Article: Article 2>', '<Article: Article 3>', '<Article: Article 7>', '<Article: Article 1>', ] ) self.assertQuerysetEqual( Article.objects.filter(headline__startswith='Article%'), ['<Article: Article% with percent sign>'] ) Article.objects.create(headline='Article with \\ backslash', pub_date=datetime(2005, 11, 22)) self.assertQuerysetEqual( Article.objects.filter(headline__contains='\\'), [r'<Article: Article with \ backslash>'] ) def test_exclude(self): Article.objects.bulk_create([ Article(headline='Article_ with underscore', pub_date=datetime(2005, 11, 20)), Article(headline='Article% with percent sign', pub_date=datetime(2005, 11, 21)), Article(headline='Article with \\ backslash', pub_date=datetime(2005, 11, 22)), ]) # exclude() is the opposite of filter() when doing lookups: self.assertQuerysetEqual( Article.objects.filter(headline__contains='Article').exclude(headline__contains='with'), [ '<Article: Article 5>', '<Article: Article 6>', '<Article: Article 4>', '<Article: Article 2>', '<Article: Article 3>', '<Article: Article 7>', '<Article: Article 1>', ] ) self.assertQuerysetEqual( Article.objects.exclude(headline__startswith="Article_"), [ '<Article: Article with \\ backslash>', '<Article: Article% with percent sign>', '<Article: Article 5>', '<Article: Article 6>', '<Article: Article 4>', '<Article: Article 2>', '<Article: Article 3>', '<Article: Article 7>', '<Article: Article 1>', ] ) self.assertQuerysetEqual( Article.objects.exclude(headline="Article 7"), [ '<Article: Article with \\ backslash>', '<Article: Article% with percent sign>', '<Article: Article_ with underscore>', '<Article: Article 5>', '<Article: Article 6>', '<Article: Article 4>', '<Article: Article 2>', '<Article: Article 3>', '<Article: Article 1>', ] ) def test_none(self): # none() returns a QuerySet that behaves like any other QuerySet object self.assertQuerysetEqual(Article.objects.none(), []) self.assertQuerysetEqual(Article.objects.none().filter(headline__startswith='Article'), []) self.assertQuerysetEqual(Article.objects.filter(headline__startswith='Article').none(), []) self.assertEqual(Article.objects.none().count(), 0) self.assertEqual(Article.objects.none().update(headline="This should not take effect"), 0) self.assertQuerysetEqual(Article.objects.none().iterator(), []) def test_in(self): # using __in with an empty list should return an empty query set self.assertQuerysetEqual(Article.objects.filter(id__in=[]), []) self.assertQuerysetEqual( Article.objects.exclude(id__in=[]), [ '<Article: Article 5>', '<Article: Article 6>', '<Article: Article 4>', '<Article: Article 2>', '<Article: Article 3>', '<Article: Article 7>', '<Article: Article 1>', ] ) def test_in_different_database(self): with self.assertRaisesMessage( ValueError, "Subqueries aren't allowed across different databases. Force the " "inner query to be evaluated using `list(inner_query)`." ): list(Article.objects.filter(id__in=Article.objects.using('other').all())) def test_in_keeps_value_ordering(self): query = Article.objects.filter(slug__in=['a%d' % i for i in range(1, 8)]).values('pk').query self.assertIn(' IN (a1, a2, a3, a4, a5, a6, a7) ', str(query)) def test_error_messages(self): # Programming errors are pointed out with nice error messages with self.assertRaisesMessage( FieldError, "Cannot resolve keyword 'pub_date_year' into field. Choices are: " "author, author_id, headline, id, pub_date, slug, tag" ): Article.objects.filter(pub_date_year='2005').count() def test_unsupported_lookups(self): with self.assertRaisesMessage( FieldError, "Unsupported lookup 'starts' for CharField or join on the field " "not permitted, perhaps you meant startswith or istartswith?" ): Article.objects.filter(headline__starts='Article') with self.assertRaisesMessage( FieldError, "Unsupported lookup 'is_null' for DateTimeField or join on the field " "not permitted, perhaps you meant isnull?" ): Article.objects.filter(pub_date__is_null=True) with self.assertRaisesMessage( FieldError, "Unsupported lookup 'gobbledygook' for DateTimeField or join on the field " "not permitted." ): Article.objects.filter(pub_date__gobbledygook='blahblah') def test_relation_nested_lookup_error(self): # An invalid nested lookup on a related field raises a useful error. msg = 'Related Field got invalid lookup: editor' with self.assertRaisesMessage(FieldError, msg): Article.objects.filter(author__editor__name='James') msg = 'Related Field got invalid lookup: foo' with self.assertRaisesMessage(FieldError, msg): Tag.objects.filter(articles__foo='bar') def test_regex(self): # Create some articles with a bit more interesting headlines for testing field lookups: for a in Article.objects.all(): a.delete() now = datetime.now() Article.objects.bulk_create([ Article(pub_date=now, headline='f'), Article(pub_date=now, headline='fo'), Article(pub_date=now, headline='foo'), Article(pub_date=now, headline='fooo'), Article(pub_date=now, headline='hey-Foo'), Article(pub_date=now, headline='bar'), Article(pub_date=now, headline='AbBa'), Article(pub_date=now, headline='baz'), Article(pub_date=now, headline='baxZ'), ]) # zero-or-more self.assertQuerysetEqual( Article.objects.filter(headline__regex=r'fo*'), ['<Article: f>', '<Article: fo>', '<Article: foo>', '<Article: fooo>'] ) self.assertQuerysetEqual( Article.objects.filter(headline__iregex=r'fo*'), [ '<Article: f>', '<Article: fo>', '<Article: foo>', '<Article: fooo>', '<Article: hey-Foo>', ] ) # one-or-more self.assertQuerysetEqual( Article.objects.filter(headline__regex=r'fo+'), ['<Article: fo>', '<Article: foo>', '<Article: fooo>'] ) # wildcard self.assertQuerysetEqual( Article.objects.filter(headline__regex=r'fooo?'), ['<Article: foo>', '<Article: fooo>'] ) # leading anchor self.assertQuerysetEqual( Article.objects.filter(headline__regex=r'^b'), ['<Article: bar>', '<Article: baxZ>', '<Article: baz>'] ) self.assertQuerysetEqual(Article.objects.filter(headline__iregex=r'^a'), ['<Article: AbBa>']) # trailing anchor self.assertQuerysetEqual(Article.objects.filter(headline__regex=r'z$'), ['<Article: baz>']) self.assertQuerysetEqual( Article.objects.filter(headline__iregex=r'z$'), ['<Article: baxZ>', '<Article: baz>'] ) # character sets self.assertQuerysetEqual( Article.objects.filter(headline__regex=r'ba[rz]'), ['<Article: bar>', '<Article: baz>'] ) self.assertQuerysetEqual(Article.objects.filter(headline__regex=r'ba.[RxZ]'), ['<Article: baxZ>']) self.assertQuerysetEqual( Article.objects.filter(headline__iregex=r'ba[RxZ]'), ['<Article: bar>', '<Article: baxZ>', '<Article: baz>'] ) # and more articles: Article.objects.bulk_create([ Article(pub_date=now, headline='foobar'), Article(pub_date=now, headline='foobaz'), Article(pub_date=now, headline='ooF'), Article(pub_date=now, headline='foobarbaz'), Article(pub_date=now, headline='zoocarfaz'), Article(pub_date=now, headline='barfoobaz'), Article(pub_date=now, headline='bazbaRFOO'), ]) # alternation self.assertQuerysetEqual( Article.objects.filter(headline__regex=r'oo(f|b)'), [ '<Article: barfoobaz>', '<Article: foobar>', '<Article: foobarbaz>', '<Article: foobaz>', ] ) self.assertQuerysetEqual( Article.objects.filter(headline__iregex=r'oo(f|b)'), [ '<Article: barfoobaz>', '<Article: foobar>', '<Article: foobarbaz>', '<Article: foobaz>', '<Article: ooF>', ] ) self.assertQuerysetEqual( Article.objects.filter(headline__regex=r'^foo(f|b)'), ['<Article: foobar>', '<Article: foobarbaz>', '<Article: foobaz>'] ) # greedy matching self.assertQuerysetEqual( Article.objects.filter(headline__regex=r'b.*az'), [ '<Article: barfoobaz>', '<Article: baz>', '<Article: bazbaRFOO>', '<Article: foobarbaz>', '<Article: foobaz>', ] ) self.assertQuerysetEqual( Article.objects.filter(headline__iregex=r'b.*ar'), [ '<Article: bar>', '<Article: barfoobaz>', '<Article: bazbaRFOO>', '<Article: foobar>', '<Article: foobarbaz>', ] ) @skipUnlessDBFeature('supports_regex_backreferencing') def test_regex_backreferencing(self): # grouping and backreferences now = datetime.now() Article.objects.bulk_create([ Article(pub_date=now, headline='foobar'), Article(pub_date=now, headline='foobaz'), Article(pub_date=now, headline='ooF'), Article(pub_date=now, headline='foobarbaz'), Article(pub_date=now, headline='zoocarfaz'), Article(pub_date=now, headline='barfoobaz'), Article(pub_date=now, headline='bazbaRFOO'), ]) self.assertQuerysetEqual( Article.objects.filter(headline__regex=r'b(.).*b\1'), ['<Article: barfoobaz>', '<Article: bazbaRFOO>', '<Article: foobarbaz>'] ) def test_regex_null(self): """ A regex lookup does not fail on null/None values """ Season.objects.create(year=2012, gt=None) self.assertQuerysetEqual(Season.objects.filter(gt__regex=r'^$'), []) def test_regex_non_string(self): """ A regex lookup does not fail on non-string fields """ Season.objects.create(year=2013, gt=444) self.assertQuerysetEqual(Season.objects.filter(gt__regex=r'^444$'), ['<Season: 2013>']) def test_regex_non_ascii(self): """ A regex lookup does not trip on non-ASCII characters. """ Player.objects.create(name='\u2660') Player.objects.get(name__regex='\u2660') def test_nonfield_lookups(self): """ A lookup query containing non-fields raises the proper exception. """ msg = "Unsupported lookup 'blahblah' for CharField or join on the field not permitted." with self.assertRaisesMessage(FieldError, msg): Article.objects.filter(headline__blahblah=99) with self.assertRaisesMessage(FieldError, msg): Article.objects.filter(headline__blahblah__exact=99) msg = ( "Cannot resolve keyword 'blahblah' into field. Choices are: " "author, author_id, headline, id, pub_date, slug, tag" ) with self.assertRaisesMessage(FieldError, msg): Article.objects.filter(blahblah=99) def test_lookup_collision(self): """ Genuine field names don't collide with built-in lookup types ('year', 'gt', 'range', 'in' etc.) (#11670). """ # 'gt' is used as a code number for the year, e.g. 111=>2009. season_2009 = Season.objects.create(year=2009, gt=111) season_2009.games.create(home="Houston Astros", away="St. Louis Cardinals") season_2010 = Season.objects.create(year=2010, gt=222) season_2010.games.create(home="Houston Astros", away="Chicago Cubs") season_2010.games.create(home="Houston Astros", away="Milwaukee Brewers") season_2010.games.create(home="Houston Astros", away="St. Louis Cardinals") season_2011 = Season.objects.create(year=2011, gt=333) season_2011.games.create(home="Houston Astros", away="St. Louis Cardinals") season_2011.games.create(home="Houston Astros", away="Milwaukee Brewers") hunter_pence = Player.objects.create(name="Hunter Pence") hunter_pence.games.set(Game.objects.filter(season__year__in=[2009, 2010])) pudge = Player.objects.create(name="Ivan Rodriquez") pudge.games.set(Game.objects.filter(season__year=2009)) pedro_feliz = Player.objects.create(name="Pedro Feliz") pedro_feliz.games.set(Game.objects.filter(season__year__in=[2011])) johnson = Player.objects.create(name="Johnson") johnson.games.set(Game.objects.filter(season__year__in=[2011])) # Games in 2010 self.assertEqual(Game.objects.filter(season__year=2010).count(), 3) self.assertEqual(Game.objects.filter(season__year__exact=2010).count(), 3) self.assertEqual(Game.objects.filter(season__gt=222).count(), 3) self.assertEqual(Game.objects.filter(season__gt__exact=222).count(), 3) # Games in 2011 self.assertEqual(Game.objects.filter(season__year=2011).count(), 2) self.assertEqual(Game.objects.filter(season__year__exact=2011).count(), 2) self.assertEqual(Game.objects.filter(season__gt=333).count(), 2) self.assertEqual(Game.objects.filter(season__gt__exact=333).count(), 2) self.assertEqual(Game.objects.filter(season__year__gt=2010).count(), 2) self.assertEqual(Game.objects.filter(season__gt__gt=222).count(), 2) # Games played in 2010 and 2011 self.assertEqual(Game.objects.filter(season__year__in=[2010, 2011]).count(), 5) self.assertEqual(Game.objects.filter(season__year__gt=2009).count(), 5) self.assertEqual(Game.objects.filter(season__gt__in=[222, 333]).count(), 5) self.assertEqual(Game.objects.filter(season__gt__gt=111).count(), 5) # Players who played in 2009 self.assertEqual(Player.objects.filter(games__season__year=2009).distinct().count(), 2) self.assertEqual(Player.objects.filter(games__season__year__exact=2009).distinct().count(), 2) self.assertEqual(Player.objects.filter(games__season__gt=111).distinct().count(), 2) self.assertEqual(Player.objects.filter(games__season__gt__exact=111).distinct().count(), 2) # Players who played in 2010 self.assertEqual(Player.objects.filter(games__season__year=2010).distinct().count(), 1) self.assertEqual(Player.objects.filter(games__season__year__exact=2010).distinct().count(), 1) self.assertEqual(Player.objects.filter(games__season__gt=222).distinct().count(), 1) self.assertEqual(Player.objects.filter(games__season__gt__exact=222).distinct().count(), 1) # Players who played in 2011 self.assertEqual(Player.objects.filter(games__season__year=2011).distinct().count(), 2) self.assertEqual(Player.objects.filter(games__season__year__exact=2011).distinct().count(), 2) self.assertEqual(Player.objects.filter(games__season__gt=333).distinct().count(), 2) self.assertEqual(Player.objects.filter(games__season__year__gt=2010).distinct().count(), 2) self.assertEqual(Player.objects.filter(games__season__gt__gt=222).distinct().count(), 2) def test_chain_date_time_lookups(self): self.assertQuerysetEqual( Article.objects.filter(pub_date__month__gt=7), ['<Article: Article 5>', '<Article: Article 6>'], ordered=False ) self.assertQuerysetEqual( Article.objects.filter(pub_date__day__gte=27), ['<Article: Article 2>', '<Article: Article 3>', '<Article: Article 4>', '<Article: Article 7>'], ordered=False ) self.assertQuerysetEqual( Article.objects.filter(pub_date__hour__lt=8), ['<Article: Article 1>', '<Article: Article 2>', '<Article: Article 3>', '<Article: Article 4>', '<Article: Article 7>'], ordered=False ) self.assertQuerysetEqual( Article.objects.filter(pub_date__minute__lte=0), ['<Article: Article 1>', '<Article: Article 2>', '<Article: Article 3>', '<Article: Article 4>', '<Article: Article 5>', '<Article: Article 6>', '<Article: Article 7>'], ordered=False ) def test_exact_none_transform(self): """Transforms are used for __exact=None.""" Season.objects.create(year=1, nulled_text_field='not null') self.assertFalse(Season.objects.filter(nulled_text_field__isnull=True)) self.assertTrue(Season.objects.filter(nulled_text_field__nulled__isnull=True)) self.assertTrue(Season.objects.filter(nulled_text_field__nulled__exact=None)) self.assertTrue(Season.objects.filter(nulled_text_field__nulled=None)) def test_exact_sliced_queryset_limit_one(self): self.assertCountEqual( Article.objects.filter(author=Author.objects.all()[:1]), [self.a1, self.a2, self.a3, self.a4] ) def test_exact_sliced_queryset_limit_one_offset(self): self.assertCountEqual( Article.objects.filter(author=Author.objects.all()[1:2]), [self.a5, self.a6, self.a7] ) def test_exact_sliced_queryset_not_limited_to_one(self): msg = ( 'The QuerySet value for an exact lookup must be limited to one ' 'result using slicing.' ) with self.assertRaisesMessage(ValueError, msg): list(Article.objects.filter(author=Author.objects.all()[:2])) with self.assertRaisesMessage(ValueError, msg): list(Article.objects.filter(author=Author.objects.all()[1:])) def test_custom_field_none_rhs(self): """ __exact=value is transformed to __isnull=True if Field.get_prep_value() converts value to None. """ season = Season.objects.create(year=2012, nulled_text_field=None) self.assertTrue(Season.objects.filter(pk=season.pk, nulled_text_field__isnull=True)) self.assertTrue(Season.objects.filter(pk=season.pk, nulled_text_field='')) def test_pattern_lookups_with_substr(self): a = Author.objects.create(name='John Smith', alias='Johx') b = Author.objects.create(name='Rhonda Simpson', alias='sonx') tests = ( ('startswith', [a]), ('istartswith', [a]), ('contains', [a, b]), ('icontains', [a, b]), ('endswith', [b]), ('iendswith', [b]), ) for lookup, result in tests: with self.subTest(lookup=lookup): authors = Author.objects.filter(**{'name__%s' % lookup: Substr('alias', 1, 3)}) self.assertCountEqual(authors, result) def test_custom_lookup_none_rhs(self): """Lookup.can_use_none_as_rhs=True allows None as a lookup value.""" season = Season.objects.create(year=2012, nulled_text_field=None) query = Season.objects.get_queryset().query field = query.model._meta.get_field('nulled_text_field') self.assertIsInstance(query.build_lookup(['isnull_none_rhs'], field, None), IsNullWithNoneAsRHS) self.assertTrue(Season.objects.filter(pk=season.pk, nulled_text_field__isnull_none_rhs=True)) def test_exact_exists(self): qs = Article.objects.filter(pk=OuterRef('pk')) seasons = Season.objects.annotate( pk_exists=Exists(qs), ).filter( pk_exists=Exists(qs), ) self.assertCountEqual(seasons, Season.objects.all()) def test_nested_outerref_lhs(self): tag = Tag.objects.create(name=self.au1.alias) tag.articles.add(self.a1) qs = Tag.objects.annotate( has_author_alias_match=Exists( Article.objects.annotate( author_exists=Exists( Author.objects.filter(alias=OuterRef(OuterRef('name'))) ), ).filter(author_exists=True) ), ) self.assertEqual(qs.get(has_author_alias_match=True), tag) def test_exact_query_rhs_with_selected_columns(self): newest_author = Author.objects.create(name='Author 2') authors_max_ids = Author.objects.filter( name='Author 2', ).values( 'name', ).annotate( max_id=Max('id'), ).values('max_id') authors = Author.objects.filter(id=authors_max_ids[:1]) self.assertEqual(authors.get(), newest_author) def test_isnull_non_boolean_value(self): # These tests will catch ValueError in Django 4.0 when using # non-boolean values for an isnull lookup becomes forbidden. # msg = ( # 'The QuerySet value for an isnull lookup must be True or False.' # ) msg = ( 'Using a non-boolean value for an isnull lookup is deprecated, ' 'use True or False instead.' ) tests = [ Author.objects.filter(alias__isnull=1), Article.objects.filter(author__isnull=1), Season.objects.filter(games__isnull=1), Freebie.objects.filter(stock__isnull=1), ] for qs in tests: with self.subTest(qs=qs): with self.assertWarnsMessage(RemovedInDjango40Warning, msg): qs.exists()
64d2ddd9ca4585b3933da82298c552cfe01e98eeff698721871c3aa0f70e550b
""" The lookup API This demonstrates features of the database API. """ from django.db import models from django.db.models.lookups import IsNull class Alarm(models.Model): desc = models.CharField(max_length=100) time = models.TimeField() def __str__(self): return '%s (%s)' % (self.time, self.desc) class Author(models.Model): name = models.CharField(max_length=100) alias = models.CharField(max_length=50, null=True, blank=True) class Meta: ordering = ('name',) class Article(models.Model): headline = models.CharField(max_length=100) pub_date = models.DateTimeField() author = models.ForeignKey(Author, models.SET_NULL, blank=True, null=True) slug = models.SlugField(unique=True, blank=True, null=True) class Meta: ordering = ('-pub_date', 'headline') def __str__(self): return self.headline class Tag(models.Model): articles = models.ManyToManyField(Article) name = models.CharField(max_length=100) class Meta: ordering = ('name',) class NulledTextField(models.TextField): def get_prep_value(self, value): return None if value == '' else value @NulledTextField.register_lookup class NulledTransform(models.Transform): lookup_name = 'nulled' template = 'NULL' @NulledTextField.register_lookup class IsNullWithNoneAsRHS(IsNull): lookup_name = 'isnull_none_rhs' can_use_none_as_rhs = True class Season(models.Model): year = models.PositiveSmallIntegerField() gt = models.IntegerField(null=True, blank=True) nulled_text_field = NulledTextField(null=True) def __str__(self): return str(self.year) class Game(models.Model): season = models.ForeignKey(Season, models.CASCADE, related_name='games') home = models.CharField(max_length=100) away = models.CharField(max_length=100) def __str__(self): return "%s at %s" % (self.away, self.home) class Player(models.Model): name = models.CharField(max_length=100) games = models.ManyToManyField(Game, related_name='players') def __str__(self): return self.name class Product(models.Model): name = models.CharField(max_length=80) qty_target = models.DecimalField(max_digits=6, decimal_places=2) class Stock(models.Model): product = models.ForeignKey(Product, models.CASCADE) qty_available = models.DecimalField(max_digits=6, decimal_places=2) class Freebie(models.Model): gift_product = models.ForeignKey(Product, models.CASCADE) stock_id = models.IntegerField(blank=True, null=True) stock = models.ForeignObject( Stock, from_fields=['stock_id', 'gift_product'], to_fields=['id', 'product'], on_delete=models.CASCADE, )
b832b3c82c28a7c054336c4945f087f1c4500a7fbcbade9b366c661731d9b476
from unittest import mock from django.db import connection, transaction from django.db.models import Case, Count, F, FilteredRelation, Q, When from django.test import TestCase from django.test.testcases import skipUnlessDBFeature from .models import Author, Book, Borrower, Editor, RentalSession, Reservation class FilteredRelationTests(TestCase): @classmethod def setUpTestData(cls): cls.author1 = Author.objects.create(name='Alice') cls.author2 = Author.objects.create(name='Jane') cls.editor_a = Editor.objects.create(name='a') cls.editor_b = Editor.objects.create(name='b') cls.book1 = Book.objects.create( title='Poem by Alice', editor=cls.editor_a, author=cls.author1, ) cls.book1.generic_author.set([cls.author2]) cls.book2 = Book.objects.create( title='The book by Jane A', editor=cls.editor_b, author=cls.author2, ) cls.book3 = Book.objects.create( title='The book by Jane B', editor=cls.editor_b, author=cls.author2, ) cls.book4 = Book.objects.create( title='The book by Alice', editor=cls.editor_a, author=cls.author1, ) cls.author1.favorite_books.add(cls.book2) cls.author1.favorite_books.add(cls.book3) def test_select_related(self): qs = Author.objects.annotate( book_join=FilteredRelation('book'), ).select_related('book_join__editor').order_by('pk', 'book_join__pk') with self.assertNumQueries(1): self.assertQuerysetEqual(qs, [ (self.author1, self.book1, self.editor_a, self.author1), (self.author1, self.book4, self.editor_a, self.author1), (self.author2, self.book2, self.editor_b, self.author2), (self.author2, self.book3, self.editor_b, self.author2), ], lambda x: (x, x.book_join, x.book_join.editor, x.book_join.author)) def test_select_related_multiple(self): qs = Book.objects.annotate( author_join=FilteredRelation('author'), editor_join=FilteredRelation('editor'), ).select_related('author_join', 'editor_join').order_by('pk') self.assertQuerysetEqual(qs, [ (self.book1, self.author1, self.editor_a), (self.book2, self.author2, self.editor_b), (self.book3, self.author2, self.editor_b), (self.book4, self.author1, self.editor_a), ], lambda x: (x, x.author_join, x.editor_join)) def test_select_related_with_empty_relation(self): qs = Author.objects.annotate( book_join=FilteredRelation('book', condition=Q(pk=-1)), ).select_related('book_join').order_by('pk') self.assertSequenceEqual(qs, [self.author1, self.author2]) def test_select_related_foreign_key(self): qs = Book.objects.annotate( author_join=FilteredRelation('author'), ).select_related('author_join').order_by('pk') with self.assertNumQueries(1): self.assertQuerysetEqual(qs, [ (self.book1, self.author1), (self.book2, self.author2), (self.book3, self.author2), (self.book4, self.author1), ], lambda x: (x, x.author_join)) @skipUnlessDBFeature('has_select_for_update', 'has_select_for_update_of') def test_select_related_foreign_key_for_update_of(self): with transaction.atomic(): qs = Book.objects.annotate( author_join=FilteredRelation('author'), ).select_related('author_join').select_for_update(of=('self',)).order_by('pk') with self.assertNumQueries(1): self.assertQuerysetEqual(qs, [ (self.book1, self.author1), (self.book2, self.author2), (self.book3, self.author2), (self.book4, self.author1), ], lambda x: (x, x.author_join)) def test_without_join(self): self.assertSequenceEqual( Author.objects.annotate( book_alice=FilteredRelation('book', condition=Q(book__title__iexact='poem by alice')), ), [self.author1, self.author2] ) def test_with_join(self): self.assertSequenceEqual( Author.objects.annotate( book_alice=FilteredRelation('book', condition=Q(book__title__iexact='poem by alice')), ).filter(book_alice__isnull=False), [self.author1] ) def test_with_exclude(self): self.assertSequenceEqual( Author.objects.annotate( book_alice=FilteredRelation('book', condition=Q(book__title__iexact='poem by alice')), ).exclude(book_alice__isnull=False), [self.author2], ) def test_with_join_and_complex_condition(self): self.assertSequenceEqual( Author.objects.annotate( book_alice=FilteredRelation( 'book', condition=Q( Q(book__title__iexact='poem by alice') | Q(book__state=Book.RENTED) ), ), ).filter(book_alice__isnull=False), [self.author1] ) def test_internal_queryset_alias_mapping(self): queryset = Author.objects.annotate( book_alice=FilteredRelation('book', condition=Q(book__title__iexact='poem by alice')), ).filter(book_alice__isnull=False) self.assertIn( 'INNER JOIN {} book_alice ON'.format(connection.ops.quote_name('filtered_relation_book')), str(queryset.query) ) def test_with_multiple_filter(self): self.assertSequenceEqual( Author.objects.annotate( book_editor_a=FilteredRelation( 'book', condition=Q(book__title__icontains='book', book__editor_id=self.editor_a.pk), ), ).filter(book_editor_a__isnull=False), [self.author1] ) def test_multiple_times(self): self.assertSequenceEqual( Author.objects.annotate( book_title_alice=FilteredRelation('book', condition=Q(book__title__icontains='alice')), ).filter(book_title_alice__isnull=False).filter(book_title_alice__isnull=False).distinct(), [self.author1] ) def test_exclude_relation_with_join(self): self.assertSequenceEqual( Author.objects.annotate( book_alice=FilteredRelation('book', condition=~Q(book__title__icontains='alice')), ).filter(book_alice__isnull=False).distinct(), [self.author2] ) def test_with_m2m(self): qs = Author.objects.annotate( favorite_books_written_by_jane=FilteredRelation( 'favorite_books', condition=Q(favorite_books__in=[self.book2]), ), ).filter(favorite_books_written_by_jane__isnull=False) self.assertSequenceEqual(qs, [self.author1]) def test_with_m2m_deep(self): qs = Author.objects.annotate( favorite_books_written_by_jane=FilteredRelation( 'favorite_books', condition=Q(favorite_books__author=self.author2), ), ).filter(favorite_books_written_by_jane__title='The book by Jane B') self.assertSequenceEqual(qs, [self.author1]) def test_with_m2m_multijoin(self): qs = Author.objects.annotate( favorite_books_written_by_jane=FilteredRelation( 'favorite_books', condition=Q(favorite_books__author=self.author2), ) ).filter(favorite_books_written_by_jane__editor__name='b').distinct() self.assertSequenceEqual(qs, [self.author1]) def test_values_list(self): self.assertSequenceEqual( Author.objects.annotate( book_alice=FilteredRelation('book', condition=Q(book__title__iexact='poem by alice')), ).filter(book_alice__isnull=False).values_list('book_alice__title', flat=True), ['Poem by Alice'] ) def test_values(self): self.assertSequenceEqual( Author.objects.annotate( book_alice=FilteredRelation('book', condition=Q(book__title__iexact='poem by alice')), ).filter(book_alice__isnull=False).values(), [{'id': self.author1.pk, 'name': 'Alice', 'content_type_id': None, 'object_id': None}] ) def test_extra(self): self.assertSequenceEqual( Author.objects.annotate( book_alice=FilteredRelation('book', condition=Q(book__title__iexact='poem by alice')), ).filter(book_alice__isnull=False).extra(where=['1 = 1']), [self.author1] ) @skipUnlessDBFeature('supports_select_union') def test_union(self): qs1 = Author.objects.annotate( book_alice=FilteredRelation('book', condition=Q(book__title__iexact='poem by alice')), ).filter(book_alice__isnull=False) qs2 = Author.objects.annotate( book_jane=FilteredRelation('book', condition=Q(book__title__iexact='the book by jane a')), ).filter(book_jane__isnull=False) self.assertSequenceEqual(qs1.union(qs2), [self.author1, self.author2]) @skipUnlessDBFeature('supports_select_intersection') def test_intersection(self): qs1 = Author.objects.annotate( book_alice=FilteredRelation('book', condition=Q(book__title__iexact='poem by alice')), ).filter(book_alice__isnull=False) qs2 = Author.objects.annotate( book_jane=FilteredRelation('book', condition=Q(book__title__iexact='the book by jane a')), ).filter(book_jane__isnull=False) self.assertSequenceEqual(qs1.intersection(qs2), []) @skipUnlessDBFeature('supports_select_difference') def test_difference(self): qs1 = Author.objects.annotate( book_alice=FilteredRelation('book', condition=Q(book__title__iexact='poem by alice')), ).filter(book_alice__isnull=False) qs2 = Author.objects.annotate( book_jane=FilteredRelation('book', condition=Q(book__title__iexact='the book by jane a')), ).filter(book_jane__isnull=False) self.assertSequenceEqual(qs1.difference(qs2), [self.author1]) def test_select_for_update(self): self.assertSequenceEqual( Author.objects.annotate( book_jane=FilteredRelation('book', condition=Q(book__title__iexact='the book by jane a')), ).filter(book_jane__isnull=False).select_for_update(), [self.author2] ) def test_defer(self): # One query for the list and one query for the deferred title. with self.assertNumQueries(2): self.assertQuerysetEqual( Author.objects.annotate( book_alice=FilteredRelation('book', condition=Q(book__title__iexact='poem by alice')), ).filter(book_alice__isnull=False).select_related('book_alice').defer('book_alice__title'), ['Poem by Alice'], lambda author: author.book_alice.title ) def test_only_not_supported(self): msg = 'only() is not supported with FilteredRelation.' with self.assertRaisesMessage(ValueError, msg): Author.objects.annotate( book_alice=FilteredRelation('book', condition=Q(book__title__iexact='poem by alice')), ).filter(book_alice__isnull=False).select_related('book_alice').only('book_alice__state') def test_as_subquery(self): inner_qs = Author.objects.annotate( book_alice=FilteredRelation('book', condition=Q(book__title__iexact='poem by alice')), ).filter(book_alice__isnull=False) qs = Author.objects.filter(id__in=inner_qs) self.assertSequenceEqual(qs, [self.author1]) def test_with_foreign_key_error(self): msg = ( "FilteredRelation's condition doesn't support nested relations " "(got 'author__favorite_books__author')." ) with self.assertRaisesMessage(ValueError, msg): list(Book.objects.annotate( alice_favorite_books=FilteredRelation( 'author__favorite_books', condition=Q(author__favorite_books__author=self.author1), ) )) def test_with_foreign_key_on_condition_error(self): msg = ( "FilteredRelation's condition doesn't support nested relations " "(got 'book__editor__name__icontains')." ) with self.assertRaisesMessage(ValueError, msg): list(Author.objects.annotate( book_edited_by_b=FilteredRelation('book', condition=Q(book__editor__name__icontains='b')), )) def test_with_empty_relation_name_error(self): with self.assertRaisesMessage(ValueError, 'relation_name cannot be empty.'): FilteredRelation('', condition=Q(blank='')) def test_with_condition_as_expression_error(self): msg = 'condition argument must be a Q() instance.' expression = Case( When(book__title__iexact='poem by alice', then=True), default=False, ) with self.assertRaisesMessage(ValueError, msg): FilteredRelation('book', condition=expression) def test_with_prefetch_related(self): msg = 'prefetch_related() is not supported with FilteredRelation.' qs = Author.objects.annotate( book_title_contains_b=FilteredRelation('book', condition=Q(book__title__icontains='b')), ).filter( book_title_contains_b__isnull=False, ) with self.assertRaisesMessage(ValueError, msg): qs.prefetch_related('book_title_contains_b') with self.assertRaisesMessage(ValueError, msg): qs.prefetch_related('book_title_contains_b__editor') def test_with_generic_foreign_key(self): self.assertSequenceEqual( Book.objects.annotate( generic_authored_book=FilteredRelation( 'generic_author', condition=Q(generic_author__isnull=False) ), ).filter(generic_authored_book__isnull=False), [self.book1] ) def test_eq(self): self.assertEqual(FilteredRelation('book', condition=Q(book__title='b')), mock.ANY) class FilteredRelationAggregationTests(TestCase): @classmethod def setUpTestData(cls): cls.author1 = Author.objects.create(name='Alice') cls.editor_a = Editor.objects.create(name='a') cls.book1 = Book.objects.create( title='Poem by Alice', editor=cls.editor_a, author=cls.author1, ) cls.borrower1 = Borrower.objects.create(name='Jenny') cls.borrower2 = Borrower.objects.create(name='Kevin') # borrower 1 reserves, rents, and returns book1. Reservation.objects.create( borrower=cls.borrower1, book=cls.book1, state=Reservation.STOPPED, ) RentalSession.objects.create( borrower=cls.borrower1, book=cls.book1, state=RentalSession.STOPPED, ) # borrower2 reserves, rents, and returns book1. Reservation.objects.create( borrower=cls.borrower2, book=cls.book1, state=Reservation.STOPPED, ) RentalSession.objects.create( borrower=cls.borrower2, book=cls.book1, state=RentalSession.STOPPED, ) def test_aggregate(self): """ filtered_relation() not only improves performance but also creates correct results when aggregating with multiple LEFT JOINs. Books can be reserved then rented by a borrower. Each reservation and rental session are recorded with Reservation and RentalSession models. Every time a reservation or a rental session is over, their state is changed to 'stopped'. Goal: Count number of books that are either currently reserved or rented by borrower1 or available. """ qs = Book.objects.annotate( is_reserved_or_rented_by=Case( When(reservation__state=Reservation.NEW, then=F('reservation__borrower__pk')), When(rental_session__state=RentalSession.NEW, then=F('rental_session__borrower__pk')), default=None, ) ).filter( Q(is_reserved_or_rented_by=self.borrower1.pk) | Q(state=Book.AVAILABLE) ).distinct() self.assertEqual(qs.count(), 1) # If count is equal to 1, the same aggregation should return in the # same result but it returns 4. self.assertSequenceEqual(qs.annotate(total=Count('pk')).values('total'), [{'total': 4}]) # With FilteredRelation, the result is as expected (1). qs = Book.objects.annotate( active_reservations=FilteredRelation( 'reservation', condition=Q( reservation__state=Reservation.NEW, reservation__borrower=self.borrower1, ) ), ).annotate( active_rental_sessions=FilteredRelation( 'rental_session', condition=Q( rental_session__state=RentalSession.NEW, rental_session__borrower=self.borrower1, ) ), ).filter( (Q(active_reservations__isnull=False) | Q(active_rental_sessions__isnull=False)) | Q(state=Book.AVAILABLE) ).distinct() self.assertEqual(qs.count(), 1) self.assertSequenceEqual(qs.annotate(total=Count('pk')).values('total'), [{'total': 1}])
90284659051961a5e2e539a9adeb69962a4d958fdf757cd4dec74deba7665c3f
from django.db import migrations def grow_tail(x, y): pass def feed(x, y): """Feed salamander.""" pass class Migration(migrations.Migration): dependencies = [ ('migrations', '0004_fourth'), ] operations = [ migrations.RunPython(migrations.RunPython.noop), migrations.RunPython(grow_tail), migrations.RunPython(feed, migrations.RunPython.noop), ]
a2e4d044ee2145198980a4f5fc7c3ac93522d3c71a6dbfd506656736705b49f9
from django.db import migrations, models class Migration(migrations.Migration): initial = True operations = [ migrations.CreateModel( "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=255)), ], ), ]
da48fd731329473c8782f7ff17ba5c9c1e3f26634a04fbe22291915deafdf220
import os import re from io import StringIO from django.contrib.gis.gdal import GDAL_VERSION, Driver, GDALException from django.contrib.gis.utils.ogrinspect import ogrinspect from django.core.management import call_command from django.db import connection, connections from django.test import SimpleTestCase, TestCase, skipUnlessDBFeature from django.test.utils import modify_settings from ..test_data import TEST_DATA from ..utils import mariadb, postgis from .models import AllOGRFields class InspectDbTests(TestCase): def test_geom_columns(self): """ Test the geo-enabled inspectdb command. """ out = StringIO() call_command( 'inspectdb', table_name_filter=lambda tn: tn == 'inspectapp_allogrfields', stdout=out ) output = out.getvalue() if connection.features.supports_geometry_field_introspection: self.assertIn('geom = models.PolygonField()', output) self.assertIn('point = models.PointField()', output) else: self.assertIn('geom = models.GeometryField(', output) self.assertIn('point = models.GeometryField(', output) @skipUnlessDBFeature("supports_3d_storage") def test_3d_columns(self): out = StringIO() call_command( 'inspectdb', table_name_filter=lambda tn: tn == 'inspectapp_fields3d', stdout=out ) output = out.getvalue() if connection.features.supports_geometry_field_introspection: self.assertIn('point = models.PointField(dim=3)', output) if postgis: # Geography type is specific to PostGIS self.assertIn('pointg = models.PointField(geography=True, dim=3)', output) self.assertIn('line = models.LineStringField(dim=3)', output) self.assertIn('poly = models.PolygonField(dim=3)', output) else: self.assertIn('point = models.GeometryField(', output) self.assertIn('pointg = models.GeometryField(', output) self.assertIn('line = models.GeometryField(', output) self.assertIn('poly = models.GeometryField(', output) @modify_settings( INSTALLED_APPS={'append': 'django.contrib.gis'}, ) class OGRInspectTest(SimpleTestCase): expected_srid = 'srid=-1' if GDAL_VERSION < (2, 2) else '' maxDiff = 1024 def test_poly(self): shp_file = os.path.join(TEST_DATA, 'test_poly', 'test_poly.shp') model_def = ogrinspect(shp_file, 'MyModel') expected = [ '# This is an auto-generated Django model module created by ogrinspect.', 'from django.contrib.gis.db import models', '', '', 'class MyModel(models.Model):', ' float = models.FloatField()', ' int = models.BigIntegerField()', ' str = models.CharField(max_length=80)', ' geom = models.PolygonField(%s)' % self.expected_srid, ] self.assertEqual(model_def, '\n'.join(expected)) def test_poly_multi(self): shp_file = os.path.join(TEST_DATA, 'test_poly', 'test_poly.shp') model_def = ogrinspect(shp_file, 'MyModel', multi_geom=True) self.assertIn('geom = models.MultiPolygonField(%s)' % self.expected_srid, model_def) # Same test with a 25D-type geometry field shp_file = os.path.join(TEST_DATA, 'gas_lines', 'gas_leitung.shp') model_def = ogrinspect(shp_file, 'MyModel', multi_geom=True) srid = '-1' if GDAL_VERSION < (2, 3) else '31253' self.assertIn('geom = models.MultiLineStringField(srid=%s)' % srid, model_def) def test_date_field(self): shp_file = os.path.join(TEST_DATA, 'cities', 'cities.shp') model_def = ogrinspect(shp_file, 'City') expected = [ '# This is an auto-generated Django model module created by ogrinspect.', 'from django.contrib.gis.db import models', '', '', 'class City(models.Model):', ' name = models.CharField(max_length=80)', ' population = models.BigIntegerField()', ' density = models.FloatField()', ' created = models.DateField()', ' geom = models.PointField(%s)' % self.expected_srid, ] self.assertEqual(model_def, '\n'.join(expected)) def test_time_field(self): # Getting the database identifier used by OGR, if None returned # GDAL does not have the support compiled in. ogr_db = get_ogr_db_string() if not ogr_db: self.skipTest("Unable to setup an OGR connection to your database") try: # Writing shapefiles via GDAL currently does not support writing OGRTime # fields, so we need to actually use a database model_def = ogrinspect(ogr_db, 'Measurement', layer_key=AllOGRFields._meta.db_table, decimal=['f_decimal']) except GDALException: self.skipTest("Unable to setup an OGR connection to your database") self.assertTrue(model_def.startswith( '# This is an auto-generated Django model module created by ogrinspect.\n' 'from django.contrib.gis.db import models\n' '\n' '\n' 'class Measurement(models.Model):\n' )) # The ordering of model fields might vary depending on several factors (version of GDAL, etc.) if connection.vendor == 'sqlite': # SpatiaLite introspection is somewhat lacking (#29461). self.assertIn(' f_decimal = models.CharField(max_length=0)', model_def) else: self.assertIn(' f_decimal = models.DecimalField(max_digits=0, decimal_places=0)', model_def) self.assertIn(' f_int = models.IntegerField()', model_def) if not mariadb: # Probably a bug between GDAL and MariaDB on time fields. self.assertIn(' f_datetime = models.DateTimeField()', model_def) self.assertIn(' f_time = models.TimeField()', model_def) if connection.vendor == 'sqlite': self.assertIn(' f_float = models.CharField(max_length=0)', model_def) else: self.assertIn(' f_float = models.FloatField()', model_def) max_length = 0 if connection.vendor == 'sqlite' else 10 self.assertIn(' f_char = models.CharField(max_length=%s)' % max_length, model_def) self.assertIn(' f_date = models.DateField()', model_def) # Some backends may have srid=-1 self.assertIsNotNone(re.search(r' geom = models.PolygonField\(([^\)])*\)', model_def)) def test_management_command(self): shp_file = os.path.join(TEST_DATA, 'cities', 'cities.shp') out = StringIO() call_command('ogrinspect', shp_file, 'City', stdout=out) output = out.getvalue() self.assertIn('class City(models.Model):', output) def test_mapping_option(self): expected = ( " geom = models.PointField(%s)\n" "\n" "\n" "# Auto-generated `LayerMapping` dictionary for City model\n" "city_mapping = {\n" " 'name': 'Name',\n" " 'population': 'Population',\n" " 'density': 'Density',\n" " 'created': 'Created',\n" " 'geom': 'POINT',\n" "}\n" % self.expected_srid) shp_file = os.path.join(TEST_DATA, 'cities', 'cities.shp') out = StringIO() call_command('ogrinspect', shp_file, '--mapping', 'City', stdout=out) self.assertIn(expected, out.getvalue()) def get_ogr_db_string(): """ Construct the DB string that GDAL will use to inspect the database. GDAL will create its own connection to the database, so we re-use the connection settings from the Django test. """ db = connections.databases['default'] # Map from the django backend into the OGR driver name and database identifier # https://www.gdal.org/ogr/ogr_formats.html # # TODO: Support Oracle (OCI). drivers = { 'django.contrib.gis.db.backends.postgis': ('PostgreSQL', "PG:dbname='%(db_name)s'", ' '), 'django.contrib.gis.db.backends.mysql': ('MySQL', 'MYSQL:"%(db_name)s"', ','), 'django.contrib.gis.db.backends.spatialite': ('SQLite', '%(db_name)s', '') } db_engine = db['ENGINE'] if db_engine not in drivers: return None drv_name, db_str, param_sep = drivers[db_engine] # Ensure that GDAL library has driver support for the database. try: Driver(drv_name) except GDALException: return None # SQLite/SpatiaLite in-memory databases if db['NAME'] == ":memory:": return None # Build the params of the OGR database connection string params = [db_str % {'db_name': db['NAME']}] def add(key, template): value = db.get(key, None) # Don't add the parameter if it is not in django's settings if value: params.append(template % value) add('HOST', "host='%s'") add('PORT', "port='%s'") add('USER', "user='%s'") add('PASSWORD', "password='%s'") return param_sep.join(params)
dbd1c3f428456d4d318411cf5e65c83377546aa99bd0d4ca11933776e644550c
import json import math import re from decimal import Decimal from django.contrib.gis.db.models import functions from django.contrib.gis.geos import ( GEOSGeometry, LineString, Point, Polygon, fromstr, ) from django.contrib.gis.measure import Area from django.db import NotSupportedError, connection from django.db.models import Sum from django.test import TestCase, skipUnlessDBFeature from ..utils import FuncTestMixin, mariadb, mysql, oracle, postgis, spatialite from .models import City, Country, CountryWebMercator, State, Track class GISFunctionsTests(FuncTestMixin, TestCase): """ Testing functions from django/contrib/gis/db/models/functions.py. Area/Distance/Length/Perimeter are tested in distapp/tests. Please keep the tests in function's alphabetic order. """ fixtures = ['initial'] def test_asgeojson(self): if not connection.features.has_AsGeoJSON_function: with self.assertRaises(NotSupportedError): list(Country.objects.annotate(json=functions.AsGeoJSON('mpoly'))) return pueblo_json = '{"type":"Point","coordinates":[-104.609252,38.255001]}' houston_json = ( '{"type":"Point","crs":{"type":"name","properties":' '{"name":"EPSG:4326"}},"coordinates":[-95.363151,29.763374]}' ) victoria_json = ( '{"type":"Point","bbox":[-123.30519600,48.46261100,-123.30519600,48.46261100],' '"coordinates":[-123.305196,48.462611]}' ) chicago_json = ( '{"type":"Point","crs":{"type":"name","properties":{"name":"EPSG:4326"}},' '"bbox":[-87.65018,41.85039,-87.65018,41.85039],"coordinates":[-87.65018,41.85039]}' ) # MySQL ignores the crs option. if mysql: houston_json = json.loads(houston_json) del houston_json['crs'] chicago_json = json.loads(chicago_json) del chicago_json['crs'] # Precision argument should only be an integer with self.assertRaises(TypeError): City.objects.annotate(geojson=functions.AsGeoJSON('point', precision='foo')) # Reference queries and values. # SELECT ST_AsGeoJson("geoapp_city"."point", 8, 0) # FROM "geoapp_city" WHERE "geoapp_city"."name" = 'Pueblo'; self.assertJSONEqual( pueblo_json, City.objects.annotate(geojson=functions.AsGeoJSON('point')).get(name='Pueblo').geojson ) # SELECT ST_AsGeoJson("geoapp_city"."point", 8, 2) FROM "geoapp_city" # WHERE "geoapp_city"."name" = 'Houston'; # This time we want to include the CRS by using the `crs` keyword. self.assertJSONEqual( City.objects.annotate(json=functions.AsGeoJSON('point', crs=True)).get(name='Houston').json, houston_json, ) # SELECT ST_AsGeoJson("geoapp_city"."point", 8, 1) FROM "geoapp_city" # WHERE "geoapp_city"."name" = 'Houston'; # This time we include the bounding box by using the `bbox` keyword. self.assertJSONEqual( victoria_json, City.objects.annotate( geojson=functions.AsGeoJSON('point', bbox=True) ).get(name='Victoria').geojson ) # SELECT ST_AsGeoJson("geoapp_city"."point", 5, 3) FROM "geoapp_city" # WHERE "geoapp_city"."name" = 'Chicago'; # Finally, we set every available keyword. # MariaDB doesn't limit the number of decimals in bbox. if mariadb: chicago_json['bbox'] = [-87.650175, 41.850385, -87.650175, 41.850385] self.assertJSONEqual( City.objects.annotate( geojson=functions.AsGeoJSON('point', bbox=True, crs=True, precision=5) ).get(name='Chicago').geojson, chicago_json, ) @skipUnlessDBFeature("has_AsGML_function") def test_asgml(self): # Should throw a TypeError when trying to obtain GML from a # non-geometry field. qs = City.objects.all() with self.assertRaises(TypeError): qs.annotate(gml=functions.AsGML('name')) ptown = City.objects.annotate(gml=functions.AsGML('point', precision=9)).get(name='Pueblo') if oracle: # No precision parameter for Oracle :-/ gml_regex = re.compile( r'^<gml:Point srsName="EPSG:4326" xmlns:gml="http://www.opengis.net/gml">' r'<gml:coordinates decimal="\." cs="," ts=" ">-104.60925\d+,38.25500\d+ ' r'</gml:coordinates></gml:Point>' ) else: gml_regex = re.compile( r'^<gml:Point srsName="EPSG:4326"><gml:coordinates>' r'-104\.60925\d+,38\.255001</gml:coordinates></gml:Point>' ) self.assertTrue(gml_regex.match(ptown.gml)) self.assertIn( '<gml:pos srsDimension="2">', City.objects.annotate(gml=functions.AsGML('point', version=3)).get(name='Pueblo').gml ) @skipUnlessDBFeature("has_AsKML_function") def test_askml(self): # Should throw a TypeError when trying to obtain KML from a # non-geometry field. with self.assertRaises(TypeError): City.objects.annotate(kml=functions.AsKML('name')) # Ensuring the KML is as expected. ptown = City.objects.annotate(kml=functions.AsKML('point', precision=9)).get(name='Pueblo') self.assertEqual('<Point><coordinates>-104.609252,38.255001</coordinates></Point>', ptown.kml) @skipUnlessDBFeature("has_AsSVG_function") def test_assvg(self): with self.assertRaises(TypeError): City.objects.annotate(svg=functions.AsSVG('point', precision='foo')) # SELECT AsSVG(geoapp_city.point, 0, 8) FROM geoapp_city WHERE name = 'Pueblo'; svg1 = 'cx="-104.609252" cy="-38.255001"' # Even though relative, only one point so it's practically the same except for # the 'c' letter prefix on the x,y values. svg2 = svg1.replace('c', '') self.assertEqual(svg1, City.objects.annotate(svg=functions.AsSVG('point')).get(name='Pueblo').svg) self.assertEqual(svg2, City.objects.annotate(svg=functions.AsSVG('point', relative=5)).get(name='Pueblo').svg) @skipUnlessDBFeature("has_Azimuth_function") def test_azimuth(self): # Returns the azimuth in radians. azimuth_expr = functions.Azimuth(Point(0, 0, srid=4326), Point(1, 1, srid=4326)) self.assertAlmostEqual(City.objects.annotate(azimuth=azimuth_expr).first().azimuth, math.pi / 4) # Returns None if the two points are coincident. azimuth_expr = functions.Azimuth(Point(0, 0, srid=4326), Point(0, 0, srid=4326)) self.assertIsNone(City.objects.annotate(azimuth=azimuth_expr).first().azimuth) @skipUnlessDBFeature("has_BoundingCircle_function") def test_bounding_circle(self): def circle_num_points(num_seg): # num_seg is the number of segments per quarter circle. return (4 * num_seg) + 1 expected_areas = (169, 136) if postgis else (171, 126) qs = Country.objects.annotate(circle=functions.BoundingCircle('mpoly')).order_by('name') self.assertAlmostEqual(qs[0].circle.area, expected_areas[0], 0) self.assertAlmostEqual(qs[1].circle.area, expected_areas[1], 0) if postgis: # By default num_seg=48. self.assertEqual(qs[0].circle.num_points, circle_num_points(48)) self.assertEqual(qs[1].circle.num_points, circle_num_points(48)) qs = Country.objects.annotate(circle=functions.BoundingCircle('mpoly', num_seg=12)).order_by('name') if postgis: self.assertGreater(qs[0].circle.area, 168.4, 0) self.assertLess(qs[0].circle.area, 169.5, 0) self.assertAlmostEqual(qs[1].circle.area, 136, 0) self.assertEqual(qs[0].circle.num_points, circle_num_points(12)) self.assertEqual(qs[1].circle.num_points, circle_num_points(12)) else: self.assertAlmostEqual(qs[0].circle.area, expected_areas[0], 0) self.assertAlmostEqual(qs[1].circle.area, expected_areas[1], 0) @skipUnlessDBFeature("has_Centroid_function") def test_centroid(self): qs = State.objects.exclude(poly__isnull=True).annotate(centroid=functions.Centroid('poly')) tol = 1.8 if mysql else (0.1 if oracle else 0.00001) for state in qs: self.assertTrue(state.poly.centroid.equals_exact(state.centroid, tol)) with self.assertRaisesMessage(TypeError, "'Centroid' takes exactly 1 argument (2 given)"): State.objects.annotate(centroid=functions.Centroid('poly', 'poly')) @skipUnlessDBFeature("has_Difference_function") def test_difference(self): geom = Point(5, 23, srid=4326) qs = Country.objects.annotate(diff=functions.Difference('mpoly', geom)) # Oracle does something screwy with the Texas geometry. if oracle: qs = qs.exclude(name='Texas') for c in qs: self.assertTrue(c.mpoly.difference(geom).equals(c.diff)) @skipUnlessDBFeature("has_Difference_function", "has_Transform_function") def test_difference_mixed_srid(self): """Testing with mixed SRID (Country has default 4326).""" geom = Point(556597.4, 2632018.6, srid=3857) # Spherical Mercator qs = Country.objects.annotate(difference=functions.Difference('mpoly', geom)) # Oracle does something screwy with the Texas geometry. if oracle: qs = qs.exclude(name='Texas') for c in qs: self.assertTrue(c.mpoly.difference(geom).equals(c.difference)) @skipUnlessDBFeature("has_Envelope_function") def test_envelope(self): countries = Country.objects.annotate(envelope=functions.Envelope('mpoly')) for country in countries: self.assertTrue(country.envelope.equals(country.mpoly.envelope)) @skipUnlessDBFeature("has_ForcePolygonCW_function") def test_force_polygon_cw(self): rings = ( ((0, 0), (5, 0), (0, 5), (0, 0)), ((1, 1), (1, 3), (3, 1), (1, 1)), ) rhr_rings = ( ((0, 0), (0, 5), (5, 0), (0, 0)), ((1, 1), (3, 1), (1, 3), (1, 1)), ) State.objects.create(name='Foo', poly=Polygon(*rings)) st = State.objects.annotate(force_polygon_cw=functions.ForcePolygonCW('poly')).get(name='Foo') self.assertEqual(rhr_rings, st.force_polygon_cw.coords) @skipUnlessDBFeature("has_GeoHash_function") def test_geohash(self): # Reference query: # SELECT ST_GeoHash(point) FROM geoapp_city WHERE name='Houston'; # SELECT ST_GeoHash(point, 5) FROM geoapp_city WHERE name='Houston'; ref_hash = '9vk1mfq8jx0c8e0386z6' h1 = City.objects.annotate(geohash=functions.GeoHash('point')).get(name='Houston') h2 = City.objects.annotate(geohash=functions.GeoHash('point', precision=5)).get(name='Houston') self.assertEqual(ref_hash, h1.geohash[:len(ref_hash)]) self.assertEqual(ref_hash[:5], h2.geohash) @skipUnlessDBFeature('has_GeometryDistance_function') def test_geometry_distance(self): point = Point(-90, 40, srid=4326) qs = City.objects.annotate(distance=functions.GeometryDistance('point', point)).order_by('distance') self.assertEqual([city.distance for city in qs], [ 2.99091995527296, 5.33507274054713, 9.33852187483721, 9.91769193646233, 11.556465744884, 14.713098433352, 34.3635252198568, 276.987855073372, ]) @skipUnlessDBFeature("has_Intersection_function") def test_intersection(self): geom = Point(5, 23, srid=4326) qs = Country.objects.annotate(inter=functions.Intersection('mpoly', geom)) for c in qs: if spatialite or (mysql and not connection.features.supports_empty_geometry_collection) or oracle: # When the intersection is empty, some databases return None. expected = None else: expected = c.mpoly.intersection(geom) self.assertEqual(c.inter, expected) @skipUnlessDBFeature("has_IsValid_function") def test_isvalid(self): valid_geom = fromstr('POLYGON((0 0, 0 1, 1 1, 1 0, 0 0))') invalid_geom = fromstr('POLYGON((0 0, 0 1, 1 1, 1 0, 1 1, 1 0, 0 0))') State.objects.create(name='valid', poly=valid_geom) State.objects.create(name='invalid', poly=invalid_geom) valid = State.objects.filter(name='valid').annotate(isvalid=functions.IsValid('poly')).first() invalid = State.objects.filter(name='invalid').annotate(isvalid=functions.IsValid('poly')).first() self.assertIs(valid.isvalid, True) self.assertIs(invalid.isvalid, False) @skipUnlessDBFeature("has_Area_function") def test_area_with_regular_aggregate(self): # Create projected country objects, for this test to work on all backends. for c in Country.objects.all(): CountryWebMercator.objects.create(name=c.name, mpoly=c.mpoly.transform(3857, clone=True)) # Test in projected coordinate system qs = CountryWebMercator.objects.annotate(area_sum=Sum(functions.Area('mpoly'))) # Some backends (e.g. Oracle) cannot group by multipolygon values, so # defer such fields in the aggregation query. for c in qs.defer('mpoly'): result = c.area_sum # If the result is a measure object, get value. if isinstance(result, Area): result = result.sq_m self.assertAlmostEqual((result - c.mpoly.area) / c.mpoly.area, 0) @skipUnlessDBFeature("has_Area_function") def test_area_lookups(self): # Create projected countries so the test works on all backends. CountryWebMercator.objects.bulk_create( CountryWebMercator(name=c.name, mpoly=c.mpoly.transform(3857, clone=True)) for c in Country.objects.all() ) qs = CountryWebMercator.objects.annotate(area=functions.Area('mpoly')) self.assertEqual(qs.get(area__lt=Area(sq_km=500000)), CountryWebMercator.objects.get(name='New Zealand')) with self.assertRaisesMessage(ValueError, 'AreaField only accepts Area measurement objects.'): qs.get(area__lt=500000) @skipUnlessDBFeature("has_LineLocatePoint_function") def test_line_locate_point(self): pos_expr = functions.LineLocatePoint(LineString((0, 0), (0, 3), srid=4326), Point(0, 1, srid=4326)) self.assertAlmostEqual(State.objects.annotate(pos=pos_expr).first().pos, 0.3333333) @skipUnlessDBFeature("has_MakeValid_function") def test_make_valid(self): invalid_geom = fromstr('POLYGON((0 0, 0 1, 1 1, 1 0, 1 1, 1 0, 0 0))') State.objects.create(name='invalid', poly=invalid_geom) invalid = State.objects.filter(name='invalid').annotate(repaired=functions.MakeValid('poly')).first() self.assertIs(invalid.repaired.valid, True) self.assertEqual(invalid.repaired, fromstr('POLYGON((0 0, 0 1, 1 1, 1 0, 0 0))', srid=invalid.poly.srid)) @skipUnlessDBFeature("has_MemSize_function") def test_memsize(self): ptown = City.objects.annotate(size=functions.MemSize('point')).get(name='Pueblo') self.assertTrue(20 <= ptown.size <= 40) # Exact value may depend on PostGIS version @skipUnlessDBFeature("has_NumGeom_function") def test_num_geom(self): # Both 'countries' only have two geometries. for c in Country.objects.annotate(num_geom=functions.NumGeometries('mpoly')): self.assertEqual(2, c.num_geom) qs = City.objects.filter(point__isnull=False).annotate(num_geom=functions.NumGeometries('point')) for city in qs: # Oracle and PostGIS return 1 for the number of geometries on # non-collections, whereas MySQL returns None. if mysql: self.assertIsNone(city.num_geom) else: self.assertEqual(1, city.num_geom) @skipUnlessDBFeature("has_NumPoint_function") def test_num_points(self): coords = [(-95.363151, 29.763374), (-95.448601, 29.713803)] Track.objects.create(name='Foo', line=LineString(coords)) qs = Track.objects.annotate(num_points=functions.NumPoints('line')) self.assertEqual(qs.first().num_points, 2) mpoly_qs = Country.objects.annotate(num_points=functions.NumPoints('mpoly')) if not connection.features.supports_num_points_poly: for c in mpoly_qs: self.assertIsNone(c.num_points) return for c in mpoly_qs: self.assertEqual(c.mpoly.num_points, c.num_points) for c in City.objects.annotate(num_points=functions.NumPoints('point')): self.assertEqual(c.num_points, 1) @skipUnlessDBFeature("has_PointOnSurface_function") def test_point_on_surface(self): qs = Country.objects.annotate(point_on_surface=functions.PointOnSurface('mpoly')) for country in qs: self.assertTrue(country.mpoly.intersection(country.point_on_surface)) @skipUnlessDBFeature("has_Reverse_function") def test_reverse_geom(self): coords = [(-95.363151, 29.763374), (-95.448601, 29.713803)] Track.objects.create(name='Foo', line=LineString(coords)) track = Track.objects.annotate(reverse_geom=functions.Reverse('line')).get(name='Foo') coords.reverse() self.assertEqual(tuple(coords), track.reverse_geom.coords) @skipUnlessDBFeature("has_Scale_function") def test_scale(self): xfac, yfac = 2, 3 tol = 5 # The low precision tolerance is for SpatiaLite qs = Country.objects.annotate(scaled=functions.Scale('mpoly', xfac, yfac)) for country in qs: for p1, p2 in zip(country.mpoly, country.scaled): for r1, r2 in zip(p1, p2): for c1, c2 in zip(r1.coords, r2.coords): self.assertAlmostEqual(c1[0] * xfac, c2[0], tol) self.assertAlmostEqual(c1[1] * yfac, c2[1], tol) # Test float/Decimal values qs = Country.objects.annotate(scaled=functions.Scale('mpoly', 1.5, Decimal('2.5'))) self.assertGreater(qs[0].scaled.area, qs[0].mpoly.area) @skipUnlessDBFeature("has_SnapToGrid_function") def test_snap_to_grid(self): # Let's try and break snap_to_grid() with bad combinations of arguments. for bad_args in ((), range(3), range(5)): with self.assertRaises(ValueError): Country.objects.annotate(snap=functions.SnapToGrid('mpoly', *bad_args)) for bad_args in (('1.0',), (1.0, None), tuple(map(str, range(4)))): with self.assertRaises(TypeError): Country.objects.annotate(snap=functions.SnapToGrid('mpoly', *bad_args)) # Boundary for San Marino, courtesy of Bjorn Sandvik of thematicmapping.org # from the world borders dataset he provides. wkt = ('MULTIPOLYGON(((12.41580 43.95795,12.45055 43.97972,12.45389 43.98167,' '12.46250 43.98472,12.47167 43.98694,12.49278 43.98917,' '12.50555 43.98861,12.51000 43.98694,12.51028 43.98277,' '12.51167 43.94333,12.51056 43.93916,12.49639 43.92333,' '12.49500 43.91472,12.48778 43.90583,12.47444 43.89722,' '12.46472 43.89555,12.45917 43.89611,12.41639 43.90472,' '12.41222 43.90610,12.40782 43.91366,12.40389 43.92667,' '12.40500 43.94833,12.40889 43.95499,12.41580 43.95795)))') Country.objects.create(name='San Marino', mpoly=fromstr(wkt)) # Because floating-point arithmetic isn't exact, we set a tolerance # to pass into GEOS `equals_exact`. tol = 0.000000001 # SELECT AsText(ST_SnapToGrid("geoapp_country"."mpoly", 0.1)) FROM "geoapp_country" # WHERE "geoapp_country"."name" = 'San Marino'; ref = fromstr('MULTIPOLYGON(((12.4 44,12.5 44,12.5 43.9,12.4 43.9,12.4 44)))') self.assertTrue( ref.equals_exact( Country.objects.annotate( snap=functions.SnapToGrid('mpoly', 0.1) ).get(name='San Marino').snap, tol ) ) # SELECT AsText(ST_SnapToGrid("geoapp_country"."mpoly", 0.05, 0.23)) FROM "geoapp_country" # WHERE "geoapp_country"."name" = 'San Marino'; ref = fromstr('MULTIPOLYGON(((12.4 43.93,12.45 43.93,12.5 43.93,12.45 43.93,12.4 43.93)))') self.assertTrue( ref.equals_exact( Country.objects.annotate( snap=functions.SnapToGrid('mpoly', 0.05, 0.23) ).get(name='San Marino').snap, tol ) ) # SELECT AsText(ST_SnapToGrid("geoapp_country"."mpoly", 0.5, 0.17, 0.05, 0.23)) FROM "geoapp_country" # WHERE "geoapp_country"."name" = 'San Marino'; ref = fromstr( 'MULTIPOLYGON(((12.4 43.87,12.45 43.87,12.45 44.1,12.5 44.1,12.5 43.87,12.45 43.87,12.4 43.87)))' ) self.assertTrue( ref.equals_exact( Country.objects.annotate( snap=functions.SnapToGrid('mpoly', 0.05, 0.23, 0.5, 0.17) ).get(name='San Marino').snap, tol ) ) @skipUnlessDBFeature("has_SymDifference_function") def test_sym_difference(self): geom = Point(5, 23, srid=4326) qs = Country.objects.annotate(sym_difference=functions.SymDifference('mpoly', geom)) # Oracle does something screwy with the Texas geometry. if oracle: qs = qs.exclude(name='Texas') for country in qs: self.assertTrue(country.mpoly.sym_difference(geom).equals(country.sym_difference)) @skipUnlessDBFeature("has_Transform_function") def test_transform(self): # Pre-transformed points for Houston and Pueblo. ptown = fromstr('POINT(992363.390841912 481455.395105533)', srid=2774) prec = 3 # Precision is low due to version variations in PROJ and GDAL. # Asserting the result of the transform operation with the values in # the pre-transformed points. h = City.objects.annotate(pt=functions.Transform('point', ptown.srid)).get(name='Pueblo') self.assertEqual(2774, h.pt.srid) self.assertAlmostEqual(ptown.x, h.pt.x, prec) self.assertAlmostEqual(ptown.y, h.pt.y, prec) @skipUnlessDBFeature("has_Translate_function") def test_translate(self): xfac, yfac = 5, -23 qs = Country.objects.annotate(translated=functions.Translate('mpoly', xfac, yfac)) for c in qs: for p1, p2 in zip(c.mpoly, c.translated): for r1, r2 in zip(p1, p2): for c1, c2 in zip(r1.coords, r2.coords): # The low precision is for SpatiaLite self.assertAlmostEqual(c1[0] + xfac, c2[0], 5) self.assertAlmostEqual(c1[1] + yfac, c2[1], 5) # Some combined function tests @skipUnlessDBFeature( "has_Difference_function", "has_Intersection_function", "has_SymDifference_function", "has_Union_function") def test_diff_intersection_union(self): geom = Point(5, 23, srid=4326) qs = Country.objects.all().annotate( difference=functions.Difference('mpoly', geom), sym_difference=functions.SymDifference('mpoly', geom), union=functions.Union('mpoly', geom), intersection=functions.Intersection('mpoly', geom), ) if oracle: # Should be able to execute the queries; however, they won't be the same # as GEOS (because Oracle doesn't use GEOS internally like PostGIS or # SpatiaLite). return for c in qs: self.assertTrue(c.mpoly.difference(geom).equals(c.difference)) if not (spatialite or mysql): self.assertEqual(c.mpoly.intersection(geom), c.intersection) self.assertTrue(c.mpoly.sym_difference(geom).equals(c.sym_difference)) self.assertTrue(c.mpoly.union(geom).equals(c.union)) @skipUnlessDBFeature("has_Union_function") def test_union(self): """Union with all combinations of geometries/geometry fields.""" geom = Point(-95.363151, 29.763374, srid=4326) union = City.objects.annotate(union=functions.Union('point', geom)).get(name='Dallas').union expected = fromstr('MULTIPOINT(-96.801611 32.782057,-95.363151 29.763374)', srid=4326) self.assertTrue(expected.equals(union)) union = City.objects.annotate(union=functions.Union(geom, 'point')).get(name='Dallas').union self.assertTrue(expected.equals(union)) union = City.objects.annotate(union=functions.Union('point', 'point')).get(name='Dallas').union expected = GEOSGeometry('POINT(-96.801611 32.782057)', srid=4326) self.assertTrue(expected.equals(union)) union = City.objects.annotate(union=functions.Union(geom, geom)).get(name='Dallas').union self.assertTrue(geom.equals(union)) @skipUnlessDBFeature("has_Union_function", "has_Transform_function") def test_union_mixed_srid(self): """The result SRID depends on the order of parameters.""" geom = Point(61.42915, 55.15402, srid=4326) geom_3857 = geom.transform(3857, clone=True) tol = 0.001 for city in City.objects.annotate(union=functions.Union('point', geom_3857)): expected = city.point | geom self.assertTrue(city.union.equals_exact(expected, tol)) self.assertEqual(city.union.srid, 4326) for city in City.objects.annotate(union=functions.Union(geom_3857, 'point')): expected = geom_3857 | city.point.transform(3857, clone=True) self.assertTrue(expected.equals_exact(city.union, tol)) self.assertEqual(city.union.srid, 3857) def test_argument_validation(self): with self.assertRaisesMessage(ValueError, 'SRID is required for all geometries.'): City.objects.annotate(geo=functions.GeoFunc(Point(1, 1))) msg = 'GeoFunc function requires a GeometryField in position 1, got CharField.' with self.assertRaisesMessage(TypeError, msg): City.objects.annotate(geo=functions.GeoFunc('name')) msg = 'GeoFunc function requires a geometric argument in position 1.' with self.assertRaisesMessage(TypeError, msg): City.objects.annotate(union=functions.GeoFunc(1, 'point')).get(name='Dallas')
4dc4e89ad01f8a385b7b70ccb724872e2f7c334740dc615275551fc61b8c6bca
import tempfile import unittest from io import StringIO from django.contrib.gis import gdal from django.contrib.gis.db.models import Extent, MakeLine, Union, functions from django.contrib.gis.geos import ( GeometryCollection, GEOSGeometry, LinearRing, LineString, MultiLineString, MultiPoint, MultiPolygon, Point, Polygon, fromstr, ) from django.core.management import call_command from django.db import NotSupportedError, connection from django.test import TestCase, skipUnlessDBFeature from ..utils import ( mariadb, mysql, no_oracle, oracle, postgis, skipUnlessGISLookup, spatialite, ) from .models import ( City, Country, Feature, MinusOneSRID, NonConcreteModel, PennsylvaniaCity, State, Track, ) class GeoModelTest(TestCase): fixtures = ['initial'] def test_fixtures(self): "Testing geographic model initialization from fixtures." # Ensuring that data was loaded from initial data fixtures. self.assertEqual(2, Country.objects.count()) self.assertEqual(8, City.objects.count()) self.assertEqual(2, State.objects.count()) def test_proxy(self): "Testing Lazy-Geometry support (using the GeometryProxy)." # Testing on a Point pnt = Point(0, 0) nullcity = City(name='NullCity', point=pnt) nullcity.save() # Making sure TypeError is thrown when trying to set with an # incompatible type. for bad in [5, 2.0, LineString((0, 0), (1, 1))]: with self.assertRaisesMessage(TypeError, 'Cannot set'): nullcity.point = bad # Now setting with a compatible GEOS Geometry, saving, and ensuring # the save took, notice no SRID is explicitly set. new = Point(5, 23) nullcity.point = new # Ensuring that the SRID is automatically set to that of the # field after assignment, but before saving. self.assertEqual(4326, nullcity.point.srid) nullcity.save() # Ensuring the point was saved correctly after saving self.assertEqual(new, City.objects.get(name='NullCity').point) # Setting the X and Y of the Point nullcity.point.x = 23 nullcity.point.y = 5 # Checking assignments pre & post-save. self.assertNotEqual(Point(23, 5, srid=4326), City.objects.get(name='NullCity').point) nullcity.save() self.assertEqual(Point(23, 5, srid=4326), City.objects.get(name='NullCity').point) nullcity.delete() # Testing on a Polygon shell = LinearRing((0, 0), (0, 90), (100, 90), (100, 0), (0, 0)) inner = LinearRing((40, 40), (40, 60), (60, 60), (60, 40), (40, 40)) # Creating a State object using a built Polygon ply = Polygon(shell, inner) nullstate = State(name='NullState', poly=ply) self.assertEqual(4326, nullstate.poly.srid) # SRID auto-set from None nullstate.save() ns = State.objects.get(name='NullState') self.assertEqual(ply, ns.poly) # Testing the `ogr` and `srs` lazy-geometry properties. self.assertIsInstance(ns.poly.ogr, gdal.OGRGeometry) self.assertEqual(ns.poly.wkb, ns.poly.ogr.wkb) self.assertIsInstance(ns.poly.srs, gdal.SpatialReference) self.assertEqual('WGS 84', ns.poly.srs.name) # Changing the interior ring on the poly attribute. new_inner = LinearRing((30, 30), (30, 70), (70, 70), (70, 30), (30, 30)) ns.poly[1] = new_inner ply[1] = new_inner self.assertEqual(4326, ns.poly.srid) ns.save() self.assertEqual(ply, State.objects.get(name='NullState').poly) ns.delete() @skipUnlessDBFeature("supports_transform") def test_lookup_insert_transform(self): "Testing automatic transform for lookups and inserts." # San Antonio in 'WGS84' (SRID 4326) sa_4326 = 'POINT (-98.493183 29.424170)' wgs_pnt = fromstr(sa_4326, srid=4326) # Our reference point in WGS84 # San Antonio in 'WGS 84 / Pseudo-Mercator' (SRID 3857) other_srid_pnt = wgs_pnt.transform(3857, clone=True) # Constructing & querying with a point from a different SRID. Oracle # `SDO_OVERLAPBDYINTERSECT` operates differently from # `ST_Intersects`, so contains is used instead. if oracle: tx = Country.objects.get(mpoly__contains=other_srid_pnt) else: tx = Country.objects.get(mpoly__intersects=other_srid_pnt) self.assertEqual('Texas', tx.name) # Creating San Antonio. Remember the Alamo. sa = City.objects.create(name='San Antonio', point=other_srid_pnt) # Now verifying that San Antonio was transformed correctly sa = City.objects.get(name='San Antonio') self.assertAlmostEqual(wgs_pnt.x, sa.point.x, 6) self.assertAlmostEqual(wgs_pnt.y, sa.point.y, 6) # If the GeometryField SRID is -1, then we shouldn't perform any # transformation if the SRID of the input geometry is different. m1 = MinusOneSRID(geom=Point(17, 23, srid=4326)) m1.save() self.assertEqual(-1, m1.geom.srid) def test_createnull(self): "Testing creating a model instance and the geometry being None" c = City() self.assertIsNone(c.point) def test_geometryfield(self): "Testing the general GeometryField." Feature(name='Point', geom=Point(1, 1)).save() Feature(name='LineString', geom=LineString((0, 0), (1, 1), (5, 5))).save() Feature(name='Polygon', geom=Polygon(LinearRing((0, 0), (0, 5), (5, 5), (5, 0), (0, 0)))).save() Feature(name='GeometryCollection', geom=GeometryCollection(Point(2, 2), LineString((0, 0), (2, 2)), Polygon(LinearRing((0, 0), (0, 5), (5, 5), (5, 0), (0, 0))))).save() f_1 = Feature.objects.get(name='Point') self.assertIsInstance(f_1.geom, Point) self.assertEqual((1.0, 1.0), f_1.geom.tuple) f_2 = Feature.objects.get(name='LineString') self.assertIsInstance(f_2.geom, LineString) self.assertEqual(((0.0, 0.0), (1.0, 1.0), (5.0, 5.0)), f_2.geom.tuple) f_3 = Feature.objects.get(name='Polygon') self.assertIsInstance(f_3.geom, Polygon) f_4 = Feature.objects.get(name='GeometryCollection') self.assertIsInstance(f_4.geom, GeometryCollection) self.assertEqual(f_3.geom, f_4.geom[2]) # TODO: fix on Oracle: ORA-22901: cannot compare nested table or VARRAY or # LOB attributes of an object type. @no_oracle @skipUnlessDBFeature("supports_transform") def test_inherited_geofields(self): "Database functions on inherited Geometry fields." # Creating a Pennsylvanian city. PennsylvaniaCity.objects.create(name='Mansfield', county='Tioga', point='POINT(-77.071445 41.823881)') # All transformation SQL will need to be performed on the # _parent_ table. qs = PennsylvaniaCity.objects.annotate(new_point=functions.Transform('point', srid=32128)) self.assertEqual(1, qs.count()) for pc in qs: self.assertEqual(32128, pc.new_point.srid) def test_raw_sql_query(self): "Testing raw SQL query." cities1 = City.objects.all() point_select = connection.ops.select % 'point' cities2 = list(City.objects.raw( 'select id, name, %s as point from geoapp_city' % point_select )) self.assertEqual(len(cities1), len(cities2)) with self.assertNumQueries(0): # Ensure point isn't deferred. self.assertIsInstance(cities2[0].point, Point) def test_dumpdata_loaddata_cycle(self): """ Test a dumpdata/loaddata cycle with geographic data. """ out = StringIO() original_data = list(City.objects.all().order_by('name')) call_command('dumpdata', 'geoapp.City', stdout=out) result = out.getvalue() houston = City.objects.get(name='Houston') self.assertIn('"point": "%s"' % houston.point.ewkt, result) # Reload now dumped data with tempfile.NamedTemporaryFile(mode='w', suffix='.json') as tmp: tmp.write(result) tmp.seek(0) call_command('loaddata', tmp.name, verbosity=0) self.assertEqual(original_data, list(City.objects.all().order_by('name'))) @skipUnlessDBFeature("supports_empty_geometries") def test_empty_geometries(self): geometry_classes = [ Point, LineString, LinearRing, Polygon, MultiPoint, MultiLineString, MultiPolygon, GeometryCollection, ] for klass in geometry_classes: g = klass(srid=4326) feature = Feature.objects.create(name='Empty %s' % klass.__name__, geom=g) feature.refresh_from_db() if klass is LinearRing: # LinearRing isn't representable in WKB, so GEOSGeomtry.wkb # uses LineString instead. g = LineString(srid=4326) self.assertEqual(feature.geom, g) self.assertEqual(feature.geom.srid, g.srid) class GeoLookupTest(TestCase): fixtures = ['initial'] def test_disjoint_lookup(self): "Testing the `disjoint` lookup type." if mysql and not mariadb and connection.mysql_version < (8, 0, 0): raise unittest.SkipTest('MySQL < 8 gives different results.') ptown = City.objects.get(name='Pueblo') qs1 = City.objects.filter(point__disjoint=ptown.point) self.assertEqual(7, qs1.count()) qs2 = State.objects.filter(poly__disjoint=ptown.point) self.assertEqual(1, qs2.count()) self.assertEqual('Kansas', qs2[0].name) def test_contains_contained_lookups(self): "Testing the 'contained', 'contains', and 'bbcontains' lookup types." # Getting Texas, yes we were a country -- once ;) texas = Country.objects.get(name='Texas') # Seeing what cities are in Texas, should get Houston and Dallas, # and Oklahoma City because 'contained' only checks on the # _bounding box_ of the Geometries. if connection.features.supports_contained_lookup: qs = City.objects.filter(point__contained=texas.mpoly) self.assertEqual(3, qs.count()) cities = ['Houston', 'Dallas', 'Oklahoma City'] for c in qs: self.assertIn(c.name, cities) # Pulling out some cities. houston = City.objects.get(name='Houston') wellington = City.objects.get(name='Wellington') pueblo = City.objects.get(name='Pueblo') okcity = City.objects.get(name='Oklahoma City') lawrence = City.objects.get(name='Lawrence') # Now testing contains on the countries using the points for # Houston and Wellington. tx = Country.objects.get(mpoly__contains=houston.point) # Query w/GEOSGeometry nz = Country.objects.get(mpoly__contains=wellington.point.hex) # Query w/EWKBHEX self.assertEqual('Texas', tx.name) self.assertEqual('New Zealand', nz.name) # Testing `contains` on the states using the point for Lawrence. ks = State.objects.get(poly__contains=lawrence.point) self.assertEqual('Kansas', ks.name) # Pueblo and Oklahoma City (even though OK City is within the bounding box of Texas) # are not contained in Texas or New Zealand. self.assertEqual(len(Country.objects.filter(mpoly__contains=pueblo.point)), 0) # Query w/GEOSGeometry object self.assertEqual(len(Country.objects.filter(mpoly__contains=okcity.point.wkt)), 0) # Query w/WKT # OK City is contained w/in bounding box of Texas. if connection.features.supports_bbcontains_lookup: qs = Country.objects.filter(mpoly__bbcontains=okcity.point) self.assertEqual(1, len(qs)) self.assertEqual('Texas', qs[0].name) @skipUnlessDBFeature("supports_crosses_lookup") def test_crosses_lookup(self): Track.objects.create( name='Line1', line=LineString([(-95, 29), (-60, 0)]) ) self.assertEqual( Track.objects.filter(line__crosses=LineString([(-95, 0), (-60, 29)])).count(), 1 ) self.assertEqual( Track.objects.filter(line__crosses=LineString([(-95, 30), (0, 30)])).count(), 0 ) @skipUnlessDBFeature("supports_isvalid_lookup") def test_isvalid_lookup(self): invalid_geom = fromstr('POLYGON((0 0, 0 1, 1 1, 1 0, 1 1, 1 0, 0 0))') State.objects.create(name='invalid', poly=invalid_geom) qs = State.objects.all() if oracle or (mysql and connection.mysql_version < (8, 0, 0)): # Kansas has adjacent vertices with distance 6.99244813842e-12 # which is smaller than the default Oracle tolerance. # It's invalid on MySQL < 8 also. qs = qs.exclude(name='Kansas') self.assertEqual(State.objects.filter(name='Kansas', poly__isvalid=False).count(), 1) self.assertEqual(qs.filter(poly__isvalid=False).count(), 1) self.assertEqual(qs.filter(poly__isvalid=True).count(), qs.count() - 1) @skipUnlessDBFeature("supports_left_right_lookups") def test_left_right_lookups(self): "Testing the 'left' and 'right' lookup types." # Left: A << B => true if xmax(A) < xmin(B) # Right: A >> B => true if xmin(A) > xmax(B) # See: BOX2D_left() and BOX2D_right() in lwgeom_box2dfloat4.c in PostGIS source. # Getting the borders for Colorado & Kansas co_border = State.objects.get(name='Colorado').poly ks_border = State.objects.get(name='Kansas').poly # Note: Wellington has an 'X' value of 174, so it will not be considered # to the left of CO. # These cities should be strictly to the right of the CO border. cities = ['Houston', 'Dallas', 'Oklahoma City', 'Lawrence', 'Chicago', 'Wellington'] qs = City.objects.filter(point__right=co_border) self.assertEqual(6, len(qs)) for c in qs: self.assertIn(c.name, cities) # These cities should be strictly to the right of the KS border. cities = ['Chicago', 'Wellington'] qs = City.objects.filter(point__right=ks_border) self.assertEqual(2, len(qs)) for c in qs: self.assertIn(c.name, cities) # Note: Wellington has an 'X' value of 174, so it will not be considered # to the left of CO. vic = City.objects.get(point__left=co_border) self.assertEqual('Victoria', vic.name) cities = ['Pueblo', 'Victoria'] qs = City.objects.filter(point__left=ks_border) self.assertEqual(2, len(qs)) for c in qs: self.assertIn(c.name, cities) @skipUnlessGISLookup("strictly_above", "strictly_below") def test_strictly_above_below_lookups(self): dallas = City.objects.get(name='Dallas') self.assertQuerysetEqual( City.objects.filter(point__strictly_above=dallas.point).order_by('name'), ['Chicago', 'Lawrence', 'Oklahoma City', 'Pueblo', 'Victoria'], lambda b: b.name ) self.assertQuerysetEqual( City.objects.filter(point__strictly_below=dallas.point).order_by('name'), ['Houston', 'Wellington'], lambda b: b.name ) def test_equals_lookups(self): "Testing the 'same_as' and 'equals' lookup types." pnt = fromstr('POINT (-95.363151 29.763374)', srid=4326) c1 = City.objects.get(point=pnt) c2 = City.objects.get(point__same_as=pnt) c3 = City.objects.get(point__equals=pnt) for c in [c1, c2, c3]: self.assertEqual('Houston', c.name) @skipUnlessDBFeature("supports_null_geometries") def test_null_geometries(self): "Testing NULL geometry support, and the `isnull` lookup type." # Creating a state with a NULL boundary. State.objects.create(name='Puerto Rico') # Querying for both NULL and Non-NULL values. nullqs = State.objects.filter(poly__isnull=True) validqs = State.objects.filter(poly__isnull=False) # Puerto Rico should be NULL (it's a commonwealth unincorporated territory) self.assertEqual(1, len(nullqs)) self.assertEqual('Puerto Rico', nullqs[0].name) # GeometryField=None is an alias for __isnull=True. self.assertCountEqual(State.objects.filter(poly=None), nullqs) self.assertCountEqual(State.objects.exclude(poly=None), validqs) # The valid states should be Colorado & Kansas self.assertEqual(2, len(validqs)) state_names = [s.name for s in validqs] self.assertIn('Colorado', state_names) self.assertIn('Kansas', state_names) # Saving another commonwealth w/a NULL geometry. nmi = State.objects.create(name='Northern Mariana Islands', poly=None) self.assertIsNone(nmi.poly) # Assigning a geometry and saving -- then UPDATE back to NULL. nmi.poly = 'POLYGON((0 0,1 0,1 1,1 0,0 0))' nmi.save() State.objects.filter(name='Northern Mariana Islands').update(poly=None) self.assertIsNone(State.objects.get(name='Northern Mariana Islands').poly) @skipUnlessDBFeature('supports_null_geometries', 'supports_crosses_lookup', 'supports_relate_lookup') def test_null_geometries_excluded_in_lookups(self): """NULL features are excluded in spatial lookup functions.""" null = State.objects.create(name='NULL', poly=None) queries = [ ('equals', Point(1, 1)), ('disjoint', Point(1, 1)), ('touches', Point(1, 1)), ('crosses', LineString((0, 0), (1, 1), (5, 5))), ('within', Point(1, 1)), ('overlaps', LineString((0, 0), (1, 1), (5, 5))), ('contains', LineString((0, 0), (1, 1), (5, 5))), ('intersects', LineString((0, 0), (1, 1), (5, 5))), ('relate', (Point(1, 1), 'T*T***FF*')), ('same_as', Point(1, 1)), ('exact', Point(1, 1)), ('coveredby', Point(1, 1)), ('covers', Point(1, 1)), ] for lookup, geom in queries: with self.subTest(lookup=lookup): self.assertNotIn(null, State.objects.filter(**{'poly__%s' % lookup: geom})) @skipUnlessDBFeature("supports_relate_lookup") def test_relate_lookup(self): "Testing the 'relate' lookup type." # To make things more interesting, we will have our Texas reference point in # different SRIDs. pnt1 = fromstr('POINT (649287.0363174 4177429.4494686)', srid=2847) pnt2 = fromstr('POINT(-98.4919715741052 29.4333344025053)', srid=4326) # Not passing in a geometry as first param raises a TypeError when # initializing the QuerySet. with self.assertRaises(ValueError): Country.objects.filter(mpoly__relate=(23, 'foo')) # Making sure the right exception is raised for the given # bad arguments. for bad_args, e in [((pnt1, 0), ValueError), ((pnt2, 'T*T***FF*', 0), ValueError)]: qs = Country.objects.filter(mpoly__relate=bad_args) with self.assertRaises(e): qs.count() # Relate works differently for the different backends. if postgis or spatialite or mariadb: contains_mask = 'T*T***FF*' within_mask = 'T*F**F***' intersects_mask = 'T********' elif oracle: contains_mask = 'contains' within_mask = 'inside' # TODO: This is not quite the same as the PostGIS mask above intersects_mask = 'overlapbdyintersect' # Testing contains relation mask. if connection.features.supports_transform: self.assertEqual( Country.objects.get(mpoly__relate=(pnt1, contains_mask)).name, 'Texas', ) self.assertEqual('Texas', Country.objects.get(mpoly__relate=(pnt2, contains_mask)).name) # Testing within relation mask. ks = State.objects.get(name='Kansas') self.assertEqual('Lawrence', City.objects.get(point__relate=(ks.poly, within_mask)).name) # Testing intersection relation mask. if not oracle: if connection.features.supports_transform: self.assertEqual( Country.objects.get(mpoly__relate=(pnt1, intersects_mask)).name, 'Texas', ) self.assertEqual('Texas', Country.objects.get(mpoly__relate=(pnt2, intersects_mask)).name) self.assertEqual('Lawrence', City.objects.get(point__relate=(ks.poly, intersects_mask)).name) # With a complex geometry expression mask = 'anyinteract' if oracle else within_mask self.assertFalse(City.objects.exclude(point__relate=(functions.Union('point', 'point'), mask))) def test_gis_lookups_with_complex_expressions(self): multiple_arg_lookups = {'dwithin', 'relate'} # These lookups are tested elsewhere. lookups = connection.ops.gis_operators.keys() - multiple_arg_lookups self.assertTrue(lookups, 'No lookups found') for lookup in lookups: with self.subTest(lookup): City.objects.filter(**{'point__' + lookup: functions.Union('point', 'point')}).exists() class GeoQuerySetTest(TestCase): # TODO: GeoQuerySet is removed, organize these test better. fixtures = ['initial'] @skipUnlessDBFeature("supports_extent_aggr") def test_extent(self): """ Testing the `Extent` aggregate. """ # Reference query: # `SELECT ST_extent(point) FROM geoapp_city WHERE (name='Houston' or name='Dallas');` # => BOX(-96.8016128540039 29.7633724212646,-95.3631439208984 32.7820587158203) expected = (-96.8016128540039, 29.7633724212646, -95.3631439208984, 32.782058715820) qs = City.objects.filter(name__in=('Houston', 'Dallas')) extent = qs.aggregate(Extent('point'))['point__extent'] for val, exp in zip(extent, expected): self.assertAlmostEqual(exp, val, 4) self.assertIsNone(City.objects.filter(name=('Smalltown')).aggregate(Extent('point'))['point__extent']) @skipUnlessDBFeature("supports_extent_aggr") def test_extent_with_limit(self): """ Testing if extent supports limit. """ extent1 = City.objects.all().aggregate(Extent('point'))['point__extent'] extent2 = City.objects.all()[:3].aggregate(Extent('point'))['point__extent'] self.assertNotEqual(extent1, extent2) def test_make_line(self): """ Testing the `MakeLine` aggregate. """ if not connection.features.supports_make_line_aggr: with self.assertRaises(NotSupportedError): City.objects.all().aggregate(MakeLine('point')) return # MakeLine on an inappropriate field returns simply None self.assertIsNone(State.objects.aggregate(MakeLine('poly'))['poly__makeline']) # Reference query: # SELECT AsText(ST_MakeLine(geoapp_city.point)) FROM geoapp_city; ref_line = GEOSGeometry( 'LINESTRING(-95.363151 29.763374,-96.801611 32.782057,' '-97.521157 34.464642,174.783117 -41.315268,-104.609252 38.255001,' '-95.23506 38.971823,-87.650175 41.850385,-123.305196 48.462611)', srid=4326 ) # We check for equality with a tolerance of 10e-5 which is a lower bound # of the precisions of ref_line coordinates line = City.objects.aggregate(MakeLine('point'))['point__makeline'] self.assertTrue( ref_line.equals_exact(line, tolerance=10e-5), "%s != %s" % (ref_line, line) ) @skipUnlessDBFeature('supports_union_aggr') def test_unionagg(self): """ Testing the `Union` aggregate. """ tx = Country.objects.get(name='Texas').mpoly # Houston, Dallas -- Ordering may differ depending on backend or GEOS version. union = GEOSGeometry('MULTIPOINT(-96.801611 32.782057,-95.363151 29.763374)') qs = City.objects.filter(point__within=tx) with self.assertRaises(ValueError): qs.aggregate(Union('name')) # Using `field_name` keyword argument in one query and specifying an # order in the other (which should not be used because this is # an aggregate method on a spatial column) u1 = qs.aggregate(Union('point'))['point__union'] u2 = qs.order_by('name').aggregate(Union('point'))['point__union'] self.assertTrue(union.equals(u1)) self.assertTrue(union.equals(u2)) qs = City.objects.filter(name='NotACity') self.assertIsNone(qs.aggregate(Union('point'))['point__union']) def test_within_subquery(self): """ Using a queryset inside a geo lookup is working (using a subquery) (#14483). """ tex_cities = City.objects.filter( point__within=Country.objects.filter(name='Texas').values('mpoly')).order_by('name') self.assertEqual(list(tex_cities.values_list('name', flat=True)), ['Dallas', 'Houston']) def test_non_concrete_field(self): NonConcreteModel.objects.create(point=Point(0, 0), name='name') list(NonConcreteModel.objects.all()) def test_values_srid(self): for c, v in zip(City.objects.all(), City.objects.values()): self.assertEqual(c.point.srid, v['point'].srid)
596322a5586940fad56a89f9cb031a7d8b9d36993b9782b57f9810f91558ca37
import os import sys from distutils.sysconfig import get_python_lib from setuptools import find_packages, setup CURRENT_PYTHON = sys.version_info[:2] REQUIRED_PYTHON = (3, 6) # This check and everything above must remain compatible with Python 2.7. if CURRENT_PYTHON < REQUIRED_PYTHON: sys.stderr.write(""" ========================== Unsupported Python version ========================== This version of Django requires Python {}.{}, but you're trying to install it on Python {}.{}. This may be because you are using a version of pip that doesn't understand the python_requires classifier. Make sure you have pip >= 9.0 and setuptools >= 24.2, then try again: $ python -m pip install --upgrade pip setuptools $ python -m pip install django This will install the latest version of Django which works on your version of Python. If you can't upgrade your pip (or Python), request an older version of Django: $ python -m pip install "django<2" """.format(*(REQUIRED_PYTHON + CURRENT_PYTHON))) sys.exit(1) # Warn if we are installing over top of an existing installation. This can # cause issues where files that were deleted from a more recent Django are # still present in site-packages. See #18115. overlay_warning = False if "install" in sys.argv: lib_paths = [get_python_lib()] if lib_paths[0].startswith("/usr/lib/"): # We have to try also with an explicit prefix of /usr/local in order to # catch Debian's custom user site-packages directory. lib_paths.append(get_python_lib(prefix="/usr/local")) for lib_path in lib_paths: existing_path = os.path.abspath(os.path.join(lib_path, "django")) if os.path.exists(existing_path): # We note the need for the warning here, but present it after the # command is run, so it's more likely to be seen. overlay_warning = True break EXCLUDE_FROM_PACKAGES = ['django.conf.project_template', 'django.conf.app_template', 'django.bin'] # Dynamically calculate the version based on django.VERSION. version = __import__('django').get_version() def read(fname): with open(os.path.join(os.path.dirname(__file__), fname)) as f: return f.read() setup( name='Django', version=version, python_requires='>={}.{}'.format(*REQUIRED_PYTHON), url='https://www.djangoproject.com/', author='Django Software Foundation', author_email='[email protected]', description=('A high-level Python Web framework that encourages ' 'rapid development and clean, pragmatic design.'), long_description=read('README.rst'), license='BSD', packages=find_packages(exclude=EXCLUDE_FROM_PACKAGES), include_package_data=True, scripts=['django/bin/django-admin.py'], entry_points={'console_scripts': [ 'django-admin = django.core.management:execute_from_command_line', ]}, install_requires=['pytz', 'sqlparse', 'asgiref'], extras_require={ "bcrypt": ["bcrypt"], "argon2": ["argon2-cffi >= 16.1.0"], }, zip_safe=False, classifiers=[ 'Development Status :: 2 - Pre-Alpha', 'Environment :: Web Environment', 'Framework :: Django', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.6', 'Programming Language :: Python :: 3.7', 'Programming Language :: Python :: 3.8', 'Programming Language :: Python :: 3 :: Only', 'Topic :: Internet :: WWW/HTTP', 'Topic :: Internet :: WWW/HTTP :: Dynamic Content', 'Topic :: Internet :: WWW/HTTP :: WSGI', 'Topic :: Software Development :: Libraries :: Application Frameworks', 'Topic :: Software Development :: Libraries :: Python Modules', ], project_urls={ 'Documentation': 'https://docs.djangoproject.com/', 'Funding': 'https://www.djangoproject.com/fundraising/', 'Source': 'https://github.com/django/django', 'Tracker': 'https://code.djangoproject.com/', }, ) if overlay_warning: sys.stderr.write(""" ======== WARNING! ======== You have just installed Django over top of an existing installation, without removing it first. Because of this, your install may now include extraneous files from a previous version that have since been removed from Django. This is known to cause a variety of problems. You should manually remove the %(existing_path)s directory and re-install Django. """ % {"existing_path": existing_path})
0160e1e2f7421d4782061dbafd872b8e5d553630a5e94d4b780447de0f402f2e
""" Cross Site Request Forgery Middleware. This module provides a middleware that implements protection against request forgeries from other sites. """ import logging import re import string from urllib.parse import urlparse from django.conf import settings from django.core.exceptions import DisallowedHost, ImproperlyConfigured from django.urls import get_callable from django.utils.cache import patch_vary_headers from django.utils.crypto import constant_time_compare, get_random_string from django.utils.deprecation import MiddlewareMixin from django.utils.http import is_same_domain from django.utils.log import log_response logger = logging.getLogger('django.security.csrf') REASON_NO_REFERER = "Referer checking failed - no Referer." REASON_BAD_REFERER = "Referer checking failed - %s does not match any trusted origins." REASON_NO_CSRF_COOKIE = "CSRF cookie not set." REASON_BAD_TOKEN = "CSRF token missing or incorrect." REASON_MALFORMED_REFERER = "Referer checking failed - Referer is malformed." REASON_INSECURE_REFERER = "Referer checking failed - Referer is insecure while host is secure." CSRF_SECRET_LENGTH = 32 CSRF_TOKEN_LENGTH = 2 * CSRF_SECRET_LENGTH CSRF_ALLOWED_CHARS = string.ascii_letters + string.digits CSRF_SESSION_KEY = '_csrftoken' def _get_failure_view(): """Return the view to be used for CSRF rejections.""" return get_callable(settings.CSRF_FAILURE_VIEW) def _get_new_csrf_string(): return get_random_string(CSRF_SECRET_LENGTH, allowed_chars=CSRF_ALLOWED_CHARS) def _salt_cipher_secret(secret): """ Given a secret (assumed to be a string of CSRF_ALLOWED_CHARS), generate a token by adding a salt and using it to encrypt the secret. """ salt = _get_new_csrf_string() chars = CSRF_ALLOWED_CHARS pairs = zip((chars.index(x) for x in secret), (chars.index(x) for x in salt)) cipher = ''.join(chars[(x + y) % len(chars)] for x, y in pairs) return salt + cipher def _unsalt_cipher_token(token): """ Given a token (assumed to be a string of CSRF_ALLOWED_CHARS, of length CSRF_TOKEN_LENGTH, and that its first half is a salt), use it to decrypt the second half to produce the original secret. """ salt = token[:CSRF_SECRET_LENGTH] token = token[CSRF_SECRET_LENGTH:] chars = CSRF_ALLOWED_CHARS pairs = zip((chars.index(x) for x in token), (chars.index(x) for x in salt)) return ''.join(chars[x - y] for x, y in pairs) # Note negative values are ok def _get_new_csrf_token(): return _salt_cipher_secret(_get_new_csrf_string()) def get_token(request): """ Return the CSRF token required for a POST form. The token is an alphanumeric value. A new token is created if one is not already set. A side effect of calling this function is to make the csrf_protect decorator and the CsrfViewMiddleware add a CSRF cookie and a 'Vary: Cookie' header to the outgoing response. For this reason, you may need to use this function lazily, as is done by the csrf context processor. """ if "CSRF_COOKIE" not in request.META: csrf_secret = _get_new_csrf_string() request.META["CSRF_COOKIE"] = _salt_cipher_secret(csrf_secret) else: csrf_secret = _unsalt_cipher_token(request.META["CSRF_COOKIE"]) request.META["CSRF_COOKIE_USED"] = True return _salt_cipher_secret(csrf_secret) def rotate_token(request): """ Change the CSRF token in use for a request - should be done on login for security purposes. """ request.META.update({ "CSRF_COOKIE_USED": True, "CSRF_COOKIE": _get_new_csrf_token(), }) request.csrf_cookie_needs_reset = True def _sanitize_token(token): # Allow only ASCII alphanumerics if re.search('[^a-zA-Z0-9]', token): return _get_new_csrf_token() elif len(token) == CSRF_TOKEN_LENGTH: return token elif len(token) == CSRF_SECRET_LENGTH: # Older Django versions set cookies to values of CSRF_SECRET_LENGTH # alphanumeric characters. For backwards compatibility, accept # such values as unsalted secrets. # It's easier to salt here and be consistent later, rather than add # different code paths in the checks, although that might be a tad more # efficient. return _salt_cipher_secret(token) return _get_new_csrf_token() def _compare_salted_tokens(request_csrf_token, csrf_token): # Assume both arguments are sanitized -- that is, strings of # length CSRF_TOKEN_LENGTH, all CSRF_ALLOWED_CHARS. return constant_time_compare( _unsalt_cipher_token(request_csrf_token), _unsalt_cipher_token(csrf_token), ) class CsrfViewMiddleware(MiddlewareMixin): """ Require a present and correct csrfmiddlewaretoken for POST requests that have a CSRF cookie, and set an outgoing CSRF cookie. This middleware should be used in conjunction with the {% csrf_token %} template tag. """ # The _accept and _reject methods currently only exist for the sake of the # requires_csrf_token decorator. def _accept(self, request): # Avoid checking the request twice by adding a custom attribute to # request. This will be relevant when both decorator and middleware # are used. request.csrf_processing_done = True return None def _reject(self, request, reason): response = _get_failure_view()(request, reason=reason) log_response( 'Forbidden (%s): %s', reason, request.path, response=response, request=request, logger=logger, ) return response def _get_token(self, request): if settings.CSRF_USE_SESSIONS: try: return request.session.get(CSRF_SESSION_KEY) except AttributeError: raise ImproperlyConfigured( 'CSRF_USE_SESSIONS is enabled, but request.session is not ' 'set. SessionMiddleware must appear before CsrfViewMiddleware ' 'in MIDDLEWARE.' ) else: try: cookie_token = request.COOKIES[settings.CSRF_COOKIE_NAME] except KeyError: return None csrf_token = _sanitize_token(cookie_token) if csrf_token != cookie_token: # Cookie token needed to be replaced; # the cookie needs to be reset. request.csrf_cookie_needs_reset = True return csrf_token def _set_token(self, request, response): if settings.CSRF_USE_SESSIONS: if request.session.get(CSRF_SESSION_KEY) != request.META['CSRF_COOKIE']: request.session[CSRF_SESSION_KEY] = request.META['CSRF_COOKIE'] else: response.set_cookie( settings.CSRF_COOKIE_NAME, request.META['CSRF_COOKIE'], max_age=settings.CSRF_COOKIE_AGE, domain=settings.CSRF_COOKIE_DOMAIN, path=settings.CSRF_COOKIE_PATH, secure=settings.CSRF_COOKIE_SECURE, httponly=settings.CSRF_COOKIE_HTTPONLY, samesite=settings.CSRF_COOKIE_SAMESITE, ) # Set the Vary header since content varies with the CSRF cookie. patch_vary_headers(response, ('Cookie',)) def process_request(self, request): csrf_token = self._get_token(request) if csrf_token is not None: # Use same token next time. request.META['CSRF_COOKIE'] = csrf_token def process_view(self, request, callback, callback_args, callback_kwargs): if getattr(request, 'csrf_processing_done', False): return None # Wait until request.META["CSRF_COOKIE"] has been manipulated before # bailing out, so that get_token still works if getattr(callback, 'csrf_exempt', False): return None # Assume that anything not defined as 'safe' by RFC7231 needs protection if request.method not in ('GET', 'HEAD', 'OPTIONS', 'TRACE'): if getattr(request, '_dont_enforce_csrf_checks', False): # Mechanism to turn off CSRF checks for test suite. # It comes after the creation of CSRF cookies, so that # everything else continues to work exactly the same # (e.g. cookies are sent, etc.), but before any # branches that call reject(). return self._accept(request) if request.is_secure(): # Suppose user visits http://example.com/ # An active network attacker (man-in-the-middle, MITM) sends a # POST form that targets https://example.com/detonate-bomb/ and # submits it via JavaScript. # # The attacker will need to provide a CSRF cookie and token, but # that's no problem for a MITM and the session-independent # secret we're using. So the MITM can circumvent the CSRF # protection. This is true for any HTTP connection, but anyone # using HTTPS expects better! For this reason, for # https://example.com/ we need additional protection that treats # http://example.com/ as completely untrusted. Under HTTPS, # Barth et al. found that the Referer header is missing for # same-domain requests in only about 0.2% of cases or less, so # we can use strict Referer checking. referer = request.META.get('HTTP_REFERER') if referer is None: return self._reject(request, REASON_NO_REFERER) referer = urlparse(referer) # Make sure we have a valid URL for Referer. if '' in (referer.scheme, referer.netloc): return self._reject(request, REASON_MALFORMED_REFERER) # Ensure that our Referer is also secure. if referer.scheme != 'https': return self._reject(request, REASON_INSECURE_REFERER) # If there isn't a CSRF_COOKIE_DOMAIN, require an exact match # match on host:port. If not, obey the cookie rules (or those # for the session cookie, if CSRF_USE_SESSIONS). good_referer = ( settings.SESSION_COOKIE_DOMAIN if settings.CSRF_USE_SESSIONS else settings.CSRF_COOKIE_DOMAIN ) if good_referer is not None: server_port = request.get_port() if server_port not in ('443', '80'): good_referer = '%s:%s' % (good_referer, server_port) else: try: # request.get_host() includes the port. good_referer = request.get_host() except DisallowedHost: pass # Create a list of all acceptable HTTP referers, including the # current host if it's permitted by ALLOWED_HOSTS. good_hosts = list(settings.CSRF_TRUSTED_ORIGINS) if good_referer is not None: good_hosts.append(good_referer) if not any(is_same_domain(referer.netloc, host) for host in good_hosts): reason = REASON_BAD_REFERER % referer.geturl() return self._reject(request, reason) csrf_token = request.META.get('CSRF_COOKIE') if csrf_token is None: # No CSRF cookie. For POST requests, we insist on a CSRF cookie, # and in this way we can avoid all CSRF attacks, including login # CSRF. return self._reject(request, REASON_NO_CSRF_COOKIE) # Check non-cookie token for match. request_csrf_token = "" if request.method == "POST": try: request_csrf_token = request.POST.get('csrfmiddlewaretoken', '') except OSError: # Handle a broken connection before we've completed reading # the POST data. process_view shouldn't raise any # exceptions, so we'll ignore and serve the user a 403 # (assuming they're still listening, which they probably # aren't because of the error). pass if request_csrf_token == "": # Fall back to X-CSRFToken, to make things easier for AJAX, # and possible for PUT/DELETE. request_csrf_token = request.META.get(settings.CSRF_HEADER_NAME, '') request_csrf_token = _sanitize_token(request_csrf_token) if not _compare_salted_tokens(request_csrf_token, csrf_token): return self._reject(request, REASON_BAD_TOKEN) return self._accept(request) def process_response(self, request, response): if not getattr(request, 'csrf_cookie_needs_reset', False): if getattr(response, 'csrf_cookie_set', False): return response if not request.META.get("CSRF_COOKIE_USED", False): return response # Set the CSRF cookie even if it's already set, so we renew # the expiry timer. self._set_token(request, response) response.csrf_cookie_set = True return response
85e83a855c4cc4511f3db721ff6af535e244ccd5db3c6710e7d7f15ce1faf3d7
""" The main QuerySet implementation. This provides the public API for the ORM. """ import copy import operator import warnings from collections import namedtuple from functools import lru_cache from itertools import chain from django.conf import settings from django.core import exceptions from django.db import ( DJANGO_VERSION_PICKLE_KEY, IntegrityError, connections, router, transaction, ) from django.db.models import DateField, DateTimeField, sql from django.db.models.constants import LOOKUP_SEP from django.db.models.deletion import Collector from django.db.models.expressions import Case, Expression, F, Value, When from django.db.models.fields import AutoField from django.db.models.functions import Cast, Trunc from django.db.models.query_utils import FilteredRelation, InvalidQuery, Q from django.db.models.sql.constants import CURSOR, GET_ITERATOR_CHUNK_SIZE from django.db.utils import NotSupportedError from django.utils import timezone from django.utils.functional import cached_property, partition from django.utils.version import get_version # The maximum number of results to fetch in a get() query. MAX_GET_RESULTS = 21 # The maximum number of items to display in a QuerySet.__repr__ REPR_OUTPUT_SIZE = 20 class BaseIterable: def __init__(self, queryset, chunked_fetch=False, chunk_size=GET_ITERATOR_CHUNK_SIZE): self.queryset = queryset self.chunked_fetch = chunked_fetch self.chunk_size = chunk_size class ModelIterable(BaseIterable): """Iterable that yields a model instance for each row.""" def __iter__(self): queryset = self.queryset db = queryset.db compiler = queryset.query.get_compiler(using=db) # Execute the query. This will also fill compiler.select, klass_info, # and annotations. results = compiler.execute_sql(chunked_fetch=self.chunked_fetch, chunk_size=self.chunk_size) select, klass_info, annotation_col_map = (compiler.select, compiler.klass_info, compiler.annotation_col_map) model_cls = klass_info['model'] select_fields = klass_info['select_fields'] model_fields_start, model_fields_end = select_fields[0], select_fields[-1] + 1 init_list = [f[0].target.attname for f in select[model_fields_start:model_fields_end]] related_populators = get_related_populators(klass_info, select, db) known_related_objects = [ (field, related_objs, operator.attrgetter(*[ field.attname if from_field == 'self' else queryset.model._meta.get_field(from_field).attname for from_field in field.from_fields ])) for field, related_objs in queryset._known_related_objects.items() ] for row in compiler.results_iter(results): obj = model_cls.from_db(db, init_list, row[model_fields_start:model_fields_end]) for rel_populator in related_populators: rel_populator.populate(row, obj) if annotation_col_map: for attr_name, col_pos in annotation_col_map.items(): setattr(obj, attr_name, row[col_pos]) # Add the known related objects to the model. for field, rel_objs, rel_getter in known_related_objects: # Avoid overwriting objects loaded by, e.g., select_related(). if field.is_cached(obj): continue rel_obj_id = rel_getter(obj) try: rel_obj = rel_objs[rel_obj_id] except KeyError: pass # May happen in qs1 | qs2 scenarios. else: setattr(obj, field.name, rel_obj) yield obj class ValuesIterable(BaseIterable): """ Iterable returned by QuerySet.values() that yields a dict for each row. """ def __iter__(self): queryset = self.queryset query = queryset.query compiler = query.get_compiler(queryset.db) # extra(select=...) cols are always at the start of the row. names = [ *query.extra_select, *query.values_select, *query.annotation_select, ] indexes = range(len(names)) for row in compiler.results_iter(chunked_fetch=self.chunked_fetch, chunk_size=self.chunk_size): yield {names[i]: row[i] for i in indexes} class ValuesListIterable(BaseIterable): """ Iterable returned by QuerySet.values_list(flat=False) that yields a tuple for each row. """ def __iter__(self): queryset = self.queryset query = queryset.query compiler = query.get_compiler(queryset.db) if queryset._fields: # extra(select=...) cols are always at the start of the row. names = [ *query.extra_select, *query.values_select, *query.annotation_select, ] fields = [*queryset._fields, *(f for f in query.annotation_select if f not in queryset._fields)] if fields != names: # Reorder according to fields. index_map = {name: idx for idx, name in enumerate(names)} rowfactory = operator.itemgetter(*[index_map[f] for f in fields]) return map( rowfactory, compiler.results_iter(chunked_fetch=self.chunked_fetch, chunk_size=self.chunk_size) ) return compiler.results_iter(tuple_expected=True, chunked_fetch=self.chunked_fetch, chunk_size=self.chunk_size) class NamedValuesListIterable(ValuesListIterable): """ Iterable returned by QuerySet.values_list(named=True) that yields a namedtuple for each row. """ @staticmethod @lru_cache() def create_namedtuple_class(*names): # Cache namedtuple() with @lru_cache() since it's too slow to be # called for every QuerySet evaluation. return namedtuple('Row', names) def __iter__(self): queryset = self.queryset if queryset._fields: names = queryset._fields else: query = queryset.query names = [*query.extra_select, *query.values_select, *query.annotation_select] tuple_class = self.create_namedtuple_class(*names) new = tuple.__new__ for row in super().__iter__(): yield new(tuple_class, row) class FlatValuesListIterable(BaseIterable): """ Iterable returned by QuerySet.values_list(flat=True) that yields single values. """ def __iter__(self): queryset = self.queryset compiler = queryset.query.get_compiler(queryset.db) for row in compiler.results_iter(chunked_fetch=self.chunked_fetch, chunk_size=self.chunk_size): yield row[0] class QuerySet: """Represent a lazy database lookup for a set of objects.""" def __init__(self, model=None, query=None, using=None, hints=None): self.model = model self._db = using self._hints = hints or {} self._query = query or sql.Query(self.model) self._result_cache = None self._sticky_filter = False self._for_write = False self._prefetch_related_lookups = () self._prefetch_done = False self._known_related_objects = {} # {rel_field: {pk: rel_obj}} self._iterable_class = ModelIterable self._fields = None self._defer_next_filter = False self._deferred_filter = None @property def query(self): if self._deferred_filter: negate, args, kwargs = self._deferred_filter self._filter_or_exclude_inplace(negate, *args, **kwargs) self._deferred_filter = None return self._query @query.setter def query(self, value): self._query = value def as_manager(cls): # Address the circular dependency between `Queryset` and `Manager`. from django.db.models.manager import Manager manager = Manager.from_queryset(cls)() manager._built_with_as_manager = True return manager as_manager.queryset_only = True as_manager = classmethod(as_manager) ######################## # PYTHON MAGIC METHODS # ######################## def __deepcopy__(self, memo): """Don't populate the QuerySet's cache.""" obj = self.__class__() for k, v in self.__dict__.items(): if k == '_result_cache': obj.__dict__[k] = None else: obj.__dict__[k] = copy.deepcopy(v, memo) return obj def __getstate__(self): # Force the cache to be fully populated. self._fetch_all() return {**self.__dict__, DJANGO_VERSION_PICKLE_KEY: get_version()} def __setstate__(self, state): msg = None pickled_version = state.get(DJANGO_VERSION_PICKLE_KEY) if pickled_version: current_version = get_version() if current_version != pickled_version: msg = ( "Pickled queryset instance's Django version %s does not " "match the current version %s." % (pickled_version, current_version) ) else: msg = "Pickled queryset instance's Django version is not specified." if msg: warnings.warn(msg, RuntimeWarning, stacklevel=2) self.__dict__.update(state) def __repr__(self): data = list(self[:REPR_OUTPUT_SIZE + 1]) if len(data) > REPR_OUTPUT_SIZE: data[-1] = "...(remaining elements truncated)..." return '<%s %r>' % (self.__class__.__name__, data) def __len__(self): self._fetch_all() return len(self._result_cache) def __iter__(self): """ The queryset iterator protocol uses three nested iterators in the default case: 1. sql.compiler.execute_sql() - Returns 100 rows at time (constants.GET_ITERATOR_CHUNK_SIZE) using cursor.fetchmany(). This part is responsible for doing some column masking, and returning the rows in chunks. 2. sql.compiler.results_iter() - Returns one row at time. At this point the rows are still just tuples. In some cases the return values are converted to Python values at this location. 3. self.iterator() - Responsible for turning the rows into model objects. """ self._fetch_all() return iter(self._result_cache) def __bool__(self): self._fetch_all() return bool(self._result_cache) def __getitem__(self, k): """Retrieve an item or slice from the set of results.""" if not isinstance(k, (int, slice)): raise TypeError( 'QuerySet indices must be integers or slices, not %s.' % type(k).__name__ ) assert ((not isinstance(k, slice) and (k >= 0)) or (isinstance(k, slice) and (k.start is None or k.start >= 0) and (k.stop is None or k.stop >= 0))), \ "Negative indexing is not supported." if self._result_cache is not None: return self._result_cache[k] if isinstance(k, slice): qs = self._chain() if k.start is not None: start = int(k.start) else: start = None if k.stop is not None: stop = int(k.stop) else: stop = None qs.query.set_limits(start, stop) return list(qs)[::k.step] if k.step else qs qs = self._chain() qs.query.set_limits(k, k + 1) qs._fetch_all() return qs._result_cache[0] def __and__(self, other): self._merge_sanity_check(other) if isinstance(other, EmptyQuerySet): return other if isinstance(self, EmptyQuerySet): return self combined = self._chain() combined._merge_known_related_objects(other) combined.query.combine(other.query, sql.AND) return combined def __or__(self, other): self._merge_sanity_check(other) if isinstance(self, EmptyQuerySet): return other if isinstance(other, EmptyQuerySet): return self query = self if self.query.can_filter() else self.model._base_manager.filter(pk__in=self.values('pk')) combined = query._chain() combined._merge_known_related_objects(other) if not other.query.can_filter(): other = other.model._base_manager.filter(pk__in=other.values('pk')) combined.query.combine(other.query, sql.OR) return combined #################################### # METHODS THAT DO DATABASE QUERIES # #################################### def _iterator(self, use_chunked_fetch, chunk_size): yield from self._iterable_class(self, chunked_fetch=use_chunked_fetch, chunk_size=chunk_size) def iterator(self, chunk_size=2000): """ An iterator over the results from applying this QuerySet to the database. """ if chunk_size <= 0: raise ValueError('Chunk size must be strictly positive.') use_chunked_fetch = not connections[self.db].settings_dict.get('DISABLE_SERVER_SIDE_CURSORS') return self._iterator(use_chunked_fetch, chunk_size) def aggregate(self, *args, **kwargs): """ Return a dictionary containing the calculations (aggregation) over the current queryset. If args is present the expression is passed as a kwarg using the Aggregate object's default alias. """ if self.query.distinct_fields: raise NotImplementedError("aggregate() + distinct(fields) not implemented.") self._validate_values_are_expressions((*args, *kwargs.values()), method_name='aggregate') for arg in args: # The default_alias property raises TypeError if default_alias # can't be set automatically or AttributeError if it isn't an # attribute. try: arg.default_alias except (AttributeError, TypeError): raise TypeError("Complex aggregates require an alias") kwargs[arg.default_alias] = arg query = self.query.chain() for (alias, aggregate_expr) in kwargs.items(): query.add_annotation(aggregate_expr, alias, is_summary=True) if not query.annotations[alias].contains_aggregate: raise TypeError("%s is not an aggregate expression" % alias) return query.get_aggregation(self.db, kwargs) def count(self): """ Perform a SELECT COUNT() and return the number of records as an integer. If the QuerySet is already fully cached, return the length of the cached results set to avoid multiple SELECT COUNT(*) calls. """ if self._result_cache is not None: return len(self._result_cache) return self.query.get_count(using=self.db) def get(self, *args, **kwargs): """ Perform the query and return a single object matching the given keyword arguments. """ clone = self._chain() if self.query.combinator else self.filter(*args, **kwargs) if self.query.can_filter() and not self.query.distinct_fields: clone = clone.order_by() limit = None if not clone.query.select_for_update or connections[clone.db].features.supports_select_for_update_with_limit: limit = MAX_GET_RESULTS clone.query.set_limits(high=limit) num = len(clone) if num == 1: return clone._result_cache[0] if not num: raise self.model.DoesNotExist( "%s matching query does not exist." % self.model._meta.object_name ) raise self.model.MultipleObjectsReturned( 'get() returned more than one %s -- it returned %s!' % ( self.model._meta.object_name, num if not limit or num < limit else 'more than %s' % (limit - 1), ) ) def create(self, **kwargs): """ Create a new object with the given kwargs, saving it to the database and returning the created object. """ obj = self.model(**kwargs) self._for_write = True obj.save(force_insert=True, using=self.db) return obj def _populate_pk_values(self, objs): for obj in objs: if obj.pk is None: obj.pk = obj._meta.pk.get_pk_value_on_save(obj) def bulk_create(self, objs, batch_size=None, ignore_conflicts=False): """ Insert each of the instances into the database. Do *not* call save() on each of the instances, do not send any pre/post_save signals, and do not set the primary key attribute if it is an autoincrement field (except if features.can_return_rows_from_bulk_insert=True). Multi-table models are not supported. """ # When you bulk insert you don't get the primary keys back (if it's an # autoincrement, except if can_return_rows_from_bulk_insert=True), so # you can't insert into the child tables which references this. There # are two workarounds: # 1) This could be implemented if you didn't have an autoincrement pk # 2) You could do it by doing O(n) normal inserts into the parent # tables to get the primary keys back and then doing a single bulk # insert into the childmost table. # We currently set the primary keys on the objects when using # PostgreSQL via the RETURNING ID clause. It should be possible for # Oracle as well, but the semantics for extracting the primary keys is # trickier so it's not done yet. assert batch_size is None or batch_size > 0 # Check that the parents share the same concrete model with the our # model to detect the inheritance pattern ConcreteGrandParent -> # MultiTableParent -> ProxyChild. Simply checking self.model._meta.proxy # would not identify that case as involving multiple tables. for parent in self.model._meta.get_parent_list(): if parent._meta.concrete_model is not self.model._meta.concrete_model: raise ValueError("Can't bulk create a multi-table inherited model") if not objs: return objs self._for_write = True connection = connections[self.db] opts = self.model._meta fields = opts.concrete_fields objs = list(objs) self._populate_pk_values(objs) with transaction.atomic(using=self.db, savepoint=False): objs_with_pk, objs_without_pk = partition(lambda o: o.pk is None, objs) if objs_with_pk: returned_columns = self._batched_insert( objs_with_pk, fields, batch_size, ignore_conflicts=ignore_conflicts, ) for obj_with_pk, results in zip(objs_with_pk, returned_columns): for result, field in zip(results, opts.db_returning_fields): if field != opts.pk: setattr(obj_with_pk, field.attname, result) for obj_with_pk in objs_with_pk: obj_with_pk._state.adding = False obj_with_pk._state.db = self.db if objs_without_pk: fields = [f for f in fields if not isinstance(f, AutoField)] returned_columns = self._batched_insert( objs_without_pk, fields, batch_size, ignore_conflicts=ignore_conflicts, ) if connection.features.can_return_rows_from_bulk_insert and not ignore_conflicts: assert len(returned_columns) == len(objs_without_pk) for obj_without_pk, results in zip(objs_without_pk, returned_columns): for result, field in zip(results, opts.db_returning_fields): setattr(obj_without_pk, field.attname, result) obj_without_pk._state.adding = False obj_without_pk._state.db = self.db return objs def bulk_update(self, objs, fields, batch_size=None): """ Update the given fields in each of the given objects in the database. """ if batch_size is not None and batch_size < 0: raise ValueError('Batch size must be a positive integer.') if not fields: raise ValueError('Field names must be given to bulk_update().') objs = tuple(objs) if any(obj.pk is None for obj in objs): raise ValueError('All bulk_update() objects must have a primary key set.') fields = [self.model._meta.get_field(name) for name in fields] if any(not f.concrete or f.many_to_many for f in fields): raise ValueError('bulk_update() can only be used with concrete fields.') if any(f.primary_key for f in fields): raise ValueError('bulk_update() cannot be used with primary key fields.') if not objs: return # PK is used twice in the resulting update query, once in the filter # and once in the WHEN. Each field will also have one CAST. max_batch_size = connections[self.db].ops.bulk_batch_size(['pk', 'pk'] + fields, objs) batch_size = min(batch_size, max_batch_size) if batch_size else max_batch_size requires_casting = connections[self.db].features.requires_casted_case_in_updates batches = (objs[i:i + batch_size] for i in range(0, len(objs), batch_size)) updates = [] for batch_objs in batches: update_kwargs = {} for field in fields: when_statements = [] for obj in batch_objs: attr = getattr(obj, field.attname) if not isinstance(attr, Expression): attr = Value(attr, output_field=field) when_statements.append(When(pk=obj.pk, then=attr)) case_statement = Case(*when_statements, output_field=field) if requires_casting: case_statement = Cast(case_statement, output_field=field) update_kwargs[field.attname] = case_statement updates.append(([obj.pk for obj in batch_objs], update_kwargs)) with transaction.atomic(using=self.db, savepoint=False): for pks, update_kwargs in updates: self.filter(pk__in=pks).update(**update_kwargs) bulk_update.alters_data = True def get_or_create(self, defaults=None, **kwargs): """ Look up an object with the given kwargs, creating one if necessary. Return a tuple of (object, created), where created is a boolean specifying whether an object was created. """ # The get() needs to be targeted at the write database in order # to avoid potential transaction consistency problems. self._for_write = True try: return self.get(**kwargs), False except self.model.DoesNotExist: params = self._extract_model_params(defaults, **kwargs) return self._create_object_from_params(kwargs, params) def update_or_create(self, defaults=None, **kwargs): """ Look up an object with the given kwargs, updating one with defaults if it exists, otherwise create a new one. Return a tuple (object, created), where created is a boolean specifying whether an object was created. """ defaults = defaults or {} self._for_write = True with transaction.atomic(using=self.db): try: obj = self.select_for_update().get(**kwargs) except self.model.DoesNotExist: params = self._extract_model_params(defaults, **kwargs) # Lock the row so that a concurrent update is blocked until # after update_or_create() has performed its save. obj, created = self._create_object_from_params(kwargs, params, lock=True) if created: return obj, created for k, v in defaults.items(): setattr(obj, k, v() if callable(v) else v) obj.save(using=self.db) return obj, False def _create_object_from_params(self, lookup, params, lock=False): """ Try to create an object using passed params. Used by get_or_create() and update_or_create(). """ try: with transaction.atomic(using=self.db): params = {k: v() if callable(v) else v for k, v in params.items()} obj = self.create(**params) return obj, True except IntegrityError as e: try: qs = self.select_for_update() if lock else self return qs.get(**lookup), False except self.model.DoesNotExist: pass raise e def _extract_model_params(self, defaults, **kwargs): """ Prepare `params` for creating a model instance based on the given kwargs; for use by get_or_create() and update_or_create(). """ defaults = defaults or {} params = {k: v for k, v in kwargs.items() if LOOKUP_SEP not in k} params.update(defaults) property_names = self.model._meta._property_names invalid_params = [] for param in params: try: self.model._meta.get_field(param) except exceptions.FieldDoesNotExist: # It's okay to use a model's property if it has a setter. if not (param in property_names and getattr(self.model, param).fset): invalid_params.append(param) if invalid_params: raise exceptions.FieldError( "Invalid field name(s) for model %s: '%s'." % ( self.model._meta.object_name, "', '".join(sorted(invalid_params)), )) return params def _earliest(self, *fields): """ Return the earliest object according to fields (if given) or by the model's Meta.get_latest_by. """ if fields: order_by = fields else: order_by = getattr(self.model._meta, 'get_latest_by') if order_by and not isinstance(order_by, (tuple, list)): order_by = (order_by,) if order_by is None: raise ValueError( "earliest() and latest() require either fields as positional " "arguments or 'get_latest_by' in the model's Meta." ) assert not self.query.is_sliced, \ "Cannot change a query once a slice has been taken." obj = self._chain() obj.query.set_limits(high=1) obj.query.clear_ordering(force_empty=True) obj.query.add_ordering(*order_by) return obj.get() def earliest(self, *fields): return self._earliest(*fields) def latest(self, *fields): return self.reverse()._earliest(*fields) def first(self): """Return the first object of a query or None if no match is found.""" for obj in (self if self.ordered else self.order_by('pk'))[:1]: return obj def last(self): """Return the last object of a query or None if no match is found.""" for obj in (self.reverse() if self.ordered else self.order_by('-pk'))[:1]: return obj def in_bulk(self, id_list=None, *, field_name='pk'): """ Return a dictionary mapping each of the given IDs to the object with that ID. If `id_list` isn't provided, evaluate the entire QuerySet. """ assert not self.query.is_sliced, \ "Cannot use 'limit' or 'offset' with in_bulk" if field_name != 'pk' and not self.model._meta.get_field(field_name).unique: raise ValueError("in_bulk()'s field_name must be a unique field but %r isn't." % field_name) if id_list is not None: if not id_list: return {} filter_key = '{}__in'.format(field_name) batch_size = connections[self.db].features.max_query_params id_list = tuple(id_list) # If the database has a limit on the number of query parameters # (e.g. SQLite), retrieve objects in batches if necessary. if batch_size and batch_size < len(id_list): qs = () for offset in range(0, len(id_list), batch_size): batch = id_list[offset:offset + batch_size] qs += tuple(self.filter(**{filter_key: batch}).order_by()) else: qs = self.filter(**{filter_key: id_list}).order_by() else: qs = self._chain() return {getattr(obj, field_name): obj for obj in qs} def delete(self): """Delete the records in the current QuerySet.""" assert not self.query.is_sliced, \ "Cannot use 'limit' or 'offset' with delete." if self._fields is not None: raise TypeError("Cannot call delete() after .values() or .values_list()") del_query = self._chain() # The delete is actually 2 queries - one to find related objects, # and one to delete. Make sure that the discovery of related # objects is performed on the same database as the deletion. del_query._for_write = True # Disable non-supported fields. del_query.query.select_for_update = False del_query.query.select_related = False del_query.query.clear_ordering(force_empty=True) collector = Collector(using=del_query.db) collector.collect(del_query) deleted, _rows_count = collector.delete() # Clear the result cache, in case this QuerySet gets reused. self._result_cache = None return deleted, _rows_count delete.alters_data = True delete.queryset_only = True def _raw_delete(self, using): """ Delete objects found from the given queryset in single direct SQL query. No signals are sent and there is no protection for cascades. """ query = self.query.clone() query.__class__ = sql.DeleteQuery cursor = query.get_compiler(using).execute_sql(CURSOR) return cursor.rowcount if cursor else 0 _raw_delete.alters_data = True def update(self, **kwargs): """ Update all elements in the current QuerySet, setting all the given fields to the appropriate values. """ assert not self.query.is_sliced, \ "Cannot update a query once a slice has been taken." self._for_write = True query = self.query.chain(sql.UpdateQuery) query.add_update_values(kwargs) # Clear any annotations so that they won't be present in subqueries. query.annotations = {} with transaction.mark_for_rollback_on_error(using=self.db): rows = query.get_compiler(self.db).execute_sql(CURSOR) self._result_cache = None return rows update.alters_data = True def _update(self, values): """ A version of update() that accepts field objects instead of field names. Used primarily for model saving and not intended for use by general code (it requires too much poking around at model internals to be useful at that level). """ assert not self.query.is_sliced, \ "Cannot update a query once a slice has been taken." query = self.query.chain(sql.UpdateQuery) query.add_update_fields(values) # Clear any annotations so that they won't be present in subqueries. query.annotations = {} self._result_cache = None return query.get_compiler(self.db).execute_sql(CURSOR) _update.alters_data = True _update.queryset_only = False def exists(self): if self._result_cache is None: return self.query.has_results(using=self.db) return bool(self._result_cache) def _prefetch_related_objects(self): # This method can only be called once the result cache has been filled. prefetch_related_objects(self._result_cache, *self._prefetch_related_lookups) self._prefetch_done = True def explain(self, *, format=None, **options): return self.query.explain(using=self.db, format=format, **options) ################################################## # PUBLIC METHODS THAT RETURN A QUERYSET SUBCLASS # ################################################## def raw(self, raw_query, params=None, translations=None, using=None): if using is None: using = self.db qs = RawQuerySet(raw_query, model=self.model, params=params, translations=translations, using=using) qs._prefetch_related_lookups = self._prefetch_related_lookups[:] return qs def _values(self, *fields, **expressions): clone = self._chain() if expressions: clone = clone.annotate(**expressions) clone._fields = fields clone.query.set_values(fields) return clone def values(self, *fields, **expressions): fields += tuple(expressions) clone = self._values(*fields, **expressions) clone._iterable_class = ValuesIterable return clone def values_list(self, *fields, flat=False, named=False): if flat and named: raise TypeError("'flat' and 'named' can't be used together.") if flat and len(fields) > 1: raise TypeError("'flat' is not valid when values_list is called with more than one field.") field_names = {f for f in fields if not hasattr(f, 'resolve_expression')} _fields = [] expressions = {} counter = 1 for field in fields: if hasattr(field, 'resolve_expression'): field_id_prefix = getattr(field, 'default_alias', field.__class__.__name__.lower()) while True: field_id = field_id_prefix + str(counter) counter += 1 if field_id not in field_names: break expressions[field_id] = field _fields.append(field_id) else: _fields.append(field) clone = self._values(*_fields, **expressions) clone._iterable_class = ( NamedValuesListIterable if named else FlatValuesListIterable if flat else ValuesListIterable ) return clone def dates(self, field_name, kind, order='ASC'): """ Return a list of date objects representing all available dates for the given field_name, scoped to 'kind'. """ assert kind in ('year', 'month', 'week', 'day'), \ "'kind' must be one of 'year', 'month', 'week', or 'day'." assert order in ('ASC', 'DESC'), \ "'order' must be either 'ASC' or 'DESC'." return self.annotate( datefield=Trunc(field_name, kind, output_field=DateField()), plain_field=F(field_name) ).values_list( 'datefield', flat=True ).distinct().filter(plain_field__isnull=False).order_by(('-' if order == 'DESC' else '') + 'datefield') def datetimes(self, field_name, kind, order='ASC', tzinfo=None): """ Return a list of datetime objects representing all available datetimes for the given field_name, scoped to 'kind'. """ assert kind in ('year', 'month', 'week', 'day', 'hour', 'minute', 'second'), \ "'kind' must be one of 'year', 'month', 'week', 'day', 'hour', 'minute', or 'second'." assert order in ('ASC', 'DESC'), \ "'order' must be either 'ASC' or 'DESC'." if settings.USE_TZ: if tzinfo is None: tzinfo = timezone.get_current_timezone() else: tzinfo = None return self.annotate( datetimefield=Trunc(field_name, kind, output_field=DateTimeField(), tzinfo=tzinfo), plain_field=F(field_name) ).values_list( 'datetimefield', flat=True ).distinct().filter(plain_field__isnull=False).order_by(('-' if order == 'DESC' else '') + 'datetimefield') def none(self): """Return an empty QuerySet.""" clone = self._chain() clone.query.set_empty() return clone ################################################################## # PUBLIC METHODS THAT ALTER ATTRIBUTES AND RETURN A NEW QUERYSET # ################################################################## def all(self): """ Return a new QuerySet that is a copy of the current one. This allows a QuerySet to proxy for a model manager in some cases. """ return self._chain() def filter(self, *args, **kwargs): """ Return a new QuerySet instance with the args ANDed to the existing set. """ self._not_support_combined_queries('filter') return self._filter_or_exclude(False, *args, **kwargs) def exclude(self, *args, **kwargs): """ Return a new QuerySet instance with NOT (args) ANDed to the existing set. """ self._not_support_combined_queries('exclude') return self._filter_or_exclude(True, *args, **kwargs) def _filter_or_exclude(self, negate, *args, **kwargs): if args or kwargs: assert not self.query.is_sliced, \ "Cannot filter a query once a slice has been taken." clone = self._chain() if self._defer_next_filter: self._defer_next_filter = False clone._deferred_filter = negate, args, kwargs else: clone._filter_or_exclude_inplace(negate, *args, **kwargs) return clone def _filter_or_exclude_inplace(self, negate, *args, **kwargs): if negate: self._query.add_q(~Q(*args, **kwargs)) else: self._query.add_q(Q(*args, **kwargs)) def complex_filter(self, filter_obj): """ Return a new QuerySet instance with filter_obj added to the filters. filter_obj can be a Q object or a dictionary of keyword lookup arguments. This exists to support framework features such as 'limit_choices_to', and usually it will be more natural to use other methods. """ if isinstance(filter_obj, Q): clone = self._chain() clone.query.add_q(filter_obj) return clone else: return self._filter_or_exclude(False, **filter_obj) def _combinator_query(self, combinator, *other_qs, all=False): # Clone the query to inherit the select list and everything clone = self._chain() # Clear limits and ordering so they can be reapplied clone.query.clear_ordering(True) clone.query.clear_limits() clone.query.combined_queries = (self.query,) + tuple(qs.query for qs in other_qs) clone.query.combinator = combinator clone.query.combinator_all = all return clone def union(self, *other_qs, all=False): # If the query is an EmptyQuerySet, combine all nonempty querysets. if isinstance(self, EmptyQuerySet): qs = [q for q in other_qs if not isinstance(q, EmptyQuerySet)] return qs[0]._combinator_query('union', *qs[1:], all=all) if qs else self return self._combinator_query('union', *other_qs, all=all) def intersection(self, *other_qs): # If any query is an EmptyQuerySet, return it. if isinstance(self, EmptyQuerySet): return self for other in other_qs: if isinstance(other, EmptyQuerySet): return other return self._combinator_query('intersection', *other_qs) def difference(self, *other_qs): # If the query is an EmptyQuerySet, return it. if isinstance(self, EmptyQuerySet): return self return self._combinator_query('difference', *other_qs) def select_for_update(self, nowait=False, skip_locked=False, of=()): """ Return a new QuerySet instance that will select objects with a FOR UPDATE lock. """ if nowait and skip_locked: raise ValueError('The nowait option cannot be used with skip_locked.') obj = self._chain() obj._for_write = True obj.query.select_for_update = True obj.query.select_for_update_nowait = nowait obj.query.select_for_update_skip_locked = skip_locked obj.query.select_for_update_of = of return obj def select_related(self, *fields): """ Return a new QuerySet instance that will select related objects. If fields are specified, they must be ForeignKey fields and only those related objects are included in the selection. If select_related(None) is called, clear the list. """ self._not_support_combined_queries('select_related') if self._fields is not None: raise TypeError("Cannot call select_related() after .values() or .values_list()") obj = self._chain() if fields == (None,): obj.query.select_related = False elif fields: obj.query.add_select_related(fields) else: obj.query.select_related = True return obj def prefetch_related(self, *lookups): """ Return a new QuerySet instance that will prefetch the specified Many-To-One and Many-To-Many related objects when the QuerySet is evaluated. When prefetch_related() is called more than once, append to the list of prefetch lookups. If prefetch_related(None) is called, clear the list. """ self._not_support_combined_queries('prefetch_related') clone = self._chain() if lookups == (None,): clone._prefetch_related_lookups = () else: for lookup in lookups: if isinstance(lookup, Prefetch): lookup = lookup.prefetch_to lookup = lookup.split(LOOKUP_SEP, 1)[0] if lookup in self.query._filtered_relations: raise ValueError('prefetch_related() is not supported with FilteredRelation.') clone._prefetch_related_lookups = clone._prefetch_related_lookups + lookups return clone def annotate(self, *args, **kwargs): """ Return a query set in which the returned objects have been annotated with extra data or aggregations. """ self._not_support_combined_queries('annotate') self._validate_values_are_expressions(args + tuple(kwargs.values()), method_name='annotate') annotations = {} for arg in args: # The default_alias property may raise a TypeError. try: if arg.default_alias in kwargs: raise ValueError("The named annotation '%s' conflicts with the " "default name for another annotation." % arg.default_alias) except TypeError: raise TypeError("Complex annotations require an alias") annotations[arg.default_alias] = arg annotations.update(kwargs) clone = self._chain() names = self._fields if names is None: names = set(chain.from_iterable( (field.name, field.attname) if hasattr(field, 'attname') else (field.name,) for field in self.model._meta.get_fields() )) for alias, annotation in annotations.items(): if alias in names: raise ValueError("The annotation '%s' conflicts with a field on " "the model." % alias) if isinstance(annotation, FilteredRelation): clone.query.add_filtered_relation(annotation, alias) else: clone.query.add_annotation(annotation, alias, is_summary=False) for alias, annotation in clone.query.annotations.items(): if alias in annotations and annotation.contains_aggregate: if clone._fields is None: clone.query.group_by = True else: clone.query.set_group_by() break return clone def order_by(self, *field_names): """Return a new QuerySet instance with the ordering changed.""" assert not self.query.is_sliced, \ "Cannot reorder a query once a slice has been taken." obj = self._chain() obj.query.clear_ordering(force_empty=False) obj.query.add_ordering(*field_names) return obj def distinct(self, *field_names): """ Return a new QuerySet instance that will select only distinct results. """ assert not self.query.is_sliced, \ "Cannot create distinct fields once a slice has been taken." obj = self._chain() obj.query.add_distinct_fields(*field_names) return obj def extra(self, select=None, where=None, params=None, tables=None, order_by=None, select_params=None): """Add extra SQL fragments to the query.""" self._not_support_combined_queries('extra') assert not self.query.is_sliced, \ "Cannot change a query once a slice has been taken" clone = self._chain() clone.query.add_extra(select, select_params, where, params, tables, order_by) return clone def reverse(self): """Reverse the ordering of the QuerySet.""" if self.query.is_sliced: raise TypeError('Cannot reverse a query once a slice has been taken.') clone = self._chain() clone.query.standard_ordering = not clone.query.standard_ordering return clone def defer(self, *fields): """ Defer the loading of data for certain fields until they are accessed. Add the set of deferred fields to any existing set of deferred fields. The only exception to this is if None is passed in as the only parameter, in which case removal all deferrals. """ self._not_support_combined_queries('defer') if self._fields is not None: raise TypeError("Cannot call defer() after .values() or .values_list()") clone = self._chain() if fields == (None,): clone.query.clear_deferred_loading() else: clone.query.add_deferred_loading(fields) return clone def only(self, *fields): """ Essentially, the opposite of defer(). Only the fields passed into this method and that are not already specified as deferred are loaded immediately when the queryset is evaluated. """ self._not_support_combined_queries('only') if self._fields is not None: raise TypeError("Cannot call only() after .values() or .values_list()") if fields == (None,): # Can only pass None to defer(), not only(), as the rest option. # That won't stop people trying to do this, so let's be explicit. raise TypeError("Cannot pass None as an argument to only().") for field in fields: field = field.split(LOOKUP_SEP, 1)[0] if field in self.query._filtered_relations: raise ValueError('only() is not supported with FilteredRelation.') clone = self._chain() clone.query.add_immediate_loading(fields) return clone def using(self, alias): """Select which database this QuerySet should execute against.""" clone = self._chain() clone._db = alias return clone ################################### # PUBLIC INTROSPECTION ATTRIBUTES # ################################### @property def ordered(self): """ Return True if the QuerySet is ordered -- i.e. has an order_by() clause or a default ordering on the model (or is empty). """ if isinstance(self, EmptyQuerySet): return True if self.query.extra_order_by or self.query.order_by: return True elif self.query.default_ordering and self.query.get_meta().ordering: return True else: return False @property def db(self): """Return the database used if this query is executed now.""" if self._for_write: return self._db or router.db_for_write(self.model, **self._hints) return self._db or router.db_for_read(self.model, **self._hints) ################### # PRIVATE METHODS # ################### def _insert(self, objs, fields, returning_fields=None, raw=False, using=None, ignore_conflicts=False): """ Insert a new record for the given model. This provides an interface to the InsertQuery class and is how Model.save() is implemented. """ self._for_write = True if using is None: using = self.db query = sql.InsertQuery(self.model, ignore_conflicts=ignore_conflicts) query.insert_values(fields, objs, raw=raw) return query.get_compiler(using=using).execute_sql(returning_fields) _insert.alters_data = True _insert.queryset_only = False def _batched_insert(self, objs, fields, batch_size, ignore_conflicts=False): """ Helper method for bulk_create() to insert objs one batch at a time. """ if ignore_conflicts and not connections[self.db].features.supports_ignore_conflicts: raise NotSupportedError('This database backend does not support ignoring conflicts.') ops = connections[self.db].ops max_batch_size = max(ops.bulk_batch_size(fields, objs), 1) batch_size = min(batch_size, max_batch_size) if batch_size else max_batch_size inserted_rows = [] bulk_return = connections[self.db].features.can_return_rows_from_bulk_insert for item in [objs[i:i + batch_size] for i in range(0, len(objs), batch_size)]: if bulk_return and not ignore_conflicts: inserted_columns = self._insert( item, fields=fields, using=self.db, returning_fields=self.model._meta.db_returning_fields, ignore_conflicts=ignore_conflicts, ) if isinstance(inserted_columns, list): inserted_rows.extend(inserted_columns) else: inserted_rows.append(inserted_columns) else: self._insert(item, fields=fields, using=self.db, ignore_conflicts=ignore_conflicts) return inserted_rows def _chain(self, **kwargs): """ Return a copy of the current QuerySet that's ready for another operation. """ obj = self._clone() if obj._sticky_filter: obj.query.filter_is_sticky = True obj._sticky_filter = False obj.__dict__.update(kwargs) return obj def _clone(self): """ Return a copy of the current QuerySet. A lightweight alternative to deepcopy(). """ c = self.__class__(model=self.model, query=self.query.chain(), using=self._db, hints=self._hints) c._sticky_filter = self._sticky_filter c._for_write = self._for_write c._prefetch_related_lookups = self._prefetch_related_lookups[:] c._known_related_objects = self._known_related_objects c._iterable_class = self._iterable_class c._fields = self._fields return c def _fetch_all(self): if self._result_cache is None: self._result_cache = list(self._iterable_class(self)) if self._prefetch_related_lookups and not self._prefetch_done: self._prefetch_related_objects() def _next_is_sticky(self): """ Indicate that the next filter call and the one following that should be treated as a single filter. This is only important when it comes to determining when to reuse tables for many-to-many filters. Required so that we can filter naturally on the results of related managers. This doesn't return a clone of the current QuerySet (it returns "self"). The method is only used internally and should be immediately followed by a filter() that does create a clone. """ self._sticky_filter = True return self def _merge_sanity_check(self, other): """Check that two QuerySet classes may be merged.""" if self._fields is not None and ( set(self.query.values_select) != set(other.query.values_select) or set(self.query.extra_select) != set(other.query.extra_select) or set(self.query.annotation_select) != set(other.query.annotation_select)): raise TypeError( "Merging '%s' classes must involve the same values in each case." % self.__class__.__name__ ) def _merge_known_related_objects(self, other): """ Keep track of all known related objects from either QuerySet instance. """ for field, objects in other._known_related_objects.items(): self._known_related_objects.setdefault(field, {}).update(objects) def resolve_expression(self, *args, **kwargs): if self._fields and len(self._fields) > 1: # values() queryset can only be used as nested queries # if they are set up to select only a single field. raise TypeError('Cannot use multi-field values as a filter value.') query = self.query.resolve_expression(*args, **kwargs) query._db = self._db return query resolve_expression.queryset_only = True def _add_hints(self, **hints): """ Update hinting information for use by routers. Add new key/values or overwrite existing key/values. """ self._hints.update(hints) def _has_filters(self): """ Check if this QuerySet has any filtering going on. This isn't equivalent with checking if all objects are present in results, for example, qs[1:]._has_filters() -> False. """ return self.query.has_filters() @staticmethod def _validate_values_are_expressions(values, method_name): invalid_args = sorted(str(arg) for arg in values if not hasattr(arg, 'resolve_expression')) if invalid_args: raise TypeError( 'QuerySet.%s() received non-expression(s): %s.' % ( method_name, ', '.join(invalid_args), ) ) def _not_support_combined_queries(self, operation_name): if self.query.combinator: raise NotSupportedError( 'Calling QuerySet.%s() after %s() is not supported.' % (operation_name, self.query.combinator) ) class InstanceCheckMeta(type): def __instancecheck__(self, instance): return isinstance(instance, QuerySet) and instance.query.is_empty() class EmptyQuerySet(metaclass=InstanceCheckMeta): """ Marker class to checking if a queryset is empty by .none(): isinstance(qs.none(), EmptyQuerySet) -> True """ def __init__(self, *args, **kwargs): raise TypeError("EmptyQuerySet can't be instantiated") class RawQuerySet: """ Provide an iterator which converts the results of raw SQL queries into annotated model instances. """ def __init__(self, raw_query, model=None, query=None, params=None, translations=None, using=None, hints=None): self.raw_query = raw_query self.model = model self._db = using self._hints = hints or {} self.query = query or sql.RawQuery(sql=raw_query, using=self.db, params=params) self.params = params or () self.translations = translations or {} self._result_cache = None self._prefetch_related_lookups = () self._prefetch_done = False def resolve_model_init_order(self): """Resolve the init field names and value positions.""" converter = connections[self.db].introspection.identifier_converter model_init_fields = [f for f in self.model._meta.fields if converter(f.column) in self.columns] annotation_fields = [(column, pos) for pos, column in enumerate(self.columns) if column not in self.model_fields] model_init_order = [self.columns.index(converter(f.column)) for f in model_init_fields] model_init_names = [f.attname for f in model_init_fields] return model_init_names, model_init_order, annotation_fields def prefetch_related(self, *lookups): """Same as QuerySet.prefetch_related()""" clone = self._clone() if lookups == (None,): clone._prefetch_related_lookups = () else: clone._prefetch_related_lookups = clone._prefetch_related_lookups + lookups return clone def _prefetch_related_objects(self): prefetch_related_objects(self._result_cache, *self._prefetch_related_lookups) self._prefetch_done = True def _clone(self): """Same as QuerySet._clone()""" c = self.__class__( self.raw_query, model=self.model, query=self.query, params=self.params, translations=self.translations, using=self._db, hints=self._hints ) c._prefetch_related_lookups = self._prefetch_related_lookups[:] return c def _fetch_all(self): if self._result_cache is None: self._result_cache = list(self.iterator()) if self._prefetch_related_lookups and not self._prefetch_done: self._prefetch_related_objects() def __len__(self): self._fetch_all() return len(self._result_cache) def __bool__(self): self._fetch_all() return bool(self._result_cache) def __iter__(self): self._fetch_all() return iter(self._result_cache) def iterator(self): # Cache some things for performance reasons outside the loop. db = self.db compiler = connections[db].ops.compiler('SQLCompiler')( self.query, connections[db], db ) query = iter(self.query) try: model_init_names, model_init_pos, annotation_fields = self.resolve_model_init_order() if self.model._meta.pk.attname not in model_init_names: raise InvalidQuery('Raw query must include the primary key') model_cls = self.model fields = [self.model_fields.get(c) for c in self.columns] converters = compiler.get_converters([ f.get_col(f.model._meta.db_table) if f else None for f in fields ]) if converters: query = compiler.apply_converters(query, converters) for values in query: # Associate fields to values model_init_values = [values[pos] for pos in model_init_pos] instance = model_cls.from_db(db, model_init_names, model_init_values) if annotation_fields: for column, pos in annotation_fields: setattr(instance, column, values[pos]) yield instance finally: # Done iterating the Query. If it has its own cursor, close it. if hasattr(self.query, 'cursor') and self.query.cursor: self.query.cursor.close() def __repr__(self): return "<%s: %s>" % (self.__class__.__name__, self.query) def __getitem__(self, k): return list(self)[k] @property def db(self): """Return the database used if this query is executed now.""" return self._db or router.db_for_read(self.model, **self._hints) def using(self, alias): """Select the database this RawQuerySet should execute against.""" return RawQuerySet( self.raw_query, model=self.model, query=self.query.chain(using=alias), params=self.params, translations=self.translations, using=alias, ) @cached_property def columns(self): """ A list of model field names in the order they'll appear in the query results. """ columns = self.query.get_columns() # Adjust any column names which don't match field names for (query_name, model_name) in self.translations.items(): # Ignore translations for nonexistent column names try: index = columns.index(query_name) except ValueError: pass else: columns[index] = model_name return columns @cached_property def model_fields(self): """A dict mapping column names to model field names.""" converter = connections[self.db].introspection.identifier_converter model_fields = {} for field in self.model._meta.fields: name, column = field.get_attname_column() model_fields[converter(column)] = field return model_fields class Prefetch: def __init__(self, lookup, queryset=None, to_attr=None): # `prefetch_through` is the path we traverse to perform the prefetch. self.prefetch_through = lookup # `prefetch_to` is the path to the attribute that stores the result. self.prefetch_to = lookup if queryset is not None and not issubclass(queryset._iterable_class, ModelIterable): raise ValueError('Prefetch querysets cannot use values().') if to_attr: self.prefetch_to = LOOKUP_SEP.join(lookup.split(LOOKUP_SEP)[:-1] + [to_attr]) self.queryset = queryset self.to_attr = to_attr def __getstate__(self): obj_dict = self.__dict__.copy() if self.queryset is not None: # Prevent the QuerySet from being evaluated obj_dict['queryset'] = self.queryset._chain( _result_cache=[], _prefetch_done=True, ) return obj_dict def add_prefix(self, prefix): self.prefetch_through = prefix + LOOKUP_SEP + self.prefetch_through self.prefetch_to = prefix + LOOKUP_SEP + self.prefetch_to def get_current_prefetch_to(self, level): return LOOKUP_SEP.join(self.prefetch_to.split(LOOKUP_SEP)[:level + 1]) def get_current_to_attr(self, level): parts = self.prefetch_to.split(LOOKUP_SEP) to_attr = parts[level] as_attr = self.to_attr and level == len(parts) - 1 return to_attr, as_attr def get_current_queryset(self, level): if self.get_current_prefetch_to(level) == self.prefetch_to: return self.queryset return None def __eq__(self, other): if not isinstance(other, Prefetch): return NotImplemented return self.prefetch_to == other.prefetch_to def __hash__(self): return hash((self.__class__, self.prefetch_to)) def normalize_prefetch_lookups(lookups, prefix=None): """Normalize lookups into Prefetch objects.""" ret = [] for lookup in lookups: if not isinstance(lookup, Prefetch): lookup = Prefetch(lookup) if prefix: lookup.add_prefix(prefix) ret.append(lookup) return ret def prefetch_related_objects(model_instances, *related_lookups): """ Populate prefetched object caches for a list of model instances based on the lookups/Prefetch instances given. """ if not model_instances: return # nothing to do # We need to be able to dynamically add to the list of prefetch_related # lookups that we look up (see below). So we need some book keeping to # ensure we don't do duplicate work. done_queries = {} # dictionary of things like 'foo__bar': [results] auto_lookups = set() # we add to this as we go through. followed_descriptors = set() # recursion protection all_lookups = normalize_prefetch_lookups(reversed(related_lookups)) while all_lookups: lookup = all_lookups.pop() if lookup.prefetch_to in done_queries: if lookup.queryset is not None: raise ValueError("'%s' lookup was already seen with a different queryset. " "You may need to adjust the ordering of your lookups." % lookup.prefetch_to) continue # Top level, the list of objects to decorate is the result cache # from the primary QuerySet. It won't be for deeper levels. obj_list = model_instances through_attrs = lookup.prefetch_through.split(LOOKUP_SEP) for level, through_attr in enumerate(through_attrs): # Prepare main instances if not obj_list: break prefetch_to = lookup.get_current_prefetch_to(level) if prefetch_to in done_queries: # Skip any prefetching, and any object preparation obj_list = done_queries[prefetch_to] continue # Prepare objects: good_objects = True for obj in obj_list: # Since prefetching can re-use instances, it is possible to have # the same instance multiple times in obj_list, so obj might # already be prepared. if not hasattr(obj, '_prefetched_objects_cache'): try: obj._prefetched_objects_cache = {} except (AttributeError, TypeError): # Must be an immutable object from # values_list(flat=True), for example (TypeError) or # a QuerySet subclass that isn't returning Model # instances (AttributeError), either in Django or a 3rd # party. prefetch_related() doesn't make sense, so quit. good_objects = False break if not good_objects: break # Descend down tree # We assume that objects retrieved are homogeneous (which is the premise # of prefetch_related), so what applies to first object applies to all. first_obj = obj_list[0] to_attr = lookup.get_current_to_attr(level)[0] prefetcher, descriptor, attr_found, is_fetched = get_prefetcher(first_obj, through_attr, to_attr) if not attr_found: raise AttributeError("Cannot find '%s' on %s object, '%s' is an invalid " "parameter to prefetch_related()" % (through_attr, first_obj.__class__.__name__, lookup.prefetch_through)) if level == len(through_attrs) - 1 and prefetcher is None: # Last one, this *must* resolve to something that supports # prefetching, otherwise there is no point adding it and the # developer asking for it has made a mistake. raise ValueError("'%s' does not resolve to an item that supports " "prefetching - this is an invalid parameter to " "prefetch_related()." % lookup.prefetch_through) if prefetcher is not None and not is_fetched: obj_list, additional_lookups = prefetch_one_level(obj_list, prefetcher, lookup, level) # We need to ensure we don't keep adding lookups from the # same relationships to stop infinite recursion. So, if we # are already on an automatically added lookup, don't add # the new lookups from relationships we've seen already. if not (prefetch_to in done_queries and lookup in auto_lookups and descriptor in followed_descriptors): done_queries[prefetch_to] = obj_list new_lookups = normalize_prefetch_lookups(reversed(additional_lookups), prefetch_to) auto_lookups.update(new_lookups) all_lookups.extend(new_lookups) followed_descriptors.add(descriptor) else: # Either a singly related object that has already been fetched # (e.g. via select_related), or hopefully some other property # that doesn't support prefetching but needs to be traversed. # We replace the current list of parent objects with the list # of related objects, filtering out empty or missing values so # that we can continue with nullable or reverse relations. new_obj_list = [] for obj in obj_list: if through_attr in getattr(obj, '_prefetched_objects_cache', ()): # If related objects have been prefetched, use the # cache rather than the object's through_attr. new_obj = list(obj._prefetched_objects_cache.get(through_attr)) else: try: new_obj = getattr(obj, through_attr) except exceptions.ObjectDoesNotExist: continue if new_obj is None: continue # We special-case `list` rather than something more generic # like `Iterable` because we don't want to accidentally match # user models that define __iter__. if isinstance(new_obj, list): new_obj_list.extend(new_obj) else: new_obj_list.append(new_obj) obj_list = new_obj_list def get_prefetcher(instance, through_attr, to_attr): """ For the attribute 'through_attr' on the given instance, find an object that has a get_prefetch_queryset(). Return a 4 tuple containing: (the object with get_prefetch_queryset (or None), the descriptor object representing this relationship (or None), a boolean that is False if the attribute was not found at all, a boolean that is True if the attribute has already been fetched) """ prefetcher = None is_fetched = False # For singly related objects, we have to avoid getting the attribute # from the object, as this will trigger the query. So we first try # on the class, in order to get the descriptor object. rel_obj_descriptor = getattr(instance.__class__, through_attr, None) if rel_obj_descriptor is None: attr_found = hasattr(instance, through_attr) else: attr_found = True if rel_obj_descriptor: # singly related object, descriptor object has the # get_prefetch_queryset() method. if hasattr(rel_obj_descriptor, 'get_prefetch_queryset'): prefetcher = rel_obj_descriptor if rel_obj_descriptor.is_cached(instance): is_fetched = True else: # descriptor doesn't support prefetching, so we go ahead and get # the attribute on the instance rather than the class to # support many related managers rel_obj = getattr(instance, through_attr) if hasattr(rel_obj, 'get_prefetch_queryset'): prefetcher = rel_obj if through_attr != to_attr: # Special case cached_property instances because hasattr # triggers attribute computation and assignment. if isinstance(getattr(instance.__class__, to_attr, None), cached_property): is_fetched = to_attr in instance.__dict__ else: is_fetched = hasattr(instance, to_attr) else: is_fetched = through_attr in instance._prefetched_objects_cache return prefetcher, rel_obj_descriptor, attr_found, is_fetched def prefetch_one_level(instances, prefetcher, lookup, level): """ Helper function for prefetch_related_objects(). Run prefetches on all instances using the prefetcher object, assigning results to relevant caches in instance. Return the prefetched objects along with any additional prefetches that must be done due to prefetch_related lookups found from default managers. """ # prefetcher must have a method get_prefetch_queryset() which takes a list # of instances, and returns a tuple: # (queryset of instances of self.model that are related to passed in instances, # callable that gets value to be matched for returned instances, # callable that gets value to be matched for passed in instances, # boolean that is True for singly related objects, # cache or field name to assign to, # boolean that is True when the previous argument is a cache name vs a field name). # The 'values to be matched' must be hashable as they will be used # in a dictionary. rel_qs, rel_obj_attr, instance_attr, single, cache_name, is_descriptor = ( prefetcher.get_prefetch_queryset(instances, lookup.get_current_queryset(level))) # We have to handle the possibility that the QuerySet we just got back # contains some prefetch_related lookups. We don't want to trigger the # prefetch_related functionality by evaluating the query. Rather, we need # to merge in the prefetch_related lookups. # Copy the lookups in case it is a Prefetch object which could be reused # later (happens in nested prefetch_related). additional_lookups = [ copy.copy(additional_lookup) for additional_lookup in getattr(rel_qs, '_prefetch_related_lookups', ()) ] if additional_lookups: # Don't need to clone because the manager should have given us a fresh # instance, so we access an internal instead of using public interface # for performance reasons. rel_qs._prefetch_related_lookups = () all_related_objects = list(rel_qs) rel_obj_cache = {} for rel_obj in all_related_objects: rel_attr_val = rel_obj_attr(rel_obj) rel_obj_cache.setdefault(rel_attr_val, []).append(rel_obj) to_attr, as_attr = lookup.get_current_to_attr(level) # Make sure `to_attr` does not conflict with a field. if as_attr and instances: # We assume that objects retrieved are homogeneous (which is the premise # of prefetch_related), so what applies to first object applies to all. model = instances[0].__class__ try: model._meta.get_field(to_attr) except exceptions.FieldDoesNotExist: pass else: msg = 'to_attr={} conflicts with a field on the {} model.' raise ValueError(msg.format(to_attr, model.__name__)) # Whether or not we're prefetching the last part of the lookup. leaf = len(lookup.prefetch_through.split(LOOKUP_SEP)) - 1 == level for obj in instances: instance_attr_val = instance_attr(obj) vals = rel_obj_cache.get(instance_attr_val, []) if single: val = vals[0] if vals else None if as_attr: # A to_attr has been given for the prefetch. setattr(obj, to_attr, val) elif is_descriptor: # cache_name points to a field name in obj. # This field is a descriptor for a related object. setattr(obj, cache_name, val) else: # No to_attr has been given for this prefetch operation and the # cache_name does not point to a descriptor. Store the value of # the field in the object's field cache. obj._state.fields_cache[cache_name] = val else: if as_attr: setattr(obj, to_attr, vals) else: manager = getattr(obj, to_attr) if leaf and lookup.queryset is not None: qs = manager._apply_rel_filters(lookup.queryset) else: qs = manager.get_queryset() qs._result_cache = vals # We don't want the individual qs doing prefetch_related now, # since we have merged this into the current work. qs._prefetch_done = True obj._prefetched_objects_cache[cache_name] = qs return all_related_objects, additional_lookups class RelatedPopulator: """ RelatedPopulator is used for select_related() object instantiation. The idea is that each select_related() model will be populated by a different RelatedPopulator instance. The RelatedPopulator instances get klass_info and select (computed in SQLCompiler) plus the used db as input for initialization. That data is used to compute which columns to use, how to instantiate the model, and how to populate the links between the objects. The actual creation of the objects is done in populate() method. This method gets row and from_obj as input and populates the select_related() model instance. """ def __init__(self, klass_info, select, db): self.db = db # Pre-compute needed attributes. The attributes are: # - model_cls: the possibly deferred model class to instantiate # - either: # - cols_start, cols_end: usually the columns in the row are # in the same order model_cls.__init__ expects them, so we # can instantiate by model_cls(*row[cols_start:cols_end]) # - reorder_for_init: When select_related descends to a child # class, then we want to reuse the already selected parent # data. However, in this case the parent data isn't necessarily # in the same order that Model.__init__ expects it to be, so # we have to reorder the parent data. The reorder_for_init # attribute contains a function used to reorder the field data # in the order __init__ expects it. # - pk_idx: the index of the primary key field in the reordered # model data. Used to check if a related object exists at all. # - init_list: the field attnames fetched from the database. For # deferred models this isn't the same as all attnames of the # model's fields. # - related_populators: a list of RelatedPopulator instances if # select_related() descends to related models from this model. # - local_setter, remote_setter: Methods to set cached values on # the object being populated and on the remote object. Usually # these are Field.set_cached_value() methods. select_fields = klass_info['select_fields'] from_parent = klass_info['from_parent'] if not from_parent: self.cols_start = select_fields[0] self.cols_end = select_fields[-1] + 1 self.init_list = [ f[0].target.attname for f in select[self.cols_start:self.cols_end] ] self.reorder_for_init = None else: attname_indexes = {select[idx][0].target.attname: idx for idx in select_fields} model_init_attnames = (f.attname for f in klass_info['model']._meta.concrete_fields) self.init_list = [attname for attname in model_init_attnames if attname in attname_indexes] self.reorder_for_init = operator.itemgetter(*[attname_indexes[attname] for attname in self.init_list]) self.model_cls = klass_info['model'] self.pk_idx = self.init_list.index(self.model_cls._meta.pk.attname) self.related_populators = get_related_populators(klass_info, select, self.db) self.local_setter = klass_info['local_setter'] self.remote_setter = klass_info['remote_setter'] def populate(self, row, from_obj): if self.reorder_for_init: obj_data = self.reorder_for_init(row) else: obj_data = row[self.cols_start:self.cols_end] if obj_data[self.pk_idx] is None: obj = None else: obj = self.model_cls.from_db(self.db, self.init_list, obj_data) for rel_iter in self.related_populators: rel_iter.populate(row, obj) self.local_setter(from_obj, obj) if obj is not None: self.remote_setter(obj, from_obj) def get_related_populators(klass_info, select, db): iterators = [] related_klass_infos = klass_info.get('related_klass_infos', []) for rel_klass_info in related_klass_infos: rel_cls = RelatedPopulator(rel_klass_info, select, db) iterators.append(rel_cls) return iterators
3eeaa2b8e46cf54d2508d3b1cc27ac4f0b9e43c223ee878cac893668c80bcd4a
""" Helpers to manipulate deferred DDL statements that might need to be adjusted or discarded within when executing a migration. """ class Reference: """Base class that defines the reference interface.""" def references_table(self, table): """ Return whether or not this instance references the specified table. """ return False def references_column(self, table, column): """ Return whether or not this instance references the specified column. """ return False def rename_table_references(self, old_table, new_table): """ Rename all references to the old_name to the new_table. """ pass def rename_column_references(self, table, old_column, new_column): """ Rename all references to the old_column to the new_column. """ pass def __repr__(self): return '<%s %r>' % (self.__class__.__name__, str(self)) def __str__(self): raise NotImplementedError('Subclasses must define how they should be converted to string.') class Table(Reference): """Hold a reference to a table.""" def __init__(self, table, quote_name): self.table = table self.quote_name = quote_name def references_table(self, table): return self.table == table def rename_table_references(self, old_table, new_table): if self.table == old_table: self.table = new_table def __str__(self): return self.quote_name(self.table) class TableColumns(Table): """Base class for references to multiple columns of a table.""" def __init__(self, table, columns): self.table = table self.columns = columns def references_column(self, table, column): return self.table == table and column in self.columns def rename_column_references(self, table, old_column, new_column): if self.table == table: for index, column in enumerate(self.columns): if column == old_column: self.columns[index] = new_column class Columns(TableColumns): """Hold a reference to one or many columns.""" def __init__(self, table, columns, quote_name, col_suffixes=()): self.quote_name = quote_name self.col_suffixes = col_suffixes super().__init__(table, columns) def __str__(self): def col_str(column, idx): try: return self.quote_name(column) + self.col_suffixes[idx] except IndexError: return self.quote_name(column) return ', '.join(col_str(column, idx) for idx, column in enumerate(self.columns)) class IndexName(TableColumns): """Hold a reference to an index name.""" def __init__(self, table, columns, suffix, create_index_name): self.suffix = suffix self.create_index_name = create_index_name super().__init__(table, columns) def __str__(self): return self.create_index_name(self.table, self.columns, self.suffix) class IndexColumns(Columns): def __init__(self, table, columns, quote_name, col_suffixes=(), opclasses=()): self.opclasses = opclasses super().__init__(table, columns, quote_name, col_suffixes) def __str__(self): def col_str(column, idx): # Index.__init__() guarantees that self.opclasses is the same # length as self.columns. col = '{} {}'.format(self.quote_name(column), self.opclasses[idx]) try: col = '{} {}'.format(col, self.col_suffixes[idx]) except IndexError: pass return col return ', '.join(col_str(column, idx) for idx, column in enumerate(self.columns)) class ForeignKeyName(TableColumns): """Hold a reference to a foreign key name.""" def __init__(self, from_table, from_columns, to_table, to_columns, suffix_template, create_fk_name): self.to_reference = TableColumns(to_table, to_columns) self.suffix_template = suffix_template self.create_fk_name = create_fk_name super().__init__(from_table, from_columns,) def references_table(self, table): return super().references_table(table) or self.to_reference.references_table(table) def references_column(self, table, column): return ( super().references_column(table, column) or self.to_reference.references_column(table, column) ) def rename_table_references(self, old_table, new_table): super().rename_table_references(old_table, new_table) self.to_reference.rename_table_references(old_table, new_table) def rename_column_references(self, table, old_column, new_column): super().rename_column_references(table, old_column, new_column) self.to_reference.rename_column_references(table, old_column, new_column) def __str__(self): suffix = self.suffix_template % { 'to_table': self.to_reference.table, 'to_column': self.to_reference.columns[0], } return self.create_fk_name(self.table, self.columns, suffix) class Statement(Reference): """ Statement template and formatting parameters container. Allows keeping a reference to a statement without interpolating identifiers that might have to be adjusted if they're referencing a table or column that is removed """ def __init__(self, template, **parts): self.template = template self.parts = parts def references_table(self, table): return any( hasattr(part, 'references_table') and part.references_table(table) for part in self.parts.values() ) def references_column(self, table, column): return any( hasattr(part, 'references_column') and part.references_column(table, column) for part in self.parts.values() ) def rename_table_references(self, old_table, new_table): for part in self.parts.values(): if hasattr(part, 'rename_table_references'): part.rename_table_references(old_table, new_table) def rename_column_references(self, table, old_column, new_column): for part in self.parts.values(): if hasattr(part, 'rename_column_references'): part.rename_column_references(table, old_column, new_column) def __str__(self): return self.template % self.parts
ed6af820561cb9c2c3d76b2cf150ee93c07a5cf6acf1c3f263674f87a261505a
""" Accessors for related objects. When a field defines a relation between two models, each model class provides an attribute to access related instances of the other model class (unless the reverse accessor has been disabled with related_name='+'). Accessors are implemented as descriptors in order to customize access and assignment. This module defines the descriptor classes. Forward accessors follow foreign keys. Reverse accessors trace them back. For example, with the following models:: class Parent(Model): pass class Child(Model): parent = ForeignKey(Parent, related_name='children') ``child.parent`` is a forward many-to-one relation. ``parent.children`` is a reverse many-to-one relation. There are three types of relations (many-to-one, one-to-one, and many-to-many) and two directions (forward and reverse) for a total of six combinations. 1. Related instance on the forward side of a many-to-one relation: ``ForwardManyToOneDescriptor``. Uniqueness of foreign key values is irrelevant to accessing the related instance, making the many-to-one and one-to-one cases identical as far as the descriptor is concerned. The constraint is checked upstream (unicity validation in forms) or downstream (unique indexes in the database). 2. Related instance on the forward side of a one-to-one relation: ``ForwardOneToOneDescriptor``. It avoids querying the database when accessing the parent link field in a multi-table inheritance scenario. 3. Related instance on the reverse side of a one-to-one relation: ``ReverseOneToOneDescriptor``. One-to-one relations are asymmetrical, despite the apparent symmetry of the name, because they're implemented in the database with a foreign key from one table to another. As a consequence ``ReverseOneToOneDescriptor`` is slightly different from ``ForwardManyToOneDescriptor``. 4. Related objects manager for related instances on the reverse side of a many-to-one relation: ``ReverseManyToOneDescriptor``. Unlike the previous two classes, this one provides access to a collection of objects. It returns a manager rather than an instance. 5. Related objects manager for related instances on the forward or reverse sides of a many-to-many relation: ``ManyToManyDescriptor``. Many-to-many relations are symmetrical. The syntax of Django models requires declaring them on one side but that's an implementation detail. They could be declared on the other side without any change in behavior. Therefore the forward and reverse descriptors can be the same. If you're looking for ``ForwardManyToManyDescriptor`` or ``ReverseManyToManyDescriptor``, use ``ManyToManyDescriptor`` instead. """ from django.core.exceptions import FieldError from django.db import connections, router, transaction from django.db.models import Q, signals from django.db.models.query import QuerySet from django.db.models.query_utils import DeferredAttribute from django.utils.functional import cached_property class ForeignKeyDeferredAttribute(DeferredAttribute): def __set__(self, instance, value): if instance.__dict__.get(self.field.attname) != value and self.field.is_cached(instance): self.field.delete_cached_value(instance) instance.__dict__[self.field.attname] = value class ForwardManyToOneDescriptor: """ Accessor to the related object on the forward side of a many-to-one or one-to-one (via ForwardOneToOneDescriptor subclass) relation. In the example:: class Child(Model): parent = ForeignKey(Parent, related_name='children') ``Child.parent`` is a ``ForwardManyToOneDescriptor`` instance. """ def __init__(self, field_with_rel): self.field = field_with_rel @cached_property def RelatedObjectDoesNotExist(self): # The exception can't be created at initialization time since the # related model might not be resolved yet; `self.field.model` might # still be a string model reference. return type( 'RelatedObjectDoesNotExist', (self.field.remote_field.model.DoesNotExist, AttributeError), { '__module__': self.field.model.__module__, '__qualname__': '%s.%s.RelatedObjectDoesNotExist' % ( self.field.model.__qualname__, self.field.name, ), } ) def is_cached(self, instance): return self.field.is_cached(instance) def get_queryset(self, **hints): return self.field.remote_field.model._base_manager.db_manager(hints=hints).all() def get_prefetch_queryset(self, instances, queryset=None): if queryset is None: queryset = self.get_queryset() queryset._add_hints(instance=instances[0]) rel_obj_attr = self.field.get_foreign_related_value instance_attr = self.field.get_local_related_value instances_dict = {instance_attr(inst): inst for inst in instances} related_field = self.field.foreign_related_fields[0] remote_field = self.field.remote_field # FIXME: This will need to be revisited when we introduce support for # composite fields. In the meantime we take this practical approach to # solve a regression on 1.6 when the reverse manager in hidden # (related_name ends with a '+'). Refs #21410. # The check for len(...) == 1 is a special case that allows the query # to be join-less and smaller. Refs #21760. if remote_field.is_hidden() or len(self.field.foreign_related_fields) == 1: query = {'%s__in' % related_field.name: {instance_attr(inst)[0] for inst in instances}} else: query = {'%s__in' % self.field.related_query_name(): instances} queryset = queryset.filter(**query) # Since we're going to assign directly in the cache, # we must manage the reverse relation cache manually. if not remote_field.multiple: for rel_obj in queryset: instance = instances_dict[rel_obj_attr(rel_obj)] remote_field.set_cached_value(rel_obj, instance) return queryset, rel_obj_attr, instance_attr, True, self.field.get_cache_name(), False def get_object(self, instance): qs = self.get_queryset(instance=instance) # Assuming the database enforces foreign keys, this won't fail. return qs.get(self.field.get_reverse_related_filter(instance)) def __get__(self, instance, cls=None): """ Get the related instance through the forward relation. With the example above, when getting ``child.parent``: - ``self`` is the descriptor managing the ``parent`` attribute - ``instance`` is the ``child`` instance - ``cls`` is the ``Child`` class (we don't need it) """ if instance is None: return self # The related instance is loaded from the database and then cached # by the field on the model instance state. It can also be pre-cached # by the reverse accessor (ReverseOneToOneDescriptor). try: rel_obj = self.field.get_cached_value(instance) except KeyError: has_value = None not in self.field.get_local_related_value(instance) ancestor_link = instance._meta.get_ancestor_link(self.field.model) if has_value else None if ancestor_link and ancestor_link.is_cached(instance): # An ancestor link will exist if this field is defined on a # multi-table inheritance parent of the instance's class. ancestor = ancestor_link.get_cached_value(instance) # The value might be cached on an ancestor if the instance # originated from walking down the inheritance chain. rel_obj = self.field.get_cached_value(ancestor, default=None) else: rel_obj = None if rel_obj is None and has_value: rel_obj = self.get_object(instance) remote_field = self.field.remote_field # If this is a one-to-one relation, set the reverse accessor # cache on the related object to the current instance to avoid # an extra SQL query if it's accessed later on. if not remote_field.multiple: remote_field.set_cached_value(rel_obj, instance) self.field.set_cached_value(instance, rel_obj) if rel_obj is None and not self.field.null: raise self.RelatedObjectDoesNotExist( "%s has no %s." % (self.field.model.__name__, self.field.name) ) else: return rel_obj def __set__(self, instance, value): """ Set the related instance through the forward relation. With the example above, when setting ``child.parent = parent``: - ``self`` is the descriptor managing the ``parent`` attribute - ``instance`` is the ``child`` instance - ``value`` is the ``parent`` instance on the right of the equal sign """ # An object must be an instance of the related class. if value is not None and not isinstance(value, self.field.remote_field.model._meta.concrete_model): raise ValueError( 'Cannot assign "%r": "%s.%s" must be a "%s" instance.' % ( value, instance._meta.object_name, self.field.name, self.field.remote_field.model._meta.object_name, ) ) elif value is not None: if instance._state.db is None: instance._state.db = router.db_for_write(instance.__class__, instance=value) if value._state.db is None: value._state.db = router.db_for_write(value.__class__, instance=instance) if not router.allow_relation(value, instance): raise ValueError('Cannot assign "%r": the current database router prevents this relation.' % value) remote_field = self.field.remote_field # If we're setting the value of a OneToOneField to None, we need to clear # out the cache on any old related object. Otherwise, deleting the # previously-related object will also cause this object to be deleted, # which is wrong. if value is None: # Look up the previously-related object, which may still be available # since we've not yet cleared out the related field. # Use the cache directly, instead of the accessor; if we haven't # populated the cache, then we don't care - we're only accessing # the object to invalidate the accessor cache, so there's no # need to populate the cache just to expire it again. related = self.field.get_cached_value(instance, default=None) # If we've got an old related object, we need to clear out its # cache. This cache also might not exist if the related object # hasn't been accessed yet. if related is not None: remote_field.set_cached_value(related, None) for lh_field, rh_field in self.field.related_fields: setattr(instance, lh_field.attname, None) # Set the values of the related field. else: for lh_field, rh_field in self.field.related_fields: setattr(instance, lh_field.attname, getattr(value, rh_field.attname)) # Set the related instance cache used by __get__ to avoid an SQL query # when accessing the attribute we just set. self.field.set_cached_value(instance, value) # If this is a one-to-one relation, set the reverse accessor cache on # the related object to the current instance to avoid an extra SQL # query if it's accessed later on. if value is not None and not remote_field.multiple: remote_field.set_cached_value(value, instance) def __reduce__(self): """ Pickling should return the instance attached by self.field on the model, not a new copy of that descriptor. Use getattr() to retrieve the instance directly from the model. """ return getattr, (self.field.model, self.field.name) class ForwardOneToOneDescriptor(ForwardManyToOneDescriptor): """ Accessor to the related object on the forward side of a one-to-one relation. In the example:: class Restaurant(Model): place = OneToOneField(Place, related_name='restaurant') ``Restaurant.place`` is a ``ForwardOneToOneDescriptor`` instance. """ def get_object(self, instance): if self.field.remote_field.parent_link: deferred = instance.get_deferred_fields() # Because it's a parent link, all the data is available in the # instance, so populate the parent model with this data. rel_model = self.field.remote_field.model fields = [field.attname for field in rel_model._meta.concrete_fields] # If any of the related model's fields are deferred, fallback to # fetching all fields from the related model. This avoids a query # on the related model for every deferred field. if not any(field in fields for field in deferred): kwargs = {field: getattr(instance, field) for field in fields} obj = rel_model(**kwargs) obj._state.adding = instance._state.adding obj._state.db = instance._state.db return obj return super().get_object(instance) def __set__(self, instance, value): super().__set__(instance, value) # If the primary key is a link to a parent model and a parent instance # is being set, update the value of the inherited pk(s). if self.field.primary_key and self.field.remote_field.parent_link: opts = instance._meta # Inherited primary key fields from this object's base classes. inherited_pk_fields = [ field for field in opts.concrete_fields if field.primary_key and field.remote_field ] for field in inherited_pk_fields: rel_model_pk_name = field.remote_field.model._meta.pk.attname raw_value = getattr(value, rel_model_pk_name) if value is not None else None setattr(instance, rel_model_pk_name, raw_value) class ReverseOneToOneDescriptor: """ Accessor to the related object on the reverse side of a one-to-one relation. In the example:: class Restaurant(Model): place = OneToOneField(Place, related_name='restaurant') ``Place.restaurant`` is a ``ReverseOneToOneDescriptor`` instance. """ def __init__(self, related): # Following the example above, `related` is an instance of OneToOneRel # which represents the reverse restaurant field (place.restaurant). self.related = related @cached_property def RelatedObjectDoesNotExist(self): # The exception isn't created at initialization time for the sake of # consistency with `ForwardManyToOneDescriptor`. return type( 'RelatedObjectDoesNotExist', (self.related.related_model.DoesNotExist, AttributeError), { '__module__': self.related.model.__module__, '__qualname__': '%s.%s.RelatedObjectDoesNotExist' % ( self.related.model.__qualname__, self.related.name, ) }, ) def is_cached(self, instance): return self.related.is_cached(instance) def get_queryset(self, **hints): return self.related.related_model._base_manager.db_manager(hints=hints).all() def get_prefetch_queryset(self, instances, queryset=None): if queryset is None: queryset = self.get_queryset() queryset._add_hints(instance=instances[0]) rel_obj_attr = self.related.field.get_local_related_value instance_attr = self.related.field.get_foreign_related_value instances_dict = {instance_attr(inst): inst for inst in instances} query = {'%s__in' % self.related.field.name: instances} queryset = queryset.filter(**query) # Since we're going to assign directly in the cache, # we must manage the reverse relation cache manually. for rel_obj in queryset: instance = instances_dict[rel_obj_attr(rel_obj)] self.related.field.set_cached_value(rel_obj, instance) return queryset, rel_obj_attr, instance_attr, True, self.related.get_cache_name(), False def __get__(self, instance, cls=None): """ Get the related instance through the reverse relation. With the example above, when getting ``place.restaurant``: - ``self`` is the descriptor managing the ``restaurant`` attribute - ``instance`` is the ``place`` instance - ``cls`` is the ``Place`` class (unused) Keep in mind that ``Restaurant`` holds the foreign key to ``Place``. """ if instance is None: return self # The related instance is loaded from the database and then cached # by the field on the model instance state. It can also be pre-cached # by the forward accessor (ForwardManyToOneDescriptor). try: rel_obj = self.related.get_cached_value(instance) except KeyError: related_pk = instance.pk if related_pk is None: rel_obj = None else: filter_args = self.related.field.get_forward_related_filter(instance) try: rel_obj = self.get_queryset(instance=instance).get(**filter_args) except self.related.related_model.DoesNotExist: rel_obj = None else: # Set the forward accessor cache on the related object to # the current instance to avoid an extra SQL query if it's # accessed later on. self.related.field.set_cached_value(rel_obj, instance) self.related.set_cached_value(instance, rel_obj) if rel_obj is None: raise self.RelatedObjectDoesNotExist( "%s has no %s." % ( instance.__class__.__name__, self.related.get_accessor_name() ) ) else: return rel_obj def __set__(self, instance, value): """ Set the related instance through the reverse relation. With the example above, when setting ``place.restaurant = restaurant``: - ``self`` is the descriptor managing the ``restaurant`` attribute - ``instance`` is the ``place`` instance - ``value`` is the ``restaurant`` instance on the right of the equal sign Keep in mind that ``Restaurant`` holds the foreign key to ``Place``. """ # The similarity of the code below to the code in # ForwardManyToOneDescriptor is annoying, but there's a bunch # of small differences that would make a common base class convoluted. if value is None: # Update the cached related instance (if any) & clear the cache. # Following the example above, this would be the cached # ``restaurant`` instance (if any). rel_obj = self.related.get_cached_value(instance, default=None) if rel_obj is not None: # Remove the ``restaurant`` instance from the ``place`` # instance cache. self.related.delete_cached_value(instance) # Set the ``place`` field on the ``restaurant`` # instance to None. setattr(rel_obj, self.related.field.name, None) elif not isinstance(value, self.related.related_model): # An object must be an instance of the related class. raise ValueError( 'Cannot assign "%r": "%s.%s" must be a "%s" instance.' % ( value, instance._meta.object_name, self.related.get_accessor_name(), self.related.related_model._meta.object_name, ) ) else: if instance._state.db is None: instance._state.db = router.db_for_write(instance.__class__, instance=value) if value._state.db is None: value._state.db = router.db_for_write(value.__class__, instance=instance) if not router.allow_relation(value, instance): raise ValueError('Cannot assign "%r": the current database router prevents this relation.' % value) related_pk = tuple(getattr(instance, field.attname) for field in self.related.field.foreign_related_fields) # Set the value of the related field to the value of the related object's related field for index, field in enumerate(self.related.field.local_related_fields): setattr(value, field.attname, related_pk[index]) # Set the related instance cache used by __get__ to avoid an SQL query # when accessing the attribute we just set. self.related.set_cached_value(instance, value) # Set the forward accessor cache on the related object to the current # instance to avoid an extra SQL query if it's accessed later on. self.related.field.set_cached_value(value, instance) def __reduce__(self): # Same purpose as ForwardManyToOneDescriptor.__reduce__(). return getattr, (self.related.model, self.related.name) class ReverseManyToOneDescriptor: """ Accessor to the related objects manager on the reverse side of a many-to-one relation. In the example:: class Child(Model): parent = ForeignKey(Parent, related_name='children') ``Parent.children`` is a ``ReverseManyToOneDescriptor`` instance. Most of the implementation is delegated to a dynamically defined manager class built by ``create_forward_many_to_many_manager()`` defined below. """ def __init__(self, rel): self.rel = rel self.field = rel.field @cached_property def related_manager_cls(self): related_model = self.rel.related_model return create_reverse_many_to_one_manager( related_model._default_manager.__class__, self.rel, ) def __get__(self, instance, cls=None): """ Get the related objects through the reverse relation. With the example above, when getting ``parent.children``: - ``self`` is the descriptor managing the ``children`` attribute - ``instance`` is the ``parent`` instance - ``cls`` is the ``Parent`` class (unused) """ if instance is None: return self return self.related_manager_cls(instance) def _get_set_deprecation_msg_params(self): return ( 'reverse side of a related set', self.rel.get_accessor_name(), ) def __set__(self, instance, value): raise TypeError( 'Direct assignment to the %s is prohibited. Use %s.set() instead.' % self._get_set_deprecation_msg_params(), ) def create_reverse_many_to_one_manager(superclass, rel): """ Create a manager for the reverse side of a many-to-one relation. This manager subclasses another manager, generally the default manager of the related model, and adds behaviors specific to many-to-one relations. """ class RelatedManager(superclass): def __init__(self, instance): super().__init__() self.instance = instance self.model = rel.related_model self.field = rel.field self.core_filters = {self.field.name: instance} def __call__(self, *, manager): manager = getattr(self.model, manager) manager_class = create_reverse_many_to_one_manager(manager.__class__, rel) return manager_class(self.instance) do_not_call_in_templates = True def _apply_rel_filters(self, queryset): """ Filter the queryset for the instance this manager is bound to. """ db = self._db or router.db_for_read(self.model, instance=self.instance) empty_strings_as_null = connections[db].features.interprets_empty_strings_as_nulls queryset._add_hints(instance=self.instance) if self._db: queryset = queryset.using(self._db) queryset = queryset.filter(**self.core_filters) for field in self.field.foreign_related_fields: val = getattr(self.instance, field.attname) if val is None or (val == '' and empty_strings_as_null): return queryset.none() if self.field.many_to_one: # Guard against field-like objects such as GenericRelation # that abuse create_reverse_many_to_one_manager() with reverse # one-to-many relationships instead and break known related # objects assignment. try: target_field = self.field.target_field except FieldError: # The relationship has multiple target fields. Use a tuple # for related object id. rel_obj_id = tuple([ getattr(self.instance, target_field.attname) for target_field in self.field.get_path_info()[-1].target_fields ]) else: rel_obj_id = getattr(self.instance, target_field.attname) queryset._known_related_objects = {self.field: {rel_obj_id: self.instance}} return queryset def _remove_prefetched_objects(self): try: self.instance._prefetched_objects_cache.pop(self.field.remote_field.get_cache_name()) except (AttributeError, KeyError): pass # nothing to clear from cache def get_queryset(self): try: return self.instance._prefetched_objects_cache[self.field.remote_field.get_cache_name()] except (AttributeError, KeyError): queryset = super().get_queryset() return self._apply_rel_filters(queryset) def get_prefetch_queryset(self, instances, queryset=None): if queryset is None: queryset = super().get_queryset() queryset._add_hints(instance=instances[0]) queryset = queryset.using(queryset._db or self._db) rel_obj_attr = self.field.get_local_related_value instance_attr = self.field.get_foreign_related_value instances_dict = {instance_attr(inst): inst for inst in instances} query = {'%s__in' % self.field.name: instances} queryset = queryset.filter(**query) # Since we just bypassed this class' get_queryset(), we must manage # the reverse relation manually. for rel_obj in queryset: instance = instances_dict[rel_obj_attr(rel_obj)] setattr(rel_obj, self.field.name, instance) cache_name = self.field.remote_field.get_cache_name() return queryset, rel_obj_attr, instance_attr, False, cache_name, False def add(self, *objs, bulk=True): self._remove_prefetched_objects() objs = list(objs) db = router.db_for_write(self.model, instance=self.instance) def check_and_update_obj(obj): if not isinstance(obj, self.model): raise TypeError("'%s' instance expected, got %r" % ( self.model._meta.object_name, obj, )) setattr(obj, self.field.name, self.instance) if bulk: pks = [] for obj in objs: check_and_update_obj(obj) if obj._state.adding or obj._state.db != db: raise ValueError( "%r instance isn't saved. Use bulk=False or save " "the object first." % obj ) pks.append(obj.pk) self.model._base_manager.using(db).filter(pk__in=pks).update(**{ self.field.name: self.instance, }) else: with transaction.atomic(using=db, savepoint=False): for obj in objs: check_and_update_obj(obj) obj.save() add.alters_data = True def create(self, **kwargs): kwargs[self.field.name] = self.instance db = router.db_for_write(self.model, instance=self.instance) return super(RelatedManager, self.db_manager(db)).create(**kwargs) create.alters_data = True def get_or_create(self, **kwargs): kwargs[self.field.name] = self.instance db = router.db_for_write(self.model, instance=self.instance) return super(RelatedManager, self.db_manager(db)).get_or_create(**kwargs) get_or_create.alters_data = True def update_or_create(self, **kwargs): kwargs[self.field.name] = self.instance db = router.db_for_write(self.model, instance=self.instance) return super(RelatedManager, self.db_manager(db)).update_or_create(**kwargs) update_or_create.alters_data = True # remove() and clear() are only provided if the ForeignKey can have a value of null. if rel.field.null: def remove(self, *objs, bulk=True): if not objs: return val = self.field.get_foreign_related_value(self.instance) old_ids = set() for obj in objs: # Is obj actually part of this descriptor set? if self.field.get_local_related_value(obj) == val: old_ids.add(obj.pk) else: raise self.field.remote_field.model.DoesNotExist( "%r is not related to %r." % (obj, self.instance) ) self._clear(self.filter(pk__in=old_ids), bulk) remove.alters_data = True def clear(self, *, bulk=True): self._clear(self, bulk) clear.alters_data = True def _clear(self, queryset, bulk): self._remove_prefetched_objects() db = router.db_for_write(self.model, instance=self.instance) queryset = queryset.using(db) if bulk: # `QuerySet.update()` is intrinsically atomic. queryset.update(**{self.field.name: None}) else: with transaction.atomic(using=db, savepoint=False): for obj in queryset: setattr(obj, self.field.name, None) obj.save(update_fields=[self.field.name]) _clear.alters_data = True def set(self, objs, *, bulk=True, clear=False): # Force evaluation of `objs` in case it's a queryset whose value # could be affected by `manager.clear()`. Refs #19816. objs = tuple(objs) if self.field.null: db = router.db_for_write(self.model, instance=self.instance) with transaction.atomic(using=db, savepoint=False): if clear: self.clear(bulk=bulk) self.add(*objs, bulk=bulk) else: old_objs = set(self.using(db).all()) new_objs = [] for obj in objs: if obj in old_objs: old_objs.remove(obj) else: new_objs.append(obj) self.remove(*old_objs, bulk=bulk) self.add(*new_objs, bulk=bulk) else: self.add(*objs, bulk=bulk) set.alters_data = True return RelatedManager class ManyToManyDescriptor(ReverseManyToOneDescriptor): """ Accessor to the related objects manager on the forward and reverse sides of a many-to-many relation. In the example:: class Pizza(Model): toppings = ManyToManyField(Topping, related_name='pizzas') ``Pizza.toppings`` and ``Topping.pizzas`` are ``ManyToManyDescriptor`` instances. Most of the implementation is delegated to a dynamically defined manager class built by ``create_forward_many_to_many_manager()`` defined below. """ def __init__(self, rel, reverse=False): super().__init__(rel) self.reverse = reverse @property def through(self): # through is provided so that you have easy access to the through # model (Book.authors.through) for inlines, etc. This is done as # a property to ensure that the fully resolved value is returned. return self.rel.through @cached_property def related_manager_cls(self): related_model = self.rel.related_model if self.reverse else self.rel.model return create_forward_many_to_many_manager( related_model._default_manager.__class__, self.rel, reverse=self.reverse, ) def _get_set_deprecation_msg_params(self): return ( '%s side of a many-to-many set' % ('reverse' if self.reverse else 'forward'), self.rel.get_accessor_name() if self.reverse else self.field.name, ) def create_forward_many_to_many_manager(superclass, rel, reverse): """ Create a manager for the either side of a many-to-many relation. This manager subclasses another manager, generally the default manager of the related model, and adds behaviors specific to many-to-many relations. """ class ManyRelatedManager(superclass): def __init__(self, instance=None): super().__init__() self.instance = instance if not reverse: self.model = rel.model self.query_field_name = rel.field.related_query_name() self.prefetch_cache_name = rel.field.name self.source_field_name = rel.field.m2m_field_name() self.target_field_name = rel.field.m2m_reverse_field_name() self.symmetrical = rel.symmetrical else: self.model = rel.related_model self.query_field_name = rel.field.name self.prefetch_cache_name = rel.field.related_query_name() self.source_field_name = rel.field.m2m_reverse_field_name() self.target_field_name = rel.field.m2m_field_name() self.symmetrical = False self.through = rel.through self.reverse = reverse self.source_field = self.through._meta.get_field(self.source_field_name) self.target_field = self.through._meta.get_field(self.target_field_name) self.core_filters = {} self.pk_field_names = {} for lh_field, rh_field in self.source_field.related_fields: core_filter_key = '%s__%s' % (self.query_field_name, rh_field.name) self.core_filters[core_filter_key] = getattr(instance, rh_field.attname) self.pk_field_names[lh_field.name] = rh_field.name self.related_val = self.source_field.get_foreign_related_value(instance) if None in self.related_val: raise ValueError('"%r" needs to have a value for field "%s" before ' 'this many-to-many relationship can be used.' % (instance, self.pk_field_names[self.source_field_name])) # Even if this relation is not to pk, we require still pk value. # The wish is that the instance has been already saved to DB, # although having a pk value isn't a guarantee of that. if instance.pk is None: raise ValueError("%r instance needs to have a primary key value before " "a many-to-many relationship can be used." % instance.__class__.__name__) def __call__(self, *, manager): manager = getattr(self.model, manager) manager_class = create_forward_many_to_many_manager(manager.__class__, rel, reverse) return manager_class(instance=self.instance) do_not_call_in_templates = True def _build_remove_filters(self, removed_vals): filters = Q(**{self.source_field_name: self.related_val}) # No need to add a subquery condition if removed_vals is a QuerySet without # filters. removed_vals_filters = (not isinstance(removed_vals, QuerySet) or removed_vals._has_filters()) if removed_vals_filters: filters &= Q(**{'%s__in' % self.target_field_name: removed_vals}) if self.symmetrical: symmetrical_filters = Q(**{self.target_field_name: self.related_val}) if removed_vals_filters: symmetrical_filters &= Q( **{'%s__in' % self.source_field_name: removed_vals}) filters |= symmetrical_filters return filters def _apply_rel_filters(self, queryset): """ Filter the queryset for the instance this manager is bound to. """ queryset._add_hints(instance=self.instance) if self._db: queryset = queryset.using(self._db) queryset._defer_next_filter = True return queryset._next_is_sticky().filter(**self.core_filters) def _remove_prefetched_objects(self): try: self.instance._prefetched_objects_cache.pop(self.prefetch_cache_name) except (AttributeError, KeyError): pass # nothing to clear from cache def get_queryset(self): try: return self.instance._prefetched_objects_cache[self.prefetch_cache_name] except (AttributeError, KeyError): queryset = super().get_queryset() return self._apply_rel_filters(queryset) def get_prefetch_queryset(self, instances, queryset=None): if queryset is None: queryset = super().get_queryset() queryset._add_hints(instance=instances[0]) queryset = queryset.using(queryset._db or self._db) query = {'%s__in' % self.query_field_name: instances} queryset = queryset._next_is_sticky().filter(**query) # M2M: need to annotate the query in order to get the primary model # that the secondary model was actually related to. We know that # there will already be a join on the join table, so we can just add # the select. # For non-autocreated 'through' models, can't assume we are # dealing with PK values. fk = self.through._meta.get_field(self.source_field_name) join_table = fk.model._meta.db_table connection = connections[queryset.db] qn = connection.ops.quote_name queryset = queryset.extra(select={ '_prefetch_related_val_%s' % f.attname: '%s.%s' % (qn(join_table), qn(f.column)) for f in fk.local_related_fields}) return ( queryset, lambda result: tuple( getattr(result, '_prefetch_related_val_%s' % f.attname) for f in fk.local_related_fields ), lambda inst: tuple( f.get_db_prep_value(getattr(inst, f.attname), connection) for f in fk.foreign_related_fields ), False, self.prefetch_cache_name, False, ) def add(self, *objs, through_defaults=None): self._remove_prefetched_objects() db = router.db_for_write(self.through, instance=self.instance) with transaction.atomic(using=db, savepoint=False): self._add_items( self.source_field_name, self.target_field_name, *objs, through_defaults=through_defaults, ) # If this is a symmetrical m2m relation to self, add the mirror # entry in the m2m table. if self.symmetrical: self._add_items( self.target_field_name, self.source_field_name, *objs, through_defaults=through_defaults, ) add.alters_data = True def remove(self, *objs): self._remove_prefetched_objects() self._remove_items(self.source_field_name, self.target_field_name, *objs) remove.alters_data = True def clear(self): db = router.db_for_write(self.through, instance=self.instance) with transaction.atomic(using=db, savepoint=False): signals.m2m_changed.send( sender=self.through, action="pre_clear", instance=self.instance, reverse=self.reverse, model=self.model, pk_set=None, using=db, ) self._remove_prefetched_objects() filters = self._build_remove_filters(super().get_queryset().using(db)) self.through._default_manager.using(db).filter(filters).delete() signals.m2m_changed.send( sender=self.through, action="post_clear", instance=self.instance, reverse=self.reverse, model=self.model, pk_set=None, using=db, ) clear.alters_data = True def set(self, objs, *, clear=False, through_defaults=None): # Force evaluation of `objs` in case it's a queryset whose value # could be affected by `manager.clear()`. Refs #19816. objs = tuple(objs) db = router.db_for_write(self.through, instance=self.instance) with transaction.atomic(using=db, savepoint=False): if clear: self.clear() self.add(*objs, through_defaults=through_defaults) else: old_ids = set(self.using(db).values_list(self.target_field.target_field.attname, flat=True)) new_objs = [] for obj in objs: fk_val = ( self.target_field.get_foreign_related_value(obj)[0] if isinstance(obj, self.model) else obj ) if fk_val in old_ids: old_ids.remove(fk_val) else: new_objs.append(obj) self.remove(*old_ids) self.add(*new_objs, through_defaults=through_defaults) set.alters_data = True def create(self, *, through_defaults=None, **kwargs): db = router.db_for_write(self.instance.__class__, instance=self.instance) new_obj = super(ManyRelatedManager, self.db_manager(db)).create(**kwargs) self.add(new_obj, through_defaults=through_defaults) return new_obj create.alters_data = True def get_or_create(self, *, through_defaults=None, **kwargs): db = router.db_for_write(self.instance.__class__, instance=self.instance) obj, created = super(ManyRelatedManager, self.db_manager(db)).get_or_create(**kwargs) # We only need to add() if created because if we got an object back # from get() then the relationship already exists. if created: self.add(obj, through_defaults=through_defaults) return obj, created get_or_create.alters_data = True def update_or_create(self, *, through_defaults=None, **kwargs): db = router.db_for_write(self.instance.__class__, instance=self.instance) obj, created = super(ManyRelatedManager, self.db_manager(db)).update_or_create(**kwargs) # We only need to add() if created because if we got an object back # from get() then the relationship already exists. if created: self.add(obj, through_defaults=through_defaults) return obj, created update_or_create.alters_data = True def _get_target_ids(self, target_field_name, objs): """ Return the set of ids of `objs` that the target field references. """ from django.db.models import Model target_ids = set() target_field = self.through._meta.get_field(target_field_name) for obj in objs: if isinstance(obj, self.model): if not router.allow_relation(obj, self.instance): raise ValueError( 'Cannot add "%r": instance is on database "%s", ' 'value is on database "%s"' % (obj, self.instance._state.db, obj._state.db) ) target_id = target_field.get_foreign_related_value(obj)[0] if target_id is None: raise ValueError( 'Cannot add "%r": the value for field "%s" is None' % (obj, target_field_name) ) target_ids.add(target_id) elif isinstance(obj, Model): raise TypeError( "'%s' instance expected, got %r" % (self.model._meta.object_name, obj) ) else: target_ids.add(obj) return target_ids def _get_missing_target_ids(self, source_field_name, target_field_name, db, target_ids): """ Return the subset of ids of `objs` that aren't already assigned to this relationship. """ vals = self.through._default_manager.using(db).values_list( target_field_name, flat=True ).filter(**{ source_field_name: self.related_val[0], '%s__in' % target_field_name: target_ids, }) return target_ids.difference(vals) def _get_add_plan(self, db, source_field_name): """ Return a boolean triple of the way the add should be performed. The first element is whether or not bulk_create(ignore_conflicts) can be used, the second whether or not signals must be sent, and the third element is whether or not the immediate bulk insertion with conflicts ignored can be performed. """ # Conflicts can be ignored when the intermediary model is # auto-created as the only possible collision is on the # (source_id, target_id) tuple. The same assertion doesn't hold for # user-defined intermediary models as they could have other fields # causing conflicts which must be surfaced. can_ignore_conflicts = ( connections[db].features.supports_ignore_conflicts and self.through._meta.auto_created is not False ) # Don't send the signal when inserting duplicate data row # for symmetrical reverse entries. must_send_signals = (self.reverse or source_field_name == self.source_field_name) and ( signals.m2m_changed.has_listeners(self.through) ) # Fast addition through bulk insertion can only be performed # if no m2m_changed listeners are connected for self.through # as they require the added set of ids to be provided via # pk_set. return can_ignore_conflicts, must_send_signals, (can_ignore_conflicts and not must_send_signals) def _add_items(self, source_field_name, target_field_name, *objs, through_defaults=None): # source_field_name: the PK fieldname in join table for the source object # target_field_name: the PK fieldname in join table for the target object # *objs - objects to add. Either object instances, or primary keys of object instances. through_defaults = through_defaults or {} # If there aren't any objects, there is nothing to do. if objs: target_ids = self._get_target_ids(target_field_name, objs) db = router.db_for_write(self.through, instance=self.instance) can_ignore_conflicts, must_send_signals, can_fast_add = self._get_add_plan(db, source_field_name) if can_fast_add: self.through._default_manager.using(db).bulk_create([ self.through(**{ '%s_id' % source_field_name: self.related_val[0], '%s_id' % target_field_name: target_id, }) for target_id in target_ids ], ignore_conflicts=True) return missing_target_ids = self._get_missing_target_ids( source_field_name, target_field_name, db, target_ids ) with transaction.atomic(using=db, savepoint=False): if must_send_signals: signals.m2m_changed.send( sender=self.through, action='pre_add', instance=self.instance, reverse=self.reverse, model=self.model, pk_set=missing_target_ids, using=db, ) # Add the ones that aren't there already. self.through._default_manager.using(db).bulk_create([ self.through(**through_defaults, **{ '%s_id' % source_field_name: self.related_val[0], '%s_id' % target_field_name: target_id, }) for target_id in missing_target_ids ], ignore_conflicts=can_ignore_conflicts) if must_send_signals: signals.m2m_changed.send( sender=self.through, action='post_add', instance=self.instance, reverse=self.reverse, model=self.model, pk_set=missing_target_ids, using=db, ) def _remove_items(self, source_field_name, target_field_name, *objs): # source_field_name: the PK colname in join table for the source object # target_field_name: the PK colname in join table for the target object # *objs - objects to remove. Either object instances, or primary # keys of object instances. if not objs: return # Check that all the objects are of the right type old_ids = set() for obj in objs: if isinstance(obj, self.model): fk_val = self.target_field.get_foreign_related_value(obj)[0] old_ids.add(fk_val) else: old_ids.add(obj) db = router.db_for_write(self.through, instance=self.instance) with transaction.atomic(using=db, savepoint=False): # Send a signal to the other end if need be. signals.m2m_changed.send( sender=self.through, action="pre_remove", instance=self.instance, reverse=self.reverse, model=self.model, pk_set=old_ids, using=db, ) target_model_qs = super().get_queryset() if target_model_qs._has_filters(): old_vals = target_model_qs.using(db).filter(**{ '%s__in' % self.target_field.target_field.attname: old_ids}) else: old_vals = old_ids filters = self._build_remove_filters(old_vals) self.through._default_manager.using(db).filter(filters).delete() signals.m2m_changed.send( sender=self.through, action="post_remove", instance=self.instance, reverse=self.reverse, model=self.model, pk_set=old_ids, using=db, ) return ManyRelatedManager
97576633a7fafd8a0bf6a87baaca2252fcb6f5dbeeb1d50c1ba97e9781b108d4
""" "Rel objects" for related fields. "Rel objects" (for lack of a better name) carry information about the relation modeled by a related field and provide some utility functions. They're stored in the ``remote_field`` attribute of the field. They also act as reverse fields for the purposes of the Meta API because they're the closest concept currently available. """ from django.core import exceptions from django.utils.functional import cached_property from . import BLANK_CHOICE_DASH from .mixins import FieldCacheMixin class ForeignObjectRel(FieldCacheMixin): """ Used by ForeignObject to store information about the relation. ``_meta.get_fields()`` returns this class to provide access to the field flags for the reverse relation. """ # Field flags auto_created = True concrete = False editable = False is_relation = True # Reverse relations are always nullable (Django can't enforce that a # foreign key on the related model points to this model). null = True def __init__(self, field, to, related_name=None, related_query_name=None, limit_choices_to=None, parent_link=False, on_delete=None): self.field = field self.model = to self.related_name = related_name self.related_query_name = related_query_name self.limit_choices_to = {} if limit_choices_to is None else limit_choices_to self.parent_link = parent_link self.on_delete = on_delete self.symmetrical = False self.multiple = True # Some of the following cached_properties can't be initialized in # __init__ as the field doesn't have its model yet. Calling these methods # before field.contribute_to_class() has been called will result in # AttributeError @cached_property def hidden(self): return self.is_hidden() @cached_property def name(self): return self.field.related_query_name() @property def remote_field(self): return self.field @property def target_field(self): """ When filtering against this relation, return the field on the remote model against which the filtering should happen. """ target_fields = self.get_path_info()[-1].target_fields if len(target_fields) > 1: raise exceptions.FieldError("Can't use target_field for multicolumn relations.") return target_fields[0] @cached_property def related_model(self): if not self.field.model: raise AttributeError( "This property can't be accessed before self.field.contribute_to_class has been called.") return self.field.model @cached_property def many_to_many(self): return self.field.many_to_many @cached_property def many_to_one(self): return self.field.one_to_many @cached_property def one_to_many(self): return self.field.many_to_one @cached_property def one_to_one(self): return self.field.one_to_one def get_lookup(self, lookup_name): return self.field.get_lookup(lookup_name) def get_internal_type(self): return self.field.get_internal_type() @property def db_type(self): return self.field.db_type def __repr__(self): return '<%s: %s.%s>' % ( type(self).__name__, self.related_model._meta.app_label, self.related_model._meta.model_name, ) def get_choices( self, include_blank=True, blank_choice=BLANK_CHOICE_DASH, limit_choices_to=None, ordering=(), ): """ Return choices with a default blank choices included, for use as <select> choices for this field. Analog of django.db.models.fields.Field.get_choices(), provided initially for utilization by RelatedFieldListFilter. """ limit_choices_to = limit_choices_to or self.limit_choices_to qs = self.related_model._default_manager.complex_filter(limit_choices_to) if ordering: qs = qs.order_by(*ordering) return (blank_choice if include_blank else []) + [ (x.pk, str(x)) for x in qs ] def is_hidden(self): """Should the related object be hidden?""" return bool(self.related_name) and self.related_name[-1] == '+' def get_joining_columns(self): return self.field.get_reverse_joining_columns() def get_extra_restriction(self, where_class, alias, related_alias): return self.field.get_extra_restriction(where_class, related_alias, alias) def set_field_name(self): """ Set the related field's name, this is not available until later stages of app loading, so set_field_name is called from set_attributes_from_rel() """ # By default foreign object doesn't relate to any remote field (for # example custom multicolumn joins currently have no remote field). self.field_name = None def get_accessor_name(self, model=None): # This method encapsulates the logic that decides what name to give an # accessor descriptor that retrieves related many-to-one or # many-to-many objects. It uses the lowercased object_name + "_set", # but this can be overridden with the "related_name" option. Due to # backwards compatibility ModelForms need to be able to provide an # alternate model. See BaseInlineFormSet.get_default_prefix(). opts = model._meta if model else self.related_model._meta model = model or self.related_model if self.multiple: # If this is a symmetrical m2m relation on self, there is no reverse accessor. if self.symmetrical and model == self.model: return None if self.related_name: return self.related_name return opts.model_name + ('_set' if self.multiple else '') def get_path_info(self, filtered_relation=None): return self.field.get_reverse_path_info(filtered_relation) def get_cache_name(self): """ Return the name of the cache key to use for storing an instance of the forward model on the reverse model. """ return self.get_accessor_name() class ManyToOneRel(ForeignObjectRel): """ Used by the ForeignKey field to store information about the relation. ``_meta.get_fields()`` returns this class to provide access to the field flags for the reverse relation. Note: Because we somewhat abuse the Rel objects by using them as reverse fields we get the funny situation where ``ManyToOneRel.many_to_one == False`` and ``ManyToOneRel.one_to_many == True``. This is unfortunate but the actual ManyToOneRel class is a private API and there is work underway to turn reverse relations into actual fields. """ def __init__(self, field, to, field_name, related_name=None, related_query_name=None, limit_choices_to=None, parent_link=False, on_delete=None): super().__init__( field, to, related_name=related_name, related_query_name=related_query_name, limit_choices_to=limit_choices_to, parent_link=parent_link, on_delete=on_delete, ) self.field_name = field_name def __getstate__(self): state = self.__dict__.copy() state.pop('related_model', None) return state def get_related_field(self): """ Return the Field in the 'to' object to which this relationship is tied. """ field = self.model._meta.get_field(self.field_name) if not field.concrete: raise exceptions.FieldDoesNotExist("No related field named '%s'" % self.field_name) return field def set_field_name(self): self.field_name = self.field_name or self.model._meta.pk.name class OneToOneRel(ManyToOneRel): """ Used by OneToOneField to store information about the relation. ``_meta.get_fields()`` returns this class to provide access to the field flags for the reverse relation. """ def __init__(self, field, to, field_name, related_name=None, related_query_name=None, limit_choices_to=None, parent_link=False, on_delete=None): super().__init__( field, to, field_name, related_name=related_name, related_query_name=related_query_name, limit_choices_to=limit_choices_to, parent_link=parent_link, on_delete=on_delete, ) self.multiple = False class ManyToManyRel(ForeignObjectRel): """ Used by ManyToManyField to store information about the relation. ``_meta.get_fields()`` returns this class to provide access to the field flags for the reverse relation. """ def __init__(self, field, to, related_name=None, related_query_name=None, limit_choices_to=None, symmetrical=True, through=None, through_fields=None, db_constraint=True): super().__init__( field, to, related_name=related_name, related_query_name=related_query_name, limit_choices_to=limit_choices_to, ) if through and not db_constraint: raise ValueError("Can't supply a through model and db_constraint=False") self.through = through if through_fields and not through: raise ValueError("Cannot specify through_fields without a through model") self.through_fields = through_fields self.symmetrical = symmetrical self.db_constraint = db_constraint def get_related_field(self): """ Return the field in the 'to' object to which this relationship is tied. Provided for symmetry with ManyToOneRel. """ opts = self.through._meta if self.through_fields: field = opts.get_field(self.through_fields[0]) else: for field in opts.fields: rel = getattr(field, 'remote_field', None) if rel and rel.model == self.model: break return field.foreign_related_fields[0]
0c4d1319ea46a5972d1ef1dc39441d714093cf451be9f832367d428aa9aa145f
import collections import re from functools import partial from itertools import chain from django.core.exceptions import EmptyResultSet, FieldError from django.db.models.constants import LOOKUP_SEP from django.db.models.expressions import OrderBy, Random, RawSQL, Ref, Value from django.db.models.functions import Cast from django.db.models.query_utils import ( Q, QueryWrapper, select_related_descend, ) from django.db.models.sql.constants import ( CURSOR, GET_ITERATOR_CHUNK_SIZE, MULTI, NO_RESULTS, ORDER_DIR, SINGLE, ) from django.db.models.sql.query import Query, get_order_dir from django.db.transaction import TransactionManagementError from django.db.utils import DatabaseError, NotSupportedError from django.utils.functional import cached_property from django.utils.hashable import make_hashable class SQLCompiler: def __init__(self, query, connection, using): self.query = query self.connection = connection self.using = using self.quote_cache = {'*': '*'} # The select, klass_info, and annotations are needed by QuerySet.iterator() # these are set as a side-effect of executing the query. Note that we calculate # separately a list of extra select columns needed for grammatical correctness # of the query, but these columns are not included in self.select. self.select = None self.annotation_col_map = None self.klass_info = None # Multiline ordering SQL clause may appear from RawSQL. self.ordering_parts = re.compile(r'^(.*)\s(ASC|DESC)(.*)', re.MULTILINE | re.DOTALL) self._meta_ordering = None def setup_query(self): if all(self.query.alias_refcount[a] == 0 for a in self.query.alias_map): self.query.get_initial_alias() self.select, self.klass_info, self.annotation_col_map = self.get_select() self.col_count = len(self.select) def pre_sql_setup(self): """ Do any necessary class setup immediately prior to producing SQL. This is for things that can't necessarily be done in __init__ because we might not have all the pieces in place at that time. """ self.setup_query() order_by = self.get_order_by() self.where, self.having = self.query.where.split_having() extra_select = self.get_extra_select(order_by, self.select) self.has_extra_select = bool(extra_select) group_by = self.get_group_by(self.select + extra_select, order_by) return extra_select, order_by, group_by def get_group_by(self, select, order_by): """ Return a list of 2-tuples of form (sql, params). The logic of what exactly the GROUP BY clause contains is hard to describe in other words than "if it passes the test suite, then it is correct". """ # Some examples: # SomeModel.objects.annotate(Count('somecol')) # GROUP BY: all fields of the model # # SomeModel.objects.values('name').annotate(Count('somecol')) # GROUP BY: name # # SomeModel.objects.annotate(Count('somecol')).values('name') # GROUP BY: all cols of the model # # SomeModel.objects.values('name', 'pk').annotate(Count('somecol')).values('pk') # GROUP BY: name, pk # # SomeModel.objects.values('name').annotate(Count('somecol')).values('pk') # GROUP BY: name, pk # # In fact, the self.query.group_by is the minimal set to GROUP BY. It # can't be ever restricted to a smaller set, but additional columns in # HAVING, ORDER BY, and SELECT clauses are added to it. Unfortunately # the end result is that it is impossible to force the query to have # a chosen GROUP BY clause - you can almost do this by using the form: # .values(*wanted_cols).annotate(AnAggregate()) # but any later annotations, extra selects, values calls that # refer some column outside of the wanted_cols, order_by, or even # filter calls can alter the GROUP BY clause. # The query.group_by is either None (no GROUP BY at all), True # (group by select fields), or a list of expressions to be added # to the group by. if self.query.group_by is None: return [] expressions = [] if self.query.group_by is not True: # If the group by is set to a list (by .values() call most likely), # then we need to add everything in it to the GROUP BY clause. # Backwards compatibility hack for setting query.group_by. Remove # when we have public API way of forcing the GROUP BY clause. # Converts string references to expressions. for expr in self.query.group_by: if not hasattr(expr, 'as_sql'): expressions.append(self.query.resolve_ref(expr)) else: expressions.append(expr) # Note that even if the group_by is set, it is only the minimal # set to group by. So, we need to add cols in select, order_by, and # having into the select in any case. for expr, _, _ in select: cols = expr.get_group_by_cols() for col in cols: expressions.append(col) for expr, (sql, params, is_ref) in order_by: # Skip References to the select clause, as all expressions in the # select clause are already part of the group by. if not expr.contains_aggregate and not is_ref: expressions.extend(expr.get_source_expressions()) having_group_by = self.having.get_group_by_cols() if self.having else () for expr in having_group_by: expressions.append(expr) result = [] seen = set() expressions = self.collapse_group_by(expressions, having_group_by) for expr in expressions: sql, params = self.compile(expr) params_hash = make_hashable(params) if (sql, params_hash) not in seen: result.append((sql, params)) seen.add((sql, params_hash)) return result def collapse_group_by(self, expressions, having): # If the DB can group by primary key, then group by the primary key of # query's main model. Note that for PostgreSQL the GROUP BY clause must # include the primary key of every table, but for MySQL it is enough to # have the main table's primary key. if self.connection.features.allows_group_by_pk: # Determine if the main model's primary key is in the query. pk = None for expr in expressions: # Is this a reference to query's base table primary key? If the # expression isn't a Col-like, then skip the expression. if (getattr(expr, 'target', None) == self.query.model._meta.pk and getattr(expr, 'alias', None) == self.query.base_table): pk = expr break # If the main model's primary key is in the query, group by that # field, HAVING expressions, and expressions associated with tables # that don't have a primary key included in the grouped columns. if pk: pk_aliases = { expr.alias for expr in expressions if hasattr(expr, 'target') and expr.target.primary_key } expressions = [pk] + [ expr for expr in expressions if expr in having or ( getattr(expr, 'alias', None) is not None and expr.alias not in pk_aliases ) ] elif self.connection.features.allows_group_by_selected_pks: # Filter out all expressions associated with a table's primary key # present in the grouped columns. This is done by identifying all # tables that have their primary key included in the grouped # columns and removing non-primary key columns referring to them. # Unmanaged models are excluded because they could be representing # database views on which the optimization might not be allowed. pks = { expr for expr in expressions if ( hasattr(expr, 'target') and expr.target.primary_key and self.connection.features.allows_group_by_selected_pks_on_model(expr.target.model) ) } aliases = {expr.alias for expr in pks} expressions = [ expr for expr in expressions if expr in pks or getattr(expr, 'alias', None) not in aliases ] return expressions def get_select(self): """ Return three values: - a list of 3-tuples of (expression, (sql, params), alias) - a klass_info structure, - a dictionary of annotations The (sql, params) is what the expression will produce, and alias is the "AS alias" for the column (possibly None). The klass_info structure contains the following information: - The base model of the query. - Which columns for that model are present in the query (by position of the select clause). - related_klass_infos: [f, klass_info] to descent into The annotations is a dictionary of {'attname': column position} values. """ select = [] klass_info = None annotations = {} select_idx = 0 for alias, (sql, params) in self.query.extra_select.items(): annotations[alias] = select_idx select.append((RawSQL(sql, params), alias)) select_idx += 1 assert not (self.query.select and self.query.default_cols) if self.query.default_cols: cols = self.get_default_columns() else: # self.query.select is a special case. These columns never go to # any model. cols = self.query.select if cols: select_list = [] for col in cols: select_list.append(select_idx) select.append((col, None)) select_idx += 1 klass_info = { 'model': self.query.model, 'select_fields': select_list, } for alias, annotation in self.query.annotation_select.items(): annotations[alias] = select_idx select.append((annotation, alias)) select_idx += 1 if self.query.select_related: related_klass_infos = self.get_related_selections(select) klass_info['related_klass_infos'] = related_klass_infos def get_select_from_parent(klass_info): for ki in klass_info['related_klass_infos']: if ki['from_parent']: ki['select_fields'] = (klass_info['select_fields'] + ki['select_fields']) get_select_from_parent(ki) get_select_from_parent(klass_info) ret = [] for col, alias in select: try: sql, params = self.compile(col) except EmptyResultSet: # Select a predicate that's always False. sql, params = '0', () else: sql, params = col.select_format(self, sql, params) ret.append((col, (sql, params), alias)) return ret, klass_info, annotations def get_order_by(self): """ Return a list of 2-tuples of form (expr, (sql, params, is_ref)) for the ORDER BY clause. The order_by clause can alter the select clause (for example it can add aliases to clauses that do not yet have one, or it can add totally new select clauses). """ if self.query.extra_order_by: ordering = self.query.extra_order_by elif not self.query.default_ordering: ordering = self.query.order_by elif self.query.order_by: ordering = self.query.order_by elif self.query.get_meta().ordering: ordering = self.query.get_meta().ordering self._meta_ordering = ordering else: ordering = [] if self.query.standard_ordering: asc, desc = ORDER_DIR['ASC'] else: asc, desc = ORDER_DIR['DESC'] order_by = [] for field in ordering: if hasattr(field, 'resolve_expression'): if isinstance(field, Value): # output_field must be resolved for constants. field = Cast(field, field.output_field) if not isinstance(field, OrderBy): field = field.asc() if not self.query.standard_ordering: field = field.copy() field.reverse_ordering() order_by.append((field, False)) continue if field == '?': # random order_by.append((OrderBy(Random()), False)) continue col, order = get_order_dir(field, asc) descending = order == 'DESC' if col in self.query.annotation_select: # Reference to expression in SELECT clause order_by.append(( OrderBy(Ref(col, self.query.annotation_select[col]), descending=descending), True)) continue if col in self.query.annotations: # References to an expression which is masked out of the SELECT # clause. expr = self.query.annotations[col] if isinstance(expr, Value): # output_field must be resolved for constants. expr = Cast(expr, expr.output_field) order_by.append((OrderBy(expr, descending=descending), False)) continue if '.' in field: # This came in through an extra(order_by=...) addition. Pass it # on verbatim. table, col = col.split('.', 1) order_by.append(( OrderBy( RawSQL('%s.%s' % (self.quote_name_unless_alias(table), col), []), descending=descending ), False)) continue if not self.query.extra or col not in self.query.extra: # 'col' is of the form 'field' or 'field1__field2' or # '-field1__field2__field', etc. order_by.extend(self.find_ordering_name( field, self.query.get_meta(), default_order=asc)) else: if col not in self.query.extra_select: order_by.append(( OrderBy(RawSQL(*self.query.extra[col]), descending=descending), False)) else: order_by.append(( OrderBy(Ref(col, RawSQL(*self.query.extra[col])), descending=descending), True)) result = [] seen = set() for expr, is_ref in order_by: resolved = expr.resolve_expression(self.query, allow_joins=True, reuse=None) if self.query.combinator: src = resolved.get_source_expressions()[0] # Relabel order by columns to raw numbers if this is a combined # query; necessary since the columns can't be referenced by the # fully qualified name and the simple column names may collide. for idx, (sel_expr, _, col_alias) in enumerate(self.select): if is_ref and col_alias == src.refs: src = src.source elif col_alias: continue if src == sel_expr: resolved.set_source_expressions([RawSQL('%d' % (idx + 1), ())]) break else: if col_alias: raise DatabaseError('ORDER BY term does not match any column in the result set.') # Add column used in ORDER BY clause without an alias to # the selected columns. self.query.add_select_col(src) resolved.set_source_expressions([RawSQL('%d' % len(self.query.select), ())]) sql, params = self.compile(resolved) # Don't add the same column twice, but the order direction is # not taken into account so we strip it. When this entire method # is refactored into expressions, then we can check each part as we # generate it. without_ordering = self.ordering_parts.search(sql).group(1) params_hash = make_hashable(params) if (without_ordering, params_hash) in seen: continue seen.add((without_ordering, params_hash)) result.append((resolved, (sql, params, is_ref))) return result def get_extra_select(self, order_by, select): extra_select = [] if self.query.distinct and not self.query.distinct_fields: select_sql = [t[1] for t in select] for expr, (sql, params, is_ref) in order_by: without_ordering = self.ordering_parts.search(sql).group(1) if not is_ref and (without_ordering, params) not in select_sql: extra_select.append((expr, (without_ordering, params), None)) return extra_select def quote_name_unless_alias(self, name): """ A wrapper around connection.ops.quote_name that doesn't quote aliases for table names. This avoids problems with some SQL dialects that treat quoted strings specially (e.g. PostgreSQL). """ if name in self.quote_cache: return self.quote_cache[name] if ((name in self.query.alias_map and name not in self.query.table_map) or name in self.query.extra_select or ( name in self.query.external_aliases and name not in self.query.table_map)): self.quote_cache[name] = name return name r = self.connection.ops.quote_name(name) self.quote_cache[name] = r return r def compile(self, node): vendor_impl = getattr(node, 'as_' + self.connection.vendor, None) if vendor_impl: sql, params = vendor_impl(self, self.connection) else: sql, params = node.as_sql(self, self.connection) return sql, params def get_combinator_sql(self, combinator, all): features = self.connection.features compilers = [ query.get_compiler(self.using, self.connection) for query in self.query.combined_queries if not query.is_empty() ] if not features.supports_slicing_ordering_in_compound: for query, compiler in zip(self.query.combined_queries, compilers): if query.low_mark or query.high_mark: raise DatabaseError('LIMIT/OFFSET not allowed in subqueries of compound statements.') if compiler.get_order_by(): raise DatabaseError('ORDER BY not allowed in subqueries of compound statements.') parts = () for compiler in compilers: try: # If the columns list is limited, then all combined queries # must have the same columns list. Set the selects defined on # the query on all combined queries, if not already set. if not compiler.query.values_select and self.query.values_select: compiler.query = compiler.query.clone() compiler.query.set_values(( *self.query.extra_select, *self.query.values_select, *self.query.annotation_select, )) part_sql, part_args = compiler.as_sql() if compiler.query.combinator: # Wrap in a subquery if wrapping in parentheses isn't # supported. if not features.supports_parentheses_in_compound: part_sql = 'SELECT * FROM ({})'.format(part_sql) # Add parentheses when combining with compound query if not # already added for all compound queries. elif not features.supports_slicing_ordering_in_compound: part_sql = '({})'.format(part_sql) parts += ((part_sql, part_args),) except EmptyResultSet: # Omit the empty queryset with UNION and with DIFFERENCE if the # first queryset is nonempty. if combinator == 'union' or (combinator == 'difference' and parts): continue raise if not parts: raise EmptyResultSet combinator_sql = self.connection.ops.set_operators[combinator] if all and combinator == 'union': combinator_sql += ' ALL' braces = '({})' if features.supports_slicing_ordering_in_compound else '{}' sql_parts, args_parts = zip(*((braces.format(sql), args) for sql, args in parts)) result = [' {} '.format(combinator_sql).join(sql_parts)] params = [] for part in args_parts: params.extend(part) return result, params def as_sql(self, with_limits=True, with_col_aliases=False): """ Create the SQL for this query. Return the SQL string and list of parameters. If 'with_limits' is False, any limit/offset information is not included in the query. """ refcounts_before = self.query.alias_refcount.copy() try: extra_select, order_by, group_by = self.pre_sql_setup() for_update_part = None # Is a LIMIT/OFFSET clause needed? with_limit_offset = with_limits and (self.query.high_mark is not None or self.query.low_mark) combinator = self.query.combinator features = self.connection.features if combinator: if not getattr(features, 'supports_select_{}'.format(combinator)): raise NotSupportedError('{} is not supported on this database backend.'.format(combinator)) result, params = self.get_combinator_sql(combinator, self.query.combinator_all) else: distinct_fields, distinct_params = self.get_distinct() # This must come after 'select', 'ordering', and 'distinct' # (see docstring of get_from_clause() for details). from_, f_params = self.get_from_clause() where, w_params = self.compile(self.where) if self.where is not None else ("", []) having, h_params = self.compile(self.having) if self.having is not None else ("", []) result = ['SELECT'] params = [] if self.query.distinct: distinct_result, distinct_params = self.connection.ops.distinct_sql( distinct_fields, distinct_params, ) result += distinct_result params += distinct_params out_cols = [] col_idx = 1 for _, (s_sql, s_params), alias in self.select + extra_select: if alias: s_sql = '%s AS %s' % (s_sql, self.connection.ops.quote_name(alias)) elif with_col_aliases: s_sql = '%s AS %s' % (s_sql, 'Col%d' % col_idx) col_idx += 1 params.extend(s_params) out_cols.append(s_sql) result += [', '.join(out_cols), 'FROM', *from_] params.extend(f_params) if self.query.select_for_update and self.connection.features.has_select_for_update: if self.connection.get_autocommit(): raise TransactionManagementError('select_for_update cannot be used outside of a transaction.') if with_limit_offset and not self.connection.features.supports_select_for_update_with_limit: raise NotSupportedError( 'LIMIT/OFFSET is not supported with ' 'select_for_update on this database backend.' ) nowait = self.query.select_for_update_nowait skip_locked = self.query.select_for_update_skip_locked of = self.query.select_for_update_of # If it's a NOWAIT/SKIP LOCKED/OF query but the backend # doesn't support it, raise NotSupportedError to prevent a # possible deadlock. if nowait and not self.connection.features.has_select_for_update_nowait: raise NotSupportedError('NOWAIT is not supported on this database backend.') elif skip_locked and not self.connection.features.has_select_for_update_skip_locked: raise NotSupportedError('SKIP LOCKED is not supported on this database backend.') elif of and not self.connection.features.has_select_for_update_of: raise NotSupportedError('FOR UPDATE OF is not supported on this database backend.') for_update_part = self.connection.ops.for_update_sql( nowait=nowait, skip_locked=skip_locked, of=self.get_select_for_update_of_arguments(), ) if for_update_part and self.connection.features.for_update_after_from: result.append(for_update_part) if where: result.append('WHERE %s' % where) params.extend(w_params) grouping = [] for g_sql, g_params in group_by: grouping.append(g_sql) params.extend(g_params) if grouping: if distinct_fields: raise NotImplementedError('annotate() + distinct(fields) is not implemented.') order_by = order_by or self.connection.ops.force_no_ordering() result.append('GROUP BY %s' % ', '.join(grouping)) if self._meta_ordering: order_by = None if having: result.append('HAVING %s' % having) params.extend(h_params) if self.query.explain_query: result.insert(0, self.connection.ops.explain_query_prefix( self.query.explain_format, **self.query.explain_options )) if order_by: ordering = [] for _, (o_sql, o_params, _) in order_by: ordering.append(o_sql) params.extend(o_params) result.append('ORDER BY %s' % ', '.join(ordering)) if with_limit_offset: result.append(self.connection.ops.limit_offset_sql(self.query.low_mark, self.query.high_mark)) if for_update_part and not self.connection.features.for_update_after_from: result.append(for_update_part) if self.query.subquery and extra_select: # If the query is used as a subquery, the extra selects would # result in more columns than the left-hand side expression is # expecting. This can happen when a subquery uses a combination # of order_by() and distinct(), forcing the ordering expressions # to be selected as well. Wrap the query in another subquery # to exclude extraneous selects. sub_selects = [] sub_params = [] for index, (select, _, alias) in enumerate(self.select, start=1): if not alias and with_col_aliases: alias = 'col%d' % index if alias: sub_selects.append("%s.%s" % ( self.connection.ops.quote_name('subquery'), self.connection.ops.quote_name(alias), )) else: select_clone = select.relabeled_clone({select.alias: 'subquery'}) subselect, subparams = select_clone.as_sql(self, self.connection) sub_selects.append(subselect) sub_params.extend(subparams) return 'SELECT %s FROM (%s) subquery' % ( ', '.join(sub_selects), ' '.join(result), ), tuple(sub_params + params) return ' '.join(result), tuple(params) finally: # Finally do cleanup - get rid of the joins we created above. self.query.reset_refcounts(refcounts_before) def get_default_columns(self, start_alias=None, opts=None, from_parent=None): """ Compute the default columns for selecting every field in the base model. Will sometimes be called to pull in related models (e.g. via select_related), in which case "opts" and "start_alias" will be given to provide a starting point for the traversal. Return a list of strings, quoted appropriately for use in SQL directly, as well as a set of aliases used in the select statement (if 'as_pairs' is True, return a list of (alias, col_name) pairs instead of strings as the first component and None as the second component). """ result = [] if opts is None: opts = self.query.get_meta() only_load = self.deferred_to_columns() start_alias = start_alias or self.query.get_initial_alias() # The 'seen_models' is used to optimize checking the needed parent # alias for a given field. This also includes None -> start_alias to # be used by local fields. seen_models = {None: start_alias} for field in opts.concrete_fields: model = field.model._meta.concrete_model # A proxy model will have a different model and concrete_model. We # will assign None if the field belongs to this model. if model == opts.model: model = None if from_parent and model is not None and issubclass( from_parent._meta.concrete_model, model._meta.concrete_model): # Avoid loading data for already loaded parents. # We end up here in the case select_related() resolution # proceeds from parent model to child model. In that case the # parent model data is already present in the SELECT clause, # and we want to avoid reloading the same data again. continue if field.model in only_load and field.attname not in only_load[field.model]: continue alias = self.query.join_parent_model(opts, model, start_alias, seen_models) column = field.get_col(alias) result.append(column) return result def get_distinct(self): """ Return a quoted list of fields to use in DISTINCT ON part of the query. This method can alter the tables in the query, and thus it must be called before get_from_clause(). """ result = [] params = [] opts = self.query.get_meta() for name in self.query.distinct_fields: parts = name.split(LOOKUP_SEP) _, targets, alias, joins, path, _, transform_function = self._setup_joins(parts, opts, None) targets, alias, _ = self.query.trim_joins(targets, joins, path) for target in targets: if name in self.query.annotation_select: result.append(name) else: r, p = self.compile(transform_function(target, alias)) result.append(r) params.append(p) return result, params def find_ordering_name(self, name, opts, alias=None, default_order='ASC', already_seen=None): """ Return the table alias (the name might be ambiguous, the alias will not be) and column name for ordering by the given 'name' parameter. The 'name' is of the form 'field1__field2__...__fieldN'. """ name, order = get_order_dir(name, default_order) descending = order == 'DESC' pieces = name.split(LOOKUP_SEP) field, targets, alias, joins, path, opts, transform_function = self._setup_joins(pieces, opts, alias) # If we get to this point and the field is a relation to another model, # append the default ordering for that model unless the attribute name # of the field is specified. if field.is_relation and opts.ordering and getattr(field, 'attname', None) != name: # Firstly, avoid infinite loops. already_seen = already_seen or set() join_tuple = tuple(getattr(self.query.alias_map[j], 'join_cols', None) for j in joins) if join_tuple in already_seen: raise FieldError('Infinite loop caused by ordering.') already_seen.add(join_tuple) results = [] for item in opts.ordering: if hasattr(item, 'resolve_expression') and not isinstance(item, OrderBy): item = item.desc() if descending else item.asc() if isinstance(item, OrderBy): results.append((item, False)) continue results.extend(self.find_ordering_name(item, opts, alias, order, already_seen)) return results targets, alias, _ = self.query.trim_joins(targets, joins, path) return [(OrderBy(transform_function(t, alias), descending=descending), False) for t in targets] def _setup_joins(self, pieces, opts, alias): """ Helper method for get_order_by() and get_distinct(). get_ordering() and get_distinct() must produce same target columns on same input, as the prefixes of get_ordering() and get_distinct() must match. Executing SQL where this is not true is an error. """ alias = alias or self.query.get_initial_alias() field, targets, opts, joins, path, transform_function = self.query.setup_joins(pieces, opts, alias) alias = joins[-1] return field, targets, alias, joins, path, opts, transform_function def get_from_clause(self): """ Return a list of strings that are joined together to go after the "FROM" part of the query, as well as a list any extra parameters that need to be included. Subclasses, can override this to create a from-clause via a "select". This should only be called after any SQL construction methods that might change the tables that are needed. This means the select columns, ordering, and distinct must be done first. """ result = [] params = [] for alias in tuple(self.query.alias_map): if not self.query.alias_refcount[alias]: continue try: from_clause = self.query.alias_map[alias] except KeyError: # Extra tables can end up in self.tables, but not in the # alias_map if they aren't in a join. That's OK. We skip them. continue clause_sql, clause_params = self.compile(from_clause) result.append(clause_sql) params.extend(clause_params) for t in self.query.extra_tables: alias, _ = self.query.table_alias(t) # Only add the alias if it's not already present (the table_alias() # call increments the refcount, so an alias refcount of one means # this is the only reference). if alias not in self.query.alias_map or self.query.alias_refcount[alias] == 1: result.append(', %s' % self.quote_name_unless_alias(alias)) return result, params def get_related_selections(self, select, opts=None, root_alias=None, cur_depth=1, requested=None, restricted=None): """ Fill in the information needed for a select_related query. The current depth is measured as the number of connections away from the root model (for example, cur_depth=1 means we are looking at models with direct connections to the root model). """ def _get_field_choices(): direct_choices = (f.name for f in opts.fields if f.is_relation) reverse_choices = ( f.field.related_query_name() for f in opts.related_objects if f.field.unique ) return chain(direct_choices, reverse_choices, self.query._filtered_relations) related_klass_infos = [] if not restricted and cur_depth > self.query.max_depth: # We've recursed far enough; bail out. return related_klass_infos if not opts: opts = self.query.get_meta() root_alias = self.query.get_initial_alias() only_load = self.query.get_loaded_field_names() # Setup for the case when only particular related fields should be # included in the related selection. fields_found = set() if requested is None: restricted = isinstance(self.query.select_related, dict) if restricted: requested = self.query.select_related def get_related_klass_infos(klass_info, related_klass_infos): klass_info['related_klass_infos'] = related_klass_infos for f in opts.fields: field_model = f.model._meta.concrete_model fields_found.add(f.name) if restricted: next = requested.get(f.name, {}) if not f.is_relation: # If a non-related field is used like a relation, # or if a single non-relational field is given. if next or f.name in requested: raise FieldError( "Non-relational field given in select_related: '%s'. " "Choices are: %s" % ( f.name, ", ".join(_get_field_choices()) or '(none)', ) ) else: next = False if not select_related_descend(f, restricted, requested, only_load.get(field_model)): continue klass_info = { 'model': f.remote_field.model, 'field': f, 'reverse': False, 'local_setter': f.set_cached_value, 'remote_setter': f.remote_field.set_cached_value if f.unique else lambda x, y: None, 'from_parent': False, } related_klass_infos.append(klass_info) select_fields = [] _, _, _, joins, _, _ = self.query.setup_joins( [f.name], opts, root_alias) alias = joins[-1] columns = self.get_default_columns(start_alias=alias, opts=f.remote_field.model._meta) for col in columns: select_fields.append(len(select)) select.append((col, None)) klass_info['select_fields'] = select_fields next_klass_infos = self.get_related_selections( select, f.remote_field.model._meta, alias, cur_depth + 1, next, restricted) get_related_klass_infos(klass_info, next_klass_infos) if restricted: related_fields = [ (o.field, o.related_model) for o in opts.related_objects if o.field.unique and not o.many_to_many ] for f, model in related_fields: if not select_related_descend(f, restricted, requested, only_load.get(model), reverse=True): continue related_field_name = f.related_query_name() fields_found.add(related_field_name) join_info = self.query.setup_joins([related_field_name], opts, root_alias) alias = join_info.joins[-1] from_parent = issubclass(model, opts.model) and model is not opts.model klass_info = { 'model': model, 'field': f, 'reverse': True, 'local_setter': f.remote_field.set_cached_value, 'remote_setter': f.set_cached_value, 'from_parent': from_parent, } related_klass_infos.append(klass_info) select_fields = [] columns = self.get_default_columns( start_alias=alias, opts=model._meta, from_parent=opts.model) for col in columns: select_fields.append(len(select)) select.append((col, None)) klass_info['select_fields'] = select_fields next = requested.get(f.related_query_name(), {}) next_klass_infos = self.get_related_selections( select, model._meta, alias, cur_depth + 1, next, restricted) get_related_klass_infos(klass_info, next_klass_infos) def local_setter(obj, from_obj): # Set a reverse fk object when relation is non-empty. if from_obj: f.remote_field.set_cached_value(from_obj, obj) def remote_setter(name, obj, from_obj): setattr(from_obj, name, obj) for name in list(requested): # Filtered relations work only on the topmost level. if cur_depth > 1: break if name in self.query._filtered_relations: fields_found.add(name) f, _, join_opts, joins, _, _ = self.query.setup_joins([name], opts, root_alias) model = join_opts.model alias = joins[-1] from_parent = issubclass(model, opts.model) and model is not opts.model klass_info = { 'model': model, 'field': f, 'reverse': True, 'local_setter': local_setter, 'remote_setter': partial(remote_setter, name), 'from_parent': from_parent, } related_klass_infos.append(klass_info) select_fields = [] columns = self.get_default_columns( start_alias=alias, opts=model._meta, from_parent=opts.model, ) for col in columns: select_fields.append(len(select)) select.append((col, None)) klass_info['select_fields'] = select_fields next_requested = requested.get(name, {}) next_klass_infos = self.get_related_selections( select, opts=model._meta, root_alias=alias, cur_depth=cur_depth + 1, requested=next_requested, restricted=restricted, ) get_related_klass_infos(klass_info, next_klass_infos) fields_not_found = set(requested).difference(fields_found) if fields_not_found: invalid_fields = ("'%s'" % s for s in fields_not_found) raise FieldError( 'Invalid field name(s) given in select_related: %s. ' 'Choices are: %s' % ( ', '.join(invalid_fields), ', '.join(_get_field_choices()) or '(none)', ) ) return related_klass_infos def get_select_for_update_of_arguments(self): """ Return a quoted list of arguments for the SELECT FOR UPDATE OF part of the query. """ def _get_field_choices(): """Yield all allowed field paths in breadth-first search order.""" queue = collections.deque([(None, self.klass_info)]) while queue: parent_path, klass_info = queue.popleft() if parent_path is None: path = [] yield 'self' else: field = klass_info['field'] if klass_info['reverse']: field = field.remote_field path = parent_path + [field.name] yield LOOKUP_SEP.join(path) queue.extend( (path, klass_info) for klass_info in klass_info.get('related_klass_infos', []) ) result = [] invalid_names = [] for name in self.query.select_for_update_of: parts = [] if name == 'self' else name.split(LOOKUP_SEP) klass_info = self.klass_info for part in parts: for related_klass_info in klass_info.get('related_klass_infos', []): field = related_klass_info['field'] if related_klass_info['reverse']: field = field.remote_field if field.name == part: klass_info = related_klass_info break else: klass_info = None break if klass_info is None: invalid_names.append(name) continue select_index = klass_info['select_fields'][0] col = self.select[select_index][0] if self.connection.features.select_for_update_of_column: result.append(self.compile(col)[0]) else: result.append(self.quote_name_unless_alias(col.alias)) if invalid_names: raise FieldError( 'Invalid field name(s) given in select_for_update(of=(...)): %s. ' 'Only relational fields followed in the query are allowed. ' 'Choices are: %s.' % ( ', '.join(invalid_names), ', '.join(_get_field_choices()), ) ) return result def deferred_to_columns(self): """ Convert the self.deferred_loading data structure to mapping of table names to sets of column names which are to be loaded. Return the dictionary. """ columns = {} self.query.deferred_to_data(columns, self.query.get_loaded_field_names_cb) return columns def get_converters(self, expressions): converters = {} for i, expression in enumerate(expressions): if expression: backend_converters = self.connection.ops.get_db_converters(expression) field_converters = expression.get_db_converters(self.connection) if backend_converters or field_converters: converters[i] = (backend_converters + field_converters, expression) return converters def apply_converters(self, rows, converters): connection = self.connection converters = list(converters.items()) for row in map(list, rows): for pos, (convs, expression) in converters: value = row[pos] for converter in convs: value = converter(value, expression, connection) row[pos] = value yield row def results_iter(self, results=None, tuple_expected=False, chunked_fetch=False, chunk_size=GET_ITERATOR_CHUNK_SIZE): """Return an iterator over the results from executing this query.""" if results is None: results = self.execute_sql(MULTI, chunked_fetch=chunked_fetch, chunk_size=chunk_size) fields = [s[0] for s in self.select[0:self.col_count]] converters = self.get_converters(fields) rows = chain.from_iterable(results) if converters: rows = self.apply_converters(rows, converters) if tuple_expected: rows = map(tuple, rows) return rows def has_results(self): """ Backends (e.g. NoSQL) can override this in order to use optimized versions of "query has any results." """ # This is always executed on a query clone, so we can modify self.query self.query.add_extra({'a': 1}, None, None, None, None, None) self.query.set_extra_mask(['a']) return bool(self.execute_sql(SINGLE)) def execute_sql(self, result_type=MULTI, chunked_fetch=False, chunk_size=GET_ITERATOR_CHUNK_SIZE): """ Run the query against the database and return the result(s). The return value is a single data item if result_type is SINGLE, or an iterator over the results if the result_type is MULTI. result_type is either MULTI (use fetchmany() to retrieve all rows), SINGLE (only retrieve a single row), or None. In this last case, the cursor is returned if any query is executed, since it's used by subclasses such as InsertQuery). It's possible, however, that no query is needed, as the filters describe an empty set. In that case, None is returned, to avoid any unnecessary database interaction. """ result_type = result_type or NO_RESULTS try: sql, params = self.as_sql() if not sql: raise EmptyResultSet except EmptyResultSet: if result_type == MULTI: return iter([]) else: return if chunked_fetch: cursor = self.connection.chunked_cursor() else: cursor = self.connection.cursor() try: cursor.execute(sql, params) except Exception: # Might fail for server-side cursors (e.g. connection closed) cursor.close() raise if result_type == CURSOR: # Give the caller the cursor to process and close. return cursor if result_type == SINGLE: try: val = cursor.fetchone() if val: return val[0:self.col_count] return val finally: # done with the cursor cursor.close() if result_type == NO_RESULTS: cursor.close() return result = cursor_iter( cursor, self.connection.features.empty_fetchmany_value, self.col_count if self.has_extra_select else None, chunk_size, ) if not chunked_fetch or not self.connection.features.can_use_chunked_reads: try: # If we are using non-chunked reads, we return the same data # structure as normally, but ensure it is all read into memory # before going any further. Use chunked_fetch if requested, # unless the database doesn't support it. return list(result) finally: # done with the cursor cursor.close() return result def as_subquery_condition(self, alias, columns, compiler): qn = compiler.quote_name_unless_alias qn2 = self.connection.ops.quote_name for index, select_col in enumerate(self.query.select): lhs_sql, lhs_params = self.compile(select_col) rhs = '%s.%s' % (qn(alias), qn2(columns[index])) self.query.where.add( QueryWrapper('%s = %s' % (lhs_sql, rhs), lhs_params), 'AND') sql, params = self.as_sql() return 'EXISTS (%s)' % sql, params def explain_query(self): result = list(self.execute_sql()) # Some backends return 1 item tuples with strings, and others return # tuples with integers and strings. Flatten them out into strings. for row in result[0]: if not isinstance(row, str): yield ' '.join(str(c) for c in row) else: yield row class SQLInsertCompiler(SQLCompiler): returning_fields = None returning_params = tuple() def field_as_sql(self, field, val): """ Take a field and a value intended to be saved on that field, and return placeholder SQL and accompanying params. Check for raw values, expressions, and fields with get_placeholder() defined in that order. When field is None, consider the value raw and use it as the placeholder, with no corresponding parameters returned. """ if field is None: # A field value of None means the value is raw. sql, params = val, [] elif hasattr(val, 'as_sql'): # This is an expression, let's compile it. sql, params = self.compile(val) elif hasattr(field, 'get_placeholder'): # Some fields (e.g. geo fields) need special munging before # they can be inserted. sql, params = field.get_placeholder(val, self, self.connection), [val] else: # Return the common case for the placeholder sql, params = '%s', [val] # The following hook is only used by Oracle Spatial, which sometimes # needs to yield 'NULL' and [] as its placeholder and params instead # of '%s' and [None]. The 'NULL' placeholder is produced earlier by # OracleOperations.get_geom_placeholder(). The following line removes # the corresponding None parameter. See ticket #10888. params = self.connection.ops.modify_insert_params(sql, params) return sql, params def prepare_value(self, field, value): """ Prepare a value to be used in a query by resolving it if it is an expression and otherwise calling the field's get_db_prep_save(). """ if hasattr(value, 'resolve_expression'): value = value.resolve_expression(self.query, allow_joins=False, for_save=True) # Don't allow values containing Col expressions. They refer to # existing columns on a row, but in the case of insert the row # doesn't exist yet. if value.contains_column_references: raise ValueError( 'Failed to insert expression "%s" on %s. F() expressions ' 'can only be used to update, not to insert.' % (value, field) ) if value.contains_aggregate: raise FieldError( 'Aggregate functions are not allowed in this query ' '(%s=%r).' % (field.name, value) ) if value.contains_over_clause: raise FieldError( 'Window expressions are not allowed in this query (%s=%r).' % (field.name, value) ) else: value = field.get_db_prep_save(value, connection=self.connection) return value def pre_save_val(self, field, obj): """ Get the given field's value off the given obj. pre_save() is used for things like auto_now on DateTimeField. Skip it if this is a raw query. """ if self.query.raw: return getattr(obj, field.attname) return field.pre_save(obj, add=True) def assemble_as_sql(self, fields, value_rows): """ Take a sequence of N fields and a sequence of M rows of values, and generate placeholder SQL and parameters for each field and value. Return a pair containing: * a sequence of M rows of N SQL placeholder strings, and * a sequence of M rows of corresponding parameter values. Each placeholder string may contain any number of '%s' interpolation strings, and each parameter row will contain exactly as many params as the total number of '%s's in the corresponding placeholder row. """ if not value_rows: return [], [] # list of (sql, [params]) tuples for each object to be saved # Shape: [n_objs][n_fields][2] rows_of_fields_as_sql = ( (self.field_as_sql(field, v) for field, v in zip(fields, row)) for row in value_rows ) # tuple like ([sqls], [[params]s]) for each object to be saved # Shape: [n_objs][2][n_fields] sql_and_param_pair_rows = (zip(*row) for row in rows_of_fields_as_sql) # Extract separate lists for placeholders and params. # Each of these has shape [n_objs][n_fields] placeholder_rows, param_rows = zip(*sql_and_param_pair_rows) # Params for each field are still lists, and need to be flattened. param_rows = [[p for ps in row for p in ps] for row in param_rows] return placeholder_rows, param_rows def as_sql(self): # We don't need quote_name_unless_alias() here, since these are all # going to be column names (so we can avoid the extra overhead). qn = self.connection.ops.quote_name opts = self.query.get_meta() insert_statement = self.connection.ops.insert_statement(ignore_conflicts=self.query.ignore_conflicts) result = ['%s %s' % (insert_statement, qn(opts.db_table))] fields = self.query.fields or [opts.pk] result.append('(%s)' % ', '.join(qn(f.column) for f in fields)) if self.query.fields: value_rows = [ [self.prepare_value(field, self.pre_save_val(field, obj)) for field in fields] for obj in self.query.objs ] else: # An empty object. value_rows = [[self.connection.ops.pk_default_value()] for _ in self.query.objs] fields = [None] # Currently the backends just accept values when generating bulk # queries and generate their own placeholders. Doing that isn't # necessary and it should be possible to use placeholders and # expressions in bulk inserts too. can_bulk = (not self.returning_fields and self.connection.features.has_bulk_insert) placeholder_rows, param_rows = self.assemble_as_sql(fields, value_rows) ignore_conflicts_suffix_sql = self.connection.ops.ignore_conflicts_suffix_sql( ignore_conflicts=self.query.ignore_conflicts ) if self.returning_fields and self.connection.features.can_return_columns_from_insert: if self.connection.features.can_return_rows_from_bulk_insert: result.append(self.connection.ops.bulk_insert_sql(fields, placeholder_rows)) params = param_rows else: result.append("VALUES (%s)" % ", ".join(placeholder_rows[0])) params = [param_rows[0]] if ignore_conflicts_suffix_sql: result.append(ignore_conflicts_suffix_sql) # Skip empty r_sql to allow subclasses to customize behavior for # 3rd party backends. Refs #19096. r_sql, self.returning_params = self.connection.ops.return_insert_columns(self.returning_fields) if r_sql: result.append(r_sql) params += [self.returning_params] return [(" ".join(result), tuple(chain.from_iterable(params)))] if can_bulk: result.append(self.connection.ops.bulk_insert_sql(fields, placeholder_rows)) if ignore_conflicts_suffix_sql: result.append(ignore_conflicts_suffix_sql) return [(" ".join(result), tuple(p for ps in param_rows for p in ps))] else: if ignore_conflicts_suffix_sql: result.append(ignore_conflicts_suffix_sql) return [ (" ".join(result + ["VALUES (%s)" % ", ".join(p)]), vals) for p, vals in zip(placeholder_rows, param_rows) ] def execute_sql(self, returning_fields=None): assert not ( returning_fields and len(self.query.objs) != 1 and not self.connection.features.can_return_rows_from_bulk_insert ) self.returning_fields = returning_fields with self.connection.cursor() as cursor: for sql, params in self.as_sql(): cursor.execute(sql, params) if not self.returning_fields: return [] if self.connection.features.can_return_rows_from_bulk_insert and len(self.query.objs) > 1: return self.connection.ops.fetch_returned_insert_rows(cursor) if self.connection.features.can_return_columns_from_insert: assert len(self.query.objs) == 1 return self.connection.ops.fetch_returned_insert_columns(cursor, self.returning_params) return [self.connection.ops.last_insert_id( cursor, self.query.get_meta().db_table, self.query.get_meta().pk.column )] class SQLDeleteCompiler(SQLCompiler): @cached_property def single_alias(self): return sum(self.query.alias_refcount[t] > 0 for t in self.query.alias_map) == 1 def _as_sql(self, query): result = [ 'DELETE FROM %s' % self.quote_name_unless_alias(query.base_table) ] where, params = self.compile(query.where) if where: result.append('WHERE %s' % where) return ' '.join(result), tuple(params) def as_sql(self): """ Create the SQL for this query. Return the SQL string and list of parameters. """ if self.single_alias: return self._as_sql(self.query) innerq = self.query.clone() innerq.__class__ = Query innerq.clear_select_clause() pk = self.query.model._meta.pk innerq.select = [ pk.get_col(self.query.get_initial_alias()) ] outerq = Query(self.query.model) outerq.where = self.query.where_class() outerq.add_q(Q(pk__in=innerq)) return self._as_sql(outerq) class SQLUpdateCompiler(SQLCompiler): def as_sql(self): """ Create the SQL for this query. Return the SQL string and list of parameters. """ self.pre_sql_setup() if not self.query.values: return '', () qn = self.quote_name_unless_alias values, update_params = [], [] for field, model, val in self.query.values: if hasattr(val, 'resolve_expression'): val = val.resolve_expression(self.query, allow_joins=False, for_save=True) if val.contains_aggregate: raise FieldError( 'Aggregate functions are not allowed in this query ' '(%s=%r).' % (field.name, val) ) if val.contains_over_clause: raise FieldError( 'Window expressions are not allowed in this query ' '(%s=%r).' % (field.name, val) ) elif hasattr(val, 'prepare_database_save'): if field.remote_field: val = field.get_db_prep_save( val.prepare_database_save(field), connection=self.connection, ) else: raise TypeError( "Tried to update field %s with a model instance, %r. " "Use a value compatible with %s." % (field, val, field.__class__.__name__) ) else: val = field.get_db_prep_save(val, connection=self.connection) # Getting the placeholder for the field. if hasattr(field, 'get_placeholder'): placeholder = field.get_placeholder(val, self, self.connection) else: placeholder = '%s' name = field.column if hasattr(val, 'as_sql'): sql, params = self.compile(val) values.append('%s = %s' % (qn(name), placeholder % sql)) update_params.extend(params) elif val is not None: values.append('%s = %s' % (qn(name), placeholder)) update_params.append(val) else: values.append('%s = NULL' % qn(name)) table = self.query.base_table result = [ 'UPDATE %s SET' % qn(table), ', '.join(values), ] where, params = self.compile(self.query.where) if where: result.append('WHERE %s' % where) return ' '.join(result), tuple(update_params + params) def execute_sql(self, result_type): """ Execute the specified update. Return the number of rows affected by the primary update query. The "primary update query" is the first non-empty query that is executed. Row counts for any subsequent, related queries are not available. """ cursor = super().execute_sql(result_type) try: rows = cursor.rowcount if cursor else 0 is_empty = cursor is None finally: if cursor: cursor.close() for query in self.query.get_related_updates(): aux_rows = query.get_compiler(self.using).execute_sql(result_type) if is_empty and aux_rows: rows = aux_rows is_empty = False return rows def pre_sql_setup(self): """ If the update depends on results from other tables, munge the "where" conditions to match the format required for (portable) SQL updates. If multiple updates are required, pull out the id values to update at this point so that they don't change as a result of the progressive updates. """ refcounts_before = self.query.alias_refcount.copy() # Ensure base table is in the query self.query.get_initial_alias() count = self.query.count_active_tables() if not self.query.related_updates and count == 1: return query = self.query.chain(klass=Query) query.select_related = False query.clear_ordering(True) query.extra = {} query.select = [] query.add_fields([query.get_meta().pk.name]) super().pre_sql_setup() must_pre_select = count > 1 and not self.connection.features.update_can_self_select # Now we adjust the current query: reset the where clause and get rid # of all the tables we don't need (since they're in the sub-select). self.query.where = self.query.where_class() if self.query.related_updates or must_pre_select: # Either we're using the idents in multiple update queries (so # don't want them to change), or the db backend doesn't support # selecting from the updating table (e.g. MySQL). idents = [] for rows in query.get_compiler(self.using).execute_sql(MULTI): idents.extend(r[0] for r in rows) self.query.add_filter(('pk__in', idents)) self.query.related_ids = idents else: # The fast path. Filters and updates in one query. self.query.add_filter(('pk__in', query)) self.query.reset_refcounts(refcounts_before) class SQLAggregateCompiler(SQLCompiler): def as_sql(self): """ Create the SQL for this query. Return the SQL string and list of parameters. """ sql, params = [], [] for annotation in self.query.annotation_select.values(): ann_sql, ann_params = self.compile(annotation) ann_sql, ann_params = annotation.select_format(self, ann_sql, ann_params) sql.append(ann_sql) params.extend(ann_params) self.col_count = len(self.query.annotation_select) sql = ', '.join(sql) params = tuple(params) sql = 'SELECT %s FROM (%s) subquery' % (sql, self.query.subquery) params = params + self.query.sub_params return sql, params def cursor_iter(cursor, sentinel, col_count, itersize): """ Yield blocks of rows from a cursor and ensure the cursor is closed when done. """ try: for rows in iter((lambda: cursor.fetchmany(itersize)), sentinel): yield rows if col_count is None else [r[:col_count] for r in rows] finally: cursor.close()
e195545d083cabcb4550c1b3a62ddb3adb6137625f439e5af19cf7f214818c3d
""" Query subclasses which provide extra functionality beyond simple data retrieval. """ from django.core.exceptions import FieldError from django.db.models.query_utils import Q from django.db.models.sql.constants import ( CURSOR, GET_ITERATOR_CHUNK_SIZE, NO_RESULTS, ) from django.db.models.sql.query import Query __all__ = ['DeleteQuery', 'UpdateQuery', 'InsertQuery', 'AggregateQuery'] class DeleteQuery(Query): """A DELETE SQL query.""" compiler = 'SQLDeleteCompiler' def do_query(self, table, where, using): self.alias_map = {table: self.alias_map[table]} self.where = where cursor = self.get_compiler(using).execute_sql(CURSOR) return cursor.rowcount if cursor else 0 def delete_batch(self, pk_list, using): """ Set up and execute delete queries for all the objects in pk_list. More than one physical query may be executed if there are a lot of values in pk_list. """ # number of objects deleted num_deleted = 0 field = self.get_meta().pk for offset in range(0, len(pk_list), GET_ITERATOR_CHUNK_SIZE): self.where = self.where_class() self.add_q(Q( **{field.attname + '__in': pk_list[offset:offset + GET_ITERATOR_CHUNK_SIZE]})) num_deleted += self.do_query(self.get_meta().db_table, self.where, using=using) return num_deleted class UpdateQuery(Query): """An UPDATE SQL query.""" compiler = 'SQLUpdateCompiler' def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self._setup_query() def _setup_query(self): """ Run on initialization and at the end of chaining. Any attributes that would normally be set in __init__() should go here instead. """ self.values = [] self.related_ids = None self.related_updates = {} def clone(self): obj = super().clone() obj.related_updates = self.related_updates.copy() return obj def update_batch(self, pk_list, values, using): self.add_update_values(values) for offset in range(0, len(pk_list), GET_ITERATOR_CHUNK_SIZE): self.where = self.where_class() self.add_q(Q(pk__in=pk_list[offset: offset + GET_ITERATOR_CHUNK_SIZE])) self.get_compiler(using).execute_sql(NO_RESULTS) def add_update_values(self, values): """ Convert a dictionary of field name to value mappings into an update query. This is the entry point for the public update() method on querysets. """ values_seq = [] for name, val in values.items(): field = self.get_meta().get_field(name) direct = not (field.auto_created and not field.concrete) or not field.concrete model = field.model._meta.concrete_model if not direct or (field.is_relation and field.many_to_many): raise FieldError( 'Cannot update model field %r (only non-relations and ' 'foreign keys permitted).' % field ) if model is not self.get_meta().concrete_model: self.add_related_update(model, field, val) continue values_seq.append((field, model, val)) return self.add_update_fields(values_seq) def add_update_fields(self, values_seq): """ Append a sequence of (field, model, value) triples to the internal list that will be used to generate the UPDATE query. Might be more usefully called add_update_targets() to hint at the extra information here. """ for field, model, val in values_seq: if hasattr(val, 'resolve_expression'): # Resolve expressions here so that annotations are no longer needed val = val.resolve_expression(self, allow_joins=False, for_save=True) self.values.append((field, model, val)) def add_related_update(self, model, field, value): """ Add (name, value) to an update query for an ancestor model. Update are coalesced so that only one update query per ancestor is run. """ self.related_updates.setdefault(model, []).append((field, None, value)) def get_related_updates(self): """ Return a list of query objects: one for each update required to an ancestor model. Each query will have the same filtering conditions as the current query but will only update a single table. """ if not self.related_updates: return [] result = [] for model, values in self.related_updates.items(): query = UpdateQuery(model) query.values = values if self.related_ids is not None: query.add_filter(('pk__in', self.related_ids)) result.append(query) return result class InsertQuery(Query): compiler = 'SQLInsertCompiler' def __init__(self, *args, ignore_conflicts=False, **kwargs): super().__init__(*args, **kwargs) self.fields = [] self.objs = [] self.ignore_conflicts = ignore_conflicts def insert_values(self, fields, objs, raw=False): self.fields = fields self.objs = objs self.raw = raw class AggregateQuery(Query): """ Take another query as a parameter to the FROM clause and only select the elements in the provided list. """ compiler = 'SQLAggregateCompiler' def add_subquery(self, query, using): query.subquery = True self.subquery, self.sub_params = query.get_compiler(using).as_sql(with_col_aliases=True)
6e6c9e43e3f6df83e89115f3d64df05f395604b29d36bee8bad7d0f07e203c09
import operator from django.db.backends.base.features import BaseDatabaseFeatures from django.utils.functional import cached_property class DatabaseFeatures(BaseDatabaseFeatures): empty_fetchmany_value = () allows_group_by_pk = True related_fields_match_type = True # MySQL doesn't support sliced subqueries with IN/ALL/ANY/SOME. allow_sliced_subqueries_with_in = False has_select_for_update = True supports_forward_references = False supports_regex_backreferencing = False supports_date_lookup_using_string = False can_introspect_autofield = True can_introspect_binary_field = False can_introspect_duration_field = False can_introspect_small_integer_field = True can_introspect_positive_integer_field = True introspected_boolean_field_type = 'IntegerField' supports_index_column_ordering = False supports_timezones = False requires_explicit_null_ordering_when_grouping = True allows_auto_pk_0 = False can_release_savepoints = True atomic_transactions = False can_clone_databases = True supports_temporal_subtraction = True supports_select_intersection = False supports_select_difference = False supports_slicing_ordering_in_compound = True supports_index_on_text_field = False has_case_insensitive_like = False create_test_procedure_without_params_sql = """ CREATE PROCEDURE test_procedure () BEGIN DECLARE V_I INTEGER; SET V_I = 1; END; """ create_test_procedure_with_int_param_sql = """ CREATE PROCEDURE test_procedure (P_I INTEGER) BEGIN DECLARE V_I INTEGER; SET V_I = P_I; END; """ db_functions_convert_bytes_to_str = True # Neither MySQL nor MariaDB support partial indexes. supports_partial_indexes = False @cached_property def _mysql_storage_engine(self): "Internal method used in Django tests. Don't rely on this from your code" with self.connection.cursor() as cursor: cursor.execute("SELECT ENGINE FROM INFORMATION_SCHEMA.ENGINES WHERE SUPPORT = 'DEFAULT'") result = cursor.fetchone() return result[0] @cached_property def update_can_self_select(self): return self.connection.mysql_is_mariadb and self.connection.mysql_version >= (10, 3, 2) @cached_property def can_introspect_foreign_keys(self): "Confirm support for introspected foreign keys" return self._mysql_storage_engine != 'MyISAM' @cached_property def has_zoneinfo_database(self): # Test if the time zone definitions are installed. CONVERT_TZ returns # NULL if 'UTC' timezone isn't loaded into the mysql.time_zone. with self.connection.cursor() as cursor: cursor.execute("SELECT CONVERT_TZ('2001-01-01 01:00:00', 'UTC', 'UTC')") return cursor.fetchone()[0] is not None @cached_property def is_sql_auto_is_null_enabled(self): with self.connection.cursor() as cursor: cursor.execute('SELECT @@SQL_AUTO_IS_NULL') result = cursor.fetchone() return result and result[0] == 1 @cached_property def supports_over_clause(self): if self.connection.mysql_is_mariadb: return True return self.connection.mysql_version >= (8, 0, 2) supports_frame_range_fixed_distance = property(operator.attrgetter('supports_over_clause')) @cached_property def supports_column_check_constraints(self): if self.connection.mysql_is_mariadb: return self.connection.mysql_version >= (10, 2, 1) return self.connection.mysql_version >= (8, 0, 16) supports_table_check_constraints = property(operator.attrgetter('supports_column_check_constraints')) @cached_property def can_introspect_check_constraints(self): if self.connection.mysql_is_mariadb: version = self.connection.mysql_version return (version >= (10, 2, 22) and version < (10, 3)) or version >= (10, 3, 10) return self.connection.mysql_version >= (8, 0, 16) @cached_property def has_select_for_update_skip_locked(self): return not self.connection.mysql_is_mariadb and self.connection.mysql_version >= (8, 0, 1) has_select_for_update_nowait = property(operator.attrgetter('has_select_for_update_skip_locked')) @cached_property def needs_explain_extended(self): # EXTENDED is deprecated (and not required) in MySQL 5.7. return not self.connection.mysql_is_mariadb and self.connection.mysql_version < (5, 7) @cached_property def supports_explain_analyze(self): return self.connection.mysql_is_mariadb or self.connection.mysql_version >= (8, 0, 18) @cached_property def supported_explain_formats(self): # Alias MySQL's TRADITIONAL to TEXT for consistency with other # backends. formats = {'JSON', 'TEXT', 'TRADITIONAL'} if not self.connection.mysql_is_mariadb and self.connection.mysql_version >= (8, 0, 16): formats.add('TREE') return formats @cached_property def supports_transactions(self): """ All storage engines except MyISAM support transactions. """ return self._mysql_storage_engine != 'MyISAM' @cached_property def ignores_table_name_case(self): with self.connection.cursor() as cursor: cursor.execute('SELECT @@LOWER_CASE_TABLE_NAMES') result = cursor.fetchone() return result and result[0] != 0 @cached_property def supports_default_in_lead_lag(self): # To be added in https://jira.mariadb.org/browse/MDEV-12981. return not self.connection.mysql_is_mariadb
fe545d0f8efa9db449aaad62739a790f119800d46220d75c73e75f7da9fc1899
from django.db.models.sql import compiler class SQLCompiler(compiler.SQLCompiler): def as_subquery_condition(self, alias, columns, compiler): qn = compiler.quote_name_unless_alias qn2 = self.connection.ops.quote_name sql, params = self.as_sql() return '(%s) IN (%s)' % (', '.join('%s.%s' % (qn(alias), qn2(column)) for column in columns), sql), params class SQLInsertCompiler(compiler.SQLInsertCompiler, SQLCompiler): pass class SQLDeleteCompiler(compiler.SQLDeleteCompiler, SQLCompiler): def as_sql(self): if self.single_alias: return super().as_sql() # MySQL and MariaDB < 10.3.2 doesn't support deletion with a subquery # which is what the default implementation of SQLDeleteCompiler uses # when multiple tables are involved. Use the MySQL/MariaDB specific # DELETE table FROM table syntax instead to avoid performing the # operation in two queries. result = [ 'DELETE %s FROM' % self.quote_name_unless_alias( self.query.get_initial_alias() ) ] from_sql, from_params = self.get_from_clause() result.extend(from_sql) where, params = self.compile(self.query.where) if where: result.append('WHERE %s' % where) return ' '.join(result), tuple(from_params) + tuple(params) class SQLUpdateCompiler(compiler.SQLUpdateCompiler, SQLCompiler): pass class SQLAggregateCompiler(compiler.SQLAggregateCompiler, SQLCompiler): pass
035b1f88987dd13942380064ecad1cd6db7efc5cf462722ffdbd2ee286984bd2
import uuid from django.conf import settings from django.db.backends.base.operations import BaseDatabaseOperations from django.utils import timezone from django.utils.duration import duration_microseconds from django.utils.encoding import force_str class DatabaseOperations(BaseDatabaseOperations): compiler_module = "django.db.backends.mysql.compiler" # MySQL stores positive fields as UNSIGNED ints. integer_field_ranges = { **BaseDatabaseOperations.integer_field_ranges, 'PositiveSmallIntegerField': (0, 65535), 'PositiveIntegerField': (0, 4294967295), } cast_data_types = { 'AutoField': 'signed integer', 'BigAutoField': 'signed integer', 'SmallAutoField': 'signed integer', 'CharField': 'char(%(max_length)s)', 'DecimalField': 'decimal(%(max_digits)s, %(decimal_places)s)', 'TextField': 'char', 'IntegerField': 'signed integer', 'BigIntegerField': 'signed integer', 'SmallIntegerField': 'signed integer', 'PositiveIntegerField': 'unsigned integer', 'PositiveSmallIntegerField': 'unsigned integer', } cast_char_field_without_max_length = 'char' explain_prefix = 'EXPLAIN' def date_extract_sql(self, lookup_type, field_name): # https://dev.mysql.com/doc/mysql/en/date-and-time-functions.html if lookup_type == 'week_day': # DAYOFWEEK() returns an integer, 1-7, Sunday=1. return "DAYOFWEEK(%s)" % field_name elif lookup_type == 'iso_week_day': # WEEKDAY() returns an integer, 0-6, Monday=0. return "WEEKDAY(%s) + 1" % field_name elif lookup_type == 'week': # Override the value of default_week_format for consistency with # other database backends. # Mode 3: Monday, 1-53, with 4 or more days this year. return "WEEK(%s, 3)" % field_name elif lookup_type == 'iso_year': # Get the year part from the YEARWEEK function, which returns a # number as year * 100 + week. return "TRUNCATE(YEARWEEK(%s, 3), -2) / 100" % field_name else: # EXTRACT returns 1-53 based on ISO-8601 for the week number. return "EXTRACT(%s FROM %s)" % (lookup_type.upper(), field_name) def date_trunc_sql(self, lookup_type, field_name): fields = { 'year': '%%Y-01-01', 'month': '%%Y-%%m-01', } # Use double percents to escape. if lookup_type in fields: format_str = fields[lookup_type] return "CAST(DATE_FORMAT(%s, '%s') AS DATE)" % (field_name, format_str) elif lookup_type == 'quarter': return "MAKEDATE(YEAR(%s), 1) + INTERVAL QUARTER(%s) QUARTER - INTERVAL 1 QUARTER" % ( field_name, field_name ) elif lookup_type == 'week': return "DATE_SUB(%s, INTERVAL WEEKDAY(%s) DAY)" % ( field_name, field_name ) else: return "DATE(%s)" % (field_name) def _prepare_tzname_delta(self, tzname): if '+' in tzname: return tzname[tzname.find('+'):] elif '-' in tzname: return tzname[tzname.find('-'):] return tzname def _convert_field_to_tz(self, field_name, tzname): if settings.USE_TZ and self.connection.timezone_name != tzname: field_name = "CONVERT_TZ(%s, '%s', '%s')" % ( field_name, self.connection.timezone_name, self._prepare_tzname_delta(tzname), ) return field_name def datetime_cast_date_sql(self, field_name, tzname): field_name = self._convert_field_to_tz(field_name, tzname) return "DATE(%s)" % field_name def datetime_cast_time_sql(self, field_name, tzname): field_name = self._convert_field_to_tz(field_name, tzname) return "TIME(%s)" % field_name def datetime_extract_sql(self, lookup_type, field_name, tzname): field_name = self._convert_field_to_tz(field_name, tzname) return self.date_extract_sql(lookup_type, field_name) def datetime_trunc_sql(self, lookup_type, field_name, tzname): field_name = self._convert_field_to_tz(field_name, tzname) fields = ['year', 'month', 'day', 'hour', 'minute', 'second'] format = ('%%Y-', '%%m', '-%%d', ' %%H:', '%%i', ':%%s') # Use double percents to escape. format_def = ('0000-', '01', '-01', ' 00:', '00', ':00') if lookup_type == 'quarter': return ( "CAST(DATE_FORMAT(MAKEDATE(YEAR({field_name}), 1) + " "INTERVAL QUARTER({field_name}) QUARTER - " + "INTERVAL 1 QUARTER, '%%Y-%%m-01 00:00:00') AS DATETIME)" ).format(field_name=field_name) if lookup_type == 'week': return ( "CAST(DATE_FORMAT(DATE_SUB({field_name}, " "INTERVAL WEEKDAY({field_name}) DAY), " "'%%Y-%%m-%%d 00:00:00') AS DATETIME)" ).format(field_name=field_name) try: i = fields.index(lookup_type) + 1 except ValueError: sql = field_name else: format_str = ''.join(format[:i] + format_def[i:]) sql = "CAST(DATE_FORMAT(%s, '%s') AS DATETIME)" % (field_name, format_str) return sql def time_trunc_sql(self, lookup_type, field_name): fields = { 'hour': '%%H:00:00', 'minute': '%%H:%%i:00', 'second': '%%H:%%i:%%s', } # Use double percents to escape. if lookup_type in fields: format_str = fields[lookup_type] return "CAST(DATE_FORMAT(%s, '%s') AS TIME)" % (field_name, format_str) else: return "TIME(%s)" % (field_name) def date_interval_sql(self, timedelta): return 'INTERVAL %s MICROSECOND' % duration_microseconds(timedelta) def format_for_duration_arithmetic(self, sql): return 'INTERVAL %s MICROSECOND' % sql def force_no_ordering(self): """ "ORDER BY NULL" prevents MySQL from implicitly ordering by grouped columns. If no ordering would otherwise be applied, we don't want any implicit sorting going on. """ return [(None, ("NULL", [], False))] def last_executed_query(self, cursor, sql, params): # With MySQLdb, cursor objects have an (undocumented) "_executed" # attribute where the exact query sent to the database is saved. # See MySQLdb/cursors.py in the source distribution. # MySQLdb returns string, PyMySQL bytes. return force_str(getattr(cursor, '_executed', None), errors='replace') def no_limit_value(self): # 2**64 - 1, as recommended by the MySQL documentation return 18446744073709551615 def quote_name(self, name): if name.startswith("`") and name.endswith("`"): return name # Quoting once is enough. return "`%s`" % name def random_function_sql(self): return 'RAND()' def sql_flush(self, style, tables, sequences, allow_cascade=False): # NB: The generated SQL below is specific to MySQL # 'TRUNCATE x;', 'TRUNCATE y;', 'TRUNCATE z;'... style SQL statements # to clear all tables of all data if tables: sql = ['SET FOREIGN_KEY_CHECKS = 0;'] for table in tables: sql.append('%s %s;' % ( style.SQL_KEYWORD('TRUNCATE'), style.SQL_FIELD(self.quote_name(table)), )) sql.append('SET FOREIGN_KEY_CHECKS = 1;') sql.extend(self.sequence_reset_by_name_sql(style, sequences)) return sql else: return [] def validate_autopk_value(self, value): # MySQLism: zero in AUTO_INCREMENT field does not work. Refs #17653. if value == 0: raise ValueError('The database backend does not accept 0 as a ' 'value for AutoField.') return value def adapt_datetimefield_value(self, value): if value is None: return None # Expression values are adapted by the database. if hasattr(value, 'resolve_expression'): return value # MySQL doesn't support tz-aware datetimes if timezone.is_aware(value): if settings.USE_TZ: value = timezone.make_naive(value, self.connection.timezone) else: raise ValueError("MySQL backend does not support timezone-aware datetimes when USE_TZ is False.") return str(value) def adapt_timefield_value(self, value): if value is None: return None # Expression values are adapted by the database. if hasattr(value, 'resolve_expression'): return value # MySQL doesn't support tz-aware times if timezone.is_aware(value): raise ValueError("MySQL backend does not support timezone-aware times.") return str(value) def max_name_length(self): return 64 def bulk_insert_sql(self, fields, placeholder_rows): placeholder_rows_sql = (", ".join(row) for row in placeholder_rows) values_sql = ", ".join("(%s)" % sql for sql in placeholder_rows_sql) return "VALUES " + values_sql def combine_expression(self, connector, sub_expressions): if connector == '^': return 'POW(%s)' % ','.join(sub_expressions) # Convert the result to a signed integer since MySQL's binary operators # return an unsigned integer. elif connector in ('&', '|', '<<'): return 'CONVERT(%s, SIGNED)' % connector.join(sub_expressions) elif connector == '>>': lhs, rhs = sub_expressions return 'FLOOR(%(lhs)s / POW(2, %(rhs)s))' % {'lhs': lhs, 'rhs': rhs} return super().combine_expression(connector, sub_expressions) def get_db_converters(self, expression): converters = super().get_db_converters(expression) internal_type = expression.output_field.get_internal_type() if internal_type in ['BooleanField', 'NullBooleanField']: converters.append(self.convert_booleanfield_value) elif internal_type == 'DateTimeField': if settings.USE_TZ: converters.append(self.convert_datetimefield_value) elif internal_type == 'UUIDField': converters.append(self.convert_uuidfield_value) return converters def convert_booleanfield_value(self, value, expression, connection): if value in (0, 1): value = bool(value) return value def convert_datetimefield_value(self, value, expression, connection): if value is not None: value = timezone.make_aware(value, self.connection.timezone) return value def convert_uuidfield_value(self, value, expression, connection): if value is not None: value = uuid.UUID(value) return value def binary_placeholder_sql(self, value): return '_binary %s' if value is not None and not hasattr(value, 'as_sql') else '%s' def subtract_temporals(self, internal_type, lhs, rhs): lhs_sql, lhs_params = lhs rhs_sql, rhs_params = rhs if internal_type == 'TimeField': if self.connection.mysql_is_mariadb: # MariaDB includes the microsecond component in TIME_TO_SEC as # a decimal. MySQL returns an integer without microseconds. return 'CAST((TIME_TO_SEC(%(lhs)s) - TIME_TO_SEC(%(rhs)s)) * 1000000 AS SIGNED)' % { 'lhs': lhs_sql, 'rhs': rhs_sql }, lhs_params + rhs_params return ( "((TIME_TO_SEC(%(lhs)s) * 1000000 + MICROSECOND(%(lhs)s)) -" " (TIME_TO_SEC(%(rhs)s) * 1000000 + MICROSECOND(%(rhs)s)))" ) % {'lhs': lhs_sql, 'rhs': rhs_sql}, lhs_params * 2 + rhs_params * 2 else: return "TIMESTAMPDIFF(MICROSECOND, %s, %s)" % (rhs_sql, lhs_sql), rhs_params + lhs_params def explain_query_prefix(self, format=None, **options): # Alias MySQL's TRADITIONAL to TEXT for consistency with other backends. if format and format.upper() == 'TEXT': format = 'TRADITIONAL' elif not format and 'TREE' in self.connection.features.supported_explain_formats: # Use TREE by default (if supported) as it's more informative. format = 'TREE' analyze = options.pop('analyze', False) prefix = super().explain_query_prefix(format, **options) if analyze and self.connection.features.supports_explain_analyze: # MariaDB uses ANALYZE instead of EXPLAIN ANALYZE. prefix = 'ANALYZE' if self.connection.mysql_is_mariadb else prefix + ' ANALYZE' if format and not (analyze and not self.connection.mysql_is_mariadb): # Only MariaDB supports the analyze option with formats. prefix += ' FORMAT=%s' % format if self.connection.features.needs_explain_extended and not analyze and format is None: # ANALYZE, EXTENDED, and FORMAT are mutually exclusive options. prefix += ' EXTENDED' return prefix def regex_lookup(self, lookup_type): # REGEXP BINARY doesn't work correctly in MySQL 8+ and REGEXP_LIKE # doesn't exist in MySQL 5.6 or in MariaDB. if self.connection.mysql_version < (8, 0, 0) or self.connection.mysql_is_mariadb: if lookup_type == 'regex': return '%s REGEXP BINARY %s' return '%s REGEXP %s' match_option = 'c' if lookup_type == 'regex' else 'i' return "REGEXP_LIKE(%%s, %%s, '%s')" % match_option def insert_statement(self, ignore_conflicts=False): return 'INSERT IGNORE INTO' if ignore_conflicts else super().insert_statement(ignore_conflicts)
b520f2162de26bbb2de4c61bf8fe3c8b6efa3b352dad3d97f74aa623f8fbdc1d
import operator from django.db.backends.base.features import BaseDatabaseFeatures from django.db.utils import InterfaceError from django.utils.functional import cached_property class DatabaseFeatures(BaseDatabaseFeatures): allows_group_by_selected_pks = True can_return_columns_from_insert = True can_return_rows_from_bulk_insert = True has_real_datatype = True has_native_uuid_field = True has_native_duration_field = True can_defer_constraint_checks = True has_select_for_update = True has_select_for_update_nowait = True has_select_for_update_of = True has_select_for_update_skip_locked = True can_release_savepoints = True supports_tablespaces = True supports_transactions = True can_introspect_autofield = True can_introspect_ip_address_field = True can_introspect_materialized_views = True can_introspect_small_integer_field = True can_distinct_on_fields = True can_rollback_ddl = True supports_combined_alters = True nulls_order_largest = True closed_cursor_error_class = InterfaceError has_case_insensitive_like = False greatest_least_ignores_nulls = True can_clone_databases = True supports_temporal_subtraction = True supports_slicing_ordering_in_compound = True create_test_procedure_without_params_sql = """ CREATE FUNCTION test_procedure () RETURNS void AS $$ DECLARE V_I INTEGER; BEGIN V_I := 1; END; $$ LANGUAGE plpgsql;""" create_test_procedure_with_int_param_sql = """ CREATE FUNCTION test_procedure (P_I INTEGER) RETURNS void AS $$ DECLARE V_I INTEGER; BEGIN V_I := P_I; END; $$ LANGUAGE plpgsql;""" requires_casted_case_in_updates = True supports_over_clause = True supports_aggregate_filter_clause = True supported_explain_formats = {'JSON', 'TEXT', 'XML', 'YAML'} validates_explain_options = False # A query will error on invalid options. @cached_property def is_postgresql_9_6(self): return self.connection.pg_version >= 90600 @cached_property def is_postgresql_10(self): return self.connection.pg_version >= 100000 @cached_property def is_postgresql_12(self): return self.connection.pg_version >= 120000 has_brin_autosummarize = property(operator.attrgetter('is_postgresql_10')) has_phraseto_tsquery = property(operator.attrgetter('is_postgresql_9_6')) supports_table_partitions = property(operator.attrgetter('is_postgresql_10'))
1bdcc0fe63a20403c4fbec8604f4ec5dc79af835b2ce54580e2b0e6ea6175bc6
from django.conf import settings from django.http import HttpResponse from django.utils.deprecation import MiddlewareMixin from .utils import get_view_name class XViewMiddleware(MiddlewareMixin): """ Add an X-View header to internal HEAD requests. """ def process_view(self, request, view_func, view_args, view_kwargs): """ If the request method is HEAD and either the IP is internal or the user is a logged-in staff member, return a response with an x-view header indicating the view function. This is used to lookup the view function for an arbitrary page. """ assert hasattr(request, 'user'), ( "The XView middleware requires authentication middleware to be " "installed. Edit your MIDDLEWARE setting to insert " "'django.contrib.auth.middleware.AuthenticationMiddleware'." ) if request.method == 'HEAD' and (request.META.get('REMOTE_ADDR') in settings.INTERNAL_IPS or (request.user.is_active and request.user.is_staff)): response = HttpResponse() response['X-View'] = get_view_name(view_func) return response
b8cfccde95e2c9f8f0590149458753d6db169b847cc2bab7e0e92cd4529c59c9
from django.contrib import auth from django.contrib.auth import load_backend from django.contrib.auth.backends import RemoteUserBackend from django.core.exceptions import ImproperlyConfigured from django.utils.deprecation import MiddlewareMixin from django.utils.functional import SimpleLazyObject def get_user(request): if not hasattr(request, '_cached_user'): request._cached_user = auth.get_user(request) return request._cached_user class AuthenticationMiddleware(MiddlewareMixin): def process_request(self, request): assert hasattr(request, 'session'), ( "The Django authentication middleware requires session middleware " "to be installed. Edit your MIDDLEWARE setting to insert " "'django.contrib.sessions.middleware.SessionMiddleware' before " "'django.contrib.auth.middleware.AuthenticationMiddleware'." ) request.user = SimpleLazyObject(lambda: get_user(request)) class RemoteUserMiddleware(MiddlewareMixin): """ Middleware for utilizing Web-server-provided authentication. If request.user is not authenticated, then this middleware attempts to authenticate the username passed in the ``REMOTE_USER`` request header. If authentication is successful, the user is automatically logged in to persist the user in the session. The header used is configurable and defaults to ``REMOTE_USER``. Subclass this class and change the ``header`` attribute if you need to use a different header. """ # Name of request header to grab username from. This will be the key as # used in the request.META dictionary, i.e. the normalization of headers to # all uppercase and the addition of "HTTP_" prefix apply. header = "REMOTE_USER" force_logout_if_no_header = True def process_request(self, request): # AuthenticationMiddleware is required so that request.user exists. if not hasattr(request, 'user'): raise ImproperlyConfigured( "The Django remote user auth middleware requires the" " authentication middleware to be installed. Edit your" " MIDDLEWARE setting to insert" " 'django.contrib.auth.middleware.AuthenticationMiddleware'" " before the RemoteUserMiddleware class.") try: username = request.META[self.header] except KeyError: # If specified header doesn't exist then remove any existing # authenticated remote-user, or return (leaving request.user set to # AnonymousUser by the AuthenticationMiddleware). if self.force_logout_if_no_header and request.user.is_authenticated: self._remove_invalid_user(request) return # If the user is already authenticated and that user is the user we are # getting passed in the headers, then the correct user is already # persisted in the session and we don't need to continue. if request.user.is_authenticated: if request.user.get_username() == self.clean_username(username, request): return else: # An authenticated user is associated with the request, but # it does not match the authorized user in the header. self._remove_invalid_user(request) # We are seeing this user for the first time in this session, attempt # to authenticate the user. user = auth.authenticate(request, remote_user=username) if user: # User is valid. Set request.user and persist user in the session # by logging the user in. request.user = user auth.login(request, user) def clean_username(self, username, request): """ Allow the backend to clean the username, if the backend defines a clean_username method. """ backend_str = request.session[auth.BACKEND_SESSION_KEY] backend = auth.load_backend(backend_str) try: username = backend.clean_username(username) except AttributeError: # Backend has no clean_username method. pass return username def _remove_invalid_user(self, request): """ Remove the current authenticated user in the request which is invalid but only if the user is authenticated via the RemoteUserBackend. """ try: stored_backend = load_backend(request.session.get(auth.BACKEND_SESSION_KEY, '')) except ImportError: # backend failed to load auth.logout(request) else: if isinstance(stored_backend, RemoteUserBackend): auth.logout(request) class PersistentRemoteUserMiddleware(RemoteUserMiddleware): """ Middleware for Web-server provided authentication on logon pages. Like RemoteUserMiddleware but keeps the user authenticated even if the header (``REMOTE_USER``) is not found in the request. Useful for setups when the external authentication via ``REMOTE_USER`` is only expected to happen on some "logon" URL and the rest of the application wants to use Django's authentication mechanism. """ force_logout_if_no_header = False
195423afb5ea91ee8094516b981e57565612cc1e66cc89e56f2674e177485109
import json from django.contrib.messages.storage.base import BaseStorage from django.contrib.messages.storage.cookie import ( MessageDecoder, MessageEncoder, ) class SessionStorage(BaseStorage): """ Store messages in the session (that is, django.contrib.sessions). """ session_key = '_messages' def __init__(self, request, *args, **kwargs): assert hasattr(request, 'session'), "The session-based temporary "\ "message storage requires session middleware to be installed, "\ "and come before the message middleware in the "\ "MIDDLEWARE list." super().__init__(request, *args, **kwargs) def _get(self, *args, **kwargs): """ Retrieve a list of messages from the request's session. This storage always stores everything it is given, so return True for the all_retrieved flag. """ return self.deserialize_messages(self.request.session.get(self.session_key)), True def _store(self, messages, response, *args, **kwargs): """ Store a list of messages to the request's session. """ if messages: self.request.session[self.session_key] = self.serialize_messages(messages) else: self.request.session.pop(self.session_key, None) return [] def serialize_messages(self, messages): encoder = MessageEncoder(separators=(',', ':')) return encoder.encode(messages) def deserialize_messages(self, data): if data and isinstance(data, str): return json.loads(data, cls=MessageDecoder) return data
30c9b230a3eeae4c938bf3683986ef118ca8e796138a47c7a7e3e922ac11561b
""" This module houses the ctypes initialization procedures, as well as the notice and error handler function callbacks (get called when an error occurs in GEOS). This module also houses GEOS Pointer utilities, including get_pointer_arr(), and GEOM_PTR. """ import logging import os from ctypes import CDLL, CFUNCTYPE, POINTER, Structure, c_char_p from ctypes.util import find_library from django.core.exceptions import ImproperlyConfigured from django.utils.functional import SimpleLazyObject, cached_property from django.utils.version import get_version_tuple logger = logging.getLogger('django.contrib.gis') def load_geos(): # Custom library path set? try: from django.conf import settings lib_path = settings.GEOS_LIBRARY_PATH except (AttributeError, ImportError, ImproperlyConfigured, OSError): lib_path = None # Setting the appropriate names for the GEOS-C library. if lib_path: lib_names = None elif os.name == 'nt': # Windows NT libraries lib_names = ['geos_c', 'libgeos_c-1'] elif os.name == 'posix': # *NIX libraries lib_names = ['geos_c', 'GEOS'] else: raise ImportError('Unsupported OS "%s"' % os.name) # Using the ctypes `find_library` utility to find the path to the GEOS # shared library. This is better than manually specifying each library name # and extension (e.g., libgeos_c.[so|so.1|dylib].). if lib_names: for lib_name in lib_names: lib_path = find_library(lib_name) if lib_path is not None: break # No GEOS library could be found. if lib_path is None: raise ImportError( 'Could not find the GEOS library (tried "%s"). ' 'Try setting GEOS_LIBRARY_PATH in your settings.' % '", "'.join(lib_names) ) # Getting the GEOS C library. The C interface (CDLL) is used for # both *NIX and Windows. # See the GEOS C API source code for more details on the library function calls: # https://geos.osgeo.org/doxygen/geos__c_8h_source.html _lgeos = CDLL(lib_path) # Here we set up the prototypes for the initGEOS_r and finishGEOS_r # routines. These functions aren't actually called until they are # attached to a GEOS context handle -- this actually occurs in # geos/prototypes/threadsafe.py. _lgeos.initGEOS_r.restype = CONTEXT_PTR _lgeos.finishGEOS_r.argtypes = [CONTEXT_PTR] # Set restype for compatibility across 32 and 64-bit platforms. _lgeos.GEOSversion.restype = c_char_p return _lgeos # The notice and error handler C function callback definitions. # Supposed to mimic the GEOS message handler (C below): # typedef void (*GEOSMessageHandler)(const char *fmt, ...); NOTICEFUNC = CFUNCTYPE(None, c_char_p, c_char_p) def notice_h(fmt, lst): fmt, lst = fmt.decode(), lst.decode() try: warn_msg = fmt % lst except TypeError: warn_msg = fmt logger.warning('GEOS_NOTICE: %s\n', warn_msg) notice_h = NOTICEFUNC(notice_h) ERRORFUNC = CFUNCTYPE(None, c_char_p, c_char_p) def error_h(fmt, lst): fmt, lst = fmt.decode(), lst.decode() try: err_msg = fmt % lst except TypeError: err_msg = fmt logger.error('GEOS_ERROR: %s\n', err_msg) error_h = ERRORFUNC(error_h) # #### GEOS Geometry C data structures, and utility functions. #### # Opaque GEOS geometry structures, used for GEOM_PTR and CS_PTR class GEOSGeom_t(Structure): pass class GEOSPrepGeom_t(Structure): pass class GEOSCoordSeq_t(Structure): pass class GEOSContextHandle_t(Structure): pass # Pointers to opaque GEOS geometry structures. GEOM_PTR = POINTER(GEOSGeom_t) PREPGEOM_PTR = POINTER(GEOSPrepGeom_t) CS_PTR = POINTER(GEOSCoordSeq_t) CONTEXT_PTR = POINTER(GEOSContextHandle_t) lgeos = SimpleLazyObject(load_geos) class GEOSFuncFactory: """ Lazy loading of GEOS functions. """ argtypes = None restype = None errcheck = None def __init__(self, func_name, *args, restype=None, errcheck=None, argtypes=None, **kwargs): self.func_name = func_name if restype is not None: self.restype = restype if errcheck is not None: self.errcheck = errcheck if argtypes is not None: self.argtypes = argtypes self.args = args self.kwargs = kwargs def __call__(self, *args, **kwargs): return self.func(*args, **kwargs) @cached_property def func(self): from django.contrib.gis.geos.prototypes.threadsafe import GEOSFunc func = GEOSFunc(self.func_name) func.argtypes = self.argtypes or [] func.restype = self.restype if self.errcheck: func.errcheck = self.errcheck return func def geos_version(): """Return the string version of the GEOS library.""" return lgeos.GEOSversion() def geos_version_tuple(): """Return the GEOS version as a tuple (major, minor, subminor).""" return get_version_tuple(geos_version().decode())
568854582f4c2b76231ba88ec7af82b6135de2548a8ebc3e5b94b33501a6fa24
from django.contrib.auth.middleware import AuthenticationMiddleware from django.contrib.auth.models import User from django.http import HttpRequest from django.test import TestCase class TestAuthenticationMiddleware(TestCase): def setUp(self): self.user = User.objects.create_user('test_user', '[email protected]', 'test_password') self.middleware = AuthenticationMiddleware() self.client.force_login(self.user) self.request = HttpRequest() self.request.session = self.client.session def test_no_password_change_doesnt_invalidate_session(self): self.request.session = self.client.session self.middleware.process_request(self.request) self.assertIsNotNone(self.request.user) self.assertFalse(self.request.user.is_anonymous) def test_changed_password_invalidates_session(self): # After password change, user should be anonymous self.user.set_password('new_password') self.user.save() self.middleware.process_request(self.request) self.assertIsNotNone(self.request.user) self.assertTrue(self.request.user.is_anonymous) # session should be flushed self.assertIsNone(self.request.session.session_key) def test_no_session(self): msg = ( "The Django authentication middleware requires session middleware " "to be installed. Edit your MIDDLEWARE setting to insert " "'django.contrib.sessions.middleware.SessionMiddleware' before " "'django.contrib.auth.middleware.AuthenticationMiddleware'." ) with self.assertRaisesMessage(AssertionError, msg): self.middleware(HttpRequest())
8125b63a248a22eca5a06a0dd25fe34bddf313315e9dd613953c8a885f6cb8d9
import datetime import pickle from django.db import models from django.test import TestCase from django.utils.version import get_version from .models import Container, Event, Group, Happening, M2MModel class PickleabilityTestCase(TestCase): @classmethod def setUpTestData(cls): Happening.objects.create() # make sure the defaults are working (#20158) def assert_pickles(self, qs): self.assertEqual(list(pickle.loads(pickle.dumps(qs))), list(qs)) def test_related_field(self): g = Group.objects.create(name="Ponies Who Own Maybachs") self.assert_pickles(Event.objects.filter(group=g.id)) def test_datetime_callable_default_all(self): self.assert_pickles(Happening.objects.all()) def test_datetime_callable_default_filter(self): self.assert_pickles(Happening.objects.filter(when=datetime.datetime.now())) def test_string_as_default(self): self.assert_pickles(Happening.objects.filter(name="test")) def test_standalone_method_as_default(self): self.assert_pickles(Happening.objects.filter(number1=1)) def test_staticmethod_as_default(self): self.assert_pickles(Happening.objects.filter(number2=1)) def test_filter_reverse_fk(self): self.assert_pickles(Group.objects.filter(event=1)) def test_doesnotexist_exception(self): # Ticket #17776 original = Event.DoesNotExist("Doesn't exist") unpickled = pickle.loads(pickle.dumps(original)) # Exceptions are not equal to equivalent instances of themselves, so # can't just use assertEqual(original, unpickled) self.assertEqual(original.__class__, unpickled.__class__) self.assertEqual(original.args, unpickled.args) def test_doesnotexist_class(self): klass = Event.DoesNotExist self.assertIs(pickle.loads(pickle.dumps(klass)), klass) def test_multipleobjectsreturned_class(self): klass = Event.MultipleObjectsReturned self.assertIs(pickle.loads(pickle.dumps(klass)), klass) def test_forward_relatedobjectdoesnotexist_class(self): # ForwardManyToOneDescriptor klass = Event.group.RelatedObjectDoesNotExist self.assertIs(pickle.loads(pickle.dumps(klass)), klass) # ForwardOneToOneDescriptor klass = Happening.event.RelatedObjectDoesNotExist self.assertIs(pickle.loads(pickle.dumps(klass)), klass) def test_reverse_one_to_one_relatedobjectdoesnotexist_class(self): klass = Event.happening.RelatedObjectDoesNotExist self.assertIs(pickle.loads(pickle.dumps(klass)), klass) def test_manager_pickle(self): pickle.loads(pickle.dumps(Happening.objects)) def test_model_pickle(self): """ A model not defined on module level is picklable. """ original = Container.SomeModel(pk=1) dumped = pickle.dumps(original) reloaded = pickle.loads(dumped) self.assertEqual(original, reloaded) # Also, deferred dynamic model works Container.SomeModel.objects.create(somefield=1) original = Container.SomeModel.objects.defer('somefield')[0] dumped = pickle.dumps(original) reloaded = pickle.loads(dumped) self.assertEqual(original, reloaded) self.assertEqual(original.somefield, reloaded.somefield) def test_model_pickle_m2m(self): """ Test intentionally the automatically created through model. """ m1 = M2MModel.objects.create() g1 = Group.objects.create(name='foof') m1.groups.add(g1) m2m_through = M2MModel._meta.get_field('groups').remote_field.through original = m2m_through.objects.get() dumped = pickle.dumps(original) reloaded = pickle.loads(dumped) self.assertEqual(original, reloaded) def test_model_pickle_dynamic(self): class Meta: proxy = True dynclass = type("DynamicEventSubclass", (Event,), {'Meta': Meta, '__module__': Event.__module__}) original = dynclass(pk=1) dumped = pickle.dumps(original) reloaded = pickle.loads(dumped) self.assertEqual(original, reloaded) self.assertIs(reloaded.__class__, dynclass) def test_specialized_queryset(self): self.assert_pickles(Happening.objects.values('name')) self.assert_pickles(Happening.objects.values('name').dates('when', 'year')) # With related field (#14515) self.assert_pickles( Event.objects.select_related('group').order_by('title').values_list('title', 'group__name') ) def test_pickle_prefetch_related_idempotence(self): g = Group.objects.create(name='foo') groups = Group.objects.prefetch_related('event_set') # First pickling groups = pickle.loads(pickle.dumps(groups)) self.assertSequenceEqual(groups, [g]) # Second pickling groups = pickle.loads(pickle.dumps(groups)) self.assertSequenceEqual(groups, [g]) def test_pickle_prefetch_queryset_usable_outside_of_prefetch(self): # Prefetch shouldn't affect the fetch-on-pickle behavior of the # queryset passed to it. Group.objects.create(name='foo') events = Event.objects.order_by('id') Group.objects.prefetch_related(models.Prefetch('event_set', queryset=events)) with self.assertNumQueries(1): events2 = pickle.loads(pickle.dumps(events)) with self.assertNumQueries(0): list(events2) def test_pickle_prefetch_queryset_still_usable(self): g = Group.objects.create(name='foo') groups = Group.objects.prefetch_related( models.Prefetch('event_set', queryset=Event.objects.order_by('id')) ) groups2 = pickle.loads(pickle.dumps(groups)) self.assertSequenceEqual(groups2.filter(id__gte=0), [g]) def test_pickle_prefetch_queryset_not_evaluated(self): Group.objects.create(name='foo') groups = Group.objects.prefetch_related( models.Prefetch('event_set', queryset=Event.objects.order_by('id')) ) list(groups) # evaluate QuerySet with self.assertNumQueries(0): pickle.loads(pickle.dumps(groups)) def test_pickle_prefetch_related_with_m2m_and_objects_deletion(self): """ #24831 -- Cached properties on ManyToOneRel created in QuerySet.delete() caused subsequent QuerySet pickling to fail. """ g = Group.objects.create(name='foo') m2m = M2MModel.objects.create() m2m.groups.add(g) Group.objects.all().delete() m2ms = M2MModel.objects.prefetch_related('groups') m2ms = pickle.loads(pickle.dumps(m2ms)) self.assertSequenceEqual(m2ms, [m2m]) def test_pickle_exists_queryset_still_usable(self): group = Group.objects.create(name='group') Event.objects.create(title='event', group=group) groups = Group.objects.annotate( has_event=models.Exists( Event.objects.filter(group_id=models.OuterRef('id')), ), ) groups2 = pickle.loads(pickle.dumps(groups)) self.assertSequenceEqual(groups2.filter(has_event=True), [group]) def test_pickle_exists_queryset_not_evaluated(self): group = Group.objects.create(name='group') Event.objects.create(title='event', group=group) groups = Group.objects.annotate( has_event=models.Exists( Event.objects.filter(group_id=models.OuterRef('id')), ), ) list(groups) # evaluate QuerySet. with self.assertNumQueries(0): self.assert_pickles(groups) def test_pickle_subquery_queryset_not_evaluated(self): group = Group.objects.create(name='group') Event.objects.create(title='event', group=group) groups = Group.objects.annotate( event_title=models.Subquery( Event.objects.filter(group_id=models.OuterRef('id')).values('title'), ), ) list(groups) # evaluate QuerySet. with self.assertNumQueries(0): self.assert_pickles(groups) def test_annotation_with_callable_default(self): # Happening.when has a callable default of datetime.datetime.now. qs = Happening.objects.annotate(latest_time=models.Max('when')) self.assert_pickles(qs) def test_filter_deferred(self): qs = Happening.objects.all() qs._defer_next_filter = True qs = qs.filter(id=0) self.assert_pickles(qs) def test_missing_django_version_unpickling(self): """ #21430 -- Verifies a warning is raised for querysets that are unpickled without a Django version """ qs = Group.missing_django_version_objects.all() msg = "Pickled queryset instance's Django version is not specified." with self.assertRaisesMessage(RuntimeWarning, msg): pickle.loads(pickle.dumps(qs)) def test_unsupported_unpickle(self): """ #21430 -- Verifies a warning is raised for querysets that are unpickled with a different Django version than the current """ qs = Group.previous_django_version_objects.all() msg = "Pickled queryset instance's Django version 1.0 does not match the current version %s." % get_version() with self.assertRaisesMessage(RuntimeWarning, msg): pickle.loads(pickle.dumps(qs)) class InLookupTests(TestCase): @classmethod def setUpTestData(cls): for i in range(1, 3): group = Group.objects.create(name='Group {}'.format(i)) cls.e1 = Event.objects.create(title='Event 1', group=group) def test_in_lookup_queryset_evaluation(self): """ Neither pickling nor unpickling a QuerySet.query with an __in=inner_qs lookup should evaluate inner_qs. """ events = Event.objects.filter(group__in=Group.objects.all()) with self.assertNumQueries(0): dumped = pickle.dumps(events.query) with self.assertNumQueries(0): reloaded = pickle.loads(dumped) reloaded_events = Event.objects.none() reloaded_events.query = reloaded self.assertSequenceEqual(reloaded_events, [self.e1]) def test_in_lookup_query_evaluation(self): events = Event.objects.filter(group__in=Group.objects.values('id').query) with self.assertNumQueries(0): dumped = pickle.dumps(events.query) with self.assertNumQueries(0): reloaded = pickle.loads(dumped) reloaded_events = Event.objects.none() reloaded_events.query = reloaded self.assertSequenceEqual(reloaded_events, [self.e1])
7d70b03b14c07d3e7518f00a631a794f0a9901ed02b86d69a13e873f478ff4da
import datetime import sys import unittest from django.contrib.admin import ( AllValuesFieldListFilter, BooleanFieldListFilter, ModelAdmin, RelatedOnlyFieldListFilter, SimpleListFilter, site, ) from django.contrib.admin.options import IncorrectLookupParameters from django.contrib.auth.admin import UserAdmin from django.contrib.auth.models import User from django.core.exceptions import ImproperlyConfigured from django.test import RequestFactory, TestCase, override_settings from .models import Book, Bookmark, Department, Employee, TaggedItem def select_by(dictlist, key, value): return [x for x in dictlist if x[key] == value][0] class DecadeListFilter(SimpleListFilter): def lookups(self, request, model_admin): return ( ('the 80s', "the 1980's"), ('the 90s', "the 1990's"), ('the 00s', "the 2000's"), ('other', "other decades"), ) def queryset(self, request, queryset): decade = self.value() if decade == 'the 80s': return queryset.filter(year__gte=1980, year__lte=1989) if decade == 'the 90s': return queryset.filter(year__gte=1990, year__lte=1999) if decade == 'the 00s': return queryset.filter(year__gte=2000, year__lte=2009) class NotNinetiesListFilter(SimpleListFilter): title = "Not nineties books" parameter_name = "book_year" def lookups(self, request, model_admin): return ( ('the 90s', "the 1990's"), ) def queryset(self, request, queryset): if self.value() == 'the 90s': return queryset.filter(year__gte=1990, year__lte=1999) else: return queryset.exclude(year__gte=1990, year__lte=1999) class DecadeListFilterWithTitleAndParameter(DecadeListFilter): title = 'publication decade' parameter_name = 'publication-decade' class DecadeListFilterWithoutTitle(DecadeListFilter): parameter_name = 'publication-decade' class DecadeListFilterWithoutParameter(DecadeListFilter): title = 'publication decade' class DecadeListFilterWithNoneReturningLookups(DecadeListFilterWithTitleAndParameter): def lookups(self, request, model_admin): pass class DecadeListFilterWithFailingQueryset(DecadeListFilterWithTitleAndParameter): def queryset(self, request, queryset): raise 1 / 0 class DecadeListFilterWithQuerysetBasedLookups(DecadeListFilterWithTitleAndParameter): def lookups(self, request, model_admin): qs = model_admin.get_queryset(request) if qs.filter(year__gte=1980, year__lte=1989).exists(): yield ('the 80s', "the 1980's") if qs.filter(year__gte=1990, year__lte=1999).exists(): yield ('the 90s', "the 1990's") if qs.filter(year__gte=2000, year__lte=2009).exists(): yield ('the 00s', "the 2000's") class DecadeListFilterParameterEndsWith__In(DecadeListFilter): title = 'publication decade' parameter_name = 'decade__in' # Ends with '__in" class DecadeListFilterParameterEndsWith__Isnull(DecadeListFilter): title = 'publication decade' parameter_name = 'decade__isnull' # Ends with '__isnull" class DepartmentListFilterLookupWithNonStringValue(SimpleListFilter): title = 'department' parameter_name = 'department' def lookups(self, request, model_admin): return sorted({ (employee.department.id, # Intentionally not a string (Refs #19318) employee.department.code) for employee in model_admin.get_queryset(request).all() }) def queryset(self, request, queryset): if self.value(): return queryset.filter(department__id=self.value()) class DepartmentListFilterLookupWithUnderscoredParameter(DepartmentListFilterLookupWithNonStringValue): parameter_name = 'department__whatever' class DepartmentListFilterLookupWithDynamicValue(DecadeListFilterWithTitleAndParameter): def lookups(self, request, model_admin): if self.value() == 'the 80s': return (('the 90s', "the 1990's"),) elif self.value() == 'the 90s': return (('the 80s', "the 1980's"),) else: return (('the 80s', "the 1980's"), ('the 90s', "the 1990's"),) class CustomUserAdmin(UserAdmin): list_filter = ('books_authored', 'books_contributed') class BookAdmin(ModelAdmin): list_filter = ('year', 'author', 'contributors', 'is_best_seller', 'date_registered', 'no') ordering = ('-id',) class BookAdmin2(ModelAdmin): list_filter = ('year', 'author', 'contributors', 'is_best_seller2', 'date_registered', 'no') class BookAdminWithTupleBooleanFilter(BookAdmin): list_filter = ( 'year', 'author', 'contributors', ('is_best_seller', BooleanFieldListFilter), 'date_registered', 'no', ) class BookAdminWithUnderscoreLookupAndTuple(BookAdmin): list_filter = ( 'year', ('author__email', AllValuesFieldListFilter), 'contributors', 'is_best_seller', 'date_registered', 'no', ) class BookAdminWithCustomQueryset(ModelAdmin): def __init__(self, user, *args, **kwargs): self.user = user super().__init__(*args, **kwargs) list_filter = ('year',) def get_queryset(self, request): return super().get_queryset(request).filter(author=self.user) class BookAdminRelatedOnlyFilter(ModelAdmin): list_filter = ( 'year', 'is_best_seller', 'date_registered', 'no', ('author', RelatedOnlyFieldListFilter), ('contributors', RelatedOnlyFieldListFilter), ('employee__department', RelatedOnlyFieldListFilter), ) ordering = ('-id',) class DecadeFilterBookAdmin(ModelAdmin): list_filter = ('author', DecadeListFilterWithTitleAndParameter) ordering = ('-id',) class NotNinetiesListFilterAdmin(ModelAdmin): list_filter = (NotNinetiesListFilter,) class DecadeFilterBookAdminWithoutTitle(ModelAdmin): list_filter = (DecadeListFilterWithoutTitle,) class DecadeFilterBookAdminWithoutParameter(ModelAdmin): list_filter = (DecadeListFilterWithoutParameter,) class DecadeFilterBookAdminWithNoneReturningLookups(ModelAdmin): list_filter = (DecadeListFilterWithNoneReturningLookups,) class DecadeFilterBookAdminWithFailingQueryset(ModelAdmin): list_filter = (DecadeListFilterWithFailingQueryset,) class DecadeFilterBookAdminWithQuerysetBasedLookups(ModelAdmin): list_filter = (DecadeListFilterWithQuerysetBasedLookups,) class DecadeFilterBookAdminParameterEndsWith__In(ModelAdmin): list_filter = (DecadeListFilterParameterEndsWith__In,) class DecadeFilterBookAdminParameterEndsWith__Isnull(ModelAdmin): list_filter = (DecadeListFilterParameterEndsWith__Isnull,) class EmployeeAdmin(ModelAdmin): list_display = ['name', 'department'] list_filter = ['department'] class DepartmentFilterEmployeeAdmin(EmployeeAdmin): list_filter = [DepartmentListFilterLookupWithNonStringValue] class DepartmentFilterUnderscoredEmployeeAdmin(EmployeeAdmin): list_filter = [DepartmentListFilterLookupWithUnderscoredParameter] class DepartmentFilterDynamicValueBookAdmin(EmployeeAdmin): list_filter = [DepartmentListFilterLookupWithDynamicValue] class BookmarkAdminGenericRelation(ModelAdmin): list_filter = ['tags__tag'] class ListFiltersTests(TestCase): request_factory = RequestFactory() @classmethod def setUpTestData(cls): cls.today = datetime.date.today() cls.tomorrow = cls.today + datetime.timedelta(days=1) cls.one_week_ago = cls.today - datetime.timedelta(days=7) if cls.today.month == 12: cls.next_month = cls.today.replace(year=cls.today.year + 1, month=1, day=1) else: cls.next_month = cls.today.replace(month=cls.today.month + 1, day=1) cls.next_year = cls.today.replace(year=cls.today.year + 1, month=1, day=1) # Users cls.alfred = User.objects.create_superuser('alfred', '[email protected]', 'password') cls.bob = User.objects.create_user('bob', '[email protected]') cls.lisa = User.objects.create_user('lisa', '[email protected]') # Books cls.djangonaut_book = Book.objects.create( title='Djangonaut: an art of living', year=2009, author=cls.alfred, is_best_seller=True, date_registered=cls.today, is_best_seller2=True, ) cls.bio_book = Book.objects.create( title='Django: a biography', year=1999, author=cls.alfred, is_best_seller=False, no=207, is_best_seller2=False, ) cls.django_book = Book.objects.create( title='The Django Book', year=None, author=cls.bob, is_best_seller=None, date_registered=cls.today, no=103, is_best_seller2=None, ) cls.guitar_book = Book.objects.create( title='Guitar for dummies', year=2002, is_best_seller=True, date_registered=cls.one_week_ago, is_best_seller2=True, ) cls.guitar_book.contributors.set([cls.bob, cls.lisa]) # Departments cls.dev = Department.objects.create(code='DEV', description='Development') cls.design = Department.objects.create(code='DSN', description='Design') # Employees cls.john = Employee.objects.create(name='John Blue', department=cls.dev) cls.jack = Employee.objects.create(name='Jack Red', department=cls.design) def test_choicesfieldlistfilter_has_none_choice(self): """ The last choice is for the None value. """ class BookmarkChoicesAdmin(ModelAdmin): list_display = ['none_or_null'] list_filter = ['none_or_null'] modeladmin = BookmarkChoicesAdmin(Bookmark, site) request = self.request_factory.get('/', {}) request.user = self.alfred changelist = modeladmin.get_changelist_instance(request) filterspec = changelist.get_filters(request)[0][0] choices = list(filterspec.choices(changelist)) self.assertEqual(choices[-1]['display'], 'None') self.assertEqual(choices[-1]['query_string'], '?none_or_null__isnull=True') def test_datefieldlistfilter(self): modeladmin = BookAdmin(Book, site) request = self.request_factory.get('/') request.user = self.alfred changelist = modeladmin.get_changelist(request) request = self.request_factory.get('/', { 'date_registered__gte': self.today, 'date_registered__lt': self.tomorrow}, ) request.user = self.alfred changelist = modeladmin.get_changelist_instance(request) # Make sure the correct queryset is returned queryset = changelist.get_queryset(request) self.assertEqual(list(queryset), [self.django_book, self.djangonaut_book]) # Make sure the correct choice is selected filterspec = changelist.get_filters(request)[0][4] self.assertEqual(filterspec.title, 'date registered') choice = select_by(filterspec.choices(changelist), "display", "Today") self.assertIs(choice['selected'], True) self.assertEqual( choice['query_string'], '?date_registered__gte=%s&date_registered__lt=%s' % ( self.today, self.tomorrow, ) ) request = self.request_factory.get('/', { 'date_registered__gte': self.today.replace(day=1), 'date_registered__lt': self.next_month}, ) request.user = self.alfred changelist = modeladmin.get_changelist_instance(request) # Make sure the correct queryset is returned queryset = changelist.get_queryset(request) if (self.today.year, self.today.month) == (self.one_week_ago.year, self.one_week_ago.month): # In case one week ago is in the same month. self.assertEqual(list(queryset), [self.guitar_book, self.django_book, self.djangonaut_book]) else: self.assertEqual(list(queryset), [self.django_book, self.djangonaut_book]) # Make sure the correct choice is selected filterspec = changelist.get_filters(request)[0][4] self.assertEqual(filterspec.title, 'date registered') choice = select_by(filterspec.choices(changelist), "display", "This month") self.assertIs(choice['selected'], True) self.assertEqual( choice['query_string'], '?date_registered__gte=%s&date_registered__lt=%s' % ( self.today.replace(day=1), self.next_month, ) ) request = self.request_factory.get('/', { 'date_registered__gte': self.today.replace(month=1, day=1), 'date_registered__lt': self.next_year}, ) request.user = self.alfred changelist = modeladmin.get_changelist_instance(request) # Make sure the correct queryset is returned queryset = changelist.get_queryset(request) if self.today.year == self.one_week_ago.year: # In case one week ago is in the same year. self.assertEqual(list(queryset), [self.guitar_book, self.django_book, self.djangonaut_book]) else: self.assertEqual(list(queryset), [self.django_book, self.djangonaut_book]) # Make sure the correct choice is selected filterspec = changelist.get_filters(request)[0][4] self.assertEqual(filterspec.title, 'date registered') choice = select_by(filterspec.choices(changelist), "display", "This year") self.assertIs(choice['selected'], True) self.assertEqual( choice['query_string'], '?date_registered__gte=%s&date_registered__lt=%s' % ( self.today.replace(month=1, day=1), self.next_year, ) ) request = self.request_factory.get('/', { 'date_registered__gte': str(self.one_week_ago), 'date_registered__lt': str(self.tomorrow), }) request.user = self.alfred changelist = modeladmin.get_changelist_instance(request) # Make sure the correct queryset is returned queryset = changelist.get_queryset(request) self.assertEqual(list(queryset), [self.guitar_book, self.django_book, self.djangonaut_book]) # Make sure the correct choice is selected filterspec = changelist.get_filters(request)[0][4] self.assertEqual(filterspec.title, 'date registered') choice = select_by(filterspec.choices(changelist), "display", "Past 7 days") self.assertIs(choice['selected'], True) self.assertEqual( choice['query_string'], '?date_registered__gte=%s&date_registered__lt=%s' % ( str(self.one_week_ago), str(self.tomorrow), ) ) # Null/not null queries request = self.request_factory.get('/', {'date_registered__isnull': 'True'}) request.user = self.alfred changelist = modeladmin.get_changelist_instance(request) # Make sure the correct queryset is returned queryset = changelist.get_queryset(request) self.assertEqual(queryset.count(), 1) self.assertEqual(queryset[0], self.bio_book) # Make sure the correct choice is selected filterspec = changelist.get_filters(request)[0][4] self.assertEqual(filterspec.title, 'date registered') choice = select_by(filterspec.choices(changelist), 'display', 'No date') self.assertIs(choice['selected'], True) self.assertEqual(choice['query_string'], '?date_registered__isnull=True') request = self.request_factory.get('/', {'date_registered__isnull': 'False'}) request.user = self.alfred changelist = modeladmin.get_changelist_instance(request) # Make sure the correct queryset is returned queryset = changelist.get_queryset(request) self.assertEqual(queryset.count(), 3) self.assertEqual(list(queryset), [self.guitar_book, self.django_book, self.djangonaut_book]) # Make sure the correct choice is selected filterspec = changelist.get_filters(request)[0][4] self.assertEqual(filterspec.title, 'date registered') choice = select_by(filterspec.choices(changelist), 'display', 'Has date') self.assertIs(choice['selected'], True) self.assertEqual(choice['query_string'], '?date_registered__isnull=False') @unittest.skipIf( sys.platform.startswith('win'), "Windows doesn't support setting a timezone that differs from the " "system timezone." ) @override_settings(USE_TZ=True) def test_datefieldlistfilter_with_time_zone_support(self): # Regression for #17830 self.test_datefieldlistfilter() def test_allvaluesfieldlistfilter(self): modeladmin = BookAdmin(Book, site) request = self.request_factory.get('/', {'year__isnull': 'True'}) request.user = self.alfred changelist = modeladmin.get_changelist_instance(request) # Make sure the correct queryset is returned queryset = changelist.get_queryset(request) self.assertEqual(list(queryset), [self.django_book]) # Make sure the last choice is None and is selected filterspec = changelist.get_filters(request)[0][0] self.assertEqual(filterspec.title, 'year') choices = list(filterspec.choices(changelist)) self.assertIs(choices[-1]['selected'], True) self.assertEqual(choices[-1]['query_string'], '?year__isnull=True') request = self.request_factory.get('/', {'year': '2002'}) request.user = self.alfred changelist = modeladmin.get_changelist_instance(request) # Make sure the correct choice is selected filterspec = changelist.get_filters(request)[0][0] self.assertEqual(filterspec.title, 'year') choices = list(filterspec.choices(changelist)) self.assertIs(choices[2]['selected'], True) self.assertEqual(choices[2]['query_string'], '?year=2002') def test_allvaluesfieldlistfilter_custom_qs(self): # Make sure that correct filters are returned with custom querysets modeladmin = BookAdminWithCustomQueryset(self.alfred, Book, site) request = self.request_factory.get('/') request.user = self.alfred changelist = modeladmin.get_changelist_instance(request) filterspec = changelist.get_filters(request)[0][0] choices = list(filterspec.choices(changelist)) # Should have 'All', 1999 and 2009 options i.e. the subset of years of # books written by alfred (which is the filtering criteria set by # BookAdminWithCustomQueryset.get_queryset()) self.assertEqual(3, len(choices)) self.assertEqual(choices[0]['query_string'], '?') self.assertEqual(choices[1]['query_string'], '?year=1999') self.assertEqual(choices[2]['query_string'], '?year=2009') def test_relatedfieldlistfilter_foreignkey(self): modeladmin = BookAdmin(Book, site) request = self.request_factory.get('/') request.user = self.alfred changelist = modeladmin.get_changelist_instance(request) # Make sure that all users are present in the author's list filter filterspec = changelist.get_filters(request)[0][1] expected = [(self.alfred.pk, 'alfred'), (self.bob.pk, 'bob'), (self.lisa.pk, 'lisa')] self.assertEqual(sorted(filterspec.lookup_choices), sorted(expected)) request = self.request_factory.get('/', {'author__isnull': 'True'}) request.user = self.alfred changelist = modeladmin.get_changelist_instance(request) # Make sure the correct queryset is returned queryset = changelist.get_queryset(request) self.assertEqual(list(queryset), [self.guitar_book]) # Make sure the last choice is None and is selected filterspec = changelist.get_filters(request)[0][1] self.assertEqual(filterspec.title, 'Verbose Author') choices = list(filterspec.choices(changelist)) self.assertIs(choices[-1]['selected'], True) self.assertEqual(choices[-1]['query_string'], '?author__isnull=True') request = self.request_factory.get('/', {'author__id__exact': self.alfred.pk}) request.user = self.alfred changelist = modeladmin.get_changelist_instance(request) # Make sure the correct choice is selected filterspec = changelist.get_filters(request)[0][1] self.assertEqual(filterspec.title, 'Verbose Author') # order of choices depends on User model, which has no order choice = select_by(filterspec.choices(changelist), "display", "alfred") self.assertIs(choice['selected'], True) self.assertEqual(choice['query_string'], '?author__id__exact=%d' % self.alfred.pk) def test_relatedfieldlistfilter_foreignkey_ordering(self): """RelatedFieldListFilter ordering respects ModelAdmin.ordering.""" class EmployeeAdminWithOrdering(ModelAdmin): ordering = ('name',) class BookAdmin(ModelAdmin): list_filter = ('employee',) site.register(Employee, EmployeeAdminWithOrdering) self.addCleanup(lambda: site.unregister(Employee)) modeladmin = BookAdmin(Book, site) request = self.request_factory.get('/') request.user = self.alfred changelist = modeladmin.get_changelist_instance(request) filterspec = changelist.get_filters(request)[0][0] expected = [(self.jack.pk, 'Jack Red'), (self.john.pk, 'John Blue')] self.assertEqual(filterspec.lookup_choices, expected) def test_relatedfieldlistfilter_foreignkey_ordering_reverse(self): class EmployeeAdminWithOrdering(ModelAdmin): ordering = ('-name',) class BookAdmin(ModelAdmin): list_filter = ('employee',) site.register(Employee, EmployeeAdminWithOrdering) self.addCleanup(lambda: site.unregister(Employee)) modeladmin = BookAdmin(Book, site) request = self.request_factory.get('/') request.user = self.alfred changelist = modeladmin.get_changelist_instance(request) filterspec = changelist.get_filters(request)[0][0] expected = [(self.john.pk, 'John Blue'), (self.jack.pk, 'Jack Red')] self.assertEqual(filterspec.lookup_choices, expected) def test_relatedfieldlistfilter_foreignkey_default_ordering(self): """RelatedFieldListFilter ordering respects Model.ordering.""" class BookAdmin(ModelAdmin): list_filter = ('employee',) self.addCleanup(setattr, Employee._meta, 'ordering', Employee._meta.ordering) Employee._meta.ordering = ('name',) modeladmin = BookAdmin(Book, site) request = self.request_factory.get('/') request.user = self.alfred changelist = modeladmin.get_changelist_instance(request) filterspec = changelist.get_filters(request)[0][0] expected = [(self.jack.pk, 'Jack Red'), (self.john.pk, 'John Blue')] self.assertEqual(filterspec.lookup_choices, expected) def test_relatedfieldlistfilter_manytomany(self): modeladmin = BookAdmin(Book, site) request = self.request_factory.get('/') request.user = self.alfred changelist = modeladmin.get_changelist_instance(request) # Make sure that all users are present in the contrib's list filter filterspec = changelist.get_filters(request)[0][2] expected = [(self.alfred.pk, 'alfred'), (self.bob.pk, 'bob'), (self.lisa.pk, 'lisa')] self.assertEqual(sorted(filterspec.lookup_choices), sorted(expected)) request = self.request_factory.get('/', {'contributors__isnull': 'True'}) request.user = self.alfred changelist = modeladmin.get_changelist_instance(request) # Make sure the correct queryset is returned queryset = changelist.get_queryset(request) self.assertEqual(list(queryset), [self.django_book, self.bio_book, self.djangonaut_book]) # Make sure the last choice is None and is selected filterspec = changelist.get_filters(request)[0][2] self.assertEqual(filterspec.title, 'Verbose Contributors') choices = list(filterspec.choices(changelist)) self.assertIs(choices[-1]['selected'], True) self.assertEqual(choices[-1]['query_string'], '?contributors__isnull=True') request = self.request_factory.get('/', {'contributors__id__exact': self.bob.pk}) request.user = self.alfred changelist = modeladmin.get_changelist_instance(request) # Make sure the correct choice is selected filterspec = changelist.get_filters(request)[0][2] self.assertEqual(filterspec.title, 'Verbose Contributors') choice = select_by(filterspec.choices(changelist), "display", "bob") self.assertIs(choice['selected'], True) self.assertEqual(choice['query_string'], '?contributors__id__exact=%d' % self.bob.pk) def test_relatedfieldlistfilter_reverse_relationships(self): modeladmin = CustomUserAdmin(User, site) # FK relationship ----- request = self.request_factory.get('/', {'books_authored__isnull': 'True'}) request.user = self.alfred changelist = modeladmin.get_changelist_instance(request) # Make sure the correct queryset is returned queryset = changelist.get_queryset(request) self.assertEqual(list(queryset), [self.lisa]) # Make sure the last choice is None and is selected filterspec = changelist.get_filters(request)[0][0] self.assertEqual(filterspec.title, 'book') choices = list(filterspec.choices(changelist)) self.assertIs(choices[-1]['selected'], True) self.assertEqual(choices[-1]['query_string'], '?books_authored__isnull=True') request = self.request_factory.get('/', {'books_authored__id__exact': self.bio_book.pk}) request.user = self.alfred changelist = modeladmin.get_changelist_instance(request) # Make sure the correct choice is selected filterspec = changelist.get_filters(request)[0][0] self.assertEqual(filterspec.title, 'book') choice = select_by(filterspec.choices(changelist), "display", self.bio_book.title) self.assertIs(choice['selected'], True) self.assertEqual(choice['query_string'], '?books_authored__id__exact=%d' % self.bio_book.pk) # M2M relationship ----- request = self.request_factory.get('/', {'books_contributed__isnull': 'True'}) request.user = self.alfred changelist = modeladmin.get_changelist_instance(request) # Make sure the correct queryset is returned queryset = changelist.get_queryset(request) self.assertEqual(list(queryset), [self.alfred]) # Make sure the last choice is None and is selected filterspec = changelist.get_filters(request)[0][1] self.assertEqual(filterspec.title, 'book') choices = list(filterspec.choices(changelist)) self.assertIs(choices[-1]['selected'], True) self.assertEqual(choices[-1]['query_string'], '?books_contributed__isnull=True') request = self.request_factory.get('/', {'books_contributed__id__exact': self.django_book.pk}) request.user = self.alfred changelist = modeladmin.get_changelist_instance(request) # Make sure the correct choice is selected filterspec = changelist.get_filters(request)[0][1] self.assertEqual(filterspec.title, 'book') choice = select_by(filterspec.choices(changelist), "display", self.django_book.title) self.assertIs(choice['selected'], True) self.assertEqual(choice['query_string'], '?books_contributed__id__exact=%d' % self.django_book.pk) # With one book, the list filter should appear because there is also a # (None) option. Book.objects.exclude(pk=self.djangonaut_book.pk).delete() filterspec = changelist.get_filters(request)[0] self.assertEqual(len(filterspec), 2) # With no books remaining, no list filters should appear. Book.objects.all().delete() filterspec = changelist.get_filters(request)[0] self.assertEqual(len(filterspec), 0) def test_relatedfieldlistfilter_reverse_relationships_default_ordering(self): self.addCleanup(setattr, Book._meta, 'ordering', Book._meta.ordering) Book._meta.ordering = ('title',) modeladmin = CustomUserAdmin(User, site) request = self.request_factory.get('/') request.user = self.alfred changelist = modeladmin.get_changelist_instance(request) filterspec = changelist.get_filters(request)[0][0] expected = [ (self.bio_book.pk, 'Django: a biography'), (self.djangonaut_book.pk, 'Djangonaut: an art of living'), (self.guitar_book.pk, 'Guitar for dummies'), (self.django_book.pk, 'The Django Book') ] self.assertEqual(filterspec.lookup_choices, expected) def test_relatedonlyfieldlistfilter_foreignkey(self): modeladmin = BookAdminRelatedOnlyFilter(Book, site) request = self.request_factory.get('/') request.user = self.alfred changelist = modeladmin.get_changelist_instance(request) # Make sure that only actual authors are present in author's list filter filterspec = changelist.get_filters(request)[0][4] expected = [(self.alfred.pk, 'alfred'), (self.bob.pk, 'bob')] self.assertEqual(sorted(filterspec.lookup_choices), sorted(expected)) def test_relatedonlyfieldlistfilter_foreignkey_reverse_relationships(self): class EmployeeAdminReverseRelationship(ModelAdmin): list_filter = ( ('book', RelatedOnlyFieldListFilter), ) self.djangonaut_book.employee = self.john self.djangonaut_book.save() self.django_book.employee = self.jack self.django_book.save() modeladmin = EmployeeAdminReverseRelationship(Employee, site) request = self.request_factory.get('/') request.user = self.alfred changelist = modeladmin.get_changelist_instance(request) filterspec = changelist.get_filters(request)[0][0] self.assertEqual(filterspec.lookup_choices, [ (self.djangonaut_book.pk, 'Djangonaut: an art of living'), (self.django_book.pk, 'The Django Book'), ]) def test_relatedonlyfieldlistfilter_manytomany_reverse_relationships(self): class UserAdminReverseRelationship(ModelAdmin): list_filter = ( ('books_contributed', RelatedOnlyFieldListFilter), ) modeladmin = UserAdminReverseRelationship(User, site) request = self.request_factory.get('/') request.user = self.alfred changelist = modeladmin.get_changelist_instance(request) filterspec = changelist.get_filters(request)[0][0] self.assertEqual( filterspec.lookup_choices, [(self.guitar_book.pk, 'Guitar for dummies')], ) def test_relatedonlyfieldlistfilter_foreignkey_ordering(self): """RelatedOnlyFieldListFilter ordering respects ModelAdmin.ordering.""" class EmployeeAdminWithOrdering(ModelAdmin): ordering = ('name',) class BookAdmin(ModelAdmin): list_filter = ( ('employee', RelatedOnlyFieldListFilter), ) albert = Employee.objects.create(name='Albert Green', department=self.dev) self.djangonaut_book.employee = albert self.djangonaut_book.save() self.bio_book.employee = self.jack self.bio_book.save() site.register(Employee, EmployeeAdminWithOrdering) self.addCleanup(lambda: site.unregister(Employee)) modeladmin = BookAdmin(Book, site) request = self.request_factory.get('/') request.user = self.alfred changelist = modeladmin.get_changelist_instance(request) filterspec = changelist.get_filters(request)[0][0] expected = [(albert.pk, 'Albert Green'), (self.jack.pk, 'Jack Red')] self.assertEqual(filterspec.lookup_choices, expected) def test_relatedonlyfieldlistfilter_foreignkey_default_ordering(self): """RelatedOnlyFieldListFilter ordering respects Meta.ordering.""" class BookAdmin(ModelAdmin): list_filter = ( ('employee', RelatedOnlyFieldListFilter), ) albert = Employee.objects.create(name='Albert Green', department=self.dev) self.djangonaut_book.employee = albert self.djangonaut_book.save() self.bio_book.employee = self.jack self.bio_book.save() self.addCleanup(setattr, Employee._meta, 'ordering', Employee._meta.ordering) Employee._meta.ordering = ('name',) modeladmin = BookAdmin(Book, site) request = self.request_factory.get('/') request.user = self.alfred changelist = modeladmin.get_changelist_instance(request) filterspec = changelist.get_filters(request)[0][0] expected = [(albert.pk, 'Albert Green'), (self.jack.pk, 'Jack Red')] self.assertEqual(filterspec.lookup_choices, expected) def test_relatedonlyfieldlistfilter_underscorelookup_foreignkey(self): Department.objects.create(code='TEST', description='Testing') self.djangonaut_book.employee = self.john self.djangonaut_book.save() self.bio_book.employee = self.jack self.bio_book.save() modeladmin = BookAdminRelatedOnlyFilter(Book, site) request = self.request_factory.get('/') request.user = self.alfred changelist = modeladmin.get_changelist_instance(request) # Only actual departments should be present in employee__department's # list filter. filterspec = changelist.get_filters(request)[0][6] expected = [ (self.dev.code, str(self.dev)), (self.design.code, str(self.design)), ] self.assertEqual(sorted(filterspec.lookup_choices), sorted(expected)) def test_relatedonlyfieldlistfilter_manytomany(self): modeladmin = BookAdminRelatedOnlyFilter(Book, site) request = self.request_factory.get('/') request.user = self.alfred changelist = modeladmin.get_changelist_instance(request) # Make sure that only actual contributors are present in contrib's list filter filterspec = changelist.get_filters(request)[0][5] expected = [(self.bob.pk, 'bob'), (self.lisa.pk, 'lisa')] self.assertEqual(sorted(filterspec.lookup_choices), sorted(expected)) def test_listfilter_genericrelation(self): django_bookmark = Bookmark.objects.create(url='https://www.djangoproject.com/') python_bookmark = Bookmark.objects.create(url='https://www.python.org/') kernel_bookmark = Bookmark.objects.create(url='https://www.kernel.org/') TaggedItem.objects.create(content_object=django_bookmark, tag='python') TaggedItem.objects.create(content_object=python_bookmark, tag='python') TaggedItem.objects.create(content_object=kernel_bookmark, tag='linux') modeladmin = BookmarkAdminGenericRelation(Bookmark, site) request = self.request_factory.get('/', {'tags__tag': 'python'}) request.user = self.alfred changelist = modeladmin.get_changelist_instance(request) queryset = changelist.get_queryset(request) expected = [python_bookmark, django_bookmark] self.assertEqual(list(queryset), expected) def test_booleanfieldlistfilter(self): modeladmin = BookAdmin(Book, site) self.verify_booleanfieldlistfilter(modeladmin) def test_booleanfieldlistfilter_tuple(self): modeladmin = BookAdminWithTupleBooleanFilter(Book, site) self.verify_booleanfieldlistfilter(modeladmin) def verify_booleanfieldlistfilter(self, modeladmin): request = self.request_factory.get('/') request.user = self.alfred changelist = modeladmin.get_changelist_instance(request) request = self.request_factory.get('/', {'is_best_seller__exact': 0}) request.user = self.alfred changelist = modeladmin.get_changelist_instance(request) # Make sure the correct queryset is returned queryset = changelist.get_queryset(request) self.assertEqual(list(queryset), [self.bio_book]) # Make sure the correct choice is selected filterspec = changelist.get_filters(request)[0][3] self.assertEqual(filterspec.title, 'is best seller') choice = select_by(filterspec.choices(changelist), "display", "No") self.assertIs(choice['selected'], True) self.assertEqual(choice['query_string'], '?is_best_seller__exact=0') request = self.request_factory.get('/', {'is_best_seller__exact': 1}) request.user = self.alfred changelist = modeladmin.get_changelist_instance(request) # Make sure the correct queryset is returned queryset = changelist.get_queryset(request) self.assertEqual(list(queryset), [self.guitar_book, self.djangonaut_book]) # Make sure the correct choice is selected filterspec = changelist.get_filters(request)[0][3] self.assertEqual(filterspec.title, 'is best seller') choice = select_by(filterspec.choices(changelist), "display", "Yes") self.assertIs(choice['selected'], True) self.assertEqual(choice['query_string'], '?is_best_seller__exact=1') request = self.request_factory.get('/', {'is_best_seller__isnull': 'True'}) request.user = self.alfred changelist = modeladmin.get_changelist_instance(request) # Make sure the correct queryset is returned queryset = changelist.get_queryset(request) self.assertEqual(list(queryset), [self.django_book]) # Make sure the correct choice is selected filterspec = changelist.get_filters(request)[0][3] self.assertEqual(filterspec.title, 'is best seller') choice = select_by(filterspec.choices(changelist), "display", "Unknown") self.assertIs(choice['selected'], True) self.assertEqual(choice['query_string'], '?is_best_seller__isnull=True') def test_booleanfieldlistfilter_nullbooleanfield(self): modeladmin = BookAdmin2(Book, site) request = self.request_factory.get('/') request.user = self.alfred changelist = modeladmin.get_changelist_instance(request) request = self.request_factory.get('/', {'is_best_seller2__exact': 0}) request.user = self.alfred changelist = modeladmin.get_changelist_instance(request) # Make sure the correct queryset is returned queryset = changelist.get_queryset(request) self.assertEqual(list(queryset), [self.bio_book]) # Make sure the correct choice is selected filterspec = changelist.get_filters(request)[0][3] self.assertEqual(filterspec.title, 'is best seller2') choice = select_by(filterspec.choices(changelist), "display", "No") self.assertIs(choice['selected'], True) self.assertEqual(choice['query_string'], '?is_best_seller2__exact=0') request = self.request_factory.get('/', {'is_best_seller2__exact': 1}) request.user = self.alfred changelist = modeladmin.get_changelist_instance(request) # Make sure the correct queryset is returned queryset = changelist.get_queryset(request) self.assertEqual(list(queryset), [self.guitar_book, self.djangonaut_book]) # Make sure the correct choice is selected filterspec = changelist.get_filters(request)[0][3] self.assertEqual(filterspec.title, 'is best seller2') choice = select_by(filterspec.choices(changelist), "display", "Yes") self.assertIs(choice['selected'], True) self.assertEqual(choice['query_string'], '?is_best_seller2__exact=1') request = self.request_factory.get('/', {'is_best_seller2__isnull': 'True'}) request.user = self.alfred changelist = modeladmin.get_changelist_instance(request) # Make sure the correct queryset is returned queryset = changelist.get_queryset(request) self.assertEqual(list(queryset), [self.django_book]) # Make sure the correct choice is selected filterspec = changelist.get_filters(request)[0][3] self.assertEqual(filterspec.title, 'is best seller2') choice = select_by(filterspec.choices(changelist), "display", "Unknown") self.assertIs(choice['selected'], True) self.assertEqual(choice['query_string'], '?is_best_seller2__isnull=True') def test_fieldlistfilter_underscorelookup_tuple(self): """ Ensure ('fieldpath', ClassName ) lookups pass lookup_allowed checks when fieldpath contains double underscore in value (#19182). """ modeladmin = BookAdminWithUnderscoreLookupAndTuple(Book, site) request = self.request_factory.get('/') request.user = self.alfred changelist = modeladmin.get_changelist_instance(request) request = self.request_factory.get('/', {'author__email': '[email protected]'}) request.user = self.alfred changelist = modeladmin.get_changelist_instance(request) # Make sure the correct queryset is returned queryset = changelist.get_queryset(request) self.assertEqual(list(queryset), [self.bio_book, self.djangonaut_book]) def test_fieldlistfilter_invalid_lookup_parameters(self): """Filtering by an invalid value.""" modeladmin = BookAdmin(Book, site) request = self.request_factory.get('/', {'author__id__exact': 'StringNotInteger!'}) request.user = self.alfred with self.assertRaises(IncorrectLookupParameters): modeladmin.get_changelist_instance(request) def test_simplelistfilter(self): modeladmin = DecadeFilterBookAdmin(Book, site) # Make sure that the first option is 'All' --------------------------- request = self.request_factory.get('/', {}) request.user = self.alfred changelist = modeladmin.get_changelist_instance(request) # Make sure the correct queryset is returned queryset = changelist.get_queryset(request) self.assertEqual(list(queryset), list(Book.objects.all().order_by('-id'))) # Make sure the correct choice is selected filterspec = changelist.get_filters(request)[0][1] self.assertEqual(filterspec.title, 'publication decade') choices = list(filterspec.choices(changelist)) self.assertEqual(choices[0]['display'], 'All') self.assertIs(choices[0]['selected'], True) self.assertEqual(choices[0]['query_string'], '?') # Look for books in the 1980s ---------------------------------------- request = self.request_factory.get('/', {'publication-decade': 'the 80s'}) request.user = self.alfred changelist = modeladmin.get_changelist_instance(request) # Make sure the correct queryset is returned queryset = changelist.get_queryset(request) self.assertEqual(list(queryset), []) # Make sure the correct choice is selected filterspec = changelist.get_filters(request)[0][1] self.assertEqual(filterspec.title, 'publication decade') choices = list(filterspec.choices(changelist)) self.assertEqual(choices[1]['display'], 'the 1980\'s') self.assertIs(choices[1]['selected'], True) self.assertEqual(choices[1]['query_string'], '?publication-decade=the+80s') # Look for books in the 1990s ---------------------------------------- request = self.request_factory.get('/', {'publication-decade': 'the 90s'}) request.user = self.alfred changelist = modeladmin.get_changelist_instance(request) # Make sure the correct queryset is returned queryset = changelist.get_queryset(request) self.assertEqual(list(queryset), [self.bio_book]) # Make sure the correct choice is selected filterspec = changelist.get_filters(request)[0][1] self.assertEqual(filterspec.title, 'publication decade') choices = list(filterspec.choices(changelist)) self.assertEqual(choices[2]['display'], 'the 1990\'s') self.assertIs(choices[2]['selected'], True) self.assertEqual(choices[2]['query_string'], '?publication-decade=the+90s') # Look for books in the 2000s ---------------------------------------- request = self.request_factory.get('/', {'publication-decade': 'the 00s'}) request.user = self.alfred changelist = modeladmin.get_changelist_instance(request) # Make sure the correct queryset is returned queryset = changelist.get_queryset(request) self.assertEqual(list(queryset), [self.guitar_book, self.djangonaut_book]) # Make sure the correct choice is selected filterspec = changelist.get_filters(request)[0][1] self.assertEqual(filterspec.title, 'publication decade') choices = list(filterspec.choices(changelist)) self.assertEqual(choices[3]['display'], 'the 2000\'s') self.assertIs(choices[3]['selected'], True) self.assertEqual(choices[3]['query_string'], '?publication-decade=the+00s') # Combine multiple filters ------------------------------------------- request = self.request_factory.get('/', {'publication-decade': 'the 00s', 'author__id__exact': self.alfred.pk}) request.user = self.alfred changelist = modeladmin.get_changelist_instance(request) # Make sure the correct queryset is returned queryset = changelist.get_queryset(request) self.assertEqual(list(queryset), [self.djangonaut_book]) # Make sure the correct choices are selected filterspec = changelist.get_filters(request)[0][1] self.assertEqual(filterspec.title, 'publication decade') choices = list(filterspec.choices(changelist)) self.assertEqual(choices[3]['display'], 'the 2000\'s') self.assertIs(choices[3]['selected'], True) self.assertEqual( choices[3]['query_string'], '?author__id__exact=%s&publication-decade=the+00s' % self.alfred.pk ) filterspec = changelist.get_filters(request)[0][0] self.assertEqual(filterspec.title, 'Verbose Author') choice = select_by(filterspec.choices(changelist), "display", "alfred") self.assertIs(choice['selected'], True) self.assertEqual(choice['query_string'], '?author__id__exact=%s&publication-decade=the+00s' % self.alfred.pk) def test_listfilter_without_title(self): """ Any filter must define a title. """ modeladmin = DecadeFilterBookAdminWithoutTitle(Book, site) request = self.request_factory.get('/', {}) request.user = self.alfred msg = "The list filter 'DecadeListFilterWithoutTitle' does not specify a 'title'." with self.assertRaisesMessage(ImproperlyConfigured, msg): modeladmin.get_changelist_instance(request) def test_simplelistfilter_without_parameter(self): """ Any SimpleListFilter must define a parameter_name. """ modeladmin = DecadeFilterBookAdminWithoutParameter(Book, site) request = self.request_factory.get('/', {}) request.user = self.alfred msg = "The list filter 'DecadeListFilterWithoutParameter' does not specify a 'parameter_name'." with self.assertRaisesMessage(ImproperlyConfigured, msg): modeladmin.get_changelist_instance(request) def test_simplelistfilter_with_none_returning_lookups(self): """ A SimpleListFilter lookups method can return None but disables the filter completely. """ modeladmin = DecadeFilterBookAdminWithNoneReturningLookups(Book, site) request = self.request_factory.get('/', {}) request.user = self.alfred changelist = modeladmin.get_changelist_instance(request) filterspec = changelist.get_filters(request)[0] self.assertEqual(len(filterspec), 0) def test_filter_with_failing_queryset(self): """ When a filter's queryset method fails, it fails loudly and the corresponding exception doesn't get swallowed (#17828). """ modeladmin = DecadeFilterBookAdminWithFailingQueryset(Book, site) request = self.request_factory.get('/', {}) request.user = self.alfred with self.assertRaises(ZeroDivisionError): modeladmin.get_changelist_instance(request) def test_simplelistfilter_with_queryset_based_lookups(self): modeladmin = DecadeFilterBookAdminWithQuerysetBasedLookups(Book, site) request = self.request_factory.get('/', {}) request.user = self.alfred changelist = modeladmin.get_changelist_instance(request) filterspec = changelist.get_filters(request)[0][0] self.assertEqual(filterspec.title, 'publication decade') choices = list(filterspec.choices(changelist)) self.assertEqual(len(choices), 3) self.assertEqual(choices[0]['display'], 'All') self.assertIs(choices[0]['selected'], True) self.assertEqual(choices[0]['query_string'], '?') self.assertEqual(choices[1]['display'], 'the 1990\'s') self.assertIs(choices[1]['selected'], False) self.assertEqual(choices[1]['query_string'], '?publication-decade=the+90s') self.assertEqual(choices[2]['display'], 'the 2000\'s') self.assertIs(choices[2]['selected'], False) self.assertEqual(choices[2]['query_string'], '?publication-decade=the+00s') def test_two_characters_long_field(self): """ list_filter works with two-characters long field names (#16080). """ modeladmin = BookAdmin(Book, site) request = self.request_factory.get('/', {'no': '207'}) request.user = self.alfred changelist = modeladmin.get_changelist_instance(request) # Make sure the correct queryset is returned queryset = changelist.get_queryset(request) self.assertEqual(list(queryset), [self.bio_book]) filterspec = changelist.get_filters(request)[0][-1] self.assertEqual(filterspec.title, 'number') choices = list(filterspec.choices(changelist)) self.assertIs(choices[2]['selected'], True) self.assertEqual(choices[2]['query_string'], '?no=207') def test_parameter_ends_with__in__or__isnull(self): """ A SimpleListFilter's parameter name is not mistaken for a model field if it ends with '__isnull' or '__in' (#17091). """ # When it ends with '__in' ----------------------------------------- modeladmin = DecadeFilterBookAdminParameterEndsWith__In(Book, site) request = self.request_factory.get('/', {'decade__in': 'the 90s'}) request.user = self.alfred changelist = modeladmin.get_changelist_instance(request) # Make sure the correct queryset is returned queryset = changelist.get_queryset(request) self.assertEqual(list(queryset), [self.bio_book]) # Make sure the correct choice is selected filterspec = changelist.get_filters(request)[0][0] self.assertEqual(filterspec.title, 'publication decade') choices = list(filterspec.choices(changelist)) self.assertEqual(choices[2]['display'], 'the 1990\'s') self.assertIs(choices[2]['selected'], True) self.assertEqual(choices[2]['query_string'], '?decade__in=the+90s') # When it ends with '__isnull' --------------------------------------- modeladmin = DecadeFilterBookAdminParameterEndsWith__Isnull(Book, site) request = self.request_factory.get('/', {'decade__isnull': 'the 90s'}) request.user = self.alfred changelist = modeladmin.get_changelist_instance(request) # Make sure the correct queryset is returned queryset = changelist.get_queryset(request) self.assertEqual(list(queryset), [self.bio_book]) # Make sure the correct choice is selected filterspec = changelist.get_filters(request)[0][0] self.assertEqual(filterspec.title, 'publication decade') choices = list(filterspec.choices(changelist)) self.assertEqual(choices[2]['display'], 'the 1990\'s') self.assertIs(choices[2]['selected'], True) self.assertEqual(choices[2]['query_string'], '?decade__isnull=the+90s') def test_lookup_with_non_string_value(self): """ Ensure choices are set the selected class when using non-string values for lookups in SimpleListFilters (#19318). """ modeladmin = DepartmentFilterEmployeeAdmin(Employee, site) request = self.request_factory.get('/', {'department': self.john.department.pk}) request.user = self.alfred changelist = modeladmin.get_changelist_instance(request) queryset = changelist.get_queryset(request) self.assertEqual(list(queryset), [self.john]) filterspec = changelist.get_filters(request)[0][-1] self.assertEqual(filterspec.title, 'department') choices = list(filterspec.choices(changelist)) self.assertEqual(choices[1]['display'], 'DEV') self.assertIs(choices[1]['selected'], True) self.assertEqual(choices[1]['query_string'], '?department=%s' % self.john.department.pk) def test_lookup_with_non_string_value_underscored(self): """ Ensure SimpleListFilter lookups pass lookup_allowed checks when parameter_name attribute contains double-underscore value (#19182). """ modeladmin = DepartmentFilterUnderscoredEmployeeAdmin(Employee, site) request = self.request_factory.get('/', {'department__whatever': self.john.department.pk}) request.user = self.alfred changelist = modeladmin.get_changelist_instance(request) queryset = changelist.get_queryset(request) self.assertEqual(list(queryset), [self.john]) filterspec = changelist.get_filters(request)[0][-1] self.assertEqual(filterspec.title, 'department') choices = list(filterspec.choices(changelist)) self.assertEqual(choices[1]['display'], 'DEV') self.assertIs(choices[1]['selected'], True) self.assertEqual(choices[1]['query_string'], '?department__whatever=%s' % self.john.department.pk) def test_fk_with_to_field(self): """ A filter on a FK respects the FK's to_field attribute (#17972). """ modeladmin = EmployeeAdmin(Employee, site) request = self.request_factory.get('/', {}) request.user = self.alfred changelist = modeladmin.get_changelist_instance(request) # Make sure the correct queryset is returned queryset = changelist.get_queryset(request) self.assertEqual(list(queryset), [self.jack, self.john]) filterspec = changelist.get_filters(request)[0][-1] self.assertEqual(filterspec.title, 'department') choices = list(filterspec.choices(changelist)) self.assertEqual(choices[0]['display'], 'All') self.assertIs(choices[0]['selected'], True) self.assertEqual(choices[0]['query_string'], '?') self.assertEqual(choices[1]['display'], 'Development') self.assertIs(choices[1]['selected'], False) self.assertEqual(choices[1]['query_string'], '?department__code__exact=DEV') self.assertEqual(choices[2]['display'], 'Design') self.assertIs(choices[2]['selected'], False) self.assertEqual(choices[2]['query_string'], '?department__code__exact=DSN') # Filter by Department=='Development' -------------------------------- request = self.request_factory.get('/', {'department__code__exact': 'DEV'}) request.user = self.alfred changelist = modeladmin.get_changelist_instance(request) # Make sure the correct queryset is returned queryset = changelist.get_queryset(request) self.assertEqual(list(queryset), [self.john]) filterspec = changelist.get_filters(request)[0][-1] self.assertEqual(filterspec.title, 'department') choices = list(filterspec.choices(changelist)) self.assertEqual(choices[0]['display'], 'All') self.assertIs(choices[0]['selected'], False) self.assertEqual(choices[0]['query_string'], '?') self.assertEqual(choices[1]['display'], 'Development') self.assertIs(choices[1]['selected'], True) self.assertEqual(choices[1]['query_string'], '?department__code__exact=DEV') self.assertEqual(choices[2]['display'], 'Design') self.assertIs(choices[2]['selected'], False) self.assertEqual(choices[2]['query_string'], '?department__code__exact=DSN') def test_lookup_with_dynamic_value(self): """ Ensure SimpleListFilter can access self.value() inside the lookup. """ modeladmin = DepartmentFilterDynamicValueBookAdmin(Book, site) def _test_choices(request, expected_displays): request.user = self.alfred changelist = modeladmin.get_changelist_instance(request) filterspec = changelist.get_filters(request)[0][0] self.assertEqual(filterspec.title, 'publication decade') choices = tuple(c['display'] for c in filterspec.choices(changelist)) self.assertEqual(choices, expected_displays) _test_choices(self.request_factory.get('/', {}), ("All", "the 1980's", "the 1990's")) _test_choices(self.request_factory.get('/', {'publication-decade': 'the 80s'}), ("All", "the 1990's")) _test_choices(self.request_factory.get('/', {'publication-decade': 'the 90s'}), ("All", "the 1980's")) def test_list_filter_queryset_filtered_by_default(self): """ A list filter that filters the queryset by default gives the correct full_result_count. """ modeladmin = NotNinetiesListFilterAdmin(Book, site) request = self.request_factory.get('/', {}) request.user = self.alfred changelist = modeladmin.get_changelist_instance(request) changelist.get_results(request) self.assertEqual(changelist.full_result_count, 4)
7636934a87f92378c4ca76eac2a1c14e89d2adbd1ab78a7cffd2dfc082a6ff07
from django.contrib.auth.models import User from django.test import override_settings from .tests import AdminDocsTestCase, TestDataMixin class XViewMiddlewareTest(TestDataMixin, AdminDocsTestCase): def test_xview_func(self): user = User.objects.get(username='super') response = self.client.head('/xview/func/') self.assertNotIn('X-View', response) self.client.force_login(self.superuser) response = self.client.head('/xview/func/') self.assertIn('X-View', response) self.assertEqual(response['X-View'], 'admin_docs.views.xview') user.is_staff = False user.save() response = self.client.head('/xview/func/') self.assertNotIn('X-View', response) user.is_staff = True user.is_active = False user.save() response = self.client.head('/xview/func/') self.assertNotIn('X-View', response) def test_xview_class(self): user = User.objects.get(username='super') response = self.client.head('/xview/class/') self.assertNotIn('X-View', response) self.client.force_login(self.superuser) response = self.client.head('/xview/class/') self.assertIn('X-View', response) self.assertEqual(response['X-View'], 'admin_docs.views.XViewClass') user.is_staff = False user.save() response = self.client.head('/xview/class/') self.assertNotIn('X-View', response) user.is_staff = True user.is_active = False user.save() response = self.client.head('/xview/class/') self.assertNotIn('X-View', response) def test_callable_object_view(self): self.client.force_login(self.superuser) response = self.client.head('/xview/callable_object/') self.assertEqual(response['X-View'], 'admin_docs.views.XViewCallableObject') @override_settings(MIDDLEWARE=[]) def test_no_auth_middleware(self): msg = ( "The XView middleware requires authentication middleware to be " "installed. Edit your MIDDLEWARE setting to insert " "'django.contrib.auth.middleware.AuthenticationMiddleware'." ) with self.assertRaisesMessage(AssertionError, msg): self.client.head('/xview/func/')
45493a3dea2f7ba739519e1aaf4bc45f8ad50264d4e50961d808d598ec7ff957
import asyncio import sys from unittest import skipIf from asgiref.sync import async_to_sync from asgiref.testing import ApplicationCommunicator from django.core.asgi import get_asgi_application from django.core.signals import request_started from django.db import close_old_connections from django.test import SimpleTestCase, override_settings from .urls import test_filename @skipIf(sys.platform == 'win32' and (3, 8, 0) < sys.version_info < (3, 8, 1), 'https://bugs.python.org/issue38563') @override_settings(ROOT_URLCONF='asgi.urls') class ASGITest(SimpleTestCase): def setUp(self): request_started.disconnect(close_old_connections) def _get_scope(self, **kwargs): return { 'type': 'http', 'asgi': {'version': '3.0', 'spec_version': '2.1'}, 'http_version': '1.1', 'method': 'GET', 'query_string': b'', 'server': ('testserver', 80), **kwargs, } def tearDown(self): request_started.connect(close_old_connections) @async_to_sync async def test_get_asgi_application(self): """ get_asgi_application() returns a functioning ASGI callable. """ application = get_asgi_application() # Construct HTTP request. communicator = ApplicationCommunicator(application, self._get_scope(path='/')) await communicator.send_input({'type': 'http.request'}) # Read the response. response_start = await communicator.receive_output() self.assertEqual(response_start['type'], 'http.response.start') self.assertEqual(response_start['status'], 200) self.assertEqual( set(response_start['headers']), { (b'Content-Length', b'12'), (b'Content-Type', b'text/html; charset=utf-8'), }, ) response_body = await communicator.receive_output() self.assertEqual(response_body['type'], 'http.response.body') self.assertEqual(response_body['body'], b'Hello World!') @async_to_sync async def test_file_response(self): """ Makes sure that FileResponse works over ASGI. """ application = get_asgi_application() # Construct HTTP request. communicator = ApplicationCommunicator(application, self._get_scope(path='/file/')) await communicator.send_input({'type': 'http.request'}) # Get the file content. with open(test_filename, 'rb') as test_file: test_file_contents = test_file.read() # Read the response. response_start = await communicator.receive_output() self.assertEqual(response_start['type'], 'http.response.start') self.assertEqual(response_start['status'], 200) self.assertEqual( set(response_start['headers']), { (b'Content-Length', str(len(test_file_contents)).encode('ascii')), (b'Content-Type', b'text/plain' if sys.platform.startswith('win') else b'text/x-python'), (b'Content-Disposition', b'inline; filename="urls.py"'), }, ) response_body = await communicator.receive_output() self.assertEqual(response_body['type'], 'http.response.body') self.assertEqual(response_body['body'], test_file_contents) @async_to_sync async def test_headers(self): application = get_asgi_application() communicator = ApplicationCommunicator( application, self._get_scope( path='/meta/', headers=[ [b'content-type', b'text/plain; charset=utf-8'], [b'content-length', b'77'], [b'referer', b'Scotland'], [b'referer', b'Wales'], ], ), ) await communicator.send_input({'type': 'http.request'}) response_start = await communicator.receive_output() self.assertEqual(response_start['type'], 'http.response.start') self.assertEqual(response_start['status'], 200) self.assertEqual( set(response_start['headers']), { (b'Content-Length', b'19'), (b'Content-Type', b'text/plain; charset=utf-8'), }, ) response_body = await communicator.receive_output() self.assertEqual(response_body['type'], 'http.response.body') self.assertEqual(response_body['body'], b'From Scotland,Wales') @async_to_sync async def test_get_query_string(self): application = get_asgi_application() for query_string in (b'name=Andrew', 'name=Andrew'): with self.subTest(query_string=query_string): communicator = ApplicationCommunicator( application, self._get_scope(path='/', query_string=query_string), ) await communicator.send_input({'type': 'http.request'}) response_start = await communicator.receive_output() self.assertEqual(response_start['type'], 'http.response.start') self.assertEqual(response_start['status'], 200) response_body = await communicator.receive_output() self.assertEqual(response_body['type'], 'http.response.body') self.assertEqual(response_body['body'], b'Hello Andrew!') @async_to_sync async def test_disconnect(self): application = get_asgi_application() communicator = ApplicationCommunicator(application, self._get_scope(path='/')) await communicator.send_input({'type': 'http.disconnect'}) with self.assertRaises(asyncio.TimeoutError): await communicator.receive_output() @async_to_sync async def test_wrong_connection_type(self): application = get_asgi_application() communicator = ApplicationCommunicator( application, self._get_scope(path='/', type='other'), ) await communicator.send_input({'type': 'http.request'}) msg = 'Django can only handle ASGI/HTTP connections, not other.' with self.assertRaisesMessage(ValueError, msg): await communicator.receive_output() @async_to_sync async def test_non_unicode_query_string(self): application = get_asgi_application() communicator = ApplicationCommunicator( application, self._get_scope(path='/', query_string=b'\xff'), ) await communicator.send_input({'type': 'http.request'}) response_start = await communicator.receive_output() self.assertEqual(response_start['type'], 'http.response.start') self.assertEqual(response_start['status'], 400) response_body = await communicator.receive_output() self.assertEqual(response_body['type'], 'http.response.body') self.assertEqual(response_body['body'], b'')
4032c204ea08b7ef964293584a9dc4c051ec722f63f91da196dc370b187045a4
from django.contrib.messages import constants from django.contrib.messages.storage.base import Message from django.contrib.messages.storage.session import SessionStorage from django.http import HttpRequest from django.test import TestCase from django.utils.safestring import SafeData, mark_safe from .base import BaseTests def set_session_data(storage, messages): """ Sets the messages into the backend request's session and remove the backend's loaded data cache. """ storage.request.session[storage.session_key] = storage.serialize_messages(messages) if hasattr(storage, '_loaded_data'): del storage._loaded_data def stored_session_messages_count(storage): data = storage.deserialize_messages(storage.request.session.get(storage.session_key, [])) return len(data) class SessionTests(BaseTests, TestCase): storage_class = SessionStorage def get_request(self): self.session = {} request = super().get_request() request.session = self.session return request def stored_messages_count(self, storage, response): return stored_session_messages_count(storage) def test_no_session(self): msg = ( 'The session-based temporary message storage requires session ' 'middleware to be installed, and come before the message ' 'middleware in the MIDDLEWARE list.' ) with self.assertRaisesMessage(AssertionError, msg): self.storage_class(HttpRequest()) def test_get(self): storage = self.storage_class(self.get_request()) example_messages = ['test', 'me'] set_session_data(storage, example_messages) self.assertEqual(list(storage), example_messages) def test_safedata(self): """ A message containing SafeData keeps its safe status when retrieved from the message storage. """ storage = self.get_storage() message = Message(constants.DEBUG, mark_safe("<b>Hello Django!</b>")) set_session_data(storage, [message]) self.assertIsInstance(list(storage)[0].message, SafeData)
62a2b03b35cc71a0c8571a3186dedf8aaa145da0fb2c411c786e505609d1b3fb
import unittest from django.db import NotSupportedError, connection, transaction from django.db.models import Count from django.test import TestCase, skipIfDBFeature, skipUnlessDBFeature from django.test.utils import CaptureQueriesContext from .models import Tag @skipUnlessDBFeature('supports_explaining_query_execution') class ExplainTests(TestCase): def test_basic(self): querysets = [ Tag.objects.filter(name='test'), Tag.objects.filter(name='test').select_related('parent'), Tag.objects.filter(name='test').prefetch_related('children'), Tag.objects.filter(name='test').annotate(Count('children')), Tag.objects.filter(name='test').values_list('name'), Tag.objects.order_by().union(Tag.objects.order_by().filter(name='test')), Tag.objects.all().select_for_update().filter(name='test'), ] supported_formats = connection.features.supported_explain_formats all_formats = (None,) + tuple(supported_formats) + tuple(f.lower() for f in supported_formats) for idx, queryset in enumerate(querysets): for format in all_formats: with self.subTest(format=format, queryset=idx): if connection.vendor == 'mysql': # This does a query and caches the result. connection.features.needs_explain_extended with self.assertNumQueries(1), CaptureQueriesContext(connection) as captured_queries: result = queryset.explain(format=format) self.assertTrue(captured_queries[0]['sql'].startswith(connection.ops.explain_prefix)) self.assertIsInstance(result, str) self.assertTrue(result) @skipUnlessDBFeature('validates_explain_options') def test_unknown_options(self): with self.assertRaisesMessage(ValueError, 'Unknown options: test, test2'): Tag.objects.all().explain(test=1, test2=1) def test_unknown_format(self): msg = 'DOES NOT EXIST is not a recognized format.' if connection.features.supported_explain_formats: msg += ' Allowed formats: %s' % ', '.join(sorted(connection.features.supported_explain_formats)) with self.assertRaisesMessage(ValueError, msg): Tag.objects.all().explain(format='does not exist') @unittest.skipUnless(connection.vendor == 'postgresql', 'PostgreSQL specific') def test_postgres_options(self): qs = Tag.objects.filter(name='test') test_options = [ {'COSTS': False, 'BUFFERS': True, 'ANALYZE': True}, {'costs': False, 'buffers': True, 'analyze': True}, {'verbose': True, 'timing': True, 'analyze': True}, {'verbose': False, 'timing': False, 'analyze': True}, ] if connection.features.is_postgresql_10: test_options.append({'summary': True}) if connection.features.is_postgresql_12: test_options.append({'settings': True}) for options in test_options: with self.subTest(**options), transaction.atomic(): with CaptureQueriesContext(connection) as captured_queries: qs.explain(format='text', **options) self.assertEqual(len(captured_queries), 1) for name, value in options.items(): option = '{} {}'.format(name.upper(), 'true' if value else 'false') self.assertIn(option, captured_queries[0]['sql']) @unittest.skipUnless(connection.vendor == 'mysql', 'MySQL specific') def test_mysql_text_to_traditional(self): # Ensure these cached properties are initialized to prevent queries for # the MariaDB or MySQL version during the QuerySet evaluation. connection.features.needs_explain_extended connection.features.supported_explain_formats with CaptureQueriesContext(connection) as captured_queries: Tag.objects.filter(name='test').explain(format='text') self.assertEqual(len(captured_queries), 1) self.assertIn('FORMAT=TRADITIONAL', captured_queries[0]['sql']) @unittest.skipUnless(connection.vendor == 'mysql', 'MariaDB and MySQL >= 8.0.18 specific.') def test_mysql_analyze(self): # Inner skip to avoid module level query for MySQL version. if not connection.features.supports_explain_analyze: raise unittest.SkipTest('MariaDB and MySQL >= 8.0.18 specific.') qs = Tag.objects.filter(name='test') with CaptureQueriesContext(connection) as captured_queries: qs.explain(analyze=True) self.assertEqual(len(captured_queries), 1) prefix = 'ANALYZE ' if connection.mysql_is_mariadb else 'EXPLAIN ANALYZE ' self.assertTrue(captured_queries[0]['sql'].startswith(prefix)) with CaptureQueriesContext(connection) as captured_queries: qs.explain(analyze=True, format='JSON') self.assertEqual(len(captured_queries), 1) if connection.mysql_is_mariadb: self.assertIn('FORMAT=JSON', captured_queries[0]['sql']) else: self.assertNotIn('FORMAT=JSON', captured_queries[0]['sql']) @unittest.skipUnless(connection.vendor == 'mysql', 'MySQL < 5.7 specific') def test_mysql_extended(self): # Inner skip to avoid module level query for MySQL version. if not connection.features.needs_explain_extended: raise unittest.SkipTest('MySQL < 5.7 specific') qs = Tag.objects.filter(name='test') with CaptureQueriesContext(connection) as captured_queries: qs.explain(format='json') self.assertEqual(len(captured_queries), 1) self.assertNotIn('EXTENDED', captured_queries[0]['sql']) with CaptureQueriesContext(connection) as captured_queries: qs.explain(format='text') self.assertEqual(len(captured_queries), 1) self.assertNotIn('EXTENDED', captured_queries[0]['sql']) @skipIfDBFeature('supports_explaining_query_execution') class ExplainUnsupportedTests(TestCase): def test_message(self): msg = 'This backend does not support explaining query execution.' with self.assertRaisesMessage(NotSupportedError, msg): Tag.objects.filter(name='test').explain()
9946be52aee6b8b0fba730060489527ebb9d9a5da3da10a017b3f421b02aac13
import datetime from unittest import skipIf, skipUnless from django.db import connection from django.db.models import Index from django.db.models.deletion import CASCADE from django.db.models.fields.related import ForeignKey from django.db.models.query_utils import Q from django.test import ( TestCase, TransactionTestCase, skipIfDBFeature, skipUnlessDBFeature, ) from django.test.utils import override_settings from django.utils import timezone from .models import ( Article, ArticleTranslation, IndexedArticle2, IndexTogetherSingleList, ) class SchemaIndexesTests(TestCase): """ Test index handling by the db.backends.schema infrastructure. """ def test_index_name_hash(self): """ Index names should be deterministic. """ editor = connection.schema_editor() index_name = editor._create_index_name( table_name=Article._meta.db_table, column_names=("c1",), suffix="123", ) self.assertEqual(index_name, "indexes_article_c1_a52bd80b123") def test_index_name(self): """ Index names on the built-in database backends:: * Are truncated as needed. * Include all the column names. * Include a deterministic hash. """ long_name = 'l%sng' % ('o' * 100) editor = connection.schema_editor() index_name = editor._create_index_name( table_name=Article._meta.db_table, column_names=('c1', 'c2', long_name), suffix='ix', ) expected = { 'mysql': 'indexes_article_c1_c2_looooooooooooooooooo_255179b2ix', 'oracle': 'indexes_a_c1_c2_loo_255179b2ix', 'postgresql': 'indexes_article_c1_c2_loooooooooooooooooo_255179b2ix', 'sqlite': 'indexes_article_c1_c2_l%sng_255179b2ix' % ('o' * 100), } if connection.vendor not in expected: self.skipTest('This test is only supported on the built-in database backends.') self.assertEqual(index_name, expected[connection.vendor]) def test_index_together(self): editor = connection.schema_editor() index_sql = [str(statement) for statement in editor._model_indexes_sql(Article)] self.assertEqual(len(index_sql), 1) # Ensure the index name is properly quoted self.assertIn( connection.ops.quote_name( editor._create_index_name(Article._meta.db_table, ['headline', 'pub_date'], suffix='_idx') ), index_sql[0] ) def test_index_together_single_list(self): # Test for using index_together with a single list (#22172) index_sql = connection.schema_editor()._model_indexes_sql(IndexTogetherSingleList) self.assertEqual(len(index_sql), 1) @skipIf(connection.vendor == 'postgresql', 'opclasses are PostgreSQL only') class SchemaIndexesNotPostgreSQLTests(TransactionTestCase): available_apps = ['indexes'] def test_create_index_ignores_opclasses(self): index = Index( name='test_ops_class', fields=['headline'], opclasses=['varchar_pattern_ops'], ) with connection.schema_editor() as editor: # This would error if opclasses weren't ignored. editor.add_index(IndexedArticle2, index) # The `condition` parameter is ignored by databases that don't support partial # indexes. @skipIfDBFeature('supports_partial_indexes') class PartialIndexConditionIgnoredTests(TransactionTestCase): available_apps = ['indexes'] def test_condition_ignored(self): index = Index( name='test_condition_ignored', fields=['published'], condition=Q(published=True), ) with connection.schema_editor() as editor: # This would error if condition weren't ignored. editor.add_index(Article, index) self.assertNotIn( 'WHERE %s' % editor.quote_name('published'), str(index.create_sql(Article, editor)) ) @skipUnless(connection.vendor == 'postgresql', 'PostgreSQL tests') class SchemaIndexesPostgreSQLTests(TransactionTestCase): available_apps = ['indexes'] get_opclass_query = ''' SELECT opcname, c.relname FROM pg_opclass AS oc JOIN pg_index as i on oc.oid = ANY(i.indclass) JOIN pg_class as c on c.oid = i.indexrelid WHERE c.relname = '%s' ''' def test_text_indexes(self): """Test creation of PostgreSQL-specific text indexes (#12234)""" from .models import IndexedArticle index_sql = [str(statement) for statement in connection.schema_editor()._model_indexes_sql(IndexedArticle)] self.assertEqual(len(index_sql), 5) self.assertIn('("headline" varchar_pattern_ops)', index_sql[1]) self.assertIn('("body" text_pattern_ops)', index_sql[3]) # unique=True and db_index=True should only create the varchar-specific # index (#19441). self.assertIn('("slug" varchar_pattern_ops)', index_sql[4]) def test_virtual_relation_indexes(self): """Test indexes are not created for related objects""" index_sql = connection.schema_editor()._model_indexes_sql(Article) self.assertEqual(len(index_sql), 1) def test_ops_class(self): index = Index( name='test_ops_class', fields=['headline'], opclasses=['varchar_pattern_ops'], ) with connection.schema_editor() as editor: editor.add_index(IndexedArticle2, index) with editor.connection.cursor() as cursor: cursor.execute(self.get_opclass_query % 'test_ops_class') self.assertEqual(cursor.fetchall(), [('varchar_pattern_ops', 'test_ops_class')]) def test_ops_class_multiple_columns(self): index = Index( name='test_ops_class_multiple', fields=['headline', 'body'], opclasses=['varchar_pattern_ops', 'text_pattern_ops'], ) with connection.schema_editor() as editor: editor.add_index(IndexedArticle2, index) with editor.connection.cursor() as cursor: cursor.execute(self.get_opclass_query % 'test_ops_class_multiple') expected_ops_classes = ( ('varchar_pattern_ops', 'test_ops_class_multiple'), ('text_pattern_ops', 'test_ops_class_multiple'), ) self.assertCountEqual(cursor.fetchall(), expected_ops_classes) def test_ops_class_partial(self): index = Index( name='test_ops_class_partial', fields=['body'], opclasses=['text_pattern_ops'], condition=Q(headline__contains='China'), ) with connection.schema_editor() as editor: editor.add_index(IndexedArticle2, index) with editor.connection.cursor() as cursor: cursor.execute(self.get_opclass_query % 'test_ops_class_partial') self.assertCountEqual(cursor.fetchall(), [('text_pattern_ops', 'test_ops_class_partial')]) def test_ops_class_partial_tablespace(self): indexname = 'test_ops_class_tblspace' index = Index( name=indexname, fields=['body'], opclasses=['text_pattern_ops'], condition=Q(headline__contains='China'), db_tablespace='pg_default', ) with connection.schema_editor() as editor: editor.add_index(IndexedArticle2, index) self.assertIn('TABLESPACE "pg_default" ', str(index.create_sql(IndexedArticle2, editor))) with editor.connection.cursor() as cursor: cursor.execute(self.get_opclass_query % indexname) self.assertCountEqual(cursor.fetchall(), [('text_pattern_ops', indexname)]) def test_ops_class_descending(self): indexname = 'test_ops_class_ordered' index = Index( name=indexname, fields=['-body'], opclasses=['text_pattern_ops'], ) with connection.schema_editor() as editor: editor.add_index(IndexedArticle2, index) with editor.connection.cursor() as cursor: cursor.execute(self.get_opclass_query % indexname) self.assertCountEqual(cursor.fetchall(), [('text_pattern_ops', indexname)]) def test_ops_class_descending_partial(self): indexname = 'test_ops_class_ordered_partial' index = Index( name=indexname, fields=['-body'], opclasses=['text_pattern_ops'], condition=Q(headline__contains='China'), ) with connection.schema_editor() as editor: editor.add_index(IndexedArticle2, index) with editor.connection.cursor() as cursor: cursor.execute(self.get_opclass_query % indexname) self.assertCountEqual(cursor.fetchall(), [('text_pattern_ops', indexname)]) @skipUnless(connection.vendor == 'mysql', 'MySQL tests') class SchemaIndexesMySQLTests(TransactionTestCase): available_apps = ['indexes'] def test_no_index_for_foreignkey(self): """ MySQL on InnoDB already creates indexes automatically for foreign keys. (#14180). An index should be created if db_constraint=False (#26171). """ storage = connection.introspection.get_storage_engine( connection.cursor(), ArticleTranslation._meta.db_table ) if storage != "InnoDB": self.skip("This test only applies to the InnoDB storage engine") index_sql = [str(statement) for statement in connection.schema_editor()._model_indexes_sql(ArticleTranslation)] self.assertEqual(index_sql, [ 'CREATE INDEX `indexes_articletranslation_article_no_constraint_id_d6c0806b` ' 'ON `indexes_articletranslation` (`article_no_constraint_id`)' ]) # The index also shouldn't be created if the ForeignKey is added after # the model was created. field_created = False try: with connection.schema_editor() as editor: new_field = ForeignKey(Article, CASCADE) new_field.set_attributes_from_name('new_foreign_key') editor.add_field(ArticleTranslation, new_field) field_created = True # No deferred SQL. The FK constraint is included in the # statement to add the field. self.assertFalse(editor.deferred_sql) finally: if field_created: with connection.schema_editor() as editor: editor.remove_field(ArticleTranslation, new_field) @skipUnlessDBFeature('supports_partial_indexes') # SQLite doesn't support timezone-aware datetimes when USE_TZ is False. @override_settings(USE_TZ=True) class PartialIndexTests(TransactionTestCase): # Schema editor is used to create the index to test that it works. available_apps = ['indexes'] def test_partial_index(self): with connection.schema_editor() as editor: index = Index( name='recent_article_idx', fields=['pub_date'], condition=Q( pub_date__gt=datetime.datetime( year=2015, month=1, day=1, # PostgreSQL would otherwise complain about the lookup # being converted to a mutable function (by removing # the timezone in the cast) which is forbidden. tzinfo=timezone.get_current_timezone(), ), ) ) self.assertIn( 'WHERE %s' % editor.quote_name('pub_date'), str(index.create_sql(Article, schema_editor=editor)) ) editor.add_index(index=index, model=Article) self.assertIn(index.name, connection.introspection.get_constraints( cursor=connection.cursor(), table_name=Article._meta.db_table, )) editor.remove_index(index=index, model=Article) def test_integer_restriction_partial(self): with connection.schema_editor() as editor: index = Index( name='recent_article_idx', fields=['id'], condition=Q(pk__gt=1), ) self.assertIn( 'WHERE %s' % editor.quote_name('id'), str(index.create_sql(Article, schema_editor=editor)) ) editor.add_index(index=index, model=Article) self.assertIn(index.name, connection.introspection.get_constraints( cursor=connection.cursor(), table_name=Article._meta.db_table, )) editor.remove_index(index=index, model=Article) def test_boolean_restriction_partial(self): with connection.schema_editor() as editor: index = Index( name='published_index', fields=['published'], condition=Q(published=True), ) self.assertIn( 'WHERE %s' % editor.quote_name('published'), str(index.create_sql(Article, schema_editor=editor)) ) editor.add_index(index=index, model=Article) self.assertIn(index.name, connection.introspection.get_constraints( cursor=connection.cursor(), table_name=Article._meta.db_table, )) editor.remove_index(index=index, model=Article) @skipUnlessDBFeature('supports_functions_in_partial_indexes') def test_multiple_conditions(self): with connection.schema_editor() as editor: index = Index( name='recent_article_idx', fields=['pub_date', 'headline'], condition=( Q(pub_date__gt=datetime.datetime( year=2015, month=1, day=1, tzinfo=timezone.get_current_timezone(), )) & Q(headline__contains='China') ), ) sql = str(index.create_sql(Article, schema_editor=editor)) where = sql.find('WHERE') self.assertIn( 'WHERE (%s' % editor.quote_name('pub_date'), sql ) # Because each backend has different syntax for the operators, # check ONLY the occurrence of headline in the SQL. self.assertGreater(sql.rfind('headline'), where) editor.add_index(index=index, model=Article) self.assertIn(index.name, connection.introspection.get_constraints( cursor=connection.cursor(), table_name=Article._meta.db_table, )) editor.remove_index(index=index, model=Article) def test_is_null_condition(self): with connection.schema_editor() as editor: index = Index( name='recent_article_idx', fields=['pub_date'], condition=Q(pub_date__isnull=False), ) self.assertIn( 'WHERE %s IS NOT NULL' % editor.quote_name('pub_date'), str(index.create_sql(Article, schema_editor=editor)) ) editor.add_index(index=index, model=Article) self.assertIn(index.name, connection.introspection.get_constraints( cursor=connection.cursor(), table_name=Article._meta.db_table, )) editor.remove_index(index=index, model=Article)
42aae285fa02ed7287b6e351d93edb1948bc7c417d876872a3d02ff73a926e86
from math import ceil from operator import attrgetter from django.db import IntegrityError, NotSupportedError, connection from django.db.models import FileField, Value from django.db.models.functions import Lower from django.test import ( TestCase, override_settings, skipIfDBFeature, skipUnlessDBFeature, ) from .models import ( Country, NoFields, NullableFields, Pizzeria, ProxyCountry, ProxyMultiCountry, ProxyMultiProxyCountry, ProxyProxyCountry, Restaurant, State, TwoFields, ) class BulkCreateTests(TestCase): def setUp(self): self.data = [ Country(name="United States of America", iso_two_letter="US"), Country(name="The Netherlands", iso_two_letter="NL"), Country(name="Germany", iso_two_letter="DE"), Country(name="Czech Republic", iso_two_letter="CZ") ] def test_simple(self): created = Country.objects.bulk_create(self.data) self.assertEqual(len(created), 4) self.assertQuerysetEqual(Country.objects.order_by("-name"), [ "United States of America", "The Netherlands", "Germany", "Czech Republic" ], attrgetter("name")) created = Country.objects.bulk_create([]) self.assertEqual(created, []) self.assertEqual(Country.objects.count(), 4) @skipUnlessDBFeature('has_bulk_insert') def test_efficiency(self): with self.assertNumQueries(1): Country.objects.bulk_create(self.data) @skipUnlessDBFeature('has_bulk_insert') def test_long_non_ascii_text(self): """ Inserting non-ASCII values with a length in the range 2001 to 4000 characters, i.e. 4002 to 8000 bytes, must be set as a CLOB on Oracle (#22144). """ Country.objects.bulk_create([Country(description='Ж' * 3000)]) self.assertEqual(Country.objects.count(), 1) @skipUnlessDBFeature('has_bulk_insert') def test_long_and_short_text(self): Country.objects.bulk_create([ Country(description='a' * 4001), Country(description='a'), Country(description='Ж' * 2001), Country(description='Ж'), ]) self.assertEqual(Country.objects.count(), 4) def test_multi_table_inheritance_unsupported(self): expected_message = "Can't bulk create a multi-table inherited model" with self.assertRaisesMessage(ValueError, expected_message): Pizzeria.objects.bulk_create([ Pizzeria(name="The Art of Pizza"), ]) with self.assertRaisesMessage(ValueError, expected_message): ProxyMultiCountry.objects.bulk_create([ ProxyMultiCountry(name="Fillory", iso_two_letter="FL"), ]) with self.assertRaisesMessage(ValueError, expected_message): ProxyMultiProxyCountry.objects.bulk_create([ ProxyMultiProxyCountry(name="Fillory", iso_two_letter="FL"), ]) def test_proxy_inheritance_supported(self): ProxyCountry.objects.bulk_create([ ProxyCountry(name="Qwghlm", iso_two_letter="QW"), Country(name="Tortall", iso_two_letter="TA"), ]) self.assertQuerysetEqual(ProxyCountry.objects.all(), { "Qwghlm", "Tortall" }, attrgetter("name"), ordered=False) ProxyProxyCountry.objects.bulk_create([ ProxyProxyCountry(name="Netherlands", iso_two_letter="NT"), ]) self.assertQuerysetEqual(ProxyProxyCountry.objects.all(), { "Qwghlm", "Tortall", "Netherlands", }, attrgetter("name"), ordered=False) def test_non_auto_increment_pk(self): State.objects.bulk_create([ State(two_letter_code=s) for s in ["IL", "NY", "CA", "ME"] ]) self.assertQuerysetEqual(State.objects.order_by("two_letter_code"), [ "CA", "IL", "ME", "NY", ], attrgetter("two_letter_code")) @skipUnlessDBFeature('has_bulk_insert') def test_non_auto_increment_pk_efficiency(self): with self.assertNumQueries(1): State.objects.bulk_create([ State(two_letter_code=s) for s in ["IL", "NY", "CA", "ME"] ]) self.assertQuerysetEqual(State.objects.order_by("two_letter_code"), [ "CA", "IL", "ME", "NY", ], attrgetter("two_letter_code")) @skipIfDBFeature('allows_auto_pk_0') def test_zero_as_autoval(self): """ Zero as id for AutoField should raise exception in MySQL, because MySQL does not allow zero for automatic primary key. """ valid_country = Country(name='Germany', iso_two_letter='DE') invalid_country = Country(id=0, name='Poland', iso_two_letter='PL') msg = 'The database backend does not accept 0 as a value for AutoField.' with self.assertRaisesMessage(ValueError, msg): Country.objects.bulk_create([valid_country, invalid_country]) def test_batch_same_vals(self): # SQLite had a problem where all the same-valued models were # collapsed to one insert. Restaurant.objects.bulk_create([ Restaurant(name='foo') for i in range(0, 2) ]) self.assertEqual(Restaurant.objects.count(), 2) def test_large_batch(self): TwoFields.objects.bulk_create([ TwoFields(f1=i, f2=i + 1) for i in range(0, 1001) ]) self.assertEqual(TwoFields.objects.count(), 1001) self.assertEqual( TwoFields.objects.filter(f1__gte=450, f1__lte=550).count(), 101) self.assertEqual(TwoFields.objects.filter(f2__gte=901).count(), 101) @skipUnlessDBFeature('has_bulk_insert') def test_large_single_field_batch(self): # SQLite had a problem with more than 500 UNIONed selects in single # query. Restaurant.objects.bulk_create([ Restaurant() for i in range(0, 501) ]) @skipUnlessDBFeature('has_bulk_insert') def test_large_batch_efficiency(self): with override_settings(DEBUG=True): connection.queries_log.clear() TwoFields.objects.bulk_create([ TwoFields(f1=i, f2=i + 1) for i in range(0, 1001) ]) self.assertLess(len(connection.queries), 10) def test_large_batch_mixed(self): """ Test inserting a large batch with objects having primary key set mixed together with objects without PK set. """ TwoFields.objects.bulk_create([ TwoFields(id=i if i % 2 == 0 else None, f1=i, f2=i + 1) for i in range(100000, 101000) ]) self.assertEqual(TwoFields.objects.count(), 1000) # We can't assume much about the ID's created, except that the above # created IDs must exist. id_range = range(100000, 101000, 2) self.assertEqual(TwoFields.objects.filter(id__in=id_range).count(), 500) self.assertEqual(TwoFields.objects.exclude(id__in=id_range).count(), 500) @skipUnlessDBFeature('has_bulk_insert') def test_large_batch_mixed_efficiency(self): """ Test inserting a large batch with objects having primary key set mixed together with objects without PK set. """ with override_settings(DEBUG=True): connection.queries_log.clear() TwoFields.objects.bulk_create([ TwoFields(id=i if i % 2 == 0 else None, f1=i, f2=i + 1) for i in range(100000, 101000)]) self.assertLess(len(connection.queries), 10) def test_explicit_batch_size(self): objs = [TwoFields(f1=i, f2=i) for i in range(0, 4)] num_objs = len(objs) TwoFields.objects.bulk_create(objs, batch_size=1) self.assertEqual(TwoFields.objects.count(), num_objs) TwoFields.objects.all().delete() TwoFields.objects.bulk_create(objs, batch_size=2) self.assertEqual(TwoFields.objects.count(), num_objs) TwoFields.objects.all().delete() TwoFields.objects.bulk_create(objs, batch_size=3) self.assertEqual(TwoFields.objects.count(), num_objs) TwoFields.objects.all().delete() TwoFields.objects.bulk_create(objs, batch_size=num_objs) self.assertEqual(TwoFields.objects.count(), num_objs) def test_empty_model(self): NoFields.objects.bulk_create([NoFields() for i in range(2)]) self.assertEqual(NoFields.objects.count(), 2) @skipUnlessDBFeature('has_bulk_insert') def test_explicit_batch_size_efficiency(self): objs = [TwoFields(f1=i, f2=i) for i in range(0, 100)] with self.assertNumQueries(2): TwoFields.objects.bulk_create(objs, 50) TwoFields.objects.all().delete() with self.assertNumQueries(1): TwoFields.objects.bulk_create(objs, len(objs)) @skipUnlessDBFeature('has_bulk_insert') def test_explicit_batch_size_respects_max_batch_size(self): objs = [Country() for i in range(1000)] fields = ['name', 'iso_two_letter', 'description'] max_batch_size = max(connection.ops.bulk_batch_size(fields, objs), 1) with self.assertNumQueries(ceil(len(objs) / max_batch_size)): Country.objects.bulk_create(objs, batch_size=max_batch_size + 1) @skipUnlessDBFeature('has_bulk_insert') def test_bulk_insert_expressions(self): Restaurant.objects.bulk_create([ Restaurant(name="Sam's Shake Shack"), Restaurant(name=Lower(Value("Betty's Beetroot Bar"))) ]) bbb = Restaurant.objects.filter(name="betty's beetroot bar") self.assertEqual(bbb.count(), 1) @skipUnlessDBFeature('has_bulk_insert') def test_bulk_insert_nullable_fields(self): # NULL can be mixed with other values in nullable fields nullable_fields = [field for field in NullableFields._meta.get_fields() if field.name != 'id'] NullableFields.objects.bulk_create([ NullableFields(**{field.name: None}) for field in nullable_fields ]) self.assertEqual(NullableFields.objects.count(), len(nullable_fields)) for field in nullable_fields: with self.subTest(field=field): field_value = '' if isinstance(field, FileField) else None self.assertEqual(NullableFields.objects.filter(**{field.name: field_value}).count(), 1) @skipUnlessDBFeature('can_return_rows_from_bulk_insert') def test_set_pk_and_insert_single_item(self): with self.assertNumQueries(1): countries = Country.objects.bulk_create([self.data[0]]) self.assertEqual(len(countries), 1) self.assertEqual(Country.objects.get(pk=countries[0].pk), countries[0]) @skipUnlessDBFeature('can_return_rows_from_bulk_insert') def test_set_pk_and_query_efficiency(self): with self.assertNumQueries(1): countries = Country.objects.bulk_create(self.data) self.assertEqual(len(countries), 4) self.assertEqual(Country.objects.get(pk=countries[0].pk), countries[0]) self.assertEqual(Country.objects.get(pk=countries[1].pk), countries[1]) self.assertEqual(Country.objects.get(pk=countries[2].pk), countries[2]) self.assertEqual(Country.objects.get(pk=countries[3].pk), countries[3]) @skipUnlessDBFeature('can_return_rows_from_bulk_insert') def test_set_state(self): country_nl = Country(name='Netherlands', iso_two_letter='NL') country_be = Country(name='Belgium', iso_two_letter='BE') Country.objects.bulk_create([country_nl]) country_be.save() # Objects save via bulk_create() and save() should have equal state. self.assertEqual(country_nl._state.adding, country_be._state.adding) self.assertEqual(country_nl._state.db, country_be._state.db) def test_set_state_with_pk_specified(self): state_ca = State(two_letter_code='CA') state_ny = State(two_letter_code='NY') State.objects.bulk_create([state_ca]) state_ny.save() # Objects save via bulk_create() and save() should have equal state. self.assertEqual(state_ca._state.adding, state_ny._state.adding) self.assertEqual(state_ca._state.db, state_ny._state.db) @skipIfDBFeature('supports_ignore_conflicts') def test_ignore_conflicts_value_error(self): message = 'This database backend does not support ignoring conflicts.' with self.assertRaisesMessage(NotSupportedError, message): TwoFields.objects.bulk_create(self.data, ignore_conflicts=True) @skipUnlessDBFeature('supports_ignore_conflicts') def test_ignore_conflicts_ignore(self): data = [ TwoFields(f1=1, f2=1), TwoFields(f1=2, f2=2), TwoFields(f1=3, f2=3), ] TwoFields.objects.bulk_create(data) self.assertEqual(TwoFields.objects.count(), 3) # With ignore_conflicts=True, conflicts are ignored. conflicting_objects = [ TwoFields(f1=2, f2=2), TwoFields(f1=3, f2=3), ] TwoFields.objects.bulk_create([conflicting_objects[0]], ignore_conflicts=True) TwoFields.objects.bulk_create(conflicting_objects, ignore_conflicts=True) self.assertEqual(TwoFields.objects.count(), 3) self.assertIsNone(conflicting_objects[0].pk) self.assertIsNone(conflicting_objects[1].pk) # New objects are created and conflicts are ignored. new_object = TwoFields(f1=4, f2=4) TwoFields.objects.bulk_create(conflicting_objects + [new_object], ignore_conflicts=True) self.assertEqual(TwoFields.objects.count(), 4) self.assertIsNone(new_object.pk) # Without ignore_conflicts=True, there's a problem. with self.assertRaises(IntegrityError): TwoFields.objects.bulk_create(conflicting_objects)
8963b78621bc8bf7d9653fc3952bd2371f8686fc0461db8a660656113b7f0897
from math import ceil from django.db import IntegrityError, connection, models from django.db.models.deletion import Collector from django.db.models.sql.constants import GET_ITERATOR_CHUNK_SIZE from django.test import TestCase, skipIfDBFeature, skipUnlessDBFeature from .models import ( MR, A, Avatar, Base, Child, HiddenUser, HiddenUserProfile, M, M2MFrom, M2MTo, MRNull, Origin, Parent, R, RChild, RChildChild, Referrer, S, T, User, create_a, get_default_r, ) class OnDeleteTests(TestCase): def setUp(self): self.DEFAULT = get_default_r() def test_auto(self): a = create_a('auto') a.auto.delete() self.assertFalse(A.objects.filter(name='auto').exists()) def test_non_callable(self): msg = 'on_delete must be callable.' with self.assertRaisesMessage(TypeError, msg): models.ForeignKey('self', on_delete=None) with self.assertRaisesMessage(TypeError, msg): models.OneToOneField('self', on_delete=None) def test_auto_nullable(self): a = create_a('auto_nullable') a.auto_nullable.delete() self.assertFalse(A.objects.filter(name='auto_nullable').exists()) def test_setvalue(self): a = create_a('setvalue') a.setvalue.delete() a = A.objects.get(pk=a.pk) self.assertEqual(self.DEFAULT, a.setvalue.pk) def test_setnull(self): a = create_a('setnull') a.setnull.delete() a = A.objects.get(pk=a.pk) self.assertIsNone(a.setnull) def test_setdefault(self): a = create_a('setdefault') a.setdefault.delete() a = A.objects.get(pk=a.pk) self.assertEqual(self.DEFAULT, a.setdefault.pk) def test_setdefault_none(self): a = create_a('setdefault_none') a.setdefault_none.delete() a = A.objects.get(pk=a.pk) self.assertIsNone(a.setdefault_none) def test_cascade(self): a = create_a('cascade') a.cascade.delete() self.assertFalse(A.objects.filter(name='cascade').exists()) def test_cascade_nullable(self): a = create_a('cascade_nullable') a.cascade_nullable.delete() self.assertFalse(A.objects.filter(name='cascade_nullable').exists()) def test_protect(self): a = create_a('protect') msg = ( "Cannot delete some instances of model 'R' because they are " "referenced through a protected foreign key: 'A.protect'" ) with self.assertRaisesMessage(IntegrityError, msg): a.protect.delete() def test_do_nothing(self): # Testing DO_NOTHING is a bit harder: It would raise IntegrityError for a normal model, # so we connect to pre_delete and set the fk to a known value. replacement_r = R.objects.create() def check_do_nothing(sender, **kwargs): obj = kwargs['instance'] obj.donothing_set.update(donothing=replacement_r) models.signals.pre_delete.connect(check_do_nothing) a = create_a('do_nothing') a.donothing.delete() a = A.objects.get(pk=a.pk) self.assertEqual(replacement_r, a.donothing) models.signals.pre_delete.disconnect(check_do_nothing) def test_do_nothing_qscount(self): """ A models.DO_NOTHING relation doesn't trigger a query. """ b = Base.objects.create() with self.assertNumQueries(1): # RelToBase should not be queried. b.delete() self.assertEqual(Base.objects.count(), 0) def test_inheritance_cascade_up(self): child = RChild.objects.create() child.delete() self.assertFalse(R.objects.filter(pk=child.pk).exists()) def test_inheritance_cascade_down(self): child = RChild.objects.create() parent = child.r_ptr parent.delete() self.assertFalse(RChild.objects.filter(pk=child.pk).exists()) def test_cascade_from_child(self): a = create_a('child') a.child.delete() self.assertFalse(A.objects.filter(name='child').exists()) self.assertFalse(R.objects.filter(pk=a.child_id).exists()) def test_cascade_from_parent(self): a = create_a('child') R.objects.get(pk=a.child_id).delete() self.assertFalse(A.objects.filter(name='child').exists()) self.assertFalse(RChild.objects.filter(pk=a.child_id).exists()) def test_setnull_from_child(self): a = create_a('child_setnull') a.child_setnull.delete() self.assertFalse(R.objects.filter(pk=a.child_setnull_id).exists()) a = A.objects.get(pk=a.pk) self.assertIsNone(a.child_setnull) def test_setnull_from_parent(self): a = create_a('child_setnull') R.objects.get(pk=a.child_setnull_id).delete() self.assertFalse(RChild.objects.filter(pk=a.child_setnull_id).exists()) a = A.objects.get(pk=a.pk) self.assertIsNone(a.child_setnull) def test_o2o_setnull(self): a = create_a('o2o_setnull') a.o2o_setnull.delete() a = A.objects.get(pk=a.pk) self.assertIsNone(a.o2o_setnull) class DeletionTests(TestCase): def test_m2m(self): m = M.objects.create() r = R.objects.create() MR.objects.create(m=m, r=r) r.delete() self.assertFalse(MR.objects.exists()) r = R.objects.create() MR.objects.create(m=m, r=r) m.delete() self.assertFalse(MR.objects.exists()) m = M.objects.create() r = R.objects.create() m.m2m.add(r) r.delete() through = M._meta.get_field('m2m').remote_field.through self.assertFalse(through.objects.exists()) r = R.objects.create() m.m2m.add(r) m.delete() self.assertFalse(through.objects.exists()) m = M.objects.create() r = R.objects.create() MRNull.objects.create(m=m, r=r) r.delete() self.assertFalse(not MRNull.objects.exists()) self.assertFalse(m.m2m_through_null.exists()) def test_bulk(self): s = S.objects.create(r=R.objects.create()) for i in range(2 * GET_ITERATOR_CHUNK_SIZE): T.objects.create(s=s) # 1 (select related `T` instances) # + 1 (select related `U` instances) # + 2 (delete `T` instances in batches) # + 1 (delete `s`) self.assertNumQueries(5, s.delete) self.assertFalse(S.objects.exists()) def test_instance_update(self): deleted = [] related_setnull_sets = [] def pre_delete(sender, **kwargs): obj = kwargs['instance'] deleted.append(obj) if isinstance(obj, R): related_setnull_sets.append([a.pk for a in obj.setnull_set.all()]) models.signals.pre_delete.connect(pre_delete) a = create_a('update_setnull') a.setnull.delete() a = create_a('update_cascade') a.cascade.delete() for obj in deleted: self.assertIsNone(obj.pk) for pk_list in related_setnull_sets: for a in A.objects.filter(id__in=pk_list): self.assertIsNone(a.setnull) models.signals.pre_delete.disconnect(pre_delete) def test_deletion_order(self): pre_delete_order = [] post_delete_order = [] def log_post_delete(sender, **kwargs): pre_delete_order.append((sender, kwargs['instance'].pk)) def log_pre_delete(sender, **kwargs): post_delete_order.append((sender, kwargs['instance'].pk)) models.signals.post_delete.connect(log_post_delete) models.signals.pre_delete.connect(log_pre_delete) r = R.objects.create(pk=1) s1 = S.objects.create(pk=1, r=r) s2 = S.objects.create(pk=2, r=r) T.objects.create(pk=1, s=s1) T.objects.create(pk=2, s=s2) RChild.objects.create(r_ptr=r) r.delete() self.assertEqual( pre_delete_order, [(T, 2), (T, 1), (RChild, 1), (S, 2), (S, 1), (R, 1)] ) self.assertEqual( post_delete_order, [(T, 1), (T, 2), (RChild, 1), (S, 1), (S, 2), (R, 1)] ) models.signals.post_delete.disconnect(log_post_delete) models.signals.pre_delete.disconnect(log_pre_delete) def test_relational_post_delete_signals_happen_before_parent_object(self): deletions = [] def log_post_delete(instance, **kwargs): self.assertTrue(R.objects.filter(pk=instance.r_id)) self.assertIs(type(instance), S) deletions.append(instance.id) r = R.objects.create(pk=1) S.objects.create(pk=1, r=r) models.signals.post_delete.connect(log_post_delete, sender=S) try: r.delete() finally: models.signals.post_delete.disconnect(log_post_delete) self.assertEqual(len(deletions), 1) self.assertEqual(deletions[0], 1) @skipUnlessDBFeature("can_defer_constraint_checks") def test_can_defer_constraint_checks(self): u = User.objects.create( avatar=Avatar.objects.create() ) a = Avatar.objects.get(pk=u.avatar_id) # 1 query to find the users for the avatar. # 1 query to delete the user # 1 query to delete the avatar # The important thing is that when we can defer constraint checks there # is no need to do an UPDATE on User.avatar to null it out. # Attach a signal to make sure we will not do fast_deletes. calls = [] def noop(*args, **kwargs): calls.append('') models.signals.post_delete.connect(noop, sender=User) self.assertNumQueries(3, a.delete) self.assertFalse(User.objects.exists()) self.assertFalse(Avatar.objects.exists()) self.assertEqual(len(calls), 1) models.signals.post_delete.disconnect(noop, sender=User) @skipIfDBFeature("can_defer_constraint_checks") def test_cannot_defer_constraint_checks(self): u = User.objects.create( avatar=Avatar.objects.create() ) # Attach a signal to make sure we will not do fast_deletes. calls = [] def noop(*args, **kwargs): calls.append('') models.signals.post_delete.connect(noop, sender=User) a = Avatar.objects.get(pk=u.avatar_id) # The below doesn't make sense... Why do we need to null out # user.avatar if we are going to delete the user immediately after it, # and there are no more cascades. # 1 query to find the users for the avatar. # 1 query to delete the user # 1 query to null out user.avatar, because we can't defer the constraint # 1 query to delete the avatar self.assertNumQueries(4, a.delete) self.assertFalse(User.objects.exists()) self.assertFalse(Avatar.objects.exists()) self.assertEqual(len(calls), 1) models.signals.post_delete.disconnect(noop, sender=User) def test_hidden_related(self): r = R.objects.create() h = HiddenUser.objects.create(r=r) HiddenUserProfile.objects.create(user=h) r.delete() self.assertEqual(HiddenUserProfile.objects.count(), 0) def test_large_delete(self): TEST_SIZE = 2000 objs = [Avatar() for i in range(0, TEST_SIZE)] Avatar.objects.bulk_create(objs) # Calculate the number of queries needed. batch_size = connection.ops.bulk_batch_size(['pk'], objs) # The related fetches are done in batches. batches = ceil(len(objs) / batch_size) # One query for Avatar.objects.all() and then one related fast delete for # each batch. fetches_to_mem = 1 + batches # The Avatar objects are going to be deleted in batches of GET_ITERATOR_CHUNK_SIZE queries = fetches_to_mem + TEST_SIZE // GET_ITERATOR_CHUNK_SIZE self.assertNumQueries(queries, Avatar.objects.all().delete) self.assertFalse(Avatar.objects.exists()) def test_large_delete_related(self): TEST_SIZE = 2000 s = S.objects.create(r=R.objects.create()) for i in range(TEST_SIZE): T.objects.create(s=s) batch_size = max(connection.ops.bulk_batch_size(['pk'], range(TEST_SIZE)), 1) # TEST_SIZE / batch_size (select related `T` instances) # + 1 (select related `U` instances) # + TEST_SIZE / GET_ITERATOR_CHUNK_SIZE (delete `T` instances in batches) # + 1 (delete `s`) expected_num_queries = ceil(TEST_SIZE / batch_size) expected_num_queries += ceil(TEST_SIZE / GET_ITERATOR_CHUNK_SIZE) + 2 self.assertNumQueries(expected_num_queries, s.delete) self.assertFalse(S.objects.exists()) self.assertFalse(T.objects.exists()) def test_delete_with_keeping_parents(self): child = RChild.objects.create() parent_id = child.r_ptr_id child.delete(keep_parents=True) self.assertFalse(RChild.objects.filter(id=child.id).exists()) self.assertTrue(R.objects.filter(id=parent_id).exists()) def test_delete_with_keeping_parents_relationships(self): child = RChild.objects.create() parent_id = child.r_ptr_id parent_referent_id = S.objects.create(r=child.r_ptr).pk child.delete(keep_parents=True) self.assertFalse(RChild.objects.filter(id=child.id).exists()) self.assertTrue(R.objects.filter(id=parent_id).exists()) self.assertTrue(S.objects.filter(pk=parent_referent_id).exists()) childchild = RChildChild.objects.create() parent_id = childchild.rchild_ptr.r_ptr_id child_id = childchild.rchild_ptr_id parent_referent_id = S.objects.create(r=childchild.rchild_ptr.r_ptr).pk childchild.delete(keep_parents=True) self.assertFalse(RChildChild.objects.filter(id=childchild.id).exists()) self.assertTrue(RChild.objects.filter(id=child_id).exists()) self.assertTrue(R.objects.filter(id=parent_id).exists()) self.assertTrue(S.objects.filter(pk=parent_referent_id).exists()) def test_queryset_delete_returns_num_rows(self): """ QuerySet.delete() should return the number of deleted rows and a dictionary with the number of deletions for each object type. """ Avatar.objects.bulk_create([Avatar(desc='a'), Avatar(desc='b'), Avatar(desc='c')]) avatars_count = Avatar.objects.count() deleted, rows_count = Avatar.objects.all().delete() self.assertEqual(deleted, avatars_count) # more complex example with multiple object types r = R.objects.create() h1 = HiddenUser.objects.create(r=r) HiddenUser.objects.create(r=r) HiddenUserProfile.objects.create(user=h1) existed_objs = { R._meta.label: R.objects.count(), HiddenUser._meta.label: HiddenUser.objects.count(), A._meta.label: A.objects.count(), MR._meta.label: MR.objects.count(), HiddenUserProfile._meta.label: HiddenUserProfile.objects.count(), } deleted, deleted_objs = R.objects.all().delete() for k, v in existed_objs.items(): self.assertEqual(deleted_objs[k], v) def test_model_delete_returns_num_rows(self): """ Model.delete() should return the number of deleted rows and a dictionary with the number of deletions for each object type. """ r = R.objects.create() h1 = HiddenUser.objects.create(r=r) h2 = HiddenUser.objects.create(r=r) HiddenUser.objects.create(r=r) HiddenUserProfile.objects.create(user=h1) HiddenUserProfile.objects.create(user=h2) m1 = M.objects.create() m2 = M.objects.create() MR.objects.create(r=r, m=m1) r.m_set.add(m1) r.m_set.add(m2) r.save() existed_objs = { R._meta.label: R.objects.count(), HiddenUser._meta.label: HiddenUser.objects.count(), A._meta.label: A.objects.count(), MR._meta.label: MR.objects.count(), HiddenUserProfile._meta.label: HiddenUserProfile.objects.count(), M.m2m.through._meta.label: M.m2m.through.objects.count(), } deleted, deleted_objs = r.delete() self.assertEqual(deleted, sum(existed_objs.values())) for k, v in existed_objs.items(): self.assertEqual(deleted_objs[k], v) def test_proxied_model_duplicate_queries(self): """ #25685 - Deleting instances of a model with existing proxy classes should not issue multiple queries during cascade deletion of referring models. """ avatar = Avatar.objects.create() # One query for the Avatar table and a second for the User one. with self.assertNumQueries(2): avatar.delete() def test_only_referenced_fields_selected(self): """ Only referenced fields are selected during cascade deletion SELECT unless deletion signals are connected. """ origin = Origin.objects.create() expected_sql = str( Referrer.objects.only( # Both fields are referenced by SecondReferrer. 'id', 'unique_field', ).filter(origin__in=[origin]).query ) with self.assertNumQueries(2) as ctx: origin.delete() self.assertEqual(ctx.captured_queries[0]['sql'], expected_sql) def receiver(instance, **kwargs): pass # All fields are selected if deletion signals are connected. for signal_name in ('pre_delete', 'post_delete'): with self.subTest(signal=signal_name): origin = Origin.objects.create() signal = getattr(models.signals, signal_name) signal.connect(receiver, sender=Referrer) with self.assertNumQueries(2) as ctx: origin.delete() self.assertIn( connection.ops.quote_name('large_field'), ctx.captured_queries[0]['sql'], ) signal.disconnect(receiver, sender=Referrer) class FastDeleteTests(TestCase): def test_fast_delete_fk(self): u = User.objects.create( avatar=Avatar.objects.create() ) a = Avatar.objects.get(pk=u.avatar_id) # 1 query to fast-delete the user # 1 query to delete the avatar self.assertNumQueries(2, a.delete) self.assertFalse(User.objects.exists()) self.assertFalse(Avatar.objects.exists()) def test_fast_delete_m2m(self): t = M2MTo.objects.create() f = M2MFrom.objects.create() f.m2m.add(t) # 1 to delete f, 1 to fast-delete m2m for f self.assertNumQueries(2, f.delete) def test_fast_delete_revm2m(self): t = M2MTo.objects.create() f = M2MFrom.objects.create() f.m2m.add(t) # 1 to delete t, 1 to fast-delete t's m_set self.assertNumQueries(2, f.delete) def test_fast_delete_qs(self): u1 = User.objects.create() u2 = User.objects.create() self.assertNumQueries(1, User.objects.filter(pk=u1.pk).delete) self.assertEqual(User.objects.count(), 1) self.assertTrue(User.objects.filter(pk=u2.pk).exists()) def test_fast_delete_instance_set_pk_none(self): u = User.objects.create() # User can be fast-deleted. collector = Collector(using='default') self.assertTrue(collector.can_fast_delete(u)) u.delete() self.assertIsNone(u.pk) def test_fast_delete_joined_qs(self): a = Avatar.objects.create(desc='a') User.objects.create(avatar=a) u2 = User.objects.create() self.assertNumQueries(1, User.objects.filter(avatar__desc='a').delete) self.assertEqual(User.objects.count(), 1) self.assertTrue(User.objects.filter(pk=u2.pk).exists()) def test_fast_delete_inheritance(self): c = Child.objects.create() p = Parent.objects.create() # 1 for self, 1 for parent self.assertNumQueries(2, c.delete) self.assertFalse(Child.objects.exists()) self.assertEqual(Parent.objects.count(), 1) self.assertEqual(Parent.objects.filter(pk=p.pk).count(), 1) # 1 for self delete, 1 for fast delete of empty "child" qs. self.assertNumQueries(2, p.delete) self.assertFalse(Parent.objects.exists()) # 1 for self delete, 1 for fast delete of empty "child" qs. c = Child.objects.create() p = c.parent_ptr self.assertNumQueries(2, p.delete) self.assertFalse(Parent.objects.exists()) self.assertFalse(Child.objects.exists()) def test_fast_delete_large_batch(self): User.objects.bulk_create(User() for i in range(0, 2000)) # No problems here - we aren't going to cascade, so we will fast # delete the objects in a single query. self.assertNumQueries(1, User.objects.all().delete) a = Avatar.objects.create(desc='a') User.objects.bulk_create(User(avatar=a) for i in range(0, 2000)) # We don't hit parameter amount limits for a, so just one query for # that + fast delete of the related objs. self.assertNumQueries(2, a.delete) self.assertEqual(User.objects.count(), 0) def test_fast_delete_empty_no_update_can_self_select(self): """ #25932 - Fast deleting on backends that don't have the `no_update_can_self_select` feature should work even if the specified filter doesn't match any row. """ with self.assertNumQueries(1): self.assertEqual( User.objects.filter(avatar__desc='missing').delete(), (0, {'delete.User': 0}) ) def test_fast_delete_combined_relationships(self): # The cascading fast-delete of SecondReferrer should be combined # in a single DELETE WHERE referrer_id OR unique_field. origin = Origin.objects.create() referer = Referrer.objects.create(origin=origin, unique_field=42) with self.assertNumQueries(2): referer.delete()
4961bfe67d708be5969367ad7c66f8d28fd0bd21cd01d4602bc579119643c374
import pickle from django import forms from django.core.exceptions import ValidationError from django.db import models from django.test import SimpleTestCase, TestCase from django.utils.functional import lazy from .models import ( Bar, Choiceful, Foo, RenamedField, VerboseNameField, Whiz, WhizDelayed, WhizIter, WhizIterEmpty, ) class Nested: class Field(models.Field): pass class BasicFieldTests(SimpleTestCase): def test_show_hidden_initial(self): """ Fields with choices respect show_hidden_initial as a kwarg to formfield(). """ choices = [(0, 0), (1, 1)] model_field = models.Field(choices=choices) form_field = model_field.formfield(show_hidden_initial=True) self.assertTrue(form_field.show_hidden_initial) form_field = model_field.formfield(show_hidden_initial=False) self.assertFalse(form_field.show_hidden_initial) def test_field_repr(self): """ __repr__() of a field displays its name. """ f = Foo._meta.get_field('a') self.assertEqual(repr(f), '<django.db.models.fields.CharField: a>') f = models.fields.CharField() self.assertEqual(repr(f), '<django.db.models.fields.CharField>') def test_field_repr_nested(self): """__repr__() uses __qualname__ for nested class support.""" self.assertEqual(repr(Nested.Field()), '<model_fields.tests.Nested.Field>') def test_field_name(self): """ A defined field name (name="fieldname") is used instead of the model model's attribute name (modelname). """ instance = RenamedField() self.assertTrue(hasattr(instance, 'get_fieldname_display')) self.assertFalse(hasattr(instance, 'get_modelname_display')) def test_field_verbose_name(self): m = VerboseNameField for i in range(1, 23): self.assertEqual(m._meta.get_field('field%d' % i).verbose_name, 'verbose field%d' % i) self.assertEqual(m._meta.get_field('id').verbose_name, 'verbose pk') def test_choices_form_class(self): """Can supply a custom choices form class to Field.formfield()""" choices = [('a', 'a')] field = models.CharField(choices=choices) klass = forms.TypedMultipleChoiceField self.assertIsInstance(field.formfield(choices_form_class=klass), klass) def test_formfield_disabled(self): """Field.formfield() sets disabled for fields with choices.""" field = models.CharField(choices=[('a', 'b')]) form_field = field.formfield(disabled=True) self.assertIs(form_field.disabled, True) def test_field_str(self): f = models.Field() self.assertEqual(str(f), '<django.db.models.fields.Field>') f = Foo._meta.get_field('a') self.assertEqual(str(f), 'model_fields.Foo.a') def test_field_ordering(self): """Fields are ordered based on their creation.""" f1 = models.Field() f2 = models.Field(auto_created=True) f3 = models.Field() self.assertLess(f2, f1) self.assertGreater(f3, f1) self.assertIsNotNone(f1) self.assertNotIn(f2, (None, 1, '')) def test_field_instance_is_picklable(self): """Field instances can be pickled.""" field = models.Field(max_length=100, default='a string') # Must be picklable with this cached property populated (#28188). field._get_default pickle.dumps(field) def test_deconstruct_nested_field(self): """deconstruct() uses __qualname__ for nested class support.""" name, path, args, kwargs = Nested.Field().deconstruct() self.assertEqual(path, 'model_fields.tests.Nested.Field') class ChoicesTests(SimpleTestCase): @classmethod def setUpClass(cls): super().setUpClass() cls.no_choices = Choiceful._meta.get_field('no_choices') cls.empty_choices = Choiceful._meta.get_field('empty_choices') cls.empty_choices_bool = Choiceful._meta.get_field('empty_choices_bool') cls.empty_choices_text = Choiceful._meta.get_field('empty_choices_text') cls.with_choices = Choiceful._meta.get_field('with_choices') def test_choices(self): self.assertIsNone(self.no_choices.choices) self.assertEqual(self.empty_choices.choices, ()) self.assertEqual(self.with_choices.choices, [(1, 'A')]) def test_flatchoices(self): self.assertEqual(self.no_choices.flatchoices, []) self.assertEqual(self.empty_choices.flatchoices, []) self.assertEqual(self.with_choices.flatchoices, [(1, 'A')]) def test_check(self): self.assertEqual(Choiceful.check(), []) def test_invalid_choice(self): model_instance = None # Actual model instance not needed. self.no_choices.validate(0, model_instance) msg = "['Value 99 is not a valid choice.']" with self.assertRaisesMessage(ValidationError, msg): self.empty_choices.validate(99, model_instance) with self.assertRaisesMessage(ValidationError, msg): self.with_choices.validate(99, model_instance) def test_formfield(self): no_choices_formfield = self.no_choices.formfield() self.assertIsInstance(no_choices_formfield, forms.IntegerField) fields = ( self.empty_choices, self.with_choices, self.empty_choices_bool, self.empty_choices_text, ) for field in fields: with self.subTest(field=field): self.assertIsInstance(field.formfield(), forms.ChoiceField) class GetFieldDisplayTests(SimpleTestCase): def test_choices_and_field_display(self): """ get_choices() interacts with get_FIELD_display() to return the expected values. """ self.assertEqual(Whiz(c=1).get_c_display(), 'First') # A nested value self.assertEqual(Whiz(c=0).get_c_display(), 'Other') # A top level value self.assertEqual(Whiz(c=9).get_c_display(), 9) # Invalid value self.assertIsNone(Whiz(c=None).get_c_display()) # Blank value self.assertEqual(Whiz(c='').get_c_display(), '') # Empty value self.assertEqual(WhizDelayed(c=0).get_c_display(), 'Other') # Delayed choices def test_get_FIELD_display_translated(self): """A translated display value is coerced to str.""" val = Whiz(c=5).get_c_display() self.assertIsInstance(val, str) self.assertEqual(val, 'translated') def test_iterator_choices(self): """ get_choices() works with Iterators. """ self.assertEqual(WhizIter(c=1).c, 1) # A nested value self.assertEqual(WhizIter(c=9).c, 9) # Invalid value self.assertIsNone(WhizIter(c=None).c) # Blank value self.assertEqual(WhizIter(c='').c, '') # Empty value def test_empty_iterator_choices(self): """ get_choices() works with empty iterators. """ self.assertEqual(WhizIterEmpty(c="a").c, "a") # A nested value self.assertEqual(WhizIterEmpty(c="b").c, "b") # Invalid value self.assertIsNone(WhizIterEmpty(c=None).c) # Blank value self.assertEqual(WhizIterEmpty(c='').c, '') # Empty value class GetChoicesTests(SimpleTestCase): def test_empty_choices(self): choices = [] f = models.CharField(choices=choices) self.assertEqual(f.get_choices(include_blank=False), choices) def test_blank_in_choices(self): choices = [('', '<><>'), ('a', 'A')] f = models.CharField(choices=choices) self.assertEqual(f.get_choices(include_blank=True), choices) def test_blank_in_grouped_choices(self): choices = [ ('f', 'Foo'), ('b', 'Bar'), ('Group', ( ('', 'No Preference'), ('fg', 'Foo'), ('bg', 'Bar'), )), ] f = models.CharField(choices=choices) self.assertEqual(f.get_choices(include_blank=True), choices) def test_lazy_strings_not_evaluated(self): lazy_func = lazy(lambda x: 0 / 0, int) # raises ZeroDivisionError if evaluated. f = models.CharField(choices=[(lazy_func('group'), (('a', 'A'), ('b', 'B')))]) self.assertEqual(f.get_choices(include_blank=True)[0], ('', '---------')) class GetChoicesOrderingTests(TestCase): @classmethod def setUpTestData(cls): cls.foo1 = Foo.objects.create(a='a', d='12.35') cls.foo2 = Foo.objects.create(a='b', d='12.34') cls.bar1 = Bar.objects.create(a=cls.foo1, b='b') cls.bar2 = Bar.objects.create(a=cls.foo2, b='a') cls.field = Bar._meta.get_field('a') def assertChoicesEqual(self, choices, objs): self.assertEqual(choices, [(obj.pk, str(obj)) for obj in objs]) def test_get_choices(self): self.assertChoicesEqual( self.field.get_choices(include_blank=False, ordering=('a',)), [self.foo1, self.foo2] ) self.assertChoicesEqual( self.field.get_choices(include_blank=False, ordering=('-a',)), [self.foo2, self.foo1] ) def test_get_choices_default_ordering(self): self.addCleanup(setattr, Foo._meta, 'ordering', Foo._meta.ordering) Foo._meta.ordering = ('d',) self.assertChoicesEqual( self.field.get_choices(include_blank=False), [self.foo2, self.foo1] ) def test_get_choices_reverse_related_field(self): self.assertChoicesEqual( self.field.remote_field.get_choices(include_blank=False, ordering=('a',)), [self.bar1, self.bar2] ) self.assertChoicesEqual( self.field.remote_field.get_choices(include_blank=False, ordering=('-a',)), [self.bar2, self.bar1] ) def test_get_choices_reverse_related_field_default_ordering(self): self.addCleanup(setattr, Bar._meta, 'ordering', Bar._meta.ordering) Bar._meta.ordering = ('b',) self.assertChoicesEqual( self.field.remote_field.get_choices(include_blank=False), [self.bar2, self.bar1] ) class GetChoicesLimitChoicesToTests(TestCase): @classmethod def setUpTestData(cls): cls.foo1 = Foo.objects.create(a='a', d='12.34') cls.foo2 = Foo.objects.create(a='b', d='12.34') cls.bar1 = Bar.objects.create(a=cls.foo1, b='b') cls.bar2 = Bar.objects.create(a=cls.foo2, b='a') cls.field = Bar._meta.get_field('a') def assertChoicesEqual(self, choices, objs): self.assertEqual(choices, [(obj.pk, str(obj)) for obj in objs]) def test_get_choices(self): self.assertChoicesEqual( self.field.get_choices(include_blank=False, limit_choices_to={'a': 'a'}), [self.foo1], ) self.assertChoicesEqual( self.field.get_choices(include_blank=False, limit_choices_to={}), [self.foo1, self.foo2], ) def test_get_choices_reverse_related_field(self): field = self.field.remote_field self.assertChoicesEqual( field.get_choices(include_blank=False, limit_choices_to={'b': 'b'}), [self.bar1], ) self.assertChoicesEqual( field.get_choices(include_blank=False, limit_choices_to={}), [self.bar1, self.bar2], )
29d0e3f0cb0714340d47a48c9303c1a9a94640f997c74d4e765a4ad2bbc86687
from unittest import mock from django.contrib.contenttypes.models import ContentType from django.core.exceptions import ObjectDoesNotExist from django.db import connection from django.db.models import Prefetch, QuerySet from django.db.models.query import get_prefetcher, prefetch_related_objects from django.db.models.sql import Query from django.test import TestCase, override_settings from django.test.utils import CaptureQueriesContext from .models import ( Article, Author, Author2, AuthorAddress, AuthorWithAge, Bio, Book, Bookmark, BookReview, BookWithYear, Comment, Department, Employee, FavoriteAuthors, House, LessonEntry, ModelIterableSubclass, Person, Qualification, Reader, Room, TaggedItem, Teacher, WordEntry, ) class TestDataMixin: @classmethod def setUpTestData(cls): cls.book1 = Book.objects.create(title='Poems') cls.book2 = Book.objects.create(title='Jane Eyre') cls.book3 = Book.objects.create(title='Wuthering Heights') cls.book4 = Book.objects.create(title='Sense and Sensibility') cls.author1 = Author.objects.create(name='Charlotte', first_book=cls.book1) cls.author2 = Author.objects.create(name='Anne', first_book=cls.book1) cls.author3 = Author.objects.create(name='Emily', first_book=cls.book1) cls.author4 = Author.objects.create(name='Jane', first_book=cls.book4) cls.book1.authors.add(cls.author1, cls.author2, cls.author3) cls.book2.authors.add(cls.author1) cls.book3.authors.add(cls.author3) cls.book4.authors.add(cls.author4) cls.reader1 = Reader.objects.create(name='Amy') cls.reader2 = Reader.objects.create(name='Belinda') cls.reader1.books_read.add(cls.book1, cls.book4) cls.reader2.books_read.add(cls.book2, cls.book4) class PrefetchRelatedTests(TestDataMixin, TestCase): def assertWhereContains(self, sql, needle): where_idx = sql.index('WHERE') self.assertEqual( sql.count(str(needle), where_idx), 1, msg="WHERE clause doesn't contain %s, actual SQL: %s" % (needle, sql[where_idx:]) ) def test_m2m_forward(self): with self.assertNumQueries(2): lists = [list(b.authors.all()) for b in Book.objects.prefetch_related('authors')] normal_lists = [list(b.authors.all()) for b in Book.objects.all()] self.assertEqual(lists, normal_lists) def test_m2m_reverse(self): with self.assertNumQueries(2): lists = [list(a.books.all()) for a in Author.objects.prefetch_related('books')] normal_lists = [list(a.books.all()) for a in Author.objects.all()] self.assertEqual(lists, normal_lists) def test_foreignkey_forward(self): with self.assertNumQueries(2): books = [a.first_book for a in Author.objects.prefetch_related('first_book')] normal_books = [a.first_book for a in Author.objects.all()] self.assertEqual(books, normal_books) def test_foreignkey_reverse(self): with self.assertNumQueries(2): [list(b.first_time_authors.all()) for b in Book.objects.prefetch_related('first_time_authors')] self.assertQuerysetEqual(self.book2.authors.all(), ["<Author: Charlotte>"]) def test_onetoone_reverse_no_match(self): # Regression for #17439 with self.assertNumQueries(2): book = Book.objects.prefetch_related('bookwithyear').all()[0] with self.assertNumQueries(0): with self.assertRaises(BookWithYear.DoesNotExist): book.bookwithyear def test_onetoone_reverse_with_to_field_pk(self): """ A model (Bio) with a OneToOneField primary key (author) that references a non-pk field (name) on the related model (Author) is prefetchable. """ Bio.objects.bulk_create([ Bio(author=self.author1), Bio(author=self.author2), Bio(author=self.author3), ]) authors = Author.objects.filter( name__in=[self.author1, self.author2, self.author3], ).prefetch_related('bio') with self.assertNumQueries(2): for author in authors: self.assertEqual(author.name, author.bio.author.name) def test_survives_clone(self): with self.assertNumQueries(2): [list(b.first_time_authors.all()) for b in Book.objects.prefetch_related('first_time_authors').exclude(id=1000)] def test_len(self): with self.assertNumQueries(2): qs = Book.objects.prefetch_related('first_time_authors') len(qs) [list(b.first_time_authors.all()) for b in qs] def test_bool(self): with self.assertNumQueries(2): qs = Book.objects.prefetch_related('first_time_authors') bool(qs) [list(b.first_time_authors.all()) for b in qs] def test_count(self): with self.assertNumQueries(2): qs = Book.objects.prefetch_related('first_time_authors') [b.first_time_authors.count() for b in qs] def test_exists(self): with self.assertNumQueries(2): qs = Book.objects.prefetch_related('first_time_authors') [b.first_time_authors.exists() for b in qs] def test_in_and_prefetch_related(self): """ Regression test for #20242 - QuerySet "in" didn't work the first time when using prefetch_related. This was fixed by the removal of chunked reads from QuerySet iteration in 70679243d1786e03557c28929f9762a119e3ac14. """ qs = Book.objects.prefetch_related('first_time_authors') self.assertIn(qs[0], qs) def test_clear(self): with self.assertNumQueries(5): with_prefetch = Author.objects.prefetch_related('books') without_prefetch = with_prefetch.prefetch_related(None) [list(a.books.all()) for a in without_prefetch] def test_m2m_then_m2m(self): """A m2m can be followed through another m2m.""" with self.assertNumQueries(3): qs = Author.objects.prefetch_related('books__read_by') lists = [[[str(r) for r in b.read_by.all()] for b in a.books.all()] for a in qs] self.assertEqual(lists, [ [["Amy"], ["Belinda"]], # Charlotte - Poems, Jane Eyre [["Amy"]], # Anne - Poems [["Amy"], []], # Emily - Poems, Wuthering Heights [["Amy", "Belinda"]], # Jane - Sense and Sense ]) def test_overriding_prefetch(self): with self.assertNumQueries(3): qs = Author.objects.prefetch_related('books', 'books__read_by') lists = [[[str(r) for r in b.read_by.all()] for b in a.books.all()] for a in qs] self.assertEqual(lists, [ [["Amy"], ["Belinda"]], # Charlotte - Poems, Jane Eyre [["Amy"]], # Anne - Poems [["Amy"], []], # Emily - Poems, Wuthering Heights [["Amy", "Belinda"]], # Jane - Sense and Sense ]) with self.assertNumQueries(3): qs = Author.objects.prefetch_related('books__read_by', 'books') lists = [[[str(r) for r in b.read_by.all()] for b in a.books.all()] for a in qs] self.assertEqual(lists, [ [["Amy"], ["Belinda"]], # Charlotte - Poems, Jane Eyre [["Amy"]], # Anne - Poems [["Amy"], []], # Emily - Poems, Wuthering Heights [["Amy", "Belinda"]], # Jane - Sense and Sense ]) def test_get(self): """ Objects retrieved with .get() get the prefetch behavior. """ # Need a double with self.assertNumQueries(3): author = Author.objects.prefetch_related('books__read_by').get(name="Charlotte") lists = [[str(r) for r in b.read_by.all()] for b in author.books.all()] self.assertEqual(lists, [["Amy"], ["Belinda"]]) # Poems, Jane Eyre def test_foreign_key_then_m2m(self): """ A m2m relation can be followed after a relation like ForeignKey that doesn't have many objects. """ with self.assertNumQueries(2): qs = Author.objects.select_related('first_book').prefetch_related('first_book__read_by') lists = [[str(r) for r in a.first_book.read_by.all()] for a in qs] self.assertEqual(lists, [["Amy"], ["Amy"], ["Amy"], ["Amy", "Belinda"]]) def test_reverse_one_to_one_then_m2m(self): """ A m2m relation can be followed after going through the select_related reverse of an o2o. """ qs = Author.objects.prefetch_related('bio__books').select_related('bio') with self.assertNumQueries(1): list(qs.all()) Bio.objects.create(author=self.author1) with self.assertNumQueries(2): list(qs.all()) def test_attribute_error(self): qs = Reader.objects.all().prefetch_related('books_read__xyz') msg = ( "Cannot find 'xyz' on Book object, 'books_read__xyz' " "is an invalid parameter to prefetch_related()" ) with self.assertRaisesMessage(AttributeError, msg) as cm: list(qs) self.assertIn('prefetch_related', str(cm.exception)) def test_invalid_final_lookup(self): qs = Book.objects.prefetch_related('authors__name') msg = ( "'authors__name' does not resolve to an item that supports " "prefetching - this is an invalid parameter to prefetch_related()." ) with self.assertRaisesMessage(ValueError, msg) as cm: list(qs) self.assertIn('prefetch_related', str(cm.exception)) self.assertIn("name", str(cm.exception)) def test_prefetch_eq(self): prefetch_1 = Prefetch('authors', queryset=Author.objects.all()) prefetch_2 = Prefetch('books', queryset=Book.objects.all()) self.assertEqual(prefetch_1, prefetch_1) self.assertEqual(prefetch_1, mock.ANY) self.assertNotEqual(prefetch_1, prefetch_2) def test_forward_m2m_to_attr_conflict(self): msg = 'to_attr=authors conflicts with a field on the Book model.' authors = Author.objects.all() with self.assertRaisesMessage(ValueError, msg): list(Book.objects.prefetch_related( Prefetch('authors', queryset=authors, to_attr='authors'), )) # Without the ValueError, an author was deleted due to the implicit # save of the relation assignment. self.assertEqual(self.book1.authors.count(), 3) def test_reverse_m2m_to_attr_conflict(self): msg = 'to_attr=books conflicts with a field on the Author model.' poems = Book.objects.filter(title='Poems') with self.assertRaisesMessage(ValueError, msg): list(Author.objects.prefetch_related( Prefetch('books', queryset=poems, to_attr='books'), )) # Without the ValueError, a book was deleted due to the implicit # save of reverse relation assignment. self.assertEqual(self.author1.books.count(), 2) def test_m2m_then_reverse_fk_object_ids(self): with CaptureQueriesContext(connection) as queries: list(Book.objects.prefetch_related('authors__addresses')) sql = queries[-1]['sql'] self.assertWhereContains(sql, self.author1.name) def test_m2m_then_m2m_object_ids(self): with CaptureQueriesContext(connection) as queries: list(Book.objects.prefetch_related('authors__favorite_authors')) sql = queries[-1]['sql'] self.assertWhereContains(sql, self.author1.name) def test_m2m_then_reverse_one_to_one_object_ids(self): with CaptureQueriesContext(connection) as queries: list(Book.objects.prefetch_related('authors__authorwithage')) sql = queries[-1]['sql'] self.assertWhereContains(sql, self.author1.id) def test_filter_deferred(self): """ Related filtering of prefetched querysets is deferred until necessary. """ add_q = Query.add_q with mock.patch.object( Query, 'add_q', autospec=True, side_effect=lambda self, q: add_q(self, q), ) as add_q_mock: list(Book.objects.prefetch_related('authors')) self.assertEqual(add_q_mock.call_count, 1) class RawQuerySetTests(TestDataMixin, TestCase): def test_basic(self): with self.assertNumQueries(2): books = Book.objects.raw( "SELECT * FROM prefetch_related_book WHERE id = %s", (self.book1.id,) ).prefetch_related('authors') book1 = list(books)[0] with self.assertNumQueries(0): self.assertCountEqual(book1.authors.all(), [self.author1, self.author2, self.author3]) def test_prefetch_before_raw(self): with self.assertNumQueries(2): books = Book.objects.prefetch_related('authors').raw( "SELECT * FROM prefetch_related_book WHERE id = %s", (self.book1.id,) ) book1 = list(books)[0] with self.assertNumQueries(0): self.assertCountEqual(book1.authors.all(), [self.author1, self.author2, self.author3]) def test_clear(self): with self.assertNumQueries(5): with_prefetch = Author.objects.raw( "SELECT * FROM prefetch_related_author" ).prefetch_related('books') without_prefetch = with_prefetch.prefetch_related(None) [list(a.books.all()) for a in without_prefetch] class CustomPrefetchTests(TestCase): @classmethod def traverse_qs(cls, obj_iter, path): """ Helper method that returns a list containing a list of the objects in the obj_iter. Then for each object in the obj_iter, the path will be recursively travelled and the found objects are added to the return value. """ ret_val = [] if hasattr(obj_iter, 'all'): obj_iter = obj_iter.all() try: iter(obj_iter) except TypeError: obj_iter = [obj_iter] for obj in obj_iter: rel_objs = [] for part in path: if not part: continue try: related = getattr(obj, part[0]) except ObjectDoesNotExist: continue if related is not None: rel_objs.extend(cls.traverse_qs(related, [part[1:]])) ret_val.append((obj, rel_objs)) return ret_val @classmethod def setUpTestData(cls): cls.person1 = Person.objects.create(name='Joe') cls.person2 = Person.objects.create(name='Mary') # Set main_room for each house before creating the next one for # databases where supports_nullable_unique_constraints is False. cls.house1 = House.objects.create(name='House 1', address='123 Main St', owner=cls.person1) cls.room1_1 = Room.objects.create(name='Dining room', house=cls.house1) cls.room1_2 = Room.objects.create(name='Lounge', house=cls.house1) cls.room1_3 = Room.objects.create(name='Kitchen', house=cls.house1) cls.house1.main_room = cls.room1_1 cls.house1.save() cls.person1.houses.add(cls.house1) cls.house2 = House.objects.create(name='House 2', address='45 Side St', owner=cls.person1) cls.room2_1 = Room.objects.create(name='Dining room', house=cls.house2) cls.room2_2 = Room.objects.create(name='Lounge', house=cls.house2) cls.room2_3 = Room.objects.create(name='Kitchen', house=cls.house2) cls.house2.main_room = cls.room2_1 cls.house2.save() cls.person1.houses.add(cls.house2) cls.house3 = House.objects.create(name='House 3', address='6 Downing St', owner=cls.person2) cls.room3_1 = Room.objects.create(name='Dining room', house=cls.house3) cls.room3_2 = Room.objects.create(name='Lounge', house=cls.house3) cls.room3_3 = Room.objects.create(name='Kitchen', house=cls.house3) cls.house3.main_room = cls.room3_1 cls.house3.save() cls.person2.houses.add(cls.house3) cls.house4 = House.objects.create(name='house 4', address="7 Regents St", owner=cls.person2) cls.room4_1 = Room.objects.create(name='Dining room', house=cls.house4) cls.room4_2 = Room.objects.create(name='Lounge', house=cls.house4) cls.room4_3 = Room.objects.create(name='Kitchen', house=cls.house4) cls.house4.main_room = cls.room4_1 cls.house4.save() cls.person2.houses.add(cls.house4) def test_traverse_qs(self): qs = Person.objects.prefetch_related('houses') related_objs_normal = [list(p.houses.all()) for p in qs], related_objs_from_traverse = [[inner[0] for inner in o[1]] for o in self.traverse_qs(qs, [['houses']])] self.assertEqual(related_objs_normal, (related_objs_from_traverse,)) def test_ambiguous(self): # Ambiguous: Lookup was already seen with a different queryset. msg = ( "'houses' lookup was already seen with a different queryset. You " "may need to adjust the ordering of your lookups." ) # lookup.queryset shouldn't be evaluated. with self.assertNumQueries(3): with self.assertRaisesMessage(ValueError, msg): self.traverse_qs( Person.objects.prefetch_related( 'houses__rooms', Prefetch('houses', queryset=House.objects.all()), ), [['houses', 'rooms']], ) # Ambiguous: Lookup houses_lst doesn't yet exist when performing houses_lst__rooms. msg = ( "Cannot find 'houses_lst' on Person object, 'houses_lst__rooms' is " "an invalid parameter to prefetch_related()" ) with self.assertRaisesMessage(AttributeError, msg): self.traverse_qs( Person.objects.prefetch_related( 'houses_lst__rooms', Prefetch('houses', queryset=House.objects.all(), to_attr='houses_lst') ), [['houses', 'rooms']] ) # Not ambiguous. self.traverse_qs( Person.objects.prefetch_related('houses__rooms', 'houses'), [['houses', 'rooms']] ) self.traverse_qs( Person.objects.prefetch_related( 'houses__rooms', Prefetch('houses', queryset=House.objects.all(), to_attr='houses_lst') ), [['houses', 'rooms']] ) def test_m2m(self): # Control lookups. with self.assertNumQueries(2): lst1 = self.traverse_qs( Person.objects.prefetch_related('houses'), [['houses']] ) # Test lookups. with self.assertNumQueries(2): lst2 = self.traverse_qs( Person.objects.prefetch_related(Prefetch('houses')), [['houses']] ) self.assertEqual(lst1, lst2) with self.assertNumQueries(2): lst2 = self.traverse_qs( Person.objects.prefetch_related(Prefetch('houses', to_attr='houses_lst')), [['houses_lst']] ) self.assertEqual(lst1, lst2) def test_reverse_m2m(self): # Control lookups. with self.assertNumQueries(2): lst1 = self.traverse_qs( House.objects.prefetch_related('occupants'), [['occupants']] ) # Test lookups. with self.assertNumQueries(2): lst2 = self.traverse_qs( House.objects.prefetch_related(Prefetch('occupants')), [['occupants']] ) self.assertEqual(lst1, lst2) with self.assertNumQueries(2): lst2 = self.traverse_qs( House.objects.prefetch_related(Prefetch('occupants', to_attr='occupants_lst')), [['occupants_lst']] ) self.assertEqual(lst1, lst2) def test_m2m_through_fk(self): # Control lookups. with self.assertNumQueries(3): lst1 = self.traverse_qs( Room.objects.prefetch_related('house__occupants'), [['house', 'occupants']] ) # Test lookups. with self.assertNumQueries(3): lst2 = self.traverse_qs( Room.objects.prefetch_related(Prefetch('house__occupants')), [['house', 'occupants']] ) self.assertEqual(lst1, lst2) with self.assertNumQueries(3): lst2 = self.traverse_qs( Room.objects.prefetch_related(Prefetch('house__occupants', to_attr='occupants_lst')), [['house', 'occupants_lst']] ) self.assertEqual(lst1, lst2) def test_m2m_through_gfk(self): TaggedItem.objects.create(tag="houses", content_object=self.house1) TaggedItem.objects.create(tag="houses", content_object=self.house2) # Control lookups. with self.assertNumQueries(3): lst1 = self.traverse_qs( TaggedItem.objects.filter(tag='houses').prefetch_related('content_object__rooms'), [['content_object', 'rooms']] ) # Test lookups. with self.assertNumQueries(3): lst2 = self.traverse_qs( TaggedItem.objects.prefetch_related( Prefetch('content_object'), Prefetch('content_object__rooms', to_attr='rooms_lst') ), [['content_object', 'rooms_lst']] ) self.assertEqual(lst1, lst2) def test_o2m_through_m2m(self): # Control lookups. with self.assertNumQueries(3): lst1 = self.traverse_qs( Person.objects.prefetch_related('houses', 'houses__rooms'), [['houses', 'rooms']] ) # Test lookups. with self.assertNumQueries(3): lst2 = self.traverse_qs( Person.objects.prefetch_related(Prefetch('houses'), 'houses__rooms'), [['houses', 'rooms']] ) self.assertEqual(lst1, lst2) with self.assertNumQueries(3): lst2 = self.traverse_qs( Person.objects.prefetch_related(Prefetch('houses'), Prefetch('houses__rooms')), [['houses', 'rooms']] ) self.assertEqual(lst1, lst2) with self.assertNumQueries(3): lst2 = self.traverse_qs( Person.objects.prefetch_related(Prefetch('houses', to_attr='houses_lst'), 'houses_lst__rooms'), [['houses_lst', 'rooms']] ) self.assertEqual(lst1, lst2) with self.assertNumQueries(3): lst2 = self.traverse_qs( Person.objects.prefetch_related( Prefetch('houses', to_attr='houses_lst'), Prefetch('houses_lst__rooms', to_attr='rooms_lst') ), [['houses_lst', 'rooms_lst']] ) self.assertEqual(lst1, lst2) def test_generic_rel(self): bookmark = Bookmark.objects.create(url='http://www.djangoproject.com/') TaggedItem.objects.create(content_object=bookmark, tag='django') TaggedItem.objects.create(content_object=bookmark, favorite=bookmark, tag='python') # Control lookups. with self.assertNumQueries(4): lst1 = self.traverse_qs( Bookmark.objects.prefetch_related('tags', 'tags__content_object', 'favorite_tags'), [['tags', 'content_object'], ['favorite_tags']] ) # Test lookups. with self.assertNumQueries(4): lst2 = self.traverse_qs( Bookmark.objects.prefetch_related( Prefetch('tags', to_attr='tags_lst'), Prefetch('tags_lst__content_object'), Prefetch('favorite_tags'), ), [['tags_lst', 'content_object'], ['favorite_tags']] ) self.assertEqual(lst1, lst2) def test_traverse_single_item_property(self): # Control lookups. with self.assertNumQueries(5): lst1 = self.traverse_qs( Person.objects.prefetch_related( 'houses__rooms', 'primary_house__occupants__houses', ), [['primary_house', 'occupants', 'houses']] ) # Test lookups. with self.assertNumQueries(5): lst2 = self.traverse_qs( Person.objects.prefetch_related( 'houses__rooms', Prefetch('primary_house__occupants', to_attr='occupants_lst'), 'primary_house__occupants_lst__houses', ), [['primary_house', 'occupants_lst', 'houses']] ) self.assertEqual(lst1, lst2) def test_traverse_multiple_items_property(self): # Control lookups. with self.assertNumQueries(4): lst1 = self.traverse_qs( Person.objects.prefetch_related( 'houses', 'all_houses__occupants__houses', ), [['all_houses', 'occupants', 'houses']] ) # Test lookups. with self.assertNumQueries(4): lst2 = self.traverse_qs( Person.objects.prefetch_related( 'houses', Prefetch('all_houses__occupants', to_attr='occupants_lst'), 'all_houses__occupants_lst__houses', ), [['all_houses', 'occupants_lst', 'houses']] ) self.assertEqual(lst1, lst2) def test_custom_qs(self): # Test basic. with self.assertNumQueries(2): lst1 = list(Person.objects.prefetch_related('houses')) with self.assertNumQueries(2): lst2 = list(Person.objects.prefetch_related( Prefetch('houses', queryset=House.objects.all(), to_attr='houses_lst'))) self.assertEqual( self.traverse_qs(lst1, [['houses']]), self.traverse_qs(lst2, [['houses_lst']]) ) # Test queryset filtering. with self.assertNumQueries(2): lst2 = list( Person.objects.prefetch_related( Prefetch( 'houses', queryset=House.objects.filter(pk__in=[self.house1.pk, self.house3.pk]), to_attr='houses_lst', ) ) ) self.assertEqual(len(lst2[0].houses_lst), 1) self.assertEqual(lst2[0].houses_lst[0], self.house1) self.assertEqual(len(lst2[1].houses_lst), 1) self.assertEqual(lst2[1].houses_lst[0], self.house3) # Test flattened. with self.assertNumQueries(3): lst1 = list(Person.objects.prefetch_related('houses__rooms')) with self.assertNumQueries(3): lst2 = list(Person.objects.prefetch_related( Prefetch('houses__rooms', queryset=Room.objects.all(), to_attr='rooms_lst'))) self.assertEqual( self.traverse_qs(lst1, [['houses', 'rooms']]), self.traverse_qs(lst2, [['houses', 'rooms_lst']]) ) # Test inner select_related. with self.assertNumQueries(3): lst1 = list(Person.objects.prefetch_related('houses__owner')) with self.assertNumQueries(2): lst2 = list(Person.objects.prefetch_related( Prefetch('houses', queryset=House.objects.select_related('owner')))) self.assertEqual( self.traverse_qs(lst1, [['houses', 'owner']]), self.traverse_qs(lst2, [['houses', 'owner']]) ) # Test inner prefetch. inner_rooms_qs = Room.objects.filter(pk__in=[self.room1_1.pk, self.room1_2.pk]) houses_qs_prf = House.objects.prefetch_related( Prefetch('rooms', queryset=inner_rooms_qs, to_attr='rooms_lst')) with self.assertNumQueries(4): lst2 = list(Person.objects.prefetch_related( Prefetch('houses', queryset=houses_qs_prf.filter(pk=self.house1.pk), to_attr='houses_lst'), Prefetch('houses_lst__rooms_lst__main_room_of') )) self.assertEqual(len(lst2[0].houses_lst[0].rooms_lst), 2) self.assertEqual(lst2[0].houses_lst[0].rooms_lst[0], self.room1_1) self.assertEqual(lst2[0].houses_lst[0].rooms_lst[1], self.room1_2) self.assertEqual(lst2[0].houses_lst[0].rooms_lst[0].main_room_of, self.house1) self.assertEqual(len(lst2[1].houses_lst), 0) # Test ForwardManyToOneDescriptor. houses = House.objects.select_related('owner') with self.assertNumQueries(6): rooms = Room.objects.all().prefetch_related('house') lst1 = self.traverse_qs(rooms, [['house', 'owner']]) with self.assertNumQueries(2): rooms = Room.objects.all().prefetch_related(Prefetch('house', queryset=houses.all())) lst2 = self.traverse_qs(rooms, [['house', 'owner']]) self.assertEqual(lst1, lst2) with self.assertNumQueries(2): houses = House.objects.select_related('owner') rooms = Room.objects.all().prefetch_related(Prefetch('house', queryset=houses.all(), to_attr='house_attr')) lst2 = self.traverse_qs(rooms, [['house_attr', 'owner']]) self.assertEqual(lst1, lst2) room = Room.objects.all().prefetch_related( Prefetch('house', queryset=houses.filter(address='DoesNotExist')) ).first() with self.assertRaises(ObjectDoesNotExist): getattr(room, 'house') room = Room.objects.all().prefetch_related( Prefetch('house', queryset=houses.filter(address='DoesNotExist'), to_attr='house_attr') ).first() self.assertIsNone(room.house_attr) rooms = Room.objects.all().prefetch_related(Prefetch('house', queryset=House.objects.only('name'))) with self.assertNumQueries(2): getattr(rooms.first().house, 'name') with self.assertNumQueries(3): getattr(rooms.first().house, 'address') # Test ReverseOneToOneDescriptor. houses = House.objects.select_related('owner') with self.assertNumQueries(6): rooms = Room.objects.all().prefetch_related('main_room_of') lst1 = self.traverse_qs(rooms, [['main_room_of', 'owner']]) with self.assertNumQueries(2): rooms = Room.objects.all().prefetch_related(Prefetch('main_room_of', queryset=houses.all())) lst2 = self.traverse_qs(rooms, [['main_room_of', 'owner']]) self.assertEqual(lst1, lst2) with self.assertNumQueries(2): rooms = list( Room.objects.all().prefetch_related( Prefetch('main_room_of', queryset=houses.all(), to_attr='main_room_of_attr') ) ) lst2 = self.traverse_qs(rooms, [['main_room_of_attr', 'owner']]) self.assertEqual(lst1, lst2) room = Room.objects.filter(main_room_of__isnull=False).prefetch_related( Prefetch('main_room_of', queryset=houses.filter(address='DoesNotExist')) ).first() with self.assertRaises(ObjectDoesNotExist): getattr(room, 'main_room_of') room = Room.objects.filter(main_room_of__isnull=False).prefetch_related( Prefetch('main_room_of', queryset=houses.filter(address='DoesNotExist'), to_attr='main_room_of_attr') ).first() self.assertIsNone(room.main_room_of_attr) # The custom queryset filters should be applied to the queryset # instance returned by the manager. person = Person.objects.prefetch_related( Prefetch('houses', queryset=House.objects.filter(name='House 1')), ).get(pk=self.person1.pk) self.assertEqual( list(person.houses.all()), list(person.houses.all().all()), ) def test_nested_prefetch_related_are_not_overwritten(self): # Regression test for #24873 houses_2 = House.objects.prefetch_related(Prefetch('rooms')) persons = Person.objects.prefetch_related(Prefetch('houses', queryset=houses_2)) houses = House.objects.prefetch_related(Prefetch('occupants', queryset=persons)) list(houses) # queryset must be evaluated once to reproduce the bug. self.assertEqual( houses.all()[0].occupants.all()[0].houses.all()[1].rooms.all()[0], self.room2_1 ) def test_nested_prefetch_related_with_duplicate_prefetcher(self): """ Nested prefetches whose name clashes with descriptor names (Person.houses here) are allowed. """ occupants = Person.objects.prefetch_related( Prefetch('houses', to_attr='some_attr_name'), Prefetch('houses', queryset=House.objects.prefetch_related('main_room')), ) houses = House.objects.prefetch_related(Prefetch('occupants', queryset=occupants)) with self.assertNumQueries(5): self.traverse_qs(list(houses), [['occupants', 'houses', 'main_room']]) def test_values_queryset(self): with self.assertRaisesMessage(ValueError, 'Prefetch querysets cannot use values().'): Prefetch('houses', House.objects.values('pk')) # That error doesn't affect managers with custom ModelIterable subclasses self.assertIs(Teacher.objects_custom.all()._iterable_class, ModelIterableSubclass) Prefetch('teachers', Teacher.objects_custom.all()) def test_to_attr_doesnt_cache_through_attr_as_list(self): house = House.objects.prefetch_related( Prefetch('rooms', queryset=Room.objects.all(), to_attr='to_rooms'), ).get(pk=self.house3.pk) self.assertIsInstance(house.rooms.all(), QuerySet) def test_to_attr_cached_property(self): persons = Person.objects.prefetch_related( Prefetch('houses', House.objects.all(), to_attr='cached_all_houses'), ) for person in persons: # To bypass caching at the related descriptor level, don't use # person.houses.all() here. all_houses = list(House.objects.filter(occupants=person)) with self.assertNumQueries(0): self.assertEqual(person.cached_all_houses, all_houses) def test_filter_deferred(self): """ Related filtering of prefetched querysets is deferred until necessary. """ add_q = Query.add_q with mock.patch.object( Query, 'add_q', autospec=True, side_effect=lambda self, q: add_q(self, q), ) as add_q_mock: list(House.objects.prefetch_related( Prefetch('occupants', queryset=Person.objects.all()) )) self.assertEqual(add_q_mock.call_count, 1) class DefaultManagerTests(TestCase): def setUp(self): self.qual1 = Qualification.objects.create(name="BA") self.qual2 = Qualification.objects.create(name="BSci") self.qual3 = Qualification.objects.create(name="MA") self.qual4 = Qualification.objects.create(name="PhD") self.teacher1 = Teacher.objects.create(name="Mr Cleese") self.teacher2 = Teacher.objects.create(name="Mr Idle") self.teacher3 = Teacher.objects.create(name="Mr Chapman") self.teacher1.qualifications.add(self.qual1, self.qual2, self.qual3, self.qual4) self.teacher2.qualifications.add(self.qual1) self.teacher3.qualifications.add(self.qual2) self.dept1 = Department.objects.create(name="English") self.dept2 = Department.objects.create(name="Physics") self.dept1.teachers.add(self.teacher1, self.teacher2) self.dept2.teachers.add(self.teacher1, self.teacher3) def test_m2m_then_m2m(self): with self.assertNumQueries(3): # When we prefetch the teachers, and force the query, we don't want # the default manager on teachers to immediately get all the related # qualifications, since this will do one query per teacher. qs = Department.objects.prefetch_related('teachers') depts = "".join("%s department: %s\n" % (dept.name, ", ".join(str(t) for t in dept.teachers.all())) for dept in qs) self.assertEqual(depts, "English department: Mr Cleese (BA, BSci, MA, PhD), Mr Idle (BA)\n" "Physics department: Mr Cleese (BA, BSci, MA, PhD), Mr Chapman (BSci)\n") class GenericRelationTests(TestCase): @classmethod def setUpTestData(cls): book1 = Book.objects.create(title="Winnie the Pooh") book2 = Book.objects.create(title="Do you like green eggs and spam?") book3 = Book.objects.create(title="Three Men In A Boat") reader1 = Reader.objects.create(name="me") reader2 = Reader.objects.create(name="you") reader3 = Reader.objects.create(name="someone") book1.read_by.add(reader1, reader2) book2.read_by.add(reader2) book3.read_by.add(reader3) cls.book1, cls.book2, cls.book3 = book1, book2, book3 cls.reader1, cls.reader2, cls.reader3 = reader1, reader2, reader3 def test_prefetch_GFK(self): TaggedItem.objects.create(tag="awesome", content_object=self.book1) TaggedItem.objects.create(tag="great", content_object=self.reader1) TaggedItem.objects.create(tag="outstanding", content_object=self.book2) TaggedItem.objects.create(tag="amazing", content_object=self.reader3) # 1 for TaggedItem table, 1 for Book table, 1 for Reader table with self.assertNumQueries(3): qs = TaggedItem.objects.prefetch_related('content_object') list(qs) def test_prefetch_GFK_nonint_pk(self): Comment.objects.create(comment="awesome", content_object=self.book1) # 1 for Comment table, 1 for Book table with self.assertNumQueries(2): qs = Comment.objects.prefetch_related('content_object') [c.content_object for c in qs] def test_prefetch_GFK_uuid_pk(self): article = Article.objects.create(name='Django') Comment.objects.create(comment='awesome', content_object_uuid=article) qs = Comment.objects.prefetch_related('content_object_uuid') self.assertEqual([c.content_object_uuid for c in qs], [article]) def test_prefetch_GFK_fk_pk(self): book = Book.objects.create(title='Poems') book_with_year = BookWithYear.objects.create(book=book, published_year=2019) Comment.objects.create(comment='awesome', content_object=book_with_year) qs = Comment.objects.prefetch_related('content_object') self.assertEqual([c.content_object for c in qs], [book_with_year]) def test_traverse_GFK(self): """ A 'content_object' can be traversed with prefetch_related() and get to related objects on the other side (assuming it is suitably filtered) """ TaggedItem.objects.create(tag="awesome", content_object=self.book1) TaggedItem.objects.create(tag="awesome", content_object=self.book2) TaggedItem.objects.create(tag="awesome", content_object=self.book3) TaggedItem.objects.create(tag="awesome", content_object=self.reader1) TaggedItem.objects.create(tag="awesome", content_object=self.reader2) ct = ContentType.objects.get_for_model(Book) # We get 3 queries - 1 for main query, 1 for content_objects since they # all use the same table, and 1 for the 'read_by' relation. with self.assertNumQueries(3): # If we limit to books, we know that they will have 'read_by' # attributes, so the following makes sense: qs = TaggedItem.objects.filter(content_type=ct, tag='awesome').prefetch_related('content_object__read_by') readers_of_awesome_books = {r.name for tag in qs for r in tag.content_object.read_by.all()} self.assertEqual(readers_of_awesome_books, {"me", "you", "someone"}) def test_nullable_GFK(self): TaggedItem.objects.create(tag="awesome", content_object=self.book1, created_by=self.reader1) TaggedItem.objects.create(tag="great", content_object=self.book2) TaggedItem.objects.create(tag="rubbish", content_object=self.book3) with self.assertNumQueries(2): result = [t.created_by for t in TaggedItem.objects.prefetch_related('created_by')] self.assertEqual(result, [t.created_by for t in TaggedItem.objects.all()]) def test_generic_relation(self): bookmark = Bookmark.objects.create(url='http://www.djangoproject.com/') TaggedItem.objects.create(content_object=bookmark, tag='django') TaggedItem.objects.create(content_object=bookmark, tag='python') with self.assertNumQueries(2): tags = [t.tag for b in Bookmark.objects.prefetch_related('tags') for t in b.tags.all()] self.assertEqual(sorted(tags), ["django", "python"]) def test_charfield_GFK(self): b = Bookmark.objects.create(url='http://www.djangoproject.com/') TaggedItem.objects.create(content_object=b, tag='django') TaggedItem.objects.create(content_object=b, favorite=b, tag='python') with self.assertNumQueries(3): bookmark = Bookmark.objects.filter(pk=b.pk).prefetch_related('tags', 'favorite_tags')[0] self.assertEqual(sorted(i.tag for i in bookmark.tags.all()), ["django", "python"]) self.assertEqual([i.tag for i in bookmark.favorite_tags.all()], ["python"]) def test_custom_queryset(self): bookmark = Bookmark.objects.create(url='http://www.djangoproject.com/') django_tag = TaggedItem.objects.create(content_object=bookmark, tag='django') TaggedItem.objects.create(content_object=bookmark, tag='python') with self.assertNumQueries(2): bookmark = Bookmark.objects.prefetch_related( Prefetch('tags', TaggedItem.objects.filter(tag='django')), ).get() with self.assertNumQueries(0): self.assertEqual(list(bookmark.tags.all()), [django_tag]) # The custom queryset filters should be applied to the queryset # instance returned by the manager. self.assertEqual(list(bookmark.tags.all()), list(bookmark.tags.all().all())) class MultiTableInheritanceTest(TestCase): @classmethod def setUpTestData(cls): cls.book1 = BookWithYear.objects.create(title='Poems', published_year=2010) cls.book2 = BookWithYear.objects.create(title='More poems', published_year=2011) cls.author1 = AuthorWithAge.objects.create(name='Jane', first_book=cls.book1, age=50) cls.author2 = AuthorWithAge.objects.create(name='Tom', first_book=cls.book1, age=49) cls.author3 = AuthorWithAge.objects.create(name='Robert', first_book=cls.book2, age=48) cls.author_address = AuthorAddress.objects.create(author=cls.author1, address='SomeStreet 1') cls.book2.aged_authors.add(cls.author2, cls.author3) cls.br1 = BookReview.objects.create(book=cls.book1, notes='review book1') cls.br2 = BookReview.objects.create(book=cls.book2, notes='review book2') def test_foreignkey(self): with self.assertNumQueries(2): qs = AuthorWithAge.objects.prefetch_related('addresses') addresses = [[str(address) for address in obj.addresses.all()] for obj in qs] self.assertEqual(addresses, [[str(self.author_address)], [], []]) def test_foreignkey_to_inherited(self): with self.assertNumQueries(2): qs = BookReview.objects.prefetch_related('book') titles = [obj.book.title for obj in qs] self.assertEqual(titles, ["Poems", "More poems"]) def test_m2m_to_inheriting_model(self): qs = AuthorWithAge.objects.prefetch_related('books_with_year') with self.assertNumQueries(2): lst = [[str(book) for book in author.books_with_year.all()] for author in qs] qs = AuthorWithAge.objects.all() lst2 = [[str(book) for book in author.books_with_year.all()] for author in qs] self.assertEqual(lst, lst2) qs = BookWithYear.objects.prefetch_related('aged_authors') with self.assertNumQueries(2): lst = [[str(author) for author in book.aged_authors.all()] for book in qs] qs = BookWithYear.objects.all() lst2 = [[str(author) for author in book.aged_authors.all()] for book in qs] self.assertEqual(lst, lst2) def test_parent_link_prefetch(self): with self.assertNumQueries(2): [a.author for a in AuthorWithAge.objects.prefetch_related('author')] @override_settings(DEBUG=True) def test_child_link_prefetch(self): with self.assertNumQueries(2): authors = [a.authorwithage for a in Author.objects.prefetch_related('authorwithage')] # Regression for #18090: the prefetching query must include an IN clause. # Note that on Oracle the table name is upper case in the generated SQL, # thus the .lower() call. self.assertIn('authorwithage', connection.queries[-1]['sql'].lower()) self.assertIn(' IN ', connection.queries[-1]['sql']) self.assertEqual(authors, [a.authorwithage for a in Author.objects.all()]) class ForeignKeyToFieldTest(TestCase): @classmethod def setUpTestData(cls): cls.book = Book.objects.create(title='Poems') cls.author1 = Author.objects.create(name='Jane', first_book=cls.book) cls.author2 = Author.objects.create(name='Tom', first_book=cls.book) cls.author3 = Author.objects.create(name='Robert', first_book=cls.book) cls.author_address = AuthorAddress.objects.create(author=cls.author1, address='SomeStreet 1') FavoriteAuthors.objects.create(author=cls.author1, likes_author=cls.author2) FavoriteAuthors.objects.create(author=cls.author2, likes_author=cls.author3) FavoriteAuthors.objects.create(author=cls.author3, likes_author=cls.author1) def test_foreignkey(self): with self.assertNumQueries(2): qs = Author.objects.prefetch_related('addresses') addresses = [[str(address) for address in obj.addresses.all()] for obj in qs] self.assertEqual(addresses, [[str(self.author_address)], [], []]) def test_m2m(self): with self.assertNumQueries(3): qs = Author.objects.all().prefetch_related('favorite_authors', 'favors_me') favorites = [( [str(i_like) for i_like in author.favorite_authors.all()], [str(likes_me) for likes_me in author.favors_me.all()] ) for author in qs] self.assertEqual( favorites, [ ([str(self.author2)], [str(self.author3)]), ([str(self.author3)], [str(self.author1)]), ([str(self.author1)], [str(self.author2)]) ] ) class LookupOrderingTest(TestCase): """ Test cases that demonstrate that ordering of lookups is important, and ensure it is preserved. """ def setUp(self): self.person1 = Person.objects.create(name="Joe") self.person2 = Person.objects.create(name="Mary") # Set main_room for each house before creating the next one for # databases where supports_nullable_unique_constraints is False. self.house1 = House.objects.create(address="123 Main St") self.room1_1 = Room.objects.create(name="Dining room", house=self.house1) self.room1_2 = Room.objects.create(name="Lounge", house=self.house1) self.room1_3 = Room.objects.create(name="Kitchen", house=self.house1) self.house1.main_room = self.room1_1 self.house1.save() self.person1.houses.add(self.house1) self.house2 = House.objects.create(address="45 Side St") self.room2_1 = Room.objects.create(name="Dining room", house=self.house2) self.room2_2 = Room.objects.create(name="Lounge", house=self.house2) self.house2.main_room = self.room2_1 self.house2.save() self.person1.houses.add(self.house2) self.house3 = House.objects.create(address="6 Downing St") self.room3_1 = Room.objects.create(name="Dining room", house=self.house3) self.room3_2 = Room.objects.create(name="Lounge", house=self.house3) self.room3_3 = Room.objects.create(name="Kitchen", house=self.house3) self.house3.main_room = self.room3_1 self.house3.save() self.person2.houses.add(self.house3) self.house4 = House.objects.create(address="7 Regents St") self.room4_1 = Room.objects.create(name="Dining room", house=self.house4) self.room4_2 = Room.objects.create(name="Lounge", house=self.house4) self.house4.main_room = self.room4_1 self.house4.save() self.person2.houses.add(self.house4) def test_order(self): with self.assertNumQueries(4): # The following two queries must be done in the same order as written, # otherwise 'primary_house' will cause non-prefetched lookups qs = Person.objects.prefetch_related('houses__rooms', 'primary_house__occupants') [list(p.primary_house.occupants.all()) for p in qs] class NullableTest(TestCase): @classmethod def setUpTestData(cls): boss = Employee.objects.create(name="Peter") Employee.objects.create(name="Joe", boss=boss) Employee.objects.create(name="Angela", boss=boss) def test_traverse_nullable(self): # Because we use select_related() for 'boss', it doesn't need to be # prefetched, but we can still traverse it although it contains some nulls with self.assertNumQueries(2): qs = Employee.objects.select_related('boss').prefetch_related('boss__serfs') co_serfs = [list(e.boss.serfs.all()) if e.boss is not None else [] for e in qs] qs2 = Employee.objects.select_related('boss') co_serfs2 = [list(e.boss.serfs.all()) if e.boss is not None else [] for e in qs2] self.assertEqual(co_serfs, co_serfs2) def test_prefetch_nullable(self): # One for main employee, one for boss, one for serfs with self.assertNumQueries(3): qs = Employee.objects.prefetch_related('boss__serfs') co_serfs = [list(e.boss.serfs.all()) if e.boss is not None else [] for e in qs] qs2 = Employee.objects.all() co_serfs2 = [list(e.boss.serfs.all()) if e.boss is not None else [] for e in qs2] self.assertEqual(co_serfs, co_serfs2) def test_in_bulk(self): """ In-bulk does correctly prefetch objects by not using .iterator() directly. """ boss1 = Employee.objects.create(name="Peter") boss2 = Employee.objects.create(name="Jack") with self.assertNumQueries(2): # Prefetch is done and it does not cause any errors. bulk = Employee.objects.prefetch_related('serfs').in_bulk([boss1.pk, boss2.pk]) for b in bulk.values(): list(b.serfs.all()) class MultiDbTests(TestCase): databases = {'default', 'other'} def test_using_is_honored_m2m(self): B = Book.objects.using('other') A = Author.objects.using('other') book1 = B.create(title="Poems") book2 = B.create(title="Jane Eyre") book3 = B.create(title="Wuthering Heights") book4 = B.create(title="Sense and Sensibility") author1 = A.create(name="Charlotte", first_book=book1) author2 = A.create(name="Anne", first_book=book1) author3 = A.create(name="Emily", first_book=book1) author4 = A.create(name="Jane", first_book=book4) book1.authors.add(author1, author2, author3) book2.authors.add(author1) book3.authors.add(author3) book4.authors.add(author4) # Forward qs1 = B.prefetch_related('authors') with self.assertNumQueries(2, using='other'): books = "".join("%s (%s)\n" % (book.title, ", ".join(a.name for a in book.authors.all())) for book in qs1) self.assertEqual(books, "Poems (Charlotte, Anne, Emily)\n" "Jane Eyre (Charlotte)\n" "Wuthering Heights (Emily)\n" "Sense and Sensibility (Jane)\n") # Reverse qs2 = A.prefetch_related('books') with self.assertNumQueries(2, using='other'): authors = "".join("%s: %s\n" % (author.name, ", ".join(b.title for b in author.books.all())) for author in qs2) self.assertEqual(authors, "Charlotte: Poems, Jane Eyre\n" "Anne: Poems\n" "Emily: Poems, Wuthering Heights\n" "Jane: Sense and Sensibility\n") def test_using_is_honored_fkey(self): B = Book.objects.using('other') A = Author.objects.using('other') book1 = B.create(title="Poems") book2 = B.create(title="Sense and Sensibility") A.create(name="Charlotte Bronte", first_book=book1) A.create(name="Jane Austen", first_book=book2) # Forward with self.assertNumQueries(2, using='other'): books = ", ".join(a.first_book.title for a in A.prefetch_related('first_book')) self.assertEqual("Poems, Sense and Sensibility", books) # Reverse with self.assertNumQueries(2, using='other'): books = "".join("%s (%s)\n" % (b.title, ", ".join(a.name for a in b.first_time_authors.all())) for b in B.prefetch_related('first_time_authors')) self.assertEqual(books, "Poems (Charlotte Bronte)\n" "Sense and Sensibility (Jane Austen)\n") def test_using_is_honored_inheritance(self): B = BookWithYear.objects.using('other') A = AuthorWithAge.objects.using('other') book1 = B.create(title="Poems", published_year=2010) B.create(title="More poems", published_year=2011) A.create(name='Jane', first_book=book1, age=50) A.create(name='Tom', first_book=book1, age=49) # parent link with self.assertNumQueries(2, using='other'): authors = ", ".join(a.author.name for a in A.prefetch_related('author')) self.assertEqual(authors, "Jane, Tom") # child link with self.assertNumQueries(2, using='other'): ages = ", ".join(str(a.authorwithage.age) for a in A.prefetch_related('authorwithage')) self.assertEqual(ages, "50, 49") def test_using_is_honored_custom_qs(self): B = Book.objects.using('other') A = Author.objects.using('other') book1 = B.create(title="Poems") book2 = B.create(title="Sense and Sensibility") A.create(name="Charlotte Bronte", first_book=book1) A.create(name="Jane Austen", first_book=book2) # Implicit hinting with self.assertNumQueries(2, using='other'): prefetch = Prefetch('first_time_authors', queryset=Author.objects.all()) books = "".join("%s (%s)\n" % (b.title, ", ".join(a.name for a in b.first_time_authors.all())) for b in B.prefetch_related(prefetch)) self.assertEqual(books, "Poems (Charlotte Bronte)\n" "Sense and Sensibility (Jane Austen)\n") # Explicit using on the same db. with self.assertNumQueries(2, using='other'): prefetch = Prefetch('first_time_authors', queryset=Author.objects.using('other')) books = "".join("%s (%s)\n" % (b.title, ", ".join(a.name for a in b.first_time_authors.all())) for b in B.prefetch_related(prefetch)) self.assertEqual(books, "Poems (Charlotte Bronte)\n" "Sense and Sensibility (Jane Austen)\n") # Explicit using on a different db. with self.assertNumQueries(1, using='default'), self.assertNumQueries(1, using='other'): prefetch = Prefetch('first_time_authors', queryset=Author.objects.using('default')) books = "".join("%s (%s)\n" % (b.title, ", ".join(a.name for a in b.first_time_authors.all())) for b in B.prefetch_related(prefetch)) self.assertEqual(books, "Poems ()\n" "Sense and Sensibility ()\n") class Ticket19607Tests(TestCase): def setUp(self): for id, name1, name2 in [ (1, 'einfach', 'simple'), (2, 'schwierig', 'difficult'), ]: LessonEntry.objects.create(id=id, name1=name1, name2=name2) for id, lesson_entry_id, name in [ (1, 1, 'einfach'), (2, 1, 'simple'), (3, 2, 'schwierig'), (4, 2, 'difficult'), ]: WordEntry.objects.create(id=id, lesson_entry_id=lesson_entry_id, name=name) def test_bug(self): list(WordEntry.objects.prefetch_related('lesson_entry', 'lesson_entry__wordentry_set')) class Ticket21410Tests(TestCase): def setUp(self): self.book1 = Book.objects.create(title="Poems") self.book2 = Book.objects.create(title="Jane Eyre") self.book3 = Book.objects.create(title="Wuthering Heights") self.book4 = Book.objects.create(title="Sense and Sensibility") self.author1 = Author2.objects.create(name="Charlotte", first_book=self.book1) self.author2 = Author2.objects.create(name="Anne", first_book=self.book1) self.author3 = Author2.objects.create(name="Emily", first_book=self.book1) self.author4 = Author2.objects.create(name="Jane", first_book=self.book4) self.author1.favorite_books.add(self.book1, self.book2, self.book3) self.author2.favorite_books.add(self.book1) self.author3.favorite_books.add(self.book2) self.author4.favorite_books.add(self.book3) def test_bug(self): list(Author2.objects.prefetch_related('first_book', 'favorite_books')) class Ticket21760Tests(TestCase): def setUp(self): self.rooms = [] for _ in range(3): house = House.objects.create() for _ in range(3): self.rooms.append(Room.objects.create(house=house)) # Set main_room for each house before creating the next one for # databases where supports_nullable_unique_constraints is False. house.main_room = self.rooms[-3] house.save() def test_bug(self): prefetcher = get_prefetcher(self.rooms[0], 'house', 'house')[0] queryset = prefetcher.get_prefetch_queryset(list(Room.objects.all()))[0] self.assertNotIn(' JOIN ', str(queryset.query)) class DirectPrefetchedObjectCacheReuseTests(TestCase): """ prefetch_related() reuses objects fetched in _prefetched_objects_cache. When objects are prefetched and not stored as an instance attribute (often intermediary relationships), they are saved to the _prefetched_objects_cache attribute. prefetch_related() takes _prefetched_objects_cache into account when determining whether an object has been fetched[1] and retrieves results from it when it is populated [2]. [1]: #25546 (duplicate queries on nested Prefetch) [2]: #27554 (queryset evaluation fails with a mix of nested and flattened prefetches) """ @classmethod def setUpTestData(cls): cls.book1, cls.book2 = [ Book.objects.create(title='book1'), Book.objects.create(title='book2'), ] cls.author11, cls.author12, cls.author21 = [ Author.objects.create(first_book=cls.book1, name='Author11'), Author.objects.create(first_book=cls.book1, name='Author12'), Author.objects.create(first_book=cls.book2, name='Author21'), ] cls.author1_address1, cls.author1_address2, cls.author2_address1 = [ AuthorAddress.objects.create(author=cls.author11, address='Happy place'), AuthorAddress.objects.create(author=cls.author12, address='Haunted house'), AuthorAddress.objects.create(author=cls.author21, address='Happy place'), ] cls.bookwithyear1 = BookWithYear.objects.create(title='Poems', published_year=2010) cls.bookreview1 = BookReview.objects.create(book=cls.bookwithyear1) def test_detect_is_fetched(self): """ Nested prefetch_related() shouldn't trigger duplicate queries for the same lookup. """ with self.assertNumQueries(3): books = Book.objects.filter( title__in=['book1', 'book2'], ).prefetch_related( Prefetch( 'first_time_authors', Author.objects.prefetch_related( Prefetch( 'addresses', AuthorAddress.objects.filter(address='Happy place'), ) ), ), ) book1, book2 = list(books) with self.assertNumQueries(0): self.assertSequenceEqual(book1.first_time_authors.all(), [self.author11, self.author12]) self.assertSequenceEqual(book2.first_time_authors.all(), [self.author21]) self.assertSequenceEqual(book1.first_time_authors.all()[0].addresses.all(), [self.author1_address1]) self.assertSequenceEqual(book1.first_time_authors.all()[1].addresses.all(), []) self.assertSequenceEqual(book2.first_time_authors.all()[0].addresses.all(), [self.author2_address1]) self.assertEqual( list(book1.first_time_authors.all()), list(book1.first_time_authors.all().all()) ) self.assertEqual( list(book2.first_time_authors.all()), list(book2.first_time_authors.all().all()) ) self.assertEqual( list(book1.first_time_authors.all()[0].addresses.all()), list(book1.first_time_authors.all()[0].addresses.all().all()) ) self.assertEqual( list(book1.first_time_authors.all()[1].addresses.all()), list(book1.first_time_authors.all()[1].addresses.all().all()) ) self.assertEqual( list(book2.first_time_authors.all()[0].addresses.all()), list(book2.first_time_authors.all()[0].addresses.all().all()) ) def test_detect_is_fetched_with_to_attr(self): with self.assertNumQueries(3): books = Book.objects.filter( title__in=['book1', 'book2'], ).prefetch_related( Prefetch( 'first_time_authors', Author.objects.prefetch_related( Prefetch( 'addresses', AuthorAddress.objects.filter(address='Happy place'), to_attr='happy_place', ) ), to_attr='first_authors', ), ) book1, book2 = list(books) with self.assertNumQueries(0): self.assertEqual(book1.first_authors, [self.author11, self.author12]) self.assertEqual(book2.first_authors, [self.author21]) self.assertEqual(book1.first_authors[0].happy_place, [self.author1_address1]) self.assertEqual(book1.first_authors[1].happy_place, []) self.assertEqual(book2.first_authors[0].happy_place, [self.author2_address1]) def test_prefetch_reverse_foreign_key(self): with self.assertNumQueries(2): bookwithyear1, = BookWithYear.objects.prefetch_related('bookreview_set') with self.assertNumQueries(0): self.assertCountEqual(bookwithyear1.bookreview_set.all(), [self.bookreview1]) with self.assertNumQueries(0): prefetch_related_objects([bookwithyear1], 'bookreview_set') def test_add_clears_prefetched_objects(self): bookwithyear = BookWithYear.objects.get(pk=self.bookwithyear1.pk) prefetch_related_objects([bookwithyear], 'bookreview_set') self.assertCountEqual(bookwithyear.bookreview_set.all(), [self.bookreview1]) new_review = BookReview.objects.create() bookwithyear.bookreview_set.add(new_review) self.assertCountEqual(bookwithyear.bookreview_set.all(), [self.bookreview1, new_review]) def test_remove_clears_prefetched_objects(self): bookwithyear = BookWithYear.objects.get(pk=self.bookwithyear1.pk) prefetch_related_objects([bookwithyear], 'bookreview_set') self.assertCountEqual(bookwithyear.bookreview_set.all(), [self.bookreview1]) bookwithyear.bookreview_set.remove(self.bookreview1) self.assertCountEqual(bookwithyear.bookreview_set.all(), []) class ReadPrefetchedObjectsCacheTests(TestCase): @classmethod def setUpTestData(cls): cls.book1 = Book.objects.create(title='Les confessions Volume I') cls.book2 = Book.objects.create(title='Candide') cls.author1 = AuthorWithAge.objects.create(name='Rousseau', first_book=cls.book1, age=70) cls.author2 = AuthorWithAge.objects.create(name='Voltaire', first_book=cls.book2, age=65) cls.book1.authors.add(cls.author1) cls.book2.authors.add(cls.author2) FavoriteAuthors.objects.create(author=cls.author1, likes_author=cls.author2) def test_retrieves_results_from_prefetched_objects_cache(self): """ When intermediary results are prefetched without a destination attribute, they are saved in the RelatedManager's cache (_prefetched_objects_cache). prefetch_related() uses this cache (#27554). """ authors = AuthorWithAge.objects.prefetch_related( Prefetch( 'author', queryset=Author.objects.prefetch_related( # Results are saved in the RelatedManager's cache # (_prefetched_objects_cache) and do not replace the # RelatedManager on Author instances (favorite_authors) Prefetch('favorite_authors__first_book'), ), ), ) with self.assertNumQueries(4): # AuthorWithAge -> Author -> FavoriteAuthors, Book self.assertQuerysetEqual(authors, ['<AuthorWithAge: Rousseau>', '<AuthorWithAge: Voltaire>'])
701b29e89014fb8599e8a17965456be5d29a25a55acd9732ff140cc041a59d9f
import sys from unittest import skipIf from asgiref.sync import async_to_sync from django.core.exceptions import SynchronousOnlyOperation from django.test import SimpleTestCase from django.utils.asyncio import async_unsafe from .models import SimpleModel @skipIf(sys.platform == 'win32' and (3, 8, 0) < sys.version_info < (3, 8, 1), 'https://bugs.python.org/issue38563') class DatabaseConnectionTest(SimpleTestCase): """A database connection cannot be used in an async context.""" @async_to_sync async def test_get_async_connection(self): with self.assertRaises(SynchronousOnlyOperation): list(SimpleModel.objects.all()) @skipIf(sys.platform == 'win32' and (3, 8, 0) < sys.version_info < (3, 8, 1), 'https://bugs.python.org/issue38563') class AsyncUnsafeTest(SimpleTestCase): """ async_unsafe decorator should work correctly and returns the correct message. """ @async_unsafe def dangerous_method(self): return True @async_to_sync async def test_async_unsafe(self): # async_unsafe decorator catches bad access and returns the right # message. msg = ( 'You cannot call this from an async context - use a thread or ' 'sync_to_async.' ) with self.assertRaisesMessage(SynchronousOnlyOperation, msg): self.dangerous_method()
e198cee7b13f7992eb3ee9529b65f17bff5890c2e1fc6bf817db21592d3df886
import json import mimetypes import os import sys from copy import copy from functools import partial from http import HTTPStatus from importlib import import_module from io import BytesIO from urllib.parse import unquote_to_bytes, urljoin, urlparse, urlsplit from django.conf import settings from django.core.handlers.base import BaseHandler from django.core.handlers.wsgi import WSGIRequest from django.core.serializers.json import DjangoJSONEncoder from django.core.signals import ( got_request_exception, request_finished, request_started, ) from django.db import close_old_connections from django.http import HttpRequest, QueryDict, SimpleCookie from django.test import signals from django.test.utils import ContextList from django.urls import resolve from django.utils.encoding import force_bytes from django.utils.functional import SimpleLazyObject from django.utils.http import urlencode from django.utils.itercompat import is_iterable from django.utils.regex_helper import _lazy_re_compile __all__ = ('Client', 'RedirectCycleError', 'RequestFactory', 'encode_file', 'encode_multipart') BOUNDARY = 'BoUnDaRyStRiNg' MULTIPART_CONTENT = 'multipart/form-data; boundary=%s' % BOUNDARY CONTENT_TYPE_RE = _lazy_re_compile(r'.*; charset=([\w\d-]+);?') # Structured suffix spec: https://tools.ietf.org/html/rfc6838#section-4.2.8 JSON_CONTENT_TYPE_RE = _lazy_re_compile(r'^application\/(.+\+)?json') class RedirectCycleError(Exception): """The test client has been asked to follow a redirect loop.""" def __init__(self, message, last_response): super().__init__(message) self.last_response = last_response self.redirect_chain = last_response.redirect_chain class FakePayload: """ A wrapper around BytesIO that restricts what can be read since data from the network can't be sought and cannot be read outside of its content length. This makes sure that views can't do anything under the test client that wouldn't work in real life. """ def __init__(self, content=None): self.__content = BytesIO() self.__len = 0 self.read_started = False if content is not None: self.write(content) def __len__(self): return self.__len def read(self, num_bytes=None): if not self.read_started: self.__content.seek(0) self.read_started = True if num_bytes is None: num_bytes = self.__len or 0 assert self.__len >= num_bytes, "Cannot read more than the available bytes from the HTTP incoming data." content = self.__content.read(num_bytes) self.__len -= num_bytes return content def write(self, content): if self.read_started: raise ValueError("Unable to write a payload after it's been read") content = force_bytes(content) self.__content.write(content) self.__len += len(content) def closing_iterator_wrapper(iterable, close): try: yield from iterable finally: request_finished.disconnect(close_old_connections) close() # will fire request_finished request_finished.connect(close_old_connections) def conditional_content_removal(request, response): """ Simulate the behavior of most Web servers by removing the content of responses for HEAD requests, 1xx, 204, and 304 responses. Ensure compliance with RFC 7230, section 3.3.3. """ if 100 <= response.status_code < 200 or response.status_code in (204, 304): if response.streaming: response.streaming_content = [] else: response.content = b'' if request.method == 'HEAD': if response.streaming: response.streaming_content = [] else: response.content = b'' return response class ClientHandler(BaseHandler): """ A HTTP Handler that can be used for testing purposes. Use the WSGI interface to compose requests, but return the raw HttpResponse object with the originating WSGIRequest attached to its ``wsgi_request`` attribute. """ def __init__(self, enforce_csrf_checks=True, *args, **kwargs): self.enforce_csrf_checks = enforce_csrf_checks super().__init__(*args, **kwargs) def __call__(self, environ): # Set up middleware if needed. We couldn't do this earlier, because # settings weren't available. if self._middleware_chain is None: self.load_middleware() request_started.disconnect(close_old_connections) request_started.send(sender=self.__class__, environ=environ) request_started.connect(close_old_connections) request = WSGIRequest(environ) # sneaky little hack so that we can easily get round # CsrfViewMiddleware. This makes life easier, and is probably # required for backwards compatibility with external tests against # admin views. request._dont_enforce_csrf_checks = not self.enforce_csrf_checks # Request goes through middleware. response = self.get_response(request) # Simulate behaviors of most Web servers. conditional_content_removal(request, response) # Attach the originating request to the response so that it could be # later retrieved. response.wsgi_request = request # Emulate a WSGI server by calling the close method on completion. if response.streaming: response.streaming_content = closing_iterator_wrapper( response.streaming_content, response.close) else: request_finished.disconnect(close_old_connections) response.close() # will fire request_finished request_finished.connect(close_old_connections) return response def store_rendered_templates(store, signal, sender, template, context, **kwargs): """ Store templates and contexts that are rendered. The context is copied so that it is an accurate representation at the time of rendering. """ store.setdefault('templates', []).append(template) if 'context' not in store: store['context'] = ContextList() store['context'].append(copy(context)) def encode_multipart(boundary, data): """ Encode multipart POST data from a dictionary of form values. The key will be used as the form data name; the value will be transmitted as content. If the value is a file, the contents of the file will be sent as an application/octet-stream; otherwise, str(value) will be sent. """ lines = [] def to_bytes(s): return force_bytes(s, settings.DEFAULT_CHARSET) # Not by any means perfect, but good enough for our purposes. def is_file(thing): return hasattr(thing, "read") and callable(thing.read) # Each bit of the multipart form data could be either a form value or a # file, or a *list* of form values and/or files. Remember that HTTP field # names can be duplicated! for (key, value) in data.items(): if value is None: raise TypeError( "Cannot encode None for key '%s' as POST data. Did you mean " "to pass an empty string or omit the value?" % key ) elif is_file(value): lines.extend(encode_file(boundary, key, value)) elif not isinstance(value, str) and is_iterable(value): for item in value: if is_file(item): lines.extend(encode_file(boundary, key, item)) else: lines.extend(to_bytes(val) for val in [ '--%s' % boundary, 'Content-Disposition: form-data; name="%s"' % key, '', item ]) else: lines.extend(to_bytes(val) for val in [ '--%s' % boundary, 'Content-Disposition: form-data; name="%s"' % key, '', value ]) lines.extend([ to_bytes('--%s--' % boundary), b'', ]) return b'\r\n'.join(lines) def encode_file(boundary, key, file): def to_bytes(s): return force_bytes(s, settings.DEFAULT_CHARSET) # file.name might not be a string. For example, it's an int for # tempfile.TemporaryFile(). file_has_string_name = hasattr(file, 'name') and isinstance(file.name, str) filename = os.path.basename(file.name) if file_has_string_name else '' if hasattr(file, 'content_type'): content_type = file.content_type elif filename: content_type = mimetypes.guess_type(filename)[0] else: content_type = None if content_type is None: content_type = 'application/octet-stream' filename = filename or key return [ to_bytes('--%s' % boundary), to_bytes('Content-Disposition: form-data; name="%s"; filename="%s"' % (key, filename)), to_bytes('Content-Type: %s' % content_type), b'', to_bytes(file.read()) ] class RequestFactory: """ Class that lets you create mock Request objects for use in testing. Usage: rf = RequestFactory() get_request = rf.get('/hello/') post_request = rf.post('/submit/', {'foo': 'bar'}) Once you have a request object you can pass it to any view function, just as if that view had been hooked up using a URLconf. """ def __init__(self, *, json_encoder=DjangoJSONEncoder, **defaults): self.json_encoder = json_encoder self.defaults = defaults self.cookies = SimpleCookie() self.errors = BytesIO() def _base_environ(self, **request): """ The base environment for a request. """ # This is a minimal valid WSGI environ dictionary, plus: # - HTTP_COOKIE: for cookie support, # - REMOTE_ADDR: often useful, see #8551. # See https://www.python.org/dev/peps/pep-3333/#environ-variables return { 'HTTP_COOKIE': '; '.join(sorted( '%s=%s' % (morsel.key, morsel.coded_value) for morsel in self.cookies.values() )), 'PATH_INFO': '/', 'REMOTE_ADDR': '127.0.0.1', 'REQUEST_METHOD': 'GET', 'SCRIPT_NAME': '', 'SERVER_NAME': 'testserver', 'SERVER_PORT': '80', 'SERVER_PROTOCOL': 'HTTP/1.1', 'wsgi.version': (1, 0), 'wsgi.url_scheme': 'http', 'wsgi.input': FakePayload(b''), 'wsgi.errors': self.errors, 'wsgi.multiprocess': True, 'wsgi.multithread': False, 'wsgi.run_once': False, **self.defaults, **request, } def request(self, **request): "Construct a generic request object." return WSGIRequest(self._base_environ(**request)) def _encode_data(self, data, content_type): if content_type is MULTIPART_CONTENT: return encode_multipart(BOUNDARY, data) else: # Encode the content so that the byte representation is correct. match = CONTENT_TYPE_RE.match(content_type) if match: charset = match.group(1) else: charset = settings.DEFAULT_CHARSET return force_bytes(data, encoding=charset) def _encode_json(self, data, content_type): """ Return encoded JSON if data is a dict, list, or tuple and content_type is application/json. """ should_encode = JSON_CONTENT_TYPE_RE.match(content_type) and isinstance(data, (dict, list, tuple)) return json.dumps(data, cls=self.json_encoder) if should_encode else data def _get_path(self, parsed): path = parsed.path # If there are parameters, add them if parsed.params: path += ";" + parsed.params path = unquote_to_bytes(path) # Replace the behavior where non-ASCII values in the WSGI environ are # arbitrarily decoded with ISO-8859-1. # Refs comment in `get_bytes_from_wsgi()`. return path.decode('iso-8859-1') def get(self, path, data=None, secure=False, **extra): """Construct a GET request.""" data = {} if data is None else data return self.generic('GET', path, secure=secure, **{ 'QUERY_STRING': urlencode(data, doseq=True), **extra, }) def post(self, path, data=None, content_type=MULTIPART_CONTENT, secure=False, **extra): """Construct a POST request.""" data = self._encode_json({} if data is None else data, content_type) post_data = self._encode_data(data, content_type) return self.generic('POST', path, post_data, content_type, secure=secure, **extra) def head(self, path, data=None, secure=False, **extra): """Construct a HEAD request.""" data = {} if data is None else data return self.generic('HEAD', path, secure=secure, **{ 'QUERY_STRING': urlencode(data, doseq=True), **extra, }) def trace(self, path, secure=False, **extra): """Construct a TRACE request.""" return self.generic('TRACE', path, secure=secure, **extra) def options(self, path, data='', content_type='application/octet-stream', secure=False, **extra): "Construct an OPTIONS request." return self.generic('OPTIONS', path, data, content_type, secure=secure, **extra) def put(self, path, data='', content_type='application/octet-stream', secure=False, **extra): """Construct a PUT request.""" data = self._encode_json(data, content_type) return self.generic('PUT', path, data, content_type, secure=secure, **extra) def patch(self, path, data='', content_type='application/octet-stream', secure=False, **extra): """Construct a PATCH request.""" data = self._encode_json(data, content_type) return self.generic('PATCH', path, data, content_type, secure=secure, **extra) def delete(self, path, data='', content_type='application/octet-stream', secure=False, **extra): """Construct a DELETE request.""" data = self._encode_json(data, content_type) return self.generic('DELETE', path, data, content_type, secure=secure, **extra) def generic(self, method, path, data='', content_type='application/octet-stream', secure=False, **extra): """Construct an arbitrary HTTP request.""" parsed = urlparse(str(path)) # path can be lazy data = force_bytes(data, settings.DEFAULT_CHARSET) r = { 'PATH_INFO': self._get_path(parsed), 'REQUEST_METHOD': method, 'SERVER_PORT': '443' if secure else '80', 'wsgi.url_scheme': 'https' if secure else 'http', } if data: r.update({ 'CONTENT_LENGTH': str(len(data)), 'CONTENT_TYPE': content_type, 'wsgi.input': FakePayload(data), }) r.update(extra) # If QUERY_STRING is absent or empty, we want to extract it from the URL. if not r.get('QUERY_STRING'): # WSGI requires latin-1 encoded strings. See get_path_info(). query_string = parsed[4].encode().decode('iso-8859-1') r['QUERY_STRING'] = query_string return self.request(**r) class Client(RequestFactory): """ A class that can act as a client for testing purposes. It allows the user to compose GET and POST requests, and obtain the response that the server gave to those requests. The server Response objects are annotated with the details of the contexts and templates that were rendered during the process of serving the request. Client objects are stateful - they will retain cookie (and thus session) details for the lifetime of the Client instance. This is not intended as a replacement for Twill/Selenium or the like - it is here to allow testing against the contexts and templates produced by a view, rather than the HTML rendered to the end-user. """ def __init__(self, enforce_csrf_checks=False, raise_request_exception=True, **defaults): super().__init__(**defaults) self.handler = ClientHandler(enforce_csrf_checks) self.raise_request_exception = raise_request_exception self.exc_info = None self.extra = None def store_exc_info(self, **kwargs): """Store exceptions when they are generated by a view.""" self.exc_info = sys.exc_info() @property def session(self): """Return the current session variables.""" engine = import_module(settings.SESSION_ENGINE) cookie = self.cookies.get(settings.SESSION_COOKIE_NAME) if cookie: return engine.SessionStore(cookie.value) session = engine.SessionStore() session.save() self.cookies[settings.SESSION_COOKIE_NAME] = session.session_key return session def request(self, **request): """ The master request method. Compose the environment dictionary and pass to the handler, return the result of the handler. Assume defaults for the query environment, which can be overridden using the arguments to the request. """ environ = self._base_environ(**request) # Curry a data dictionary into an instance of the template renderer # callback function. data = {} on_template_render = partial(store_rendered_templates, data) signal_uid = "template-render-%s" % id(request) signals.template_rendered.connect(on_template_render, dispatch_uid=signal_uid) # Capture exceptions created by the handler. exception_uid = "request-exception-%s" % id(request) got_request_exception.connect(self.store_exc_info, dispatch_uid=exception_uid) try: response = self.handler(environ) finally: signals.template_rendered.disconnect(dispatch_uid=signal_uid) got_request_exception.disconnect(dispatch_uid=exception_uid) # Look for a signaled exception, clear the current context exception # data, then re-raise the signaled exception. Also clear the signaled # exception from the local cache. response.exc_info = self.exc_info if self.exc_info: _, exc_value, _ = self.exc_info self.exc_info = None if self.raise_request_exception: raise exc_value # Save the client and request that stimulated the response. response.client = self response.request = request # Add any rendered template detail to the response. response.templates = data.get('templates', []) response.context = data.get('context') response.json = partial(self._parse_json, response) # Attach the ResolverMatch instance to the response. response.resolver_match = SimpleLazyObject(lambda: resolve(request['PATH_INFO'])) # Flatten a single context. Not really necessary anymore thanks to the # __getattr__ flattening in ContextList, but has some edge case # backwards compatibility implications. if response.context and len(response.context) == 1: response.context = response.context[0] # Update persistent cookie data. if response.cookies: self.cookies.update(response.cookies) return response def get(self, path, data=None, follow=False, secure=False, **extra): """Request a response from the server using GET.""" self.extra = extra response = super().get(path, data=data, secure=secure, **extra) if follow: response = self._handle_redirects(response, data=data, **extra) return response def post(self, path, data=None, content_type=MULTIPART_CONTENT, follow=False, secure=False, **extra): """Request a response from the server using POST.""" self.extra = extra response = super().post(path, data=data, content_type=content_type, secure=secure, **extra) if follow: response = self._handle_redirects(response, data=data, content_type=content_type, **extra) return response def head(self, path, data=None, follow=False, secure=False, **extra): """Request a response from the server using HEAD.""" self.extra = extra response = super().head(path, data=data, secure=secure, **extra) if follow: response = self._handle_redirects(response, data=data, **extra) return response def options(self, path, data='', content_type='application/octet-stream', follow=False, secure=False, **extra): """Request a response from the server using OPTIONS.""" self.extra = extra response = super().options(path, data=data, content_type=content_type, secure=secure, **extra) if follow: response = self._handle_redirects(response, data=data, content_type=content_type, **extra) return response def put(self, path, data='', content_type='application/octet-stream', follow=False, secure=False, **extra): """Send a resource to the server using PUT.""" self.extra = extra response = super().put(path, data=data, content_type=content_type, secure=secure, **extra) if follow: response = self._handle_redirects(response, data=data, content_type=content_type, **extra) return response def patch(self, path, data='', content_type='application/octet-stream', follow=False, secure=False, **extra): """Send a resource to the server using PATCH.""" self.extra = extra response = super().patch(path, data=data, content_type=content_type, secure=secure, **extra) if follow: response = self._handle_redirects(response, data=data, content_type=content_type, **extra) return response def delete(self, path, data='', content_type='application/octet-stream', follow=False, secure=False, **extra): """Send a DELETE request to the server.""" self.extra = extra response = super().delete(path, data=data, content_type=content_type, secure=secure, **extra) if follow: response = self._handle_redirects(response, data=data, content_type=content_type, **extra) return response def trace(self, path, data='', follow=False, secure=False, **extra): """Send a TRACE request to the server.""" self.extra = extra response = super().trace(path, data=data, secure=secure, **extra) if follow: response = self._handle_redirects(response, data=data, **extra) return response def login(self, **credentials): """ Set the Factory to appear as if it has successfully logged into a site. Return True if login is possible; False if the provided credentials are incorrect. """ from django.contrib.auth import authenticate user = authenticate(**credentials) if user: self._login(user) return True else: return False def force_login(self, user, backend=None): def get_backend(): from django.contrib.auth import load_backend for backend_path in settings.AUTHENTICATION_BACKENDS: backend = load_backend(backend_path) if hasattr(backend, 'get_user'): return backend_path if backend is None: backend = get_backend() user.backend = backend self._login(user, backend) def _login(self, user, backend=None): from django.contrib.auth import login engine = import_module(settings.SESSION_ENGINE) # Create a fake request to store login details. request = HttpRequest() if self.session: request.session = self.session else: request.session = engine.SessionStore() login(request, user, backend) # Save the session values. request.session.save() # Set the cookie to represent the session. session_cookie = settings.SESSION_COOKIE_NAME self.cookies[session_cookie] = request.session.session_key cookie_data = { 'max-age': None, 'path': '/', 'domain': settings.SESSION_COOKIE_DOMAIN, 'secure': settings.SESSION_COOKIE_SECURE or None, 'expires': None, } self.cookies[session_cookie].update(cookie_data) def logout(self): """Log out the user by removing the cookies and session object.""" from django.contrib.auth import get_user, logout request = HttpRequest() engine = import_module(settings.SESSION_ENGINE) if self.session: request.session = self.session request.user = get_user(request) else: request.session = engine.SessionStore() logout(request) self.cookies = SimpleCookie() def _parse_json(self, response, **extra): if not hasattr(response, '_json'): if not JSON_CONTENT_TYPE_RE.match(response.get('Content-Type')): raise ValueError( 'Content-Type header is "{0}", not "application/json"' .format(response.get('Content-Type')) ) response._json = json.loads(response.content.decode(response.charset), **extra) return response._json def _handle_redirects(self, response, data='', content_type='', **extra): """ Follow any redirects by requesting responses from the server using GET. """ response.redirect_chain = [] redirect_status_codes = ( HTTPStatus.MOVED_PERMANENTLY, HTTPStatus.FOUND, HTTPStatus.SEE_OTHER, HTTPStatus.TEMPORARY_REDIRECT, HTTPStatus.PERMANENT_REDIRECT, ) while response.status_code in redirect_status_codes: response_url = response.url redirect_chain = response.redirect_chain redirect_chain.append((response_url, response.status_code)) url = urlsplit(response_url) if url.scheme: extra['wsgi.url_scheme'] = url.scheme if url.hostname: extra['SERVER_NAME'] = url.hostname if url.port: extra['SERVER_PORT'] = str(url.port) # Prepend the request path to handle relative path redirects path = url.path if not path.startswith('/'): path = urljoin(response.request['PATH_INFO'], path) if response.status_code in (HTTPStatus.TEMPORARY_REDIRECT, HTTPStatus.PERMANENT_REDIRECT): # Preserve request method post-redirect for 307/308 responses. request_method = getattr(self, response.request['REQUEST_METHOD'].lower()) else: request_method = self.get data = QueryDict(url.query) content_type = None response = request_method(path, data=data, content_type=content_type, follow=False, **extra) response.redirect_chain = redirect_chain if redirect_chain[-1] in redirect_chain[:-1]: # Check that we're not redirecting to somewhere we've already # been to, to prevent loops. raise RedirectCycleError("Redirect loop detected.", last_response=response) if len(redirect_chain) > 20: # Such a lengthy chain likely also means a loop, but one with # a growing path, changing view, or changing query argument; # 20 is the value of "network.http.redirection-limit" from Firefox. raise RedirectCycleError("Too many redirects.", last_response=response) return response
92eac3a1ecfd46b155ff07a19411828b048c4c629af886efb47c105ab069cd6a
"""Compare two HTML documents.""" from html.parser import HTMLParser from django.utils.regex_helper import _lazy_re_compile # ASCII whitespace is U+0009 TAB, U+000A LF, U+000C FF, U+000D CR, or U+0020 # SPACE. # https://infra.spec.whatwg.org/#ascii-whitespace ASCII_WHITESPACE = _lazy_re_compile(r'[\t\n\f\r ]+') def normalize_whitespace(string): return ASCII_WHITESPACE.sub(' ', string) class Element: def __init__(self, name, attributes): self.name = name self.attributes = sorted(attributes) self.children = [] def append(self, element): if isinstance(element, str): element = normalize_whitespace(element) if self.children: if isinstance(self.children[-1], str): self.children[-1] += element self.children[-1] = normalize_whitespace(self.children[-1]) return elif self.children: # removing last children if it is only whitespace # this can result in incorrect dom representations since # whitespace between inline tags like <span> is significant if isinstance(self.children[-1], str): if self.children[-1].isspace(): self.children.pop() if element: self.children.append(element) def finalize(self): def rstrip_last_element(children): if children: if isinstance(children[-1], str): children[-1] = children[-1].rstrip() if not children[-1]: children.pop() children = rstrip_last_element(children) return children rstrip_last_element(self.children) for i, child in enumerate(self.children): if isinstance(child, str): self.children[i] = child.strip() elif hasattr(child, 'finalize'): child.finalize() def __eq__(self, element): if not hasattr(element, 'name') or self.name != element.name: return False if len(self.attributes) != len(element.attributes): return False if self.attributes != element.attributes: # attributes without a value is same as attribute with value that # equals the attributes name: # <input checked> == <input checked="checked"> for i in range(len(self.attributes)): attr, value = self.attributes[i] other_attr, other_value = element.attributes[i] if value is None: value = attr if other_value is None: other_value = other_attr if attr != other_attr or value != other_value: return False return self.children == element.children def __hash__(self): return hash((self.name, *self.attributes)) def _count(self, element, count=True): if not isinstance(element, str): if self == element: return 1 if isinstance(element, RootElement): if self.children == element.children: return 1 i = 0 for child in self.children: # child is text content and element is also text content, then # make a simple "text" in "text" if isinstance(child, str): if isinstance(element, str): if count: i += child.count(element) elif element in child: return 1 else: i += child._count(element, count=count) if not count and i: return i return i def __contains__(self, element): return self._count(element, count=False) > 0 def count(self, element): return self._count(element, count=True) def __getitem__(self, key): return self.children[key] def __str__(self): output = '<%s' % self.name for key, value in self.attributes: if value: output += ' %s="%s"' % (key, value) else: output += ' %s' % key if self.children: output += '>\n' output += ''.join(str(c) for c in self.children) output += '\n</%s>' % self.name else: output += '>' return output def __repr__(self): return str(self) class RootElement(Element): def __init__(self): super().__init__(None, ()) def __str__(self): return ''.join(str(c) for c in self.children) class HTMLParseError(Exception): pass class Parser(HTMLParser): # https://html.spec.whatwg.org/#void-elements SELF_CLOSING_TAGS = { 'area', 'base', 'br', 'col', 'embed', 'hr', 'img', 'input', 'link', 'meta', 'param', 'source', 'track', 'wbr', # Deprecated tags 'frame', 'spacer', } def __init__(self): super().__init__() self.root = RootElement() self.open_tags = [] self.element_positions = {} def error(self, msg): raise HTMLParseError(msg, self.getpos()) def format_position(self, position=None, element=None): if not position and element: position = self.element_positions[element] if position is None: position = self.getpos() if hasattr(position, 'lineno'): position = position.lineno, position.offset return 'Line %d, Column %d' % position @property def current(self): if self.open_tags: return self.open_tags[-1] else: return self.root def handle_startendtag(self, tag, attrs): self.handle_starttag(tag, attrs) if tag not in self.SELF_CLOSING_TAGS: self.handle_endtag(tag) def handle_starttag(self, tag, attrs): # Special case handling of 'class' attribute, so that comparisons of DOM # instances are not sensitive to ordering of classes. attrs = [ (name, ' '.join(sorted(value for value in ASCII_WHITESPACE.split(value) if value))) if name == "class" else (name, value) for name, value in attrs ] element = Element(tag, attrs) self.current.append(element) if tag not in self.SELF_CLOSING_TAGS: self.open_tags.append(element) self.element_positions[element] = self.getpos() def handle_endtag(self, tag): if not self.open_tags: self.error("Unexpected end tag `%s` (%s)" % ( tag, self.format_position())) element = self.open_tags.pop() while element.name != tag: if not self.open_tags: self.error("Unexpected end tag `%s` (%s)" % ( tag, self.format_position())) element = self.open_tags.pop() def handle_data(self, data): self.current.append(data) def parse_html(html): """ Take a string that contains *valid* HTML and turn it into a Python object structure that can be easily compared against other HTML on semantic equivalence. Syntactical differences like which quotation is used on arguments will be ignored. """ parser = Parser() parser.feed(html) parser.close() document = parser.root document.finalize() # Removing ROOT element if it's not necessary if len(document.children) == 1: if not isinstance(document.children[0], str): document = document.children[0] return document
0fa2b968d61bede83a45644111aac6eeb63ad87010b3de21d63c8697d7af05c0
import functools import re import sys import types from pathlib import Path from django.conf import settings from django.http import Http404, HttpResponse, HttpResponseNotFound from django.template import Context, Engine, TemplateDoesNotExist from django.template.defaultfilters import pprint from django.urls import resolve from django.utils import timezone from django.utils.datastructures import MultiValueDict from django.utils.encoding import force_str from django.utils.module_loading import import_string from django.utils.regex_helper import _lazy_re_compile from django.utils.version import get_docs_version # Minimal Django templates engine to render the error templates # regardless of the project's TEMPLATES setting. Templates are # read directly from the filesystem so that the error handler # works even if the template loader is broken. DEBUG_ENGINE = Engine( debug=True, libraries={'i18n': 'django.templatetags.i18n'}, ) HIDDEN_SETTINGS = _lazy_re_compile('API|TOKEN|KEY|SECRET|PASS|SIGNATURE', flags=re.IGNORECASE) CLEANSED_SUBSTITUTE = '********************' CURRENT_DIR = Path(__file__).parent class CallableSettingWrapper: """ Object to wrap callable appearing in settings. * Not to call in the debug page (#21345). * Not to break the debug page if the callable forbidding to set attributes (#23070). """ def __init__(self, callable_setting): self._wrapped = callable_setting def __repr__(self): return repr(self._wrapped) def cleanse_setting(key, value): """ Cleanse an individual setting key/value of sensitive content. If the value is a dictionary, recursively cleanse the keys in that dictionary. """ try: if HIDDEN_SETTINGS.search(key): cleansed = CLEANSED_SUBSTITUTE else: if isinstance(value, dict): cleansed = {k: cleanse_setting(k, v) for k, v in value.items()} else: cleansed = value except TypeError: # If the key isn't regex-able, just return as-is. cleansed = value if callable(cleansed): # For fixing #21345 and #23070 cleansed = CallableSettingWrapper(cleansed) return cleansed def get_safe_settings(): """ Return a dictionary of the settings module with values of sensitive settings replaced with stars (*********). """ settings_dict = {} for k in dir(settings): if k.isupper(): settings_dict[k] = cleanse_setting(k, getattr(settings, k)) return settings_dict def technical_500_response(request, exc_type, exc_value, tb, status_code=500): """ Create a technical server error response. The last three arguments are the values returned from sys.exc_info() and friends. """ reporter = ExceptionReporter(request, exc_type, exc_value, tb) if request.is_ajax(): text = reporter.get_traceback_text() return HttpResponse(text, status=status_code, content_type='text/plain; charset=utf-8') else: html = reporter.get_traceback_html() return HttpResponse(html, status=status_code, content_type='text/html') @functools.lru_cache() def get_default_exception_reporter_filter(): # Instantiate the default filter for the first time and cache it. return import_string(settings.DEFAULT_EXCEPTION_REPORTER_FILTER)() def get_exception_reporter_filter(request): default_filter = get_default_exception_reporter_filter() return getattr(request, 'exception_reporter_filter', default_filter) class ExceptionReporterFilter: """ Base for all exception reporter filter classes. All overridable hooks contain lenient default behaviors. """ def get_post_parameters(self, request): if request is None: return {} else: return request.POST def get_traceback_frame_variables(self, request, tb_frame): return list(tb_frame.f_locals.items()) class SafeExceptionReporterFilter(ExceptionReporterFilter): """ Use annotations made by the sensitive_post_parameters and sensitive_variables decorators to filter out sensitive information. """ def is_active(self, request): """ This filter is to add safety in production environments (i.e. DEBUG is False). If DEBUG is True then your site is not safe anyway. This hook is provided as a convenience to easily activate or deactivate the filter on a per request basis. """ return settings.DEBUG is False def get_cleansed_multivaluedict(self, request, multivaluedict): """ Replace the keys in a MultiValueDict marked as sensitive with stars. This mitigates leaking sensitive POST parameters if something like request.POST['nonexistent_key'] throws an exception (#21098). """ sensitive_post_parameters = getattr(request, 'sensitive_post_parameters', []) if self.is_active(request) and sensitive_post_parameters: multivaluedict = multivaluedict.copy() for param in sensitive_post_parameters: if param in multivaluedict: multivaluedict[param] = CLEANSED_SUBSTITUTE return multivaluedict def get_post_parameters(self, request): """ Replace the values of POST parameters marked as sensitive with stars (*********). """ if request is None: return {} else: sensitive_post_parameters = getattr(request, 'sensitive_post_parameters', []) if self.is_active(request) and sensitive_post_parameters: cleansed = request.POST.copy() if sensitive_post_parameters == '__ALL__': # Cleanse all parameters. for k in cleansed: cleansed[k] = CLEANSED_SUBSTITUTE return cleansed else: # Cleanse only the specified parameters. for param in sensitive_post_parameters: if param in cleansed: cleansed[param] = CLEANSED_SUBSTITUTE return cleansed else: return request.POST def cleanse_special_types(self, request, value): try: # If value is lazy or a complex object of another kind, this check # might raise an exception. isinstance checks that lazy # MultiValueDicts will have a return value. is_multivalue_dict = isinstance(value, MultiValueDict) except Exception as e: return '{!r} while evaluating {!r}'.format(e, value) if is_multivalue_dict: # Cleanse MultiValueDicts (request.POST is the one we usually care about) value = self.get_cleansed_multivaluedict(request, value) return value def get_traceback_frame_variables(self, request, tb_frame): """ Replace the values of variables marked as sensitive with stars (*********). """ # Loop through the frame's callers to see if the sensitive_variables # decorator was used. current_frame = tb_frame.f_back sensitive_variables = None while current_frame is not None: if (current_frame.f_code.co_name == 'sensitive_variables_wrapper' and 'sensitive_variables_wrapper' in current_frame.f_locals): # The sensitive_variables decorator was used, so we take note # of the sensitive variables' names. wrapper = current_frame.f_locals['sensitive_variables_wrapper'] sensitive_variables = getattr(wrapper, 'sensitive_variables', None) break current_frame = current_frame.f_back cleansed = {} if self.is_active(request) and sensitive_variables: if sensitive_variables == '__ALL__': # Cleanse all variables for name in tb_frame.f_locals: cleansed[name] = CLEANSED_SUBSTITUTE else: # Cleanse specified variables for name, value in tb_frame.f_locals.items(): if name in sensitive_variables: value = CLEANSED_SUBSTITUTE else: value = self.cleanse_special_types(request, value) cleansed[name] = value else: # Potentially cleanse the request and any MultiValueDicts if they # are one of the frame variables. for name, value in tb_frame.f_locals.items(): cleansed[name] = self.cleanse_special_types(request, value) if (tb_frame.f_code.co_name == 'sensitive_variables_wrapper' and 'sensitive_variables_wrapper' in tb_frame.f_locals): # For good measure, obfuscate the decorated function's arguments in # the sensitive_variables decorator's frame, in case the variables # associated with those arguments were meant to be obfuscated from # the decorated function's frame. cleansed['func_args'] = CLEANSED_SUBSTITUTE cleansed['func_kwargs'] = CLEANSED_SUBSTITUTE return cleansed.items() class ExceptionReporter: """Organize and coordinate reporting on exceptions.""" def __init__(self, request, exc_type, exc_value, tb, is_email=False): self.request = request self.filter = get_exception_reporter_filter(self.request) self.exc_type = exc_type self.exc_value = exc_value self.tb = tb self.is_email = is_email self.template_info = getattr(self.exc_value, 'template_debug', None) self.template_does_not_exist = False self.postmortem = None def get_traceback_data(self): """Return a dictionary containing traceback information.""" if self.exc_type and issubclass(self.exc_type, TemplateDoesNotExist): self.template_does_not_exist = True self.postmortem = self.exc_value.chain or [self.exc_value] frames = self.get_traceback_frames() for i, frame in enumerate(frames): if 'vars' in frame: frame_vars = [] for k, v in frame['vars']: v = pprint(v) # Trim large blobs of data if len(v) > 4096: v = '%s… <trimmed %d bytes string>' % (v[0:4096], len(v)) frame_vars.append((k, v)) frame['vars'] = frame_vars frames[i] = frame unicode_hint = '' if self.exc_type and issubclass(self.exc_type, UnicodeError): start = getattr(self.exc_value, 'start', None) end = getattr(self.exc_value, 'end', None) if start is not None and end is not None: unicode_str = self.exc_value.args[1] unicode_hint = force_str( unicode_str[max(start - 5, 0):min(end + 5, len(unicode_str))], 'ascii', errors='replace' ) from django import get_version if self.request is None: user_str = None else: try: user_str = str(self.request.user) except Exception: # request.user may raise OperationalError if the database is # unavailable, for example. user_str = '[unable to retrieve the current user]' c = { 'is_email': self.is_email, 'unicode_hint': unicode_hint, 'frames': frames, 'request': self.request, 'user_str': user_str, 'filtered_POST_items': list(self.filter.get_post_parameters(self.request).items()), 'settings': get_safe_settings(), 'sys_executable': sys.executable, 'sys_version_info': '%d.%d.%d' % sys.version_info[0:3], 'server_time': timezone.now(), 'django_version_info': get_version(), 'sys_path': sys.path, 'template_info': self.template_info, 'template_does_not_exist': self.template_does_not_exist, 'postmortem': self.postmortem, } if self.request is not None: c['request_GET_items'] = self.request.GET.items() c['request_FILES_items'] = self.request.FILES.items() c['request_COOKIES_items'] = self.request.COOKIES.items() # Check whether exception info is available if self.exc_type: c['exception_type'] = self.exc_type.__name__ if self.exc_value: c['exception_value'] = str(self.exc_value) if frames: c['lastframe'] = frames[-1] return c def get_traceback_html(self): """Return HTML version of debug 500 HTTP error page.""" with Path(CURRENT_DIR, 'templates', 'technical_500.html').open(encoding='utf-8') as fh: t = DEBUG_ENGINE.from_string(fh.read()) c = Context(self.get_traceback_data(), use_l10n=False) return t.render(c) def get_traceback_text(self): """Return plain text version of debug 500 HTTP error page.""" with Path(CURRENT_DIR, 'templates', 'technical_500.txt').open(encoding='utf-8') as fh: t = DEBUG_ENGINE.from_string(fh.read()) c = Context(self.get_traceback_data(), autoescape=False, use_l10n=False) return t.render(c) def _get_lines_from_file(self, filename, lineno, context_lines, loader=None, module_name=None): """ Return context_lines before and after lineno from file. Return (pre_context_lineno, pre_context, context_line, post_context). """ source = None if hasattr(loader, 'get_source'): try: source = loader.get_source(module_name) except ImportError: pass if source is not None: source = source.splitlines() if source is None: try: with open(filename, 'rb') as fp: source = fp.read().splitlines() except OSError: pass if source is None: return None, [], None, [] # If we just read the source from a file, or if the loader did not # apply tokenize.detect_encoding to decode the source into a # string, then we should do that ourselves. if isinstance(source[0], bytes): encoding = 'ascii' for line in source[:2]: # File coding may be specified. Match pattern from PEP-263 # (https://www.python.org/dev/peps/pep-0263/) match = re.search(br'coding[:=]\s*([-\w.]+)', line) if match: encoding = match.group(1).decode('ascii') break source = [str(sline, encoding, 'replace') for sline in source] lower_bound = max(0, lineno - context_lines) upper_bound = lineno + context_lines pre_context = source[lower_bound:lineno] context_line = source[lineno] post_context = source[lineno + 1:upper_bound] return lower_bound, pre_context, context_line, post_context def get_traceback_frames(self): def explicit_or_implicit_cause(exc_value): explicit = getattr(exc_value, '__cause__', None) implicit = getattr(exc_value, '__context__', None) return explicit or implicit # Get the exception and all its causes exceptions = [] exc_value = self.exc_value while exc_value: exceptions.append(exc_value) exc_value = explicit_or_implicit_cause(exc_value) if exc_value in exceptions: # Avoid infinite loop if there's a cyclic reference (#29393). break frames = [] # No exceptions were supplied to ExceptionReporter if not exceptions: return frames # In case there's just one exception, take the traceback from self.tb exc_value = exceptions.pop() tb = self.tb if not exceptions else exc_value.__traceback__ while tb is not None: # Support for __traceback_hide__ which is used by a few libraries # to hide internal frames. if tb.tb_frame.f_locals.get('__traceback_hide__'): tb = tb.tb_next continue filename = tb.tb_frame.f_code.co_filename function = tb.tb_frame.f_code.co_name lineno = tb.tb_lineno - 1 loader = tb.tb_frame.f_globals.get('__loader__') module_name = tb.tb_frame.f_globals.get('__name__') or '' pre_context_lineno, pre_context, context_line, post_context = self._get_lines_from_file( filename, lineno, 7, loader, module_name, ) if pre_context_lineno is None: pre_context_lineno = lineno pre_context = [] context_line = '<source code not available>' post_context = [] frames.append({ 'exc_cause': explicit_or_implicit_cause(exc_value), 'exc_cause_explicit': getattr(exc_value, '__cause__', True), 'tb': tb, 'type': 'django' if module_name.startswith('django.') else 'user', 'filename': filename, 'function': function, 'lineno': lineno + 1, 'vars': self.filter.get_traceback_frame_variables(self.request, tb.tb_frame), 'id': id(tb), 'pre_context': pre_context, 'context_line': context_line, 'post_context': post_context, 'pre_context_lineno': pre_context_lineno + 1, }) # If the traceback for current exception is consumed, try the # other exception. if not tb.tb_next and exceptions: exc_value = exceptions.pop() tb = exc_value.__traceback__ else: tb = tb.tb_next return frames def technical_404_response(request, exception): """Create a technical 404 error response. `exception` is the Http404.""" try: error_url = exception.args[0]['path'] except (IndexError, TypeError, KeyError): error_url = request.path_info[1:] # Trim leading slash try: tried = exception.args[0]['tried'] except (IndexError, TypeError, KeyError): tried = [] else: if (not tried or ( # empty URLconf request.path == '/' and len(tried) == 1 and # default URLconf len(tried[0]) == 1 and getattr(tried[0][0], 'app_name', '') == getattr(tried[0][0], 'namespace', '') == 'admin' )): return default_urlconf(request) urlconf = getattr(request, 'urlconf', settings.ROOT_URLCONF) if isinstance(urlconf, types.ModuleType): urlconf = urlconf.__name__ caller = '' try: resolver_match = resolve(request.path) except Http404: pass else: obj = resolver_match.func if hasattr(obj, '__name__'): caller = obj.__name__ elif hasattr(obj, '__class__') and hasattr(obj.__class__, '__name__'): caller = obj.__class__.__name__ if hasattr(obj, '__module__'): module = obj.__module__ caller = '%s.%s' % (module, caller) with Path(CURRENT_DIR, 'templates', 'technical_404.html').open(encoding='utf-8') as fh: t = DEBUG_ENGINE.from_string(fh.read()) c = Context({ 'urlconf': urlconf, 'root_urlconf': settings.ROOT_URLCONF, 'request_path': error_url, 'urlpatterns': tried, 'reason': str(exception), 'request': request, 'settings': get_safe_settings(), 'raising_view_name': caller, }) return HttpResponseNotFound(t.render(c), content_type='text/html') def default_urlconf(request): """Create an empty URLconf 404 error response.""" with Path(CURRENT_DIR, 'templates', 'default_urlconf.html').open(encoding='utf-8') as fh: t = DEBUG_ENGINE.from_string(fh.read()) c = Context({ 'version': get_docs_version(), }) return HttpResponse(t.render(c), content_type='text/html')
3ee718e829b949f7c11dfbb3327808fd1904d3a9ac41ec73a5d83ad4d4be5b1c
# These classes override date and datetime to ensure that strftime('%Y') # returns four digits (with leading zeros) on years < 1000. # https://bugs.python.org/issue13305 # # Based on code submitted to comp.lang.python by Andrew Dalke # # >>> datetime_safe.date(10, 8, 2).strftime("%Y/%m/%d was a %A") # '0010/08/02 was a Monday' import time as ttime from datetime import ( date as real_date, datetime as real_datetime, time as real_time, ) from django.utils.regex_helper import _lazy_re_compile class date(real_date): def strftime(self, fmt): return strftime(self, fmt) class datetime(real_datetime): def strftime(self, fmt): return strftime(self, fmt) @classmethod def combine(cls, date, time): return cls(date.year, date.month, date.day, time.hour, time.minute, time.second, time.microsecond, time.tzinfo) def date(self): return date(self.year, self.month, self.day) class time(real_time): pass def new_date(d): "Generate a safe date from a datetime.date object." return date(d.year, d.month, d.day) def new_datetime(d): """ Generate a safe datetime from a datetime.date or datetime.datetime object. """ kw = [d.year, d.month, d.day] if isinstance(d, real_datetime): kw.extend([d.hour, d.minute, d.second, d.microsecond, d.tzinfo]) return datetime(*kw) # This library does not support strftime's "%s" or "%y" format strings. # Allowed if there's an even number of "%"s because they are escaped. _illegal_formatting = _lazy_re_compile(r"((^|[^%])(%%)*%[sy])") def _findall(text, substr): # Also finds overlaps sites = [] i = 0 while True: i = text.find(substr, i) if i == -1: break sites.append(i) i += 1 return sites def strftime(dt, fmt): if dt.year >= 1000: return super(type(dt), dt).strftime(fmt) illegal_formatting = _illegal_formatting.search(fmt) if illegal_formatting: raise TypeError("strftime of dates before 1000 does not handle " + illegal_formatting.group(0)) year = dt.year # For every non-leap year century, advance by # 6 years to get into the 28-year repeat cycle delta = 2000 - year off = 6 * (delta // 100 + delta // 400) year = year + off # Move to around the year 2000 year = year + ((2000 - year) // 28) * 28 timetuple = dt.timetuple() s1 = ttime.strftime(fmt, (year,) + timetuple[1:]) sites1 = _findall(s1, str(year)) s2 = ttime.strftime(fmt, (year + 28,) + timetuple[1:]) sites2 = _findall(s2, str(year + 28)) sites = [] for site in sites1: if site in sites2: sites.append(site) s = s1 syear = "%04d" % (dt.year,) for site in sites: s = s[:site] + syear + s[site + 4:] return s
f05c4b669a303e360207546bffb8ec8629ae645a6c3a7d69c7a7df9b7dbcee8f
import html.entities import re import unicodedata import warnings from gzip import GzipFile from io import BytesIO from django.utils.deprecation import RemovedInDjango40Warning from django.utils.functional import SimpleLazyObject, keep_lazy_text, lazy from django.utils.regex_helper import _lazy_re_compile from django.utils.translation import gettext as _, gettext_lazy, pgettext @keep_lazy_text def capfirst(x): """Capitalize the first letter of a string.""" return x and str(x)[0].upper() + str(x)[1:] # Set up regular expressions re_words = _lazy_re_compile(r'<[^>]+?>|([^<>\s]+)', re.S) re_chars = _lazy_re_compile(r'<[^>]+?>|(.)', re.S) re_tag = _lazy_re_compile(r'<(/)?(\S+?)(?:(\s*/)|\s.*?)?>', re.S) re_newlines = _lazy_re_compile(r'\r\n|\r') # Used in normalize_newlines re_camel_case = _lazy_re_compile(r'(((?<=[a-z])[A-Z])|([A-Z](?![A-Z]|$)))') @keep_lazy_text def wrap(text, width): """ A word-wrap function that preserves existing line breaks. Expects that existing line breaks are posix newlines. Preserve all white space except added line breaks consume the space on which they break the line. Don't wrap long words, thus the output text may have lines longer than ``width``. """ def _generator(): for line in text.splitlines(True): # True keeps trailing linebreaks max_width = min((line.endswith('\n') and width + 1 or width), width) while len(line) > max_width: space = line[:max_width + 1].rfind(' ') + 1 if space == 0: space = line.find(' ') + 1 if space == 0: yield line line = '' break yield '%s\n' % line[:space - 1] line = line[space:] max_width = min((line.endswith('\n') and width + 1 or width), width) if line: yield line return ''.join(_generator()) class Truncator(SimpleLazyObject): """ An object used to truncate text, either by characters or words. """ def __init__(self, text): super().__init__(lambda: str(text)) def add_truncation_text(self, text, truncate=None): if truncate is None: truncate = pgettext( 'String to return when truncating text', '%(truncated_text)s…') if '%(truncated_text)s' in truncate: return truncate % {'truncated_text': text} # The truncation text didn't contain the %(truncated_text)s string # replacement argument so just append it to the text. if text.endswith(truncate): # But don't append the truncation text if the current text already # ends in this. return text return '%s%s' % (text, truncate) def chars(self, num, truncate=None, html=False): """ Return the text truncated to be no longer than the specified number of characters. `truncate` specifies what should be used to notify that the string has been truncated, defaulting to a translatable string of an ellipsis. """ self._setup() length = int(num) text = unicodedata.normalize('NFC', self._wrapped) # Calculate the length to truncate to (max length - end_text length) truncate_len = length for char in self.add_truncation_text('', truncate): if not unicodedata.combining(char): truncate_len -= 1 if truncate_len == 0: break if html: return self._truncate_html(length, truncate, text, truncate_len, False) return self._text_chars(length, truncate, text, truncate_len) def _text_chars(self, length, truncate, text, truncate_len): """Truncate a string after a certain number of chars.""" s_len = 0 end_index = None for i, char in enumerate(text): if unicodedata.combining(char): # Don't consider combining characters # as adding to the string length continue s_len += 1 if end_index is None and s_len > truncate_len: end_index = i if s_len > length: # Return the truncated string return self.add_truncation_text(text[:end_index or 0], truncate) # Return the original string since no truncation was necessary return text def words(self, num, truncate=None, html=False): """ Truncate a string after a certain number of words. `truncate` specifies what should be used to notify that the string has been truncated, defaulting to ellipsis. """ self._setup() length = int(num) if html: return self._truncate_html(length, truncate, self._wrapped, length, True) return self._text_words(length, truncate) def _text_words(self, length, truncate): """ Truncate a string after a certain number of words. Strip newlines in the string. """ words = self._wrapped.split() if len(words) > length: words = words[:length] return self.add_truncation_text(' '.join(words), truncate) return ' '.join(words) def _truncate_html(self, length, truncate, text, truncate_len, words): """ Truncate HTML to a certain number of chars (not counting tags and comments), or, if words is True, then to a certain number of words. Close opened tags if they were correctly closed in the given HTML. Preserve newlines in the HTML. """ if words and length <= 0: return '' html4_singlets = ( 'br', 'col', 'link', 'base', 'img', 'param', 'area', 'hr', 'input' ) # Count non-HTML chars/words and keep note of open tags pos = 0 end_text_pos = 0 current_len = 0 open_tags = [] regex = re_words if words else re_chars while current_len <= length: m = regex.search(text, pos) if not m: # Checked through whole string break pos = m.end(0) if m.group(1): # It's an actual non-HTML word or char current_len += 1 if current_len == truncate_len: end_text_pos = pos continue # Check for tag tag = re_tag.match(m.group(0)) if not tag or current_len >= truncate_len: # Don't worry about non tags or tags after our truncate point continue closing_tag, tagname, self_closing = tag.groups() # Element names are always case-insensitive tagname = tagname.lower() if self_closing or tagname in html4_singlets: pass elif closing_tag: # Check for match in open tags list try: i = open_tags.index(tagname) except ValueError: pass else: # SGML: An end tag closes, back to the matching start tag, # all unclosed intervening start tags with omitted end tags open_tags = open_tags[i + 1:] else: # Add it to the start of the open tags list open_tags.insert(0, tagname) if current_len <= length: return text out = text[:end_text_pos] truncate_text = self.add_truncation_text('', truncate) if truncate_text: out += truncate_text # Close any tags still open for tag in open_tags: out += '</%s>' % tag # Return string return out @keep_lazy_text def get_valid_filename(s): """ Return the given string converted to a string that can be used for a clean filename. Remove leading and trailing spaces; convert other spaces to underscores; and remove anything that is not an alphanumeric, dash, underscore, or dot. >>> get_valid_filename("john's portrait in 2004.jpg") 'johns_portrait_in_2004.jpg' """ s = str(s).strip().replace(' ', '_') return re.sub(r'(?u)[^-\w.]', '', s) @keep_lazy_text def get_text_list(list_, last_word=gettext_lazy('or')): """ >>> get_text_list(['a', 'b', 'c', 'd']) 'a, b, c or d' >>> get_text_list(['a', 'b', 'c'], 'and') 'a, b and c' >>> get_text_list(['a', 'b'], 'and') 'a and b' >>> get_text_list(['a']) 'a' >>> get_text_list([]) '' """ if not list_: return '' if len(list_) == 1: return str(list_[0]) return '%s %s %s' % ( # Translators: This string is used as a separator between list elements _(', ').join(str(i) for i in list_[:-1]), str(last_word), str(list_[-1]) ) @keep_lazy_text def normalize_newlines(text): """Normalize CRLF and CR newlines to just LF.""" return re_newlines.sub('\n', str(text)) @keep_lazy_text def phone2numeric(phone): """Convert a phone number with letters into its numeric equivalent.""" char2number = { 'a': '2', 'b': '2', 'c': '2', 'd': '3', 'e': '3', 'f': '3', 'g': '4', 'h': '4', 'i': '4', 'j': '5', 'k': '5', 'l': '5', 'm': '6', 'n': '6', 'o': '6', 'p': '7', 'q': '7', 'r': '7', 's': '7', 't': '8', 'u': '8', 'v': '8', 'w': '9', 'x': '9', 'y': '9', 'z': '9', } return ''.join(char2number.get(c, c) for c in phone.lower()) # From http://www.xhaus.com/alan/python/httpcomp.html#gzip # Used with permission. def compress_string(s): zbuf = BytesIO() with GzipFile(mode='wb', compresslevel=6, fileobj=zbuf, mtime=0) as zfile: zfile.write(s) return zbuf.getvalue() class StreamingBuffer(BytesIO): def read(self): ret = self.getvalue() self.seek(0) self.truncate() return ret # Like compress_string, but for iterators of strings. def compress_sequence(sequence): buf = StreamingBuffer() with GzipFile(mode='wb', compresslevel=6, fileobj=buf, mtime=0) as zfile: # Output headers... yield buf.read() for item in sequence: zfile.write(item) data = buf.read() if data: yield data yield buf.read() # Expression to match some_token and some_token="with spaces" (and similarly # for single-quoted strings). smart_split_re = _lazy_re_compile(r""" ((?: [^\s'"]* (?: (?:"(?:[^"\\]|\\.)*" | '(?:[^'\\]|\\.)*') [^\s'"]* )+ ) | \S+) """, re.VERBOSE) def smart_split(text): r""" Generator that splits a string by spaces, leaving quoted phrases together. Supports both single and double quotes, and supports escaping quotes with backslashes. In the output, strings will keep their initial and trailing quote marks and escaped quotes will remain escaped (the results can then be further processed with unescape_string_literal()). >>> list(smart_split(r'This is "a person\'s" test.')) ['This', 'is', '"a person\\\'s"', 'test.'] >>> list(smart_split(r"Another 'person\'s' test.")) ['Another', "'person\\'s'", 'test.'] >>> list(smart_split(r'A "\"funky\" style" test.')) ['A', '"\\"funky\\" style"', 'test.'] """ for bit in smart_split_re.finditer(str(text)): yield bit.group(0) def _replace_entity(match): text = match.group(1) if text[0] == '#': text = text[1:] try: if text[0] in 'xX': c = int(text[1:], 16) else: c = int(text) return chr(c) except ValueError: return match.group(0) else: try: return chr(html.entities.name2codepoint[text]) except KeyError: return match.group(0) _entity_re = _lazy_re_compile(r"&(#?[xX]?(?:[0-9a-fA-F]+|\w{1,8}));") @keep_lazy_text def unescape_entities(text): warnings.warn( 'django.utils.text.unescape_entities() is deprecated in favor of ' 'html.unescape().', RemovedInDjango40Warning, stacklevel=2, ) return _entity_re.sub(_replace_entity, str(text)) @keep_lazy_text def unescape_string_literal(s): r""" Convert quoted string literals to unquoted strings with escaped quotes and backslashes unquoted:: >>> unescape_string_literal('"abc"') 'abc' >>> unescape_string_literal("'abc'") 'abc' >>> unescape_string_literal('"a \"bc\""') 'a "bc"' >>> unescape_string_literal("'\'ab\' c'") "'ab' c" """ if s[0] not in "\"'" or s[-1] != s[0]: raise ValueError("Not a string literal: %r" % s) quote = s[0] return s[1:-1].replace(r'\%s' % quote, quote).replace(r'\\', '\\') @keep_lazy_text def slugify(value, allow_unicode=False): """ Convert to ASCII if 'allow_unicode' is False. Convert spaces to hyphens. Remove characters that aren't alphanumerics, underscores, or hyphens. Convert to lowercase. Also strip leading and trailing whitespace. """ value = str(value) if allow_unicode: value = unicodedata.normalize('NFKC', value) else: value = unicodedata.normalize('NFKD', value).encode('ascii', 'ignore').decode('ascii') value = re.sub(r'[^\w\s-]', '', value).strip().lower() return re.sub(r'[-\s]+', '-', value) def camel_case_to_spaces(value): """ Split CamelCase and convert to lowercase. Strip surrounding whitespace. """ return re_camel_case.sub(r' \1', value).strip().lower() def _format_lazy(format_string, *args, **kwargs): """ Apply str.format() on 'format_string' where format_string, args, and/or kwargs might be lazy. """ return format_string.format(*args, **kwargs) format_lazy = lazy(_format_lazy, str)
4b46faec679cdaef615b698b358154641dfbc0d166a9c6659b52f64a10656aa1
"""Functions to parse datetime objects.""" # We're using regular expressions rather than time.strptime because: # - They provide both validation and parsing. # - They're more flexible for datetimes. # - The date/datetime/time constructors produce friendlier error messages. import datetime from django.utils.regex_helper import _lazy_re_compile from django.utils.timezone import get_fixed_timezone, utc date_re = _lazy_re_compile( r'(?P<year>\d{4})-(?P<month>\d{1,2})-(?P<day>\d{1,2})$' ) time_re = _lazy_re_compile( r'(?P<hour>\d{1,2}):(?P<minute>\d{1,2})' r'(?::(?P<second>\d{1,2})(?:\.(?P<microsecond>\d{1,6})\d{0,6})?)?' ) datetime_re = _lazy_re_compile( r'(?P<year>\d{4})-(?P<month>\d{1,2})-(?P<day>\d{1,2})' r'[T ](?P<hour>\d{1,2}):(?P<minute>\d{1,2})' r'(?::(?P<second>\d{1,2})(?:\.(?P<microsecond>\d{1,6})\d{0,6})?)?' r'(?P<tzinfo>Z|[+-]\d{2}(?::?\d{2})?)?$' ) standard_duration_re = _lazy_re_compile( r'^' r'(?:(?P<days>-?\d+) (days?, )?)?' r'(?P<sign>-?)' r'((?:(?P<hours>\d+):)(?=\d+:\d+))?' r'(?:(?P<minutes>\d+):)?' r'(?P<seconds>\d+)' r'(?:\.(?P<microseconds>\d{1,6})\d{0,6})?' r'$' ) # Support the sections of ISO 8601 date representation that are accepted by # timedelta iso8601_duration_re = _lazy_re_compile( r'^(?P<sign>[-+]?)' r'P' r'(?:(?P<days>\d+(.\d+)?)D)?' r'(?:T' r'(?:(?P<hours>\d+(.\d+)?)H)?' r'(?:(?P<minutes>\d+(.\d+)?)M)?' r'(?:(?P<seconds>\d+(.\d+)?)S)?' r')?' r'$' ) # Support PostgreSQL's day-time interval format, e.g. "3 days 04:05:06". The # year-month and mixed intervals cannot be converted to a timedelta and thus # aren't accepted. postgres_interval_re = _lazy_re_compile( r'^' r'(?:(?P<days>-?\d+) (days? ?))?' r'(?:(?P<sign>[-+])?' r'(?P<hours>\d+):' r'(?P<minutes>\d\d):' r'(?P<seconds>\d\d)' r'(?:\.(?P<microseconds>\d{1,6}))?' r')?$' ) def parse_date(value): """Parse a string and return a datetime.date. Raise ValueError if the input is well formatted but not a valid date. Return None if the input isn't well formatted. """ match = date_re.match(value) if match: kw = {k: int(v) for k, v in match.groupdict().items()} return datetime.date(**kw) def parse_time(value): """Parse a string and return a datetime.time. This function doesn't support time zone offsets. Raise ValueError if the input is well formatted but not a valid time. Return None if the input isn't well formatted, in particular if it contains an offset. """ match = time_re.match(value) if match: kw = match.groupdict() kw['microsecond'] = kw['microsecond'] and kw['microsecond'].ljust(6, '0') kw = {k: int(v) for k, v in kw.items() if v is not None} return datetime.time(**kw) def parse_datetime(value): """Parse a string and return a datetime.datetime. This function supports time zone offsets. When the input contains one, the output uses a timezone with a fixed offset from UTC. Raise ValueError if the input is well formatted but not a valid datetime. Return None if the input isn't well formatted. """ match = datetime_re.match(value) if match: kw = match.groupdict() kw['microsecond'] = kw['microsecond'] and kw['microsecond'].ljust(6, '0') tzinfo = kw.pop('tzinfo') if tzinfo == 'Z': tzinfo = utc elif tzinfo is not None: offset_mins = int(tzinfo[-2:]) if len(tzinfo) > 3 else 0 offset = 60 * int(tzinfo[1:3]) + offset_mins if tzinfo[0] == '-': offset = -offset tzinfo = get_fixed_timezone(offset) kw = {k: int(v) for k, v in kw.items() if v is not None} kw['tzinfo'] = tzinfo return datetime.datetime(**kw) def parse_duration(value): """Parse a duration string and return a datetime.timedelta. The preferred format for durations in Django is '%d %H:%M:%S.%f'. Also supports ISO 8601 representation and PostgreSQL's day-time interval format. """ match = ( standard_duration_re.match(value) or iso8601_duration_re.match(value) or postgres_interval_re.match(value) ) if match: kw = match.groupdict() days = datetime.timedelta(float(kw.pop('days', 0) or 0)) sign = -1 if kw.pop('sign', '+') == '-' else 1 if kw.get('microseconds'): kw['microseconds'] = kw['microseconds'].ljust(6, '0') if kw.get('seconds') and kw.get('microseconds') and kw['seconds'].startswith('-'): kw['microseconds'] = '-' + kw['microseconds'] kw = {k: float(v) for k, v in kw.items() if v is not None} return days + sign * datetime.timedelta(**kw)
82135b126799cfd31349ea0e2005c2270ffa0b5195a6fbfa9ee1453613c0a187
import codecs import datetime import locale import warnings from decimal import Decimal from urllib.parse import quote from django.utils.deprecation import RemovedInDjango40Warning from django.utils.functional import Promise class DjangoUnicodeDecodeError(UnicodeDecodeError): def __init__(self, obj, *args): self.obj = obj super().__init__(*args) def __str__(self): return '%s. You passed in %r (%s)' % (super().__str__(), self.obj, type(self.obj)) def smart_str(s, encoding='utf-8', strings_only=False, errors='strict'): """ Return a string representing 's'. Treat bytestrings using the 'encoding' codec. If strings_only is True, don't convert (some) non-string-like objects. """ if isinstance(s, Promise): # The input is the result of a gettext_lazy() call. return s return force_str(s, encoding, strings_only, errors) _PROTECTED_TYPES = ( type(None), int, float, Decimal, datetime.datetime, datetime.date, datetime.time, ) def is_protected_type(obj): """Determine if the object instance is of a protected type. Objects of protected types are preserved as-is when passed to force_str(strings_only=True). """ return isinstance(obj, _PROTECTED_TYPES) def force_str(s, encoding='utf-8', strings_only=False, errors='strict'): """ Similar to smart_str(), except that lazy instances are resolved to strings, rather than kept as lazy objects. If strings_only is True, don't convert (some) non-string-like objects. """ # Handle the common case first for performance reasons. if issubclass(type(s), str): return s if strings_only and is_protected_type(s): return s try: if isinstance(s, bytes): s = str(s, encoding, errors) else: s = str(s) except UnicodeDecodeError as e: raise DjangoUnicodeDecodeError(s, *e.args) return s def smart_bytes(s, encoding='utf-8', strings_only=False, errors='strict'): """ Return a bytestring version of 's', encoded as specified in 'encoding'. If strings_only is True, don't convert (some) non-string-like objects. """ if isinstance(s, Promise): # The input is the result of a gettext_lazy() call. return s return force_bytes(s, encoding, strings_only, errors) def force_bytes(s, encoding='utf-8', strings_only=False, errors='strict'): """ Similar to smart_bytes, except that lazy instances are resolved to strings, rather than kept as lazy objects. If strings_only is True, don't convert (some) non-string-like objects. """ # Handle the common case first for performance reasons. if isinstance(s, bytes): if encoding == 'utf-8': return s else: return s.decode('utf-8', errors).encode(encoding, errors) if strings_only and is_protected_type(s): return s if isinstance(s, memoryview): return bytes(s) return str(s).encode(encoding, errors) def smart_text(s, encoding='utf-8', strings_only=False, errors='strict'): warnings.warn( 'smart_text() is deprecated in favor of smart_str().', RemovedInDjango40Warning, stacklevel=2, ) return smart_str(s, encoding, strings_only, errors) def force_text(s, encoding='utf-8', strings_only=False, errors='strict'): warnings.warn( 'force_text() is deprecated in favor of force_str().', RemovedInDjango40Warning, stacklevel=2, ) return force_str(s, encoding, strings_only, errors) def iri_to_uri(iri): """ Convert an Internationalized Resource Identifier (IRI) portion to a URI portion that is suitable for inclusion in a URL. This is the algorithm from section 3.1 of RFC 3987, slightly simplified since the input is assumed to be a string rather than an arbitrary byte stream. Take an IRI (string or UTF-8 bytes, e.g. '/I ♥ Django/' or b'/I \xe2\x99\xa5 Django/') and return a string containing the encoded result with ASCII chars only (e.g. '/I%20%E2%99%A5%20Django/'). """ # The list of safe characters here is constructed from the "reserved" and # "unreserved" characters specified in sections 2.2 and 2.3 of RFC 3986: # reserved = gen-delims / sub-delims # gen-delims = ":" / "/" / "?" / "#" / "[" / "]" / "@" # sub-delims = "!" / "$" / "&" / "'" / "(" / ")" # / "*" / "+" / "," / ";" / "=" # unreserved = ALPHA / DIGIT / "-" / "." / "_" / "~" # Of the unreserved characters, urllib.parse.quote() already considers all # but the ~ safe. # The % character is also added to the list of safe characters here, as the # end of section 3.1 of RFC 3987 specifically mentions that % must not be # converted. if iri is None: return iri elif isinstance(iri, Promise): iri = str(iri) return quote(iri, safe="/#%[]=:;$&()+,!?*@'~") # List of byte values that uri_to_iri() decodes from percent encoding. # First, the unreserved characters from RFC 3986: _ascii_ranges = [[45, 46, 95, 126], range(65, 91), range(97, 123)] _hextobyte = { (fmt % char).encode(): bytes((char,)) for ascii_range in _ascii_ranges for char in ascii_range for fmt in ['%02x', '%02X'] } # And then everything above 128, because bytes ≥ 128 are part of multibyte # unicode characters. _hexdig = '0123456789ABCDEFabcdef' _hextobyte.update({ (a + b).encode(): bytes.fromhex(a + b) for a in _hexdig[8:] for b in _hexdig }) def uri_to_iri(uri): """ Convert a Uniform Resource Identifier(URI) into an Internationalized Resource Identifier(IRI). This is the algorithm from section 3.2 of RFC 3987, excluding step 4. Take an URI in ASCII bytes (e.g. '/I%20%E2%99%A5%20Django/') and return a string containing the encoded result (e.g. '/I%20♥%20Django/'). """ if uri is None: return uri uri = force_bytes(uri) # Fast selective unqote: First, split on '%' and then starting with the # second block, decode the first 2 bytes if they represent a hex code to # decode. The rest of the block is the part after '%AB', not containing # any '%'. Add that to the output without further processing. bits = uri.split(b'%') if len(bits) == 1: iri = uri else: parts = [bits[0]] append = parts.append hextobyte = _hextobyte for item in bits[1:]: hex = item[:2] if hex in hextobyte: append(hextobyte[item[:2]]) append(item[2:]) else: append(b'%') append(item) iri = b''.join(parts) return repercent_broken_unicode(iri).decode() def escape_uri_path(path): """ Escape the unsafe characters from the path portion of a Uniform Resource Identifier (URI). """ # These are the "reserved" and "unreserved" characters specified in # sections 2.2 and 2.3 of RFC 2396: # reserved = ";" | "/" | "?" | ":" | "@" | "&" | "=" | "+" | "$" | "," # unreserved = alphanum | mark # mark = "-" | "_" | "." | "!" | "~" | "*" | "'" | "(" | ")" # The list of safe characters here is constructed subtracting ";", "=", # and "?" according to section 3.3 of RFC 2396. # The reason for not subtracting and escaping "/" is that we are escaping # the entire path, not a path segment. return quote(path, safe="/:@&+$,-_.!~*'()") def punycode(domain): """Return the Punycode of the given domain if it's non-ASCII.""" return domain.encode('idna').decode('ascii') def repercent_broken_unicode(path): """ As per section 3.2 of RFC 3987, step three of converting a URI into an IRI, repercent-encode any octet produced that is not part of a strictly legal UTF-8 octet sequence. """ while True: try: path.decode() except UnicodeDecodeError as e: # CVE-2019-14235: A recursion shouldn't be used since the exception # handling uses massive amounts of memory repercent = quote(path[e.start:e.end], safe=b"/#%[]=:;$&()+,!?*@'~") path = path[:e.start] + repercent.encode() + path[e.end:] else: return path def filepath_to_uri(path): """Convert a file system path to a URI portion that is suitable for inclusion in a URL. Encode certain chars that would normally be recognized as special chars for URIs. Do not encode the ' character, as it is a valid character within URIs. See the encodeURIComponent() JavaScript function for details. """ if path is None: return path # I know about `os.sep` and `os.altsep` but I want to leave # some flexibility for hardcoding separators. return quote(str(path).replace("\\", "/"), safe="/~!*()'") def get_system_encoding(): """ The encoding of the default system locale. Fallback to 'ascii' if the #encoding is unsupported by Python or could not be determined. See tickets #10335 and #5846. """ try: encoding = locale.getdefaultlocale()[1] or 'ascii' codecs.lookup(encoding) except Exception: encoding = 'ascii' return encoding DEFAULT_LOCALE_ENCODING = get_system_encoding()
ac3c0ff846121ed0ffb4b2e2346dd10b1eeb3a2e6dfc51fb08e7e10cf3b5aca8
""" Functions for reversing a regular expression (used in reverse URL resolving). Used internally by Django and not intended for external use. This is not, and is not intended to be, a complete reg-exp decompiler. It should be good enough for a large class of URLS, however. """ import re from django.utils.functional import SimpleLazyObject # Mapping of an escape character to a representative of that class. So, e.g., # "\w" is replaced by "x" in a reverse URL. A value of None means to ignore # this sequence. Any missing key is mapped to itself. ESCAPE_MAPPINGS = { "A": None, "b": None, "B": None, "d": "0", "D": "x", "s": " ", "S": "x", "w": "x", "W": "!", "Z": None, } class Choice(list): """Represent multiple possibilities at this point in a pattern string.""" class Group(list): """Represent a capturing group in the pattern string.""" class NonCapture(list): """Represent a non-capturing group in the pattern string.""" def normalize(pattern): r""" Given a reg-exp pattern, normalize it to an iterable of forms that suffice for reverse matching. This does the following: (1) For any repeating sections, keeps the minimum number of occurrences permitted (this means zero for optional groups). (2) If an optional group includes parameters, include one occurrence of that group (along with the zero occurrence case from step (1)). (3) Select the first (essentially an arbitrary) element from any character class. Select an arbitrary character for any unordered class (e.g. '.' or '\w') in the pattern. (4) Ignore look-ahead and look-behind assertions. (5) Raise an error on any disjunctive ('|') constructs. Django's URLs for forward resolving are either all positional arguments or all keyword arguments. That is assumed here, as well. Although reverse resolving can be done using positional args when keyword args are specified, the two cannot be mixed in the same reverse() call. """ # Do a linear scan to work out the special features of this pattern. The # idea is that we scan once here and collect all the information we need to # make future decisions. result = [] non_capturing_groups = [] consume_next = True pattern_iter = next_char(iter(pattern)) num_args = 0 # A "while" loop is used here because later on we need to be able to peek # at the next character and possibly go around without consuming another # one at the top of the loop. try: ch, escaped = next(pattern_iter) except StopIteration: return [('', [])] try: while True: if escaped: result.append(ch) elif ch == '.': # Replace "any character" with an arbitrary representative. result.append(".") elif ch == '|': # FIXME: One day we'll should do this, but not in 1.0. raise NotImplementedError('Awaiting Implementation') elif ch == "^": pass elif ch == '$': break elif ch == ')': # This can only be the end of a non-capturing group, since all # other unescaped parentheses are handled by the grouping # section later (and the full group is handled there). # # We regroup everything inside the capturing group so that it # can be quantified, if necessary. start = non_capturing_groups.pop() inner = NonCapture(result[start:]) result = result[:start] + [inner] elif ch == '[': # Replace ranges with the first character in the range. ch, escaped = next(pattern_iter) result.append(ch) ch, escaped = next(pattern_iter) while escaped or ch != ']': ch, escaped = next(pattern_iter) elif ch == '(': # Some kind of group. ch, escaped = next(pattern_iter) if ch != '?' or escaped: # A positional group name = "_%d" % num_args num_args += 1 result.append(Group((("%%(%s)s" % name), name))) walk_to_end(ch, pattern_iter) else: ch, escaped = next(pattern_iter) if ch in '!=<': # All of these are ignorable. Walk to the end of the # group. walk_to_end(ch, pattern_iter) elif ch == ':': # Non-capturing group non_capturing_groups.append(len(result)) elif ch != 'P': # Anything else, other than a named group, is something # we cannot reverse. raise ValueError("Non-reversible reg-exp portion: '(?%s'" % ch) else: ch, escaped = next(pattern_iter) if ch not in ('<', '='): raise ValueError("Non-reversible reg-exp portion: '(?P%s'" % ch) # We are in a named capturing group. Extra the name and # then skip to the end. if ch == '<': terminal_char = '>' # We are in a named backreference. else: terminal_char = ')' name = [] ch, escaped = next(pattern_iter) while ch != terminal_char: name.append(ch) ch, escaped = next(pattern_iter) param = ''.join(name) # Named backreferences have already consumed the # parenthesis. if terminal_char != ')': result.append(Group((("%%(%s)s" % param), param))) walk_to_end(ch, pattern_iter) else: result.append(Group((("%%(%s)s" % param), None))) elif ch in "*?+{": # Quantifiers affect the previous item in the result list. count, ch = get_quantifier(ch, pattern_iter) if ch: # We had to look ahead, but it wasn't need to compute the # quantifier, so use this character next time around the # main loop. consume_next = False if count == 0: if contains(result[-1], Group): # If we are quantifying a capturing group (or # something containing such a group) and the minimum is # zero, we must also handle the case of one occurrence # being present. All the quantifiers (except {0,0}, # which we conveniently ignore) that have a 0 minimum # also allow a single occurrence. result[-1] = Choice([None, result[-1]]) else: result.pop() elif count > 1: result.extend([result[-1]] * (count - 1)) else: # Anything else is a literal. result.append(ch) if consume_next: ch, escaped = next(pattern_iter) consume_next = True except StopIteration: pass except NotImplementedError: # A case of using the disjunctive form. No results for you! return [('', [])] return list(zip(*flatten_result(result))) def next_char(input_iter): r""" An iterator that yields the next character from "pattern_iter", respecting escape sequences. An escaped character is replaced by a representative of its class (e.g. \w -> "x"). If the escaped character is one that is skipped, it is not returned (the next character is returned instead). Yield the next character, along with a boolean indicating whether it is a raw (unescaped) character or not. """ for ch in input_iter: if ch != '\\': yield ch, False continue ch = next(input_iter) representative = ESCAPE_MAPPINGS.get(ch, ch) if representative is None: continue yield representative, True def walk_to_end(ch, input_iter): """ The iterator is currently inside a capturing group. Walk to the close of this group, skipping over any nested groups and handling escaped parentheses correctly. """ if ch == '(': nesting = 1 else: nesting = 0 for ch, escaped in input_iter: if escaped: continue elif ch == '(': nesting += 1 elif ch == ')': if not nesting: return nesting -= 1 def get_quantifier(ch, input_iter): """ Parse a quantifier from the input, where "ch" is the first character in the quantifier. Return the minimum number of occurrences permitted by the quantifier and either None or the next character from the input_iter if the next character is not part of the quantifier. """ if ch in '*?+': try: ch2, escaped = next(input_iter) except StopIteration: ch2 = None if ch2 == '?': ch2 = None if ch == '+': return 1, ch2 return 0, ch2 quant = [] while ch != '}': ch, escaped = next(input_iter) quant.append(ch) quant = quant[:-1] values = ''.join(quant).split(',') # Consume the trailing '?', if necessary. try: ch, escaped = next(input_iter) except StopIteration: ch = None if ch == '?': ch = None return int(values[0]), ch def contains(source, inst): """ Return True if the "source" contains an instance of "inst". False, otherwise. """ if isinstance(source, inst): return True if isinstance(source, NonCapture): for elt in source: if contains(elt, inst): return True return False def flatten_result(source): """ Turn the given source sequence into a list of reg-exp possibilities and their arguments. Return a list of strings and a list of argument lists. Each of the two lists will be of the same length. """ if source is None: return [''], [[]] if isinstance(source, Group): if source[1] is None: params = [] else: params = [source[1]] return [source[0]], [params] result = [''] result_args = [[]] pos = last = 0 for pos, elt in enumerate(source): if isinstance(elt, str): continue piece = ''.join(source[last:pos]) if isinstance(elt, Group): piece += elt[0] param = elt[1] else: param = None last = pos + 1 for i in range(len(result)): result[i] += piece if param: result_args[i].append(param) if isinstance(elt, (Choice, NonCapture)): if isinstance(elt, NonCapture): elt = [elt] inner_result, inner_args = [], [] for item in elt: res, args = flatten_result(item) inner_result.extend(res) inner_args.extend(args) new_result = [] new_args = [] for item, args in zip(result, result_args): for i_item, i_args in zip(inner_result, inner_args): new_result.append(item + i_item) new_args.append(args[:] + i_args) result = new_result result_args = new_args if pos >= last: piece = ''.join(source[last:]) for i in range(len(result)): result[i] += piece return result, result_args def _lazy_re_compile(regex, flags=0): """Lazily compile a regex with flags.""" def _compile(): # Compile the regex if it was not passed pre-compiled. if isinstance(regex, (str, bytes)): return re.compile(regex, flags) else: assert not flags, ( 'flags must be empty if regex is passed pre-compiled' ) return regex return SimpleLazyObject(_compile)
74a2d1a1b7e54d6b386253546de9e237f26c81b758331f57910cda030085eb3c
import calendar import datetime from django.utils.html import avoid_wrapping from django.utils.timezone import is_aware, utc from django.utils.translation import gettext, ngettext_lazy TIME_STRINGS = { 'year': ngettext_lazy('%d year', '%d years'), 'month': ngettext_lazy('%d month', '%d months'), 'week': ngettext_lazy('%d week', '%d weeks'), 'day': ngettext_lazy('%d day', '%d days'), 'hour': ngettext_lazy('%d hour', '%d hours'), 'minute': ngettext_lazy('%d minute', '%d minutes'), } TIMESINCE_CHUNKS = ( (60 * 60 * 24 * 365, 'year'), (60 * 60 * 24 * 30, 'month'), (60 * 60 * 24 * 7, 'week'), (60 * 60 * 24, 'day'), (60 * 60, 'hour'), (60, 'minute'), ) def timesince(d, now=None, reversed=False, time_strings=None): """ Take two datetime objects and return the time between d and now as a nicely formatted string, e.g. "10 minutes". If d occurs after now, return "0 minutes". Units used are years, months, weeks, days, hours, and minutes. Seconds and microseconds are ignored. Up to two adjacent units will be displayed. For example, "2 weeks, 3 days" and "1 year, 3 months" are possible outputs, but "2 weeks, 3 hours" and "1 year, 5 days" are not. `time_strings` is an optional dict of strings to replace the default TIME_STRINGS dict. Adapted from https://web.archive.org/web/20060617175230/http://blog.natbat.co.uk/archive/2003/Jun/14/time_since """ if time_strings is None: time_strings = TIME_STRINGS # Convert datetime.date to datetime.datetime for comparison. if not isinstance(d, datetime.datetime): d = datetime.datetime(d.year, d.month, d.day) if now and not isinstance(now, datetime.datetime): now = datetime.datetime(now.year, now.month, now.day) now = now or datetime.datetime.now(utc if is_aware(d) else None) if reversed: d, now = now, d delta = now - d # Deal with leapyears by subtracing the number of leapdays leapdays = calendar.leapdays(d.year, now.year) if leapdays != 0: if calendar.isleap(d.year): leapdays -= 1 elif calendar.isleap(now.year): leapdays += 1 delta -= datetime.timedelta(leapdays) # ignore microseconds since = delta.days * 24 * 60 * 60 + delta.seconds if since <= 0: # d is in the future compared to now, stop processing. return avoid_wrapping(time_strings['minute'] % 0) for i, (seconds, name) in enumerate(TIMESINCE_CHUNKS): count = since // seconds if count != 0: break result = avoid_wrapping(time_strings[name] % count) if i + 1 < len(TIMESINCE_CHUNKS): # Now get the second item seconds2, name2 = TIMESINCE_CHUNKS[i + 1] count2 = (since - (seconds * count)) // seconds2 if count2 != 0: result += gettext(', ') + avoid_wrapping(time_strings[name2] % count2) return result def timeuntil(d, now=None, time_strings=None): """ Like timesince, but return a string measuring the time until the given time. """ return timesince(d, now, reversed=True, time_strings=time_strings)
1c9d87ef32d6919d0902a8982d8b5aadfed27d6e733b0e6a7eb2bb610644684f
""" PHP date() style date formatting See http://www.php.net/date for format strings Usage: >>> import datetime >>> d = datetime.datetime.now() >>> df = DateFormat(d) >>> print(df.format('jS F Y H:i')) 7th October 2003 11:39 >>> """ import calendar import datetime import time from django.utils.dates import ( MONTHS, MONTHS_3, MONTHS_ALT, MONTHS_AP, WEEKDAYS, WEEKDAYS_ABBR, ) from django.utils.regex_helper import _lazy_re_compile from django.utils.timezone import get_default_timezone, is_aware, is_naive from django.utils.translation import gettext as _ re_formatchars = _lazy_re_compile(r'(?<!\\)([aAbBcdDeEfFgGhHiIjlLmMnNoOPrsStTUuwWyYzZ])') re_escaped = _lazy_re_compile(r'\\(.)') class Formatter: def format(self, formatstr): pieces = [] for i, piece in enumerate(re_formatchars.split(str(formatstr))): if i % 2: if type(self.data) is datetime.date and hasattr(TimeFormat, piece): raise TypeError( "The format for date objects may not contain " "time-related format specifiers (found '%s')." % piece ) pieces.append(str(getattr(self, piece)())) elif piece: pieces.append(re_escaped.sub(r'\1', piece)) return ''.join(pieces) class TimeFormat(Formatter): def __init__(self, obj): self.data = obj self.timezone = None # We only support timezone when formatting datetime objects, # not date objects (timezone information not appropriate), # or time objects (against established django policy). if isinstance(obj, datetime.datetime): if is_naive(obj): self.timezone = get_default_timezone() else: self.timezone = obj.tzinfo def a(self): "'a.m.' or 'p.m.'" if self.data.hour > 11: return _('p.m.') return _('a.m.') def A(self): "'AM' or 'PM'" if self.data.hour > 11: return _('PM') return _('AM') def B(self): "Swatch Internet time" raise NotImplementedError('may be implemented in a future release') def e(self): """ Timezone name. If timezone information is not available, return an empty string. """ if not self.timezone: return "" try: if hasattr(self.data, 'tzinfo') and self.data.tzinfo: return self.data.tzname() or '' except NotImplementedError: pass return "" def f(self): """ Time, in 12-hour hours and minutes, with minutes left off if they're zero. Examples: '1', '1:30', '2:05', '2' Proprietary extension. """ if self.data.minute == 0: return self.g() return '%s:%s' % (self.g(), self.i()) def g(self): "Hour, 12-hour format without leading zeros; i.e. '1' to '12'" if self.data.hour == 0: return 12 if self.data.hour > 12: return self.data.hour - 12 return self.data.hour def G(self): "Hour, 24-hour format without leading zeros; i.e. '0' to '23'" return self.data.hour def h(self): "Hour, 12-hour format; i.e. '01' to '12'" return '%02d' % self.g() def H(self): "Hour, 24-hour format; i.e. '00' to '23'" return '%02d' % self.G() def i(self): "Minutes; i.e. '00' to '59'" return '%02d' % self.data.minute def O(self): # NOQA: E743 """ Difference to Greenwich time in hours; e.g. '+0200', '-0430'. If timezone information is not available, return an empty string. """ if not self.timezone: return "" seconds = self.Z() if seconds == "": return "" sign = '-' if seconds < 0 else '+' seconds = abs(seconds) return "%s%02d%02d" % (sign, seconds // 3600, (seconds // 60) % 60) def P(self): """ Time, in 12-hour hours, minutes and 'a.m.'/'p.m.', with minutes left off if they're zero and the strings 'midnight' and 'noon' if appropriate. Examples: '1 a.m.', '1:30 p.m.', 'midnight', 'noon', '12:30 p.m.' Proprietary extension. """ if self.data.minute == 0 and self.data.hour == 0: return _('midnight') if self.data.minute == 0 and self.data.hour == 12: return _('noon') return '%s %s' % (self.f(), self.a()) def s(self): "Seconds; i.e. '00' to '59'" return '%02d' % self.data.second def T(self): """ Time zone of this machine; e.g. 'EST' or 'MDT'. If timezone information is not available, return an empty string. """ if not self.timezone: return "" name = None try: name = self.timezone.tzname(self.data) except Exception: # pytz raises AmbiguousTimeError during the autumn DST change. # This happens mainly when __init__ receives a naive datetime # and sets self.timezone = get_default_timezone(). pass if name is None: name = self.format('O') return str(name) def u(self): "Microseconds; i.e. '000000' to '999999'" return '%06d' % self.data.microsecond def Z(self): """ Time zone offset in seconds (i.e. '-43200' to '43200'). The offset for timezones west of UTC is always negative, and for those east of UTC is always positive. If timezone information is not available, return an empty string. """ if not self.timezone: return "" try: offset = self.timezone.utcoffset(self.data) except Exception: # pytz raises AmbiguousTimeError during the autumn DST change. # This happens mainly when __init__ receives a naive datetime # and sets self.timezone = get_default_timezone(). return "" # `offset` is a datetime.timedelta. For negative values (to the west of # UTC) only days can be negative (days=-1) and seconds are always # positive. e.g. UTC-1 -> timedelta(days=-1, seconds=82800, microseconds=0) # Positive offsets have days=0 return offset.days * 86400 + offset.seconds class DateFormat(TimeFormat): year_days = [None, 0, 31, 59, 90, 120, 151, 181, 212, 243, 273, 304, 334] def b(self): "Month, textual, 3 letters, lowercase; e.g. 'jan'" return MONTHS_3[self.data.month] def c(self): """ ISO 8601 Format Example : '2008-01-02T10:30:00.000123' """ return self.data.isoformat() def d(self): "Day of the month, 2 digits with leading zeros; i.e. '01' to '31'" return '%02d' % self.data.day def D(self): "Day of the week, textual, 3 letters; e.g. 'Fri'" return WEEKDAYS_ABBR[self.data.weekday()] def E(self): "Alternative month names as required by some locales. Proprietary extension." return MONTHS_ALT[self.data.month] def F(self): "Month, textual, long; e.g. 'January'" return MONTHS[self.data.month] def I(self): # NOQA: E743 "'1' if Daylight Savings Time, '0' otherwise." try: if self.timezone and self.timezone.dst(self.data): return '1' else: return '0' except Exception: # pytz raises AmbiguousTimeError during the autumn DST change. # This happens mainly when __init__ receives a naive datetime # and sets self.timezone = get_default_timezone(). return '' def j(self): "Day of the month without leading zeros; i.e. '1' to '31'" return self.data.day def l(self): # NOQA: E743 "Day of the week, textual, long; e.g. 'Friday'" return WEEKDAYS[self.data.weekday()] def L(self): "Boolean for whether it is a leap year; i.e. True or False" return calendar.isleap(self.data.year) def m(self): "Month; i.e. '01' to '12'" return '%02d' % self.data.month def M(self): "Month, textual, 3 letters; e.g. 'Jan'" return MONTHS_3[self.data.month].title() def n(self): "Month without leading zeros; i.e. '1' to '12'" return self.data.month def N(self): "Month abbreviation in Associated Press style. Proprietary extension." return MONTHS_AP[self.data.month] def o(self): "ISO 8601 year number matching the ISO week number (W)" return self.data.isocalendar()[0] def r(self): "RFC 5322 formatted date; e.g. 'Thu, 21 Dec 2000 16:01:07 +0200'" return self.format('D, j M Y H:i:s O') def S(self): "English ordinal suffix for the day of the month, 2 characters; i.e. 'st', 'nd', 'rd' or 'th'" if self.data.day in (11, 12, 13): # Special case return 'th' last = self.data.day % 10 if last == 1: return 'st' if last == 2: return 'nd' if last == 3: return 'rd' return 'th' def t(self): "Number of days in the given month; i.e. '28' to '31'" return '%02d' % calendar.monthrange(self.data.year, self.data.month)[1] def U(self): "Seconds since the Unix epoch (January 1 1970 00:00:00 GMT)" if isinstance(self.data, datetime.datetime) and is_aware(self.data): return int(calendar.timegm(self.data.utctimetuple())) else: return int(time.mktime(self.data.timetuple())) def w(self): "Day of the week, numeric, i.e. '0' (Sunday) to '6' (Saturday)" return (self.data.weekday() + 1) % 7 def W(self): "ISO-8601 week number of year, weeks starting on Monday" # Algorithm from http://www.personal.ecu.edu/mccartyr/ISOwdALG.txt jan1_weekday = self.data.replace(month=1, day=1).weekday() + 1 weekday = self.data.weekday() + 1 day_of_year = self.z() if day_of_year <= (8 - jan1_weekday) and jan1_weekday > 4: if jan1_weekday == 5 or (jan1_weekday == 6 and calendar.isleap(self.data.year - 1)): week_number = 53 else: week_number = 52 else: if calendar.isleap(self.data.year): i = 366 else: i = 365 if (i - day_of_year) < (4 - weekday): week_number = 1 else: j = day_of_year + (7 - weekday) + (jan1_weekday - 1) week_number = j // 7 if jan1_weekday > 4: week_number -= 1 return week_number def y(self): "Year, 2 digits; e.g. '99'" return str(self.data.year)[2:] def Y(self): "Year, 4 digits; e.g. '1999'" return self.data.year def z(self): "Day of the year; i.e. '0' to '365'" doy = self.year_days[self.data.month] + self.data.day if self.L() and self.data.month > 2: doy += 1 return doy def format(value, format_string): "Convenience function" df = DateFormat(value) return df.format(format_string) def time_format(value, format_string): "Convenience function" tf = TimeFormat(value) return tf.format(format_string)
554db82d52140dfc06b3ba5ce204b5d52147c38a68a0c7bb57058ed46dc1afc2
"""HTML utilities suitable for global use.""" import html import json import re from html.parser import HTMLParser from urllib.parse import ( parse_qsl, quote, unquote, urlencode, urlsplit, urlunsplit, ) from django.utils.encoding import punycode from django.utils.functional import Promise, keep_lazy, keep_lazy_text from django.utils.http import RFC3986_GENDELIMS, RFC3986_SUBDELIMS from django.utils.regex_helper import _lazy_re_compile from django.utils.safestring import SafeData, SafeString, mark_safe from django.utils.text import normalize_newlines # Configuration for urlize() function. TRAILING_PUNCTUATION_CHARS = '.,:;!' WRAPPING_PUNCTUATION = [('(', ')'), ('[', ']')] # List of possible strings used for bullets in bulleted lists. DOTS = ['&middot;', '*', '\u2022', '&#149;', '&bull;', '&#8226;'] unencoded_ampersands_re = _lazy_re_compile(r'&(?!(\w+|#\d+);)') word_split_re = _lazy_re_compile(r'''([\s<>"']+)''') simple_url_re = _lazy_re_compile(r'^https?://\[?\w', re.IGNORECASE) simple_url_2_re = _lazy_re_compile( r'^www\.|^(?!http)\w[^@]+\.(com|edu|gov|int|mil|net|org)($|/.*)$', re.IGNORECASE ) @keep_lazy(str, SafeString) def escape(text): """ Return the given text with ampersands, quotes and angle brackets encoded for use in HTML. Always escape input, even if it's already escaped and marked as such. This may result in double-escaping. If this is a concern, use conditional_escape() instead. """ return mark_safe(html.escape(str(text))) _js_escapes = { ord('\\'): '\\u005C', ord('\''): '\\u0027', ord('"'): '\\u0022', ord('>'): '\\u003E', ord('<'): '\\u003C', ord('&'): '\\u0026', ord('='): '\\u003D', ord('-'): '\\u002D', ord(';'): '\\u003B', ord('`'): '\\u0060', ord('\u2028'): '\\u2028', ord('\u2029'): '\\u2029' } # Escape every ASCII character with a value less than 32. _js_escapes.update((ord('%c' % z), '\\u%04X' % z) for z in range(32)) @keep_lazy(str, SafeString) def escapejs(value): """Hex encode characters for use in JavaScript strings.""" return mark_safe(str(value).translate(_js_escapes)) _json_script_escapes = { ord('>'): '\\u003E', ord('<'): '\\u003C', ord('&'): '\\u0026', } def json_script(value, element_id): """ Escape all the HTML/XML special characters with their unicode escapes, so value is safe to be output anywhere except for inside a tag attribute. Wrap the escaped JSON in a script tag. """ from django.core.serializers.json import DjangoJSONEncoder json_str = json.dumps(value, cls=DjangoJSONEncoder).translate(_json_script_escapes) return format_html( '<script id="{}" type="application/json">{}</script>', element_id, mark_safe(json_str) ) def conditional_escape(text): """ Similar to escape(), except that it doesn't operate on pre-escaped strings. This function relies on the __html__ convention used both by Django's SafeData class and by third-party libraries like markupsafe. """ if isinstance(text, Promise): text = str(text) if hasattr(text, '__html__'): return text.__html__() else: return escape(text) def format_html(format_string, *args, **kwargs): """ Similar to str.format, but pass all arguments through conditional_escape(), and call mark_safe() on the result. This function should be used instead of str.format or % interpolation to build up small HTML fragments. """ args_safe = map(conditional_escape, args) kwargs_safe = {k: conditional_escape(v) for (k, v) in kwargs.items()} return mark_safe(format_string.format(*args_safe, **kwargs_safe)) def format_html_join(sep, format_string, args_generator): """ A wrapper of format_html, for the common case of a group of arguments that need to be formatted using the same format string, and then joined using 'sep'. 'sep' is also passed through conditional_escape. 'args_generator' should be an iterator that returns the sequence of 'args' that will be passed to format_html. Example: format_html_join('\n', "<li>{} {}</li>", ((u.first_name, u.last_name) for u in users)) """ return mark_safe(conditional_escape(sep).join( format_html(format_string, *args) for args in args_generator )) @keep_lazy_text def linebreaks(value, autoescape=False): """Convert newlines into <p> and <br>s.""" value = normalize_newlines(value) paras = re.split('\n{2,}', str(value)) if autoescape: paras = ['<p>%s</p>' % escape(p).replace('\n', '<br>') for p in paras] else: paras = ['<p>%s</p>' % p.replace('\n', '<br>') for p in paras] return '\n\n'.join(paras) class MLStripper(HTMLParser): def __init__(self): super().__init__(convert_charrefs=False) self.reset() self.fed = [] def handle_data(self, d): self.fed.append(d) def handle_entityref(self, name): self.fed.append('&%s;' % name) def handle_charref(self, name): self.fed.append('&#%s;' % name) def get_data(self): return ''.join(self.fed) def _strip_once(value): """ Internal tag stripping utility used by strip_tags. """ s = MLStripper() s.feed(value) s.close() return s.get_data() @keep_lazy_text def strip_tags(value): """Return the given HTML with all tags stripped.""" # Note: in typical case this loop executes _strip_once once. Loop condition # is redundant, but helps to reduce number of executions of _strip_once. value = str(value) while '<' in value and '>' in value: new_value = _strip_once(value) if value.count('<') == new_value.count('<'): # _strip_once wasn't able to detect more tags. break value = new_value return value @keep_lazy_text def strip_spaces_between_tags(value): """Return the given HTML with spaces between tags removed.""" return re.sub(r'>\s+<', '><', str(value)) def smart_urlquote(url): """Quote a URL if it isn't already quoted.""" def unquote_quote(segment): segment = unquote(segment) # Tilde is part of RFC3986 Unreserved Characters # https://tools.ietf.org/html/rfc3986#section-2.3 # See also https://bugs.python.org/issue16285 return quote(segment, safe=RFC3986_SUBDELIMS + RFC3986_GENDELIMS + '~') # Handle IDN before quoting. try: scheme, netloc, path, query, fragment = urlsplit(url) except ValueError: # invalid IPv6 URL (normally square brackets in hostname part). return unquote_quote(url) try: netloc = punycode(netloc) # IDN -> ACE except UnicodeError: # invalid domain part return unquote_quote(url) if query: # Separately unquoting key/value, so as to not mix querystring separators # included in query values. See #22267. query_parts = [(unquote(q[0]), unquote(q[1])) for q in parse_qsl(query, keep_blank_values=True)] # urlencode will take care of quoting query = urlencode(query_parts) path = unquote_quote(path) fragment = unquote_quote(fragment) return urlunsplit((scheme, netloc, path, query, fragment)) @keep_lazy_text def urlize(text, trim_url_limit=None, nofollow=False, autoescape=False): """ Convert any URLs in text into clickable links. Works on http://, https://, www. links, and also on links ending in one of the original seven gTLDs (.com, .edu, .gov, .int, .mil, .net, and .org). Links can have trailing punctuation (periods, commas, close-parens) and leading punctuation (opening parens) and it'll still do the right thing. If trim_url_limit is not None, truncate the URLs in the link text longer than this limit to trim_url_limit - 1 characters and append an ellipsis. If nofollow is True, give the links a rel="nofollow" attribute. If autoescape is True, autoescape the link text and URLs. """ safe_input = isinstance(text, SafeData) def trim_url(x, limit=trim_url_limit): if limit is None or len(x) <= limit: return x return '%s…' % x[:max(0, limit - 1)] def trim_punctuation(lead, middle, trail): """ Trim trailing and wrapping punctuation from `middle`. Return the items of the new state. """ # Continue trimming until middle remains unchanged. trimmed_something = True while trimmed_something: trimmed_something = False # Trim wrapping punctuation. for opening, closing in WRAPPING_PUNCTUATION: if middle.startswith(opening): middle = middle[len(opening):] lead += opening trimmed_something = True # Keep parentheses at the end only if they're balanced. if (middle.endswith(closing) and middle.count(closing) == middle.count(opening) + 1): middle = middle[:-len(closing)] trail = closing + trail trimmed_something = True # Trim trailing punctuation (after trimming wrapping punctuation, # as encoded entities contain ';'). Unescape entities to avoid # breaking them by removing ';'. middle_unescaped = html.unescape(middle) stripped = middle_unescaped.rstrip(TRAILING_PUNCTUATION_CHARS) if middle_unescaped != stripped: trail = middle[len(stripped):] + trail middle = middle[:len(stripped) - len(middle_unescaped)] trimmed_something = True return lead, middle, trail def is_email_simple(value): """Return True if value looks like an email address.""" # An @ must be in the middle of the value. if '@' not in value or value.startswith('@') or value.endswith('@'): return False try: p1, p2 = value.split('@') except ValueError: # value contains more than one @. return False # Dot must be in p2 (e.g. example.com) if '.' not in p2 or p2.startswith('.'): return False return True words = word_split_re.split(str(text)) for i, word in enumerate(words): if '.' in word or '@' in word or ':' in word: # lead: Current punctuation trimmed from the beginning of the word. # middle: Current state of the word. # trail: Current punctuation trimmed from the end of the word. lead, middle, trail = '', word, '' # Deal with punctuation. lead, middle, trail = trim_punctuation(lead, middle, trail) # Make URL we want to point to. url = None nofollow_attr = ' rel="nofollow"' if nofollow else '' if simple_url_re.match(middle): url = smart_urlquote(html.unescape(middle)) elif simple_url_2_re.match(middle): url = smart_urlquote('http://%s' % html.unescape(middle)) elif ':' not in middle and is_email_simple(middle): local, domain = middle.rsplit('@', 1) try: domain = punycode(domain) except UnicodeError: continue url = 'mailto:%s@%s' % (local, domain) nofollow_attr = '' # Make link. if url: trimmed = trim_url(middle) if autoescape and not safe_input: lead, trail = escape(lead), escape(trail) trimmed = escape(trimmed) middle = '<a href="%s"%s>%s</a>' % (escape(url), nofollow_attr, trimmed) words[i] = mark_safe('%s%s%s' % (lead, middle, trail)) else: if safe_input: words[i] = mark_safe(word) elif autoescape: words[i] = escape(word) elif safe_input: words[i] = mark_safe(word) elif autoescape: words[i] = escape(word) return ''.join(words) def avoid_wrapping(value): """ Avoid text wrapping in the middle of a phrase by adding non-breaking spaces where there previously were normal spaces. """ return value.replace(" ", "\xa0") def html_safe(klass): """ A decorator that defines the __html__ method. This helps non-Django templates to detect classes whose __str__ methods return SafeString. """ if '__html__' in klass.__dict__: raise ValueError( "can't apply @html_safe to %s because it defines " "__html__()." % klass.__name__ ) if '__str__' not in klass.__dict__: raise ValueError( "can't apply @html_safe to %s because it doesn't " "define __str__()." % klass.__name__ ) klass_str = klass.__str__ klass.__str__ = lambda self: mark_safe(klass_str(self)) klass.__html__ = lambda self: str(self) return klass
122b668950101d859e2467882ac4ed4ab2684e2dc58fb91dba18f75cfa3eb7aa
import base64 import calendar import datetime import re import unicodedata import warnings from binascii import Error as BinasciiError from email.utils import formatdate from urllib.parse import ( ParseResult, SplitResult, _coerce_args, _splitnetloc, _splitparams, quote, quote_plus, scheme_chars, unquote, unquote_plus, urlencode as original_urlencode, uses_params, ) from django.core.exceptions import TooManyFieldsSent from django.utils.datastructures import MultiValueDict from django.utils.deprecation import RemovedInDjango40Warning from django.utils.functional import keep_lazy_text from django.utils.regex_helper import _lazy_re_compile # based on RFC 7232, Appendix C ETAG_MATCH = _lazy_re_compile(r''' \A( # start of string and capture group (?:W/)? # optional weak indicator " # opening quote [^"]* # any sequence of non-quote characters " # end quote )\Z # end of string and capture group ''', re.X) MONTHS = 'jan feb mar apr may jun jul aug sep oct nov dec'.split() __D = r'(?P<day>\d{2})' __D2 = r'(?P<day>[ \d]\d)' __M = r'(?P<mon>\w{3})' __Y = r'(?P<year>\d{4})' __Y2 = r'(?P<year>\d{2})' __T = r'(?P<hour>\d{2}):(?P<min>\d{2}):(?P<sec>\d{2})' RFC1123_DATE = _lazy_re_compile(r'^\w{3}, %s %s %s %s GMT$' % (__D, __M, __Y, __T)) RFC850_DATE = _lazy_re_compile(r'^\w{6,9}, %s-%s-%s %s GMT$' % (__D, __M, __Y2, __T)) ASCTIME_DATE = _lazy_re_compile(r'^\w{3} %s %s %s %s$' % (__M, __D2, __T, __Y)) RFC3986_GENDELIMS = ":/?#[]@" RFC3986_SUBDELIMS = "!$&'()*+,;=" FIELDS_MATCH = _lazy_re_compile('[&;]') @keep_lazy_text def urlquote(url, safe='/'): """ A legacy compatibility wrapper to Python's urllib.parse.quote() function. (was used for unicode handling on Python 2) """ warnings.warn( 'django.utils.http.urlquote() is deprecated in favor of ' 'urllib.parse.quote().', RemovedInDjango40Warning, stacklevel=2, ) return quote(url, safe) @keep_lazy_text def urlquote_plus(url, safe=''): """ A legacy compatibility wrapper to Python's urllib.parse.quote_plus() function. (was used for unicode handling on Python 2) """ warnings.warn( 'django.utils.http.urlquote_plus() is deprecated in favor of ' 'urllib.parse.quote_plus(),', RemovedInDjango40Warning, stacklevel=2, ) return quote_plus(url, safe) @keep_lazy_text def urlunquote(quoted_url): """ A legacy compatibility wrapper to Python's urllib.parse.unquote() function. (was used for unicode handling on Python 2) """ warnings.warn( 'django.utils.http.urlunquote() is deprecated in favor of ' 'urllib.parse.unquote().', RemovedInDjango40Warning, stacklevel=2, ) return unquote(quoted_url) @keep_lazy_text def urlunquote_plus(quoted_url): """ A legacy compatibility wrapper to Python's urllib.parse.unquote_plus() function. (was used for unicode handling on Python 2) """ warnings.warn( 'django.utils.http.urlunquote_plus() is deprecated in favor of ' 'urllib.parse.unquote_plus().', RemovedInDjango40Warning, stacklevel=2, ) return unquote_plus(quoted_url) def urlencode(query, doseq=False): """ A version of Python's urllib.parse.urlencode() function that can operate on MultiValueDict and non-string values. """ if isinstance(query, MultiValueDict): query = query.lists() elif hasattr(query, 'items'): query = query.items() query_params = [] for key, value in query: if value is None: raise TypeError( "Cannot encode None for key '%s' in a query string. Did you " "mean to pass an empty string or omit the value?" % key ) elif not doseq or isinstance(value, (str, bytes)): query_val = value else: try: itr = iter(value) except TypeError: query_val = value else: # Consume generators and iterators, when doseq=True, to # work around https://bugs.python.org/issue31706. query_val = [] for item in itr: if item is None: raise TypeError( "Cannot encode None for key '%s' in a query " "string. Did you mean to pass an empty string or " "omit the value?" % key ) elif not isinstance(item, bytes): item = str(item) query_val.append(item) query_params.append((key, query_val)) return original_urlencode(query_params, doseq) def http_date(epoch_seconds=None): """ Format the time to match the RFC1123 date format as specified by HTTP RFC7231 section 7.1.1.1. `epoch_seconds` is a floating point number expressed in seconds since the epoch, in UTC - such as that outputted by time.time(). If set to None, it defaults to the current time. Output a string in the format 'Wdy, DD Mon YYYY HH:MM:SS GMT'. """ return formatdate(epoch_seconds, usegmt=True) def parse_http_date(date): """ Parse a date format as specified by HTTP RFC7231 section 7.1.1.1. The three formats allowed by the RFC are accepted, even if only the first one is still in widespread use. Return an integer expressed in seconds since the epoch, in UTC. """ # email.utils.parsedate() does the job for RFC1123 dates; unfortunately # RFC7231 makes it mandatory to support RFC850 dates too. So we roll # our own RFC-compliant parsing. for regex in RFC1123_DATE, RFC850_DATE, ASCTIME_DATE: m = regex.match(date) if m is not None: break else: raise ValueError("%r is not in a valid HTTP date format" % date) try: year = int(m.group('year')) if year < 100: current_year = datetime.datetime.utcnow().year current_century = current_year - (current_year % 100) if year - (current_year % 100) > 50: # year that appears to be more than 50 years in the future are # interpreted as representing the past. year += current_century - 100 else: year += current_century month = MONTHS.index(m.group('mon').lower()) + 1 day = int(m.group('day')) hour = int(m.group('hour')) min = int(m.group('min')) sec = int(m.group('sec')) result = datetime.datetime(year, month, day, hour, min, sec) return calendar.timegm(result.utctimetuple()) except Exception as exc: raise ValueError("%r is not a valid date" % date) from exc def parse_http_date_safe(date): """ Same as parse_http_date, but return None if the input is invalid. """ try: return parse_http_date(date) except Exception: pass # Base 36 functions: useful for generating compact URLs def base36_to_int(s): """ Convert a base 36 string to an int. Raise ValueError if the input won't fit into an int. """ # To prevent overconsumption of server resources, reject any # base36 string that is longer than 13 base36 digits (13 digits # is sufficient to base36-encode any 64-bit integer) if len(s) > 13: raise ValueError("Base36 input too large") return int(s, 36) def int_to_base36(i): """Convert an integer to a base36 string.""" char_set = '0123456789abcdefghijklmnopqrstuvwxyz' if i < 0: raise ValueError("Negative base36 conversion input.") if i < 36: return char_set[i] b36 = '' while i != 0: i, n = divmod(i, 36) b36 = char_set[n] + b36 return b36 def urlsafe_base64_encode(s): """ Encode a bytestring to a base64 string for use in URLs. Strip any trailing equal signs. """ return base64.urlsafe_b64encode(s).rstrip(b'\n=').decode('ascii') def urlsafe_base64_decode(s): """ Decode a base64 encoded string. Add back any trailing equal signs that might have been stripped. """ s = s.encode() try: return base64.urlsafe_b64decode(s.ljust(len(s) + len(s) % 4, b'=')) except (LookupError, BinasciiError) as e: raise ValueError(e) def parse_etags(etag_str): """ Parse a string of ETags given in an If-None-Match or If-Match header as defined by RFC 7232. Return a list of quoted ETags, or ['*'] if all ETags should be matched. """ if etag_str.strip() == '*': return ['*'] else: # Parse each ETag individually, and return any that are valid. etag_matches = (ETAG_MATCH.match(etag.strip()) for etag in etag_str.split(',')) return [match.group(1) for match in etag_matches if match] def quote_etag(etag_str): """ If the provided string is already a quoted ETag, return it. Otherwise, wrap the string in quotes, making it a strong ETag. """ if ETAG_MATCH.match(etag_str): return etag_str else: return '"%s"' % etag_str def is_same_domain(host, pattern): """ Return ``True`` if the host is either an exact match or a match to the wildcard pattern. Any pattern beginning with a period matches a domain and all of its subdomains. (e.g. ``.example.com`` matches ``example.com`` and ``foo.example.com``). Anything else is an exact string match. """ if not pattern: return False pattern = pattern.lower() return ( pattern[0] == '.' and (host.endswith(pattern) or host == pattern[1:]) or pattern == host ) def url_has_allowed_host_and_scheme(url, allowed_hosts, require_https=False): """ Return ``True`` if the url uses an allowed host and a safe scheme. Always return ``False`` on an empty url. If ``require_https`` is ``True``, only 'https' will be considered a valid scheme, as opposed to 'http' and 'https' with the default, ``False``. Note: "True" doesn't entail that a URL is "safe". It may still be e.g. quoted incorrectly. Ensure to also use django.utils.encoding.iri_to_uri() on the path component of untrusted URLs. """ if url is not None: url = url.strip() if not url: return False if allowed_hosts is None: allowed_hosts = set() elif isinstance(allowed_hosts, str): allowed_hosts = {allowed_hosts} # Chrome treats \ completely as / in paths but it could be part of some # basic auth credentials so we need to check both URLs. return ( _url_has_allowed_host_and_scheme(url, allowed_hosts, require_https=require_https) and _url_has_allowed_host_and_scheme(url.replace('\\', '/'), allowed_hosts, require_https=require_https) ) def is_safe_url(url, allowed_hosts, require_https=False): warnings.warn( 'django.utils.http.is_safe_url() is deprecated in favor of ' 'url_has_allowed_host_and_scheme().', RemovedInDjango40Warning, stacklevel=2, ) return url_has_allowed_host_and_scheme(url, allowed_hosts, require_https) # Copied from urllib.parse.urlparse() but uses fixed urlsplit() function. def _urlparse(url, scheme='', allow_fragments=True): """Parse a URL into 6 components: <scheme>://<netloc>/<path>;<params>?<query>#<fragment> Return a 6-tuple: (scheme, netloc, path, params, query, fragment). Note that we don't break the components up in smaller bits (e.g. netloc is a single string) and we don't expand % escapes.""" url, scheme, _coerce_result = _coerce_args(url, scheme) splitresult = _urlsplit(url, scheme, allow_fragments) scheme, netloc, url, query, fragment = splitresult if scheme in uses_params and ';' in url: url, params = _splitparams(url) else: params = '' result = ParseResult(scheme, netloc, url, params, query, fragment) return _coerce_result(result) # Copied from urllib.parse.urlsplit() with # https://github.com/python/cpython/pull/661 applied. def _urlsplit(url, scheme='', allow_fragments=True): """Parse a URL into 5 components: <scheme>://<netloc>/<path>?<query>#<fragment> Return a 5-tuple: (scheme, netloc, path, query, fragment). Note that we don't break the components up in smaller bits (e.g. netloc is a single string) and we don't expand % escapes.""" url, scheme, _coerce_result = _coerce_args(url, scheme) netloc = query = fragment = '' i = url.find(':') if i > 0: for c in url[:i]: if c not in scheme_chars: break else: scheme, url = url[:i].lower(), url[i + 1:] if url[:2] == '//': netloc, url = _splitnetloc(url, 2) if (('[' in netloc and ']' not in netloc) or (']' in netloc and '[' not in netloc)): raise ValueError("Invalid IPv6 URL") if allow_fragments and '#' in url: url, fragment = url.split('#', 1) if '?' in url: url, query = url.split('?', 1) v = SplitResult(scheme, netloc, url, query, fragment) return _coerce_result(v) def _url_has_allowed_host_and_scheme(url, allowed_hosts, require_https=False): # Chrome considers any URL with more than two slashes to be absolute, but # urlparse is not so flexible. Treat any url with three slashes as unsafe. if url.startswith('///'): return False try: url_info = _urlparse(url) except ValueError: # e.g. invalid IPv6 addresses return False # Forbid URLs like http:///example.com - with a scheme, but without a hostname. # In that URL, example.com is not the hostname but, a path component. However, # Chrome will still consider example.com to be the hostname, so we must not # allow this syntax. if not url_info.netloc and url_info.scheme: return False # Forbid URLs that start with control characters. Some browsers (like # Chrome) ignore quite a few control characters at the start of a # URL and might consider the URL as scheme relative. if unicodedata.category(url[0])[0] == 'C': return False scheme = url_info.scheme # Consider URLs without a scheme (e.g. //example.com/p) to be http. if not url_info.scheme and url_info.netloc: scheme = 'http' valid_schemes = ['https'] if require_https else ['http', 'https'] return ((not url_info.netloc or url_info.netloc in allowed_hosts) and (not scheme or scheme in valid_schemes)) def limited_parse_qsl(qs, keep_blank_values=False, encoding='utf-8', errors='replace', fields_limit=None): """ Return a list of key/value tuples parsed from query string. Copied from urlparse with an additional "fields_limit" argument. Copyright (C) 2013 Python Software Foundation (see LICENSE.python). Arguments: qs: percent-encoded query string to be parsed keep_blank_values: flag indicating whether blank values in percent-encoded queries should be treated as blank strings. A true value indicates that blanks should be retained as blank strings. The default false value indicates that blank values are to be ignored and treated as if they were not included. encoding and errors: specify how to decode percent-encoded sequences into Unicode characters, as accepted by the bytes.decode() method. fields_limit: maximum number of fields parsed or an exception is raised. None means no limit and is the default. """ if fields_limit: pairs = FIELDS_MATCH.split(qs, fields_limit) if len(pairs) > fields_limit: raise TooManyFieldsSent( 'The number of GET/POST parameters exceeded ' 'settings.DATA_UPLOAD_MAX_NUMBER_FIELDS.' ) else: pairs = FIELDS_MATCH.split(qs) r = [] for name_value in pairs: if not name_value: continue nv = name_value.split('=', 1) if len(nv) != 2: # Handle case of a control-name with no equal sign if keep_blank_values: nv.append('') else: continue if nv[1] or keep_blank_values: name = nv[0].replace('+', ' ') name = unquote(name, encoding=encoding, errors=errors) value = nv[1].replace('+', ' ') value = unquote(value, encoding=encoding, errors=errors) r.append((name, value)) return r def escape_leading_slashes(url): """ If redirecting to an absolute path (two leading slashes), a slash must be escaped to prevent browsers from handling the path as schemaless and redirecting to another host. """ if url.startswith('//'): url = '/%2F{}'.format(url[2:]) return url
4bb44586d48387c75e7226905c20f50d77507fee448f68dae6f7d271d9c153f9
""" This module contains helper functions for controlling caching. It does so by managing the "Vary" header of responses. It includes functions to patch the header of response objects directly and decorators that change functions to do that header-patching themselves. For information on the Vary header, see: https://tools.ietf.org/html/rfc7231#section-7.1.4 Essentially, the "Vary" HTTP header defines which headers a cache should take into account when building its cache key. Requests with the same path but different header content for headers named in "Vary" need to get different cache keys to prevent delivery of wrong content. An example: i18n middleware would need to distinguish caches by the "Accept-language" header. """ import hashlib import time from collections import defaultdict from django.conf import settings from django.core.cache import caches from django.http import HttpResponse, HttpResponseNotModified from django.utils.encoding import iri_to_uri from django.utils.http import ( http_date, parse_etags, parse_http_date_safe, quote_etag, ) from django.utils.log import log_response from django.utils.regex_helper import _lazy_re_compile from django.utils.timezone import get_current_timezone_name from django.utils.translation import get_language cc_delim_re = _lazy_re_compile(r'\s*,\s*') def patch_cache_control(response, **kwargs): """ Patch the Cache-Control header by adding all keyword arguments to it. The transformation is as follows: * All keyword parameter names are turned to lowercase, and underscores are converted to hyphens. * If the value of a parameter is True (exactly True, not just a true value), only the parameter name is added to the header. * All other parameters are added with their value, after applying str() to it. """ def dictitem(s): t = s.split('=', 1) if len(t) > 1: return (t[0].lower(), t[1]) else: return (t[0].lower(), True) def dictvalue(*t): if t[1] is True: return t[0] else: return '%s=%s' % (t[0], t[1]) cc = defaultdict(set) if response.get('Cache-Control'): for field in cc_delim_re.split(response['Cache-Control']): directive, value = dictitem(field) if directive == 'no-cache': # no-cache supports multiple field names. cc[directive].add(value) else: cc[directive] = value # If there's already a max-age header but we're being asked to set a new # max-age, use the minimum of the two ages. In practice this happens when # a decorator and a piece of middleware both operate on a given view. if 'max-age' in cc and 'max_age' in kwargs: kwargs['max_age'] = min(int(cc['max-age']), kwargs['max_age']) # Allow overriding private caching and vice versa if 'private' in cc and 'public' in kwargs: del cc['private'] elif 'public' in cc and 'private' in kwargs: del cc['public'] for (k, v) in kwargs.items(): directive = k.replace('_', '-') if directive == 'no-cache': # no-cache supports multiple field names. cc[directive].add(v) else: cc[directive] = v directives = [] for directive, values in cc.items(): if isinstance(values, set): if True in values: # True takes precedence. values = {True} directives.extend([dictvalue(directive, value) for value in values]) else: directives.append(dictvalue(directive, values)) cc = ', '.join(directives) response['Cache-Control'] = cc def get_max_age(response): """ Return the max-age from the response Cache-Control header as an integer, or None if it wasn't found or wasn't an integer. """ if not response.has_header('Cache-Control'): return cc = dict(_to_tuple(el) for el in cc_delim_re.split(response['Cache-Control'])) try: return int(cc['max-age']) except (ValueError, TypeError, KeyError): pass def set_response_etag(response): if not response.streaming and response.content: response['ETag'] = quote_etag(hashlib.md5(response.content).hexdigest()) return response def _precondition_failed(request): response = HttpResponse(status=412) log_response( 'Precondition Failed: %s', request.path, response=response, request=request, ) return response def _not_modified(request, response=None): new_response = HttpResponseNotModified() if response: # Preserve the headers required by Section 4.1 of RFC 7232, as well as # Last-Modified. for header in ('Cache-Control', 'Content-Location', 'Date', 'ETag', 'Expires', 'Last-Modified', 'Vary'): if header in response: new_response[header] = response[header] # Preserve cookies as per the cookie specification: "If a proxy server # receives a response which contains a Set-cookie header, it should # propagate the Set-cookie header to the client, regardless of whether # the response was 304 (Not Modified) or 200 (OK). # https://curl.haxx.se/rfc/cookie_spec.html new_response.cookies = response.cookies return new_response def get_conditional_response(request, etag=None, last_modified=None, response=None): # Only return conditional responses on successful requests. if response and not (200 <= response.status_code < 300): return response # Get HTTP request headers. if_match_etags = parse_etags(request.META.get('HTTP_IF_MATCH', '')) if_unmodified_since = request.META.get('HTTP_IF_UNMODIFIED_SINCE') if_unmodified_since = if_unmodified_since and parse_http_date_safe(if_unmodified_since) if_none_match_etags = parse_etags(request.META.get('HTTP_IF_NONE_MATCH', '')) if_modified_since = request.META.get('HTTP_IF_MODIFIED_SINCE') if_modified_since = if_modified_since and parse_http_date_safe(if_modified_since) # Step 1 of section 6 of RFC 7232: Test the If-Match precondition. if if_match_etags and not _if_match_passes(etag, if_match_etags): return _precondition_failed(request) # Step 2: Test the If-Unmodified-Since precondition. if (not if_match_etags and if_unmodified_since and not _if_unmodified_since_passes(last_modified, if_unmodified_since)): return _precondition_failed(request) # Step 3: Test the If-None-Match precondition. if if_none_match_etags and not _if_none_match_passes(etag, if_none_match_etags): if request.method in ('GET', 'HEAD'): return _not_modified(request, response) else: return _precondition_failed(request) # Step 4: Test the If-Modified-Since precondition. if (not if_none_match_etags and if_modified_since and not _if_modified_since_passes(last_modified, if_modified_since)): if request.method in ('GET', 'HEAD'): return _not_modified(request, response) # Step 5: Test the If-Range precondition (not supported). # Step 6: Return original response since there isn't a conditional response. return response def _if_match_passes(target_etag, etags): """ Test the If-Match comparison as defined in section 3.1 of RFC 7232. """ if not target_etag: # If there isn't an ETag, then there can't be a match. return False elif etags == ['*']: # The existence of an ETag means that there is "a current # representation for the target resource", even if the ETag is weak, # so there is a match to '*'. return True elif target_etag.startswith('W/'): # A weak ETag can never strongly match another ETag. return False else: # Since the ETag is strong, this will only return True if there's a # strong match. return target_etag in etags def _if_unmodified_since_passes(last_modified, if_unmodified_since): """ Test the If-Unmodified-Since comparison as defined in section 3.4 of RFC 7232. """ return last_modified and last_modified <= if_unmodified_since def _if_none_match_passes(target_etag, etags): """ Test the If-None-Match comparison as defined in section 3.2 of RFC 7232. """ if not target_etag: # If there isn't an ETag, then there isn't a match. return True elif etags == ['*']: # The existence of an ETag means that there is "a current # representation for the target resource", so there is a match to '*'. return False else: # The comparison should be weak, so look for a match after stripping # off any weak indicators. target_etag = target_etag.strip('W/') etags = (etag.strip('W/') for etag in etags) return target_etag not in etags def _if_modified_since_passes(last_modified, if_modified_since): """ Test the If-Modified-Since comparison as defined in section 3.3 of RFC 7232. """ return not last_modified or last_modified > if_modified_since def patch_response_headers(response, cache_timeout=None): """ Add HTTP caching headers to the given HttpResponse: Expires and Cache-Control. Each header is only added if it isn't already set. cache_timeout is in seconds. The CACHE_MIDDLEWARE_SECONDS setting is used by default. """ if cache_timeout is None: cache_timeout = settings.CACHE_MIDDLEWARE_SECONDS if cache_timeout < 0: cache_timeout = 0 # Can't have max-age negative if not response.has_header('Expires'): response['Expires'] = http_date(time.time() + cache_timeout) patch_cache_control(response, max_age=cache_timeout) def add_never_cache_headers(response): """ Add headers to a response to indicate that a page should never be cached. """ patch_response_headers(response, cache_timeout=-1) patch_cache_control(response, no_cache=True, no_store=True, must_revalidate=True, private=True) def patch_vary_headers(response, newheaders): """ Add (or update) the "Vary" header in the given HttpResponse object. newheaders is a list of header names that should be in "Vary". If headers contains an asterisk, then "Vary" header will consist of a single asterisk '*'. Otherwise, existing headers in "Vary" aren't removed. """ # Note that we need to keep the original order intact, because cache # implementations may rely on the order of the Vary contents in, say, # computing an MD5 hash. if response.has_header('Vary'): vary_headers = cc_delim_re.split(response['Vary']) else: vary_headers = [] # Use .lower() here so we treat headers as case-insensitive. existing_headers = {header.lower() for header in vary_headers} additional_headers = [newheader for newheader in newheaders if newheader.lower() not in existing_headers] vary_headers += additional_headers if '*' in vary_headers: response['Vary'] = '*' else: response['Vary'] = ', '.join(vary_headers) def has_vary_header(response, header_query): """ Check to see if the response has a given header name in its Vary header. """ if not response.has_header('Vary'): return False vary_headers = cc_delim_re.split(response['Vary']) existing_headers = {header.lower() for header in vary_headers} return header_query.lower() in existing_headers def _i18n_cache_key_suffix(request, cache_key): """If necessary, add the current locale or time zone to the cache key.""" if settings.USE_I18N or settings.USE_L10N: # first check if LocaleMiddleware or another middleware added # LANGUAGE_CODE to request, then fall back to the active language # which in turn can also fall back to settings.LANGUAGE_CODE cache_key += '.%s' % getattr(request, 'LANGUAGE_CODE', get_language()) if settings.USE_TZ: cache_key += '.%s' % get_current_timezone_name() return cache_key def _generate_cache_key(request, method, headerlist, key_prefix): """Return a cache key from the headers given in the header list.""" ctx = hashlib.md5() for header in headerlist: value = request.META.get(header) if value is not None: ctx.update(value.encode()) url = hashlib.md5(iri_to_uri(request.build_absolute_uri()).encode('ascii')) cache_key = 'views.decorators.cache.cache_page.%s.%s.%s.%s' % ( key_prefix, method, url.hexdigest(), ctx.hexdigest()) return _i18n_cache_key_suffix(request, cache_key) def _generate_cache_header_key(key_prefix, request): """Return a cache key for the header cache.""" url = hashlib.md5(iri_to_uri(request.build_absolute_uri()).encode('ascii')) cache_key = 'views.decorators.cache.cache_header.%s.%s' % ( key_prefix, url.hexdigest()) return _i18n_cache_key_suffix(request, cache_key) def get_cache_key(request, key_prefix=None, method='GET', cache=None): """ Return a cache key based on the request URL and query. It can be used in the request phase because it pulls the list of headers to take into account from the global URL registry and uses those to build a cache key to check against. If there isn't a headerlist stored, return None, indicating that the page needs to be rebuilt. """ if key_prefix is None: key_prefix = settings.CACHE_MIDDLEWARE_KEY_PREFIX cache_key = _generate_cache_header_key(key_prefix, request) if cache is None: cache = caches[settings.CACHE_MIDDLEWARE_ALIAS] headerlist = cache.get(cache_key) if headerlist is not None: return _generate_cache_key(request, method, headerlist, key_prefix) else: return None def learn_cache_key(request, response, cache_timeout=None, key_prefix=None, cache=None): """ Learn what headers to take into account for some request URL from the response object. Store those headers in a global URL registry so that later access to that URL will know what headers to take into account without building the response object itself. The headers are named in the Vary header of the response, but we want to prevent response generation. The list of headers to use for cache key generation is stored in the same cache as the pages themselves. If the cache ages some data out of the cache, this just means that we have to build the response once to get at the Vary header and so at the list of headers to use for the cache key. """ if key_prefix is None: key_prefix = settings.CACHE_MIDDLEWARE_KEY_PREFIX if cache_timeout is None: cache_timeout = settings.CACHE_MIDDLEWARE_SECONDS cache_key = _generate_cache_header_key(key_prefix, request) if cache is None: cache = caches[settings.CACHE_MIDDLEWARE_ALIAS] if response.has_header('Vary'): is_accept_language_redundant = settings.USE_I18N or settings.USE_L10N # If i18n or l10n are used, the generated cache key will be suffixed # with the current locale. Adding the raw value of Accept-Language is # redundant in that case and would result in storing the same content # under multiple keys in the cache. See #18191 for details. headerlist = [] for header in cc_delim_re.split(response['Vary']): header = header.upper().replace('-', '_') if header != 'ACCEPT_LANGUAGE' or not is_accept_language_redundant: headerlist.append('HTTP_' + header) headerlist.sort() cache.set(cache_key, headerlist, cache_timeout) return _generate_cache_key(request, request.method, headerlist, key_prefix) else: # if there is no Vary header, we still need a cache key # for the request.build_absolute_uri() cache.set(cache_key, [], cache_timeout) return _generate_cache_key(request, request.method, [], key_prefix) def _to_tuple(s): t = s.split('=', 1) if len(t) == 2: return t[0].lower(), t[1] return t[0].lower(), True
cfff24e847351f1e9e530dacb286a995a1e90ed782be4e71d562ea36968197c5
""" This is the Django template system. How it works: The Lexer.tokenize() method converts a template string (i.e., a string containing markup with custom template tags) to tokens, which can be either plain text (TokenType.TEXT), variables (TokenType.VAR), or block statements (TokenType.BLOCK). The Parser() class takes a list of tokens in its constructor, and its parse() method returns a compiled template -- which is, under the hood, a list of Node objects. Each Node is responsible for creating some sort of output -- e.g. simple text (TextNode), variable values in a given context (VariableNode), results of basic logic (IfNode), results of looping (ForNode), or anything else. The core Node types are TextNode, VariableNode, IfNode and ForNode, but plugin modules can define their own custom node types. Each Node has a render() method, which takes a Context and returns a string of the rendered node. For example, the render() method of a Variable Node returns the variable's value as a string. The render() method of a ForNode returns the rendered output of whatever was inside the loop, recursively. The Template class is a convenient wrapper that takes care of template compilation and rendering. Usage: The only thing you should ever use directly in this file is the Template class. Create a compiled template object with a template_string, then call render() with a context. In the compilation stage, the TemplateSyntaxError exception will be raised if the template doesn't have proper syntax. Sample code: >>> from django import template >>> s = '<html>{% if test %}<h1>{{ varvalue }}</h1>{% endif %}</html>' >>> t = template.Template(s) (t is now a compiled template, and its render() method can be called multiple times with multiple contexts) >>> c = template.Context({'test':True, 'varvalue': 'Hello'}) >>> t.render(c) '<html><h1>Hello</h1></html>' >>> c = template.Context({'test':False, 'varvalue': 'Hello'}) >>> t.render(c) '<html></html>' """ import logging import re from enum import Enum from inspect import getcallargs, getfullargspec, unwrap from django.template.context import BaseContext from django.utils.formats import localize from django.utils.html import conditional_escape, escape from django.utils.regex_helper import _lazy_re_compile from django.utils.safestring import SafeData, mark_safe from django.utils.text import ( get_text_list, smart_split, unescape_string_literal, ) from django.utils.timezone import template_localtime from django.utils.translation import gettext_lazy, pgettext_lazy from .exceptions import TemplateSyntaxError # template syntax constants FILTER_SEPARATOR = '|' FILTER_ARGUMENT_SEPARATOR = ':' VARIABLE_ATTRIBUTE_SEPARATOR = '.' BLOCK_TAG_START = '{%' BLOCK_TAG_END = '%}' VARIABLE_TAG_START = '{{' VARIABLE_TAG_END = '}}' COMMENT_TAG_START = '{#' COMMENT_TAG_END = '#}' TRANSLATOR_COMMENT_MARK = 'Translators' SINGLE_BRACE_START = '{' SINGLE_BRACE_END = '}' # what to report as the origin for templates that come from non-loader sources # (e.g. strings) UNKNOWN_SOURCE = '<unknown source>' # match a variable or block tag and capture the entire tag, including start/end # delimiters tag_re = (_lazy_re_compile('(%s.*?%s|%s.*?%s|%s.*?%s)' % (re.escape(BLOCK_TAG_START), re.escape(BLOCK_TAG_END), re.escape(VARIABLE_TAG_START), re.escape(VARIABLE_TAG_END), re.escape(COMMENT_TAG_START), re.escape(COMMENT_TAG_END)))) logger = logging.getLogger('django.template') class TokenType(Enum): TEXT = 0 VAR = 1 BLOCK = 2 COMMENT = 3 class VariableDoesNotExist(Exception): def __init__(self, msg, params=()): self.msg = msg self.params = params def __str__(self): return self.msg % self.params class Origin: def __init__(self, name, template_name=None, loader=None): self.name = name self.template_name = template_name self.loader = loader def __str__(self): return self.name def __eq__(self, other): return ( isinstance(other, Origin) and self.name == other.name and self.loader == other.loader ) @property def loader_name(self): if self.loader: return '%s.%s' % ( self.loader.__module__, self.loader.__class__.__name__, ) class Template: def __init__(self, template_string, origin=None, name=None, engine=None): # If Template is instantiated directly rather than from an Engine and # exactly one Django template engine is configured, use that engine. # This is required to preserve backwards-compatibility for direct use # e.g. Template('...').render(Context({...})) if engine is None: from .engine import Engine engine = Engine.get_default() if origin is None: origin = Origin(UNKNOWN_SOURCE) self.name = name self.origin = origin self.engine = engine self.source = str(template_string) # May be lazy. self.nodelist = self.compile_nodelist() def __iter__(self): for node in self.nodelist: yield from node def _render(self, context): return self.nodelist.render(context) def render(self, context): "Display stage -- can be called many times" with context.render_context.push_state(self): if context.template is None: with context.bind_template(self): context.template_name = self.name return self._render(context) else: return self._render(context) def compile_nodelist(self): """ Parse and compile the template source into a nodelist. If debug is True and an exception occurs during parsing, the exception is annotated with contextual line information where it occurred in the template source. """ if self.engine.debug: lexer = DebugLexer(self.source) else: lexer = Lexer(self.source) tokens = lexer.tokenize() parser = Parser( tokens, self.engine.template_libraries, self.engine.template_builtins, self.origin, ) try: return parser.parse() except Exception as e: if self.engine.debug: e.template_debug = self.get_exception_info(e, e.token) raise def get_exception_info(self, exception, token): """ Return a dictionary containing contextual line information of where the exception occurred in the template. The following information is provided: message The message of the exception raised. source_lines The lines before, after, and including the line the exception occurred on. line The line number the exception occurred on. before, during, after The line the exception occurred on split into three parts: 1. The content before the token that raised the error. 2. The token that raised the error. 3. The content after the token that raised the error. total The number of lines in source_lines. top The line number where source_lines starts. bottom The line number where source_lines ends. start The start position of the token in the template source. end The end position of the token in the template source. """ start, end = token.position context_lines = 10 line = 0 upto = 0 source_lines = [] before = during = after = "" for num, next in enumerate(linebreak_iter(self.source)): if start >= upto and end <= next: line = num before = escape(self.source[upto:start]) during = escape(self.source[start:end]) after = escape(self.source[end:next]) source_lines.append((num, escape(self.source[upto:next]))) upto = next total = len(source_lines) top = max(1, line - context_lines) bottom = min(total, line + 1 + context_lines) # In some rare cases exc_value.args can be empty or an invalid # string. try: message = str(exception.args[0]) except (IndexError, UnicodeDecodeError): message = '(Could not get exception message)' return { 'message': message, 'source_lines': source_lines[top:bottom], 'before': before, 'during': during, 'after': after, 'top': top, 'bottom': bottom, 'total': total, 'line': line, 'name': self.origin.name, 'start': start, 'end': end, } def linebreak_iter(template_source): yield 0 p = template_source.find('\n') while p >= 0: yield p + 1 p = template_source.find('\n', p + 1) yield len(template_source) + 1 class Token: def __init__(self, token_type, contents, position=None, lineno=None): """ A token representing a string from the template. token_type A TokenType, either .TEXT, .VAR, .BLOCK, or .COMMENT. contents The token source string. position An optional tuple containing the start and end index of the token in the template source. This is used for traceback information when debug is on. lineno The line number the token appears on in the template source. This is used for traceback information and gettext files. """ self.token_type, self.contents = token_type, contents self.lineno = lineno self.position = position def __str__(self): token_name = self.token_type.name.capitalize() return ('<%s token: "%s...">' % (token_name, self.contents[:20].replace('\n', ''))) def split_contents(self): split = [] bits = smart_split(self.contents) for bit in bits: # Handle translation-marked template pieces if bit.startswith(('_("', "_('")): sentinel = bit[2] + ')' trans_bit = [bit] while not bit.endswith(sentinel): bit = next(bits) trans_bit.append(bit) bit = ' '.join(trans_bit) split.append(bit) return split class Lexer: def __init__(self, template_string): self.template_string = template_string self.verbatim = False def tokenize(self): """ Return a list of tokens from a given template_string. """ in_tag = False lineno = 1 result = [] for bit in tag_re.split(self.template_string): if bit: result.append(self.create_token(bit, None, lineno, in_tag)) in_tag = not in_tag lineno += bit.count('\n') return result def create_token(self, token_string, position, lineno, in_tag): """ Convert the given token string into a new Token object and return it. If in_tag is True, we are processing something that matched a tag, otherwise it should be treated as a literal string. """ if in_tag and token_string.startswith(BLOCK_TAG_START): # The [2:-2] ranges below strip off *_TAG_START and *_TAG_END. # We could do len(BLOCK_TAG_START) to be more "correct", but we've # hard-coded the 2s here for performance. And it's not like # the TAG_START values are going to change anytime, anyway. block_content = token_string[2:-2].strip() if self.verbatim and block_content == self.verbatim: self.verbatim = False if in_tag and not self.verbatim: if token_string.startswith(VARIABLE_TAG_START): return Token(TokenType.VAR, token_string[2:-2].strip(), position, lineno) elif token_string.startswith(BLOCK_TAG_START): if block_content[:9] in ('verbatim', 'verbatim '): self.verbatim = 'end%s' % block_content return Token(TokenType.BLOCK, block_content, position, lineno) elif token_string.startswith(COMMENT_TAG_START): content = '' if token_string.find(TRANSLATOR_COMMENT_MARK): content = token_string[2:-2].strip() return Token(TokenType.COMMENT, content, position, lineno) else: return Token(TokenType.TEXT, token_string, position, lineno) class DebugLexer(Lexer): def tokenize(self): """ Split a template string into tokens and annotates each token with its start and end position in the source. This is slower than the default lexer so only use it when debug is True. """ lineno = 1 result = [] upto = 0 for match in tag_re.finditer(self.template_string): start, end = match.span() if start > upto: token_string = self.template_string[upto:start] result.append(self.create_token(token_string, (upto, start), lineno, in_tag=False)) lineno += token_string.count('\n') token_string = self.template_string[start:end] result.append(self.create_token(token_string, (start, end), lineno, in_tag=True)) lineno += token_string.count('\n') upto = end last_bit = self.template_string[upto:] if last_bit: result.append(self.create_token(last_bit, (upto, upto + len(last_bit)), lineno, in_tag=False)) return result class Parser: def __init__(self, tokens, libraries=None, builtins=None, origin=None): # Reverse the tokens so delete_first_token(), prepend_token(), and # next_token() can operate at the end of the list in constant time. self.tokens = list(reversed(tokens)) self.tags = {} self.filters = {} self.command_stack = [] if libraries is None: libraries = {} if builtins is None: builtins = [] self.libraries = libraries for builtin in builtins: self.add_library(builtin) self.origin = origin def parse(self, parse_until=None): """ Iterate through the parser tokens and compiles each one into a node. If parse_until is provided, parsing will stop once one of the specified tokens has been reached. This is formatted as a list of tokens, e.g. ['elif', 'else', 'endif']. If no matching token is reached, raise an exception with the unclosed block tag details. """ if parse_until is None: parse_until = [] nodelist = NodeList() while self.tokens: token = self.next_token() # Use the raw values here for TokenType.* for a tiny performance boost. if token.token_type.value == 0: # TokenType.TEXT self.extend_nodelist(nodelist, TextNode(token.contents), token) elif token.token_type.value == 1: # TokenType.VAR if not token.contents: raise self.error(token, 'Empty variable tag on line %d' % token.lineno) try: filter_expression = self.compile_filter(token.contents) except TemplateSyntaxError as e: raise self.error(token, e) var_node = VariableNode(filter_expression) self.extend_nodelist(nodelist, var_node, token) elif token.token_type.value == 2: # TokenType.BLOCK try: command = token.contents.split()[0] except IndexError: raise self.error(token, 'Empty block tag on line %d' % token.lineno) if command in parse_until: # A matching token has been reached. Return control to # the caller. Put the token back on the token list so the # caller knows where it terminated. self.prepend_token(token) return nodelist # Add the token to the command stack. This is used for error # messages if further parsing fails due to an unclosed block # tag. self.command_stack.append((command, token)) # Get the tag callback function from the ones registered with # the parser. try: compile_func = self.tags[command] except KeyError: self.invalid_block_tag(token, command, parse_until) # Compile the callback into a node object and add it to # the node list. try: compiled_result = compile_func(self, token) except Exception as e: raise self.error(token, e) self.extend_nodelist(nodelist, compiled_result, token) # Compile success. Remove the token from the command stack. self.command_stack.pop() if parse_until: self.unclosed_block_tag(parse_until) return nodelist def skip_past(self, endtag): while self.tokens: token = self.next_token() if token.token_type == TokenType.BLOCK and token.contents == endtag: return self.unclosed_block_tag([endtag]) def extend_nodelist(self, nodelist, node, token): # Check that non-text nodes don't appear before an extends tag. if node.must_be_first and nodelist.contains_nontext: raise self.error( token, '%r must be the first tag in the template.' % node, ) if isinstance(nodelist, NodeList) and not isinstance(node, TextNode): nodelist.contains_nontext = True # Set origin and token here since we can't modify the node __init__() # method. node.token = token node.origin = self.origin nodelist.append(node) def error(self, token, e): """ Return an exception annotated with the originating token. Since the parser can be called recursively, check if a token is already set. This ensures the innermost token is highlighted if an exception occurs, e.g. a compile error within the body of an if statement. """ if not isinstance(e, Exception): e = TemplateSyntaxError(e) if not hasattr(e, 'token'): e.token = token return e def invalid_block_tag(self, token, command, parse_until=None): if parse_until: raise self.error( token, "Invalid block tag on line %d: '%s', expected %s. Did you " "forget to register or load this tag?" % ( token.lineno, command, get_text_list(["'%s'" % p for p in parse_until], 'or'), ), ) raise self.error( token, "Invalid block tag on line %d: '%s'. Did you forget to register " "or load this tag?" % (token.lineno, command) ) def unclosed_block_tag(self, parse_until): command, token = self.command_stack.pop() msg = "Unclosed tag on line %d: '%s'. Looking for one of: %s." % ( token.lineno, command, ', '.join(parse_until), ) raise self.error(token, msg) def next_token(self): return self.tokens.pop() def prepend_token(self, token): self.tokens.append(token) def delete_first_token(self): del self.tokens[-1] def add_library(self, lib): self.tags.update(lib.tags) self.filters.update(lib.filters) def compile_filter(self, token): """ Convenient wrapper for FilterExpression """ return FilterExpression(token, self) def find_filter(self, filter_name): if filter_name in self.filters: return self.filters[filter_name] else: raise TemplateSyntaxError("Invalid filter: '%s'" % filter_name) # This only matches constant *strings* (things in quotes or marked for # translation). Numbers are treated as variables for implementation reasons # (so that they retain their type when passed to filters). constant_string = r""" (?:%(i18n_open)s%(strdq)s%(i18n_close)s| %(i18n_open)s%(strsq)s%(i18n_close)s| %(strdq)s| %(strsq)s) """ % { 'strdq': r'"[^"\\]*(?:\\.[^"\\]*)*"', # double-quoted string 'strsq': r"'[^'\\]*(?:\\.[^'\\]*)*'", # single-quoted string 'i18n_open': re.escape("_("), 'i18n_close': re.escape(")"), } constant_string = constant_string.replace("\n", "") filter_raw_string = r""" ^(?P<constant>%(constant)s)| ^(?P<var>[%(var_chars)s]+|%(num)s)| (?:\s*%(filter_sep)s\s* (?P<filter_name>\w+) (?:%(arg_sep)s (?: (?P<constant_arg>%(constant)s)| (?P<var_arg>[%(var_chars)s]+|%(num)s) ) )? )""" % { 'constant': constant_string, 'num': r'[-+\.]?\d[\d\.e]*', 'var_chars': r'\w\.', 'filter_sep': re.escape(FILTER_SEPARATOR), 'arg_sep': re.escape(FILTER_ARGUMENT_SEPARATOR), } filter_re = _lazy_re_compile(filter_raw_string, re.VERBOSE) class FilterExpression: """ Parse a variable token and its optional filters (all as a single string), and return a list of tuples of the filter name and arguments. Sample:: >>> token = 'variable|default:"Default value"|date:"Y-m-d"' >>> p = Parser('') >>> fe = FilterExpression(token, p) >>> len(fe.filters) 2 >>> fe.var <Variable: 'variable'> """ def __init__(self, token, parser): self.token = token matches = filter_re.finditer(token) var_obj = None filters = [] upto = 0 for match in matches: start = match.start() if upto != start: raise TemplateSyntaxError("Could not parse some characters: " "%s|%s|%s" % (token[:upto], token[upto:start], token[start:])) if var_obj is None: var, constant = match.group("var", "constant") if constant: try: var_obj = Variable(constant).resolve({}) except VariableDoesNotExist: var_obj = None elif var is None: raise TemplateSyntaxError("Could not find variable at " "start of %s." % token) else: var_obj = Variable(var) else: filter_name = match.group("filter_name") args = [] constant_arg, var_arg = match.group("constant_arg", "var_arg") if constant_arg: args.append((False, Variable(constant_arg).resolve({}))) elif var_arg: args.append((True, Variable(var_arg))) filter_func = parser.find_filter(filter_name) self.args_check(filter_name, filter_func, args) filters.append((filter_func, args)) upto = match.end() if upto != len(token): raise TemplateSyntaxError("Could not parse the remainder: '%s' " "from '%s'" % (token[upto:], token)) self.filters = filters self.var = var_obj def resolve(self, context, ignore_failures=False): if isinstance(self.var, Variable): try: obj = self.var.resolve(context) except VariableDoesNotExist: if ignore_failures: obj = None else: string_if_invalid = context.template.engine.string_if_invalid if string_if_invalid: if '%s' in string_if_invalid: return string_if_invalid % self.var else: return string_if_invalid else: obj = string_if_invalid else: obj = self.var for func, args in self.filters: arg_vals = [] for lookup, arg in args: if not lookup: arg_vals.append(mark_safe(arg)) else: arg_vals.append(arg.resolve(context)) if getattr(func, 'expects_localtime', False): obj = template_localtime(obj, context.use_tz) if getattr(func, 'needs_autoescape', False): new_obj = func(obj, autoescape=context.autoescape, *arg_vals) else: new_obj = func(obj, *arg_vals) if getattr(func, 'is_safe', False) and isinstance(obj, SafeData): obj = mark_safe(new_obj) else: obj = new_obj return obj def args_check(name, func, provided): provided = list(provided) # First argument, filter input, is implied. plen = len(provided) + 1 # Check to see if a decorator is providing the real function. func = unwrap(func) args, _, _, defaults, _, _, _ = getfullargspec(func) alen = len(args) dlen = len(defaults or []) # Not enough OR Too many if plen < (alen - dlen) or plen > alen: raise TemplateSyntaxError("%s requires %d arguments, %d provided" % (name, alen - dlen, plen)) return True args_check = staticmethod(args_check) def __str__(self): return self.token class Variable: """ A template variable, resolvable against a given context. The variable may be a hard-coded string (if it begins and ends with single or double quote marks):: >>> c = {'article': {'section':'News'}} >>> Variable('article.section').resolve(c) 'News' >>> Variable('article').resolve(c) {'section': 'News'} >>> class AClass: pass >>> c = AClass() >>> c.article = AClass() >>> c.article.section = 'News' (The example assumes VARIABLE_ATTRIBUTE_SEPARATOR is '.') """ def __init__(self, var): self.var = var self.literal = None self.lookups = None self.translate = False self.message_context = None if not isinstance(var, str): raise TypeError( "Variable must be a string or number, got %s" % type(var)) try: # First try to treat this variable as a number. # # Note that this could cause an OverflowError here that we're not # catching. Since this should only happen at compile time, that's # probably OK. # Try to interpret values containing a period or an 'e'/'E' # (possibly scientific notation) as a float; otherwise, try int. if '.' in var or 'e' in var.lower(): self.literal = float(var) # "2." is invalid if var.endswith('.'): raise ValueError else: self.literal = int(var) except ValueError: # A ValueError means that the variable isn't a number. if var.startswith('_(') and var.endswith(')'): # The result of the lookup should be translated at rendering # time. self.translate = True var = var[2:-1] # If it's wrapped with quotes (single or double), then # we're also dealing with a literal. try: self.literal = mark_safe(unescape_string_literal(var)) except ValueError: # Otherwise we'll set self.lookups so that resolve() knows we're # dealing with a bonafide variable if var.find(VARIABLE_ATTRIBUTE_SEPARATOR + '_') > -1 or var[0] == '_': raise TemplateSyntaxError("Variables and attributes may " "not begin with underscores: '%s'" % var) self.lookups = tuple(var.split(VARIABLE_ATTRIBUTE_SEPARATOR)) def resolve(self, context): """Resolve this variable against a given context.""" if self.lookups is not None: # We're dealing with a variable that needs to be resolved value = self._resolve_lookup(context) else: # We're dealing with a literal, so it's already been "resolved" value = self.literal if self.translate: is_safe = isinstance(value, SafeData) msgid = value.replace('%', '%%') msgid = mark_safe(msgid) if is_safe else msgid if self.message_context: return pgettext_lazy(self.message_context, msgid) else: return gettext_lazy(msgid) return value def __repr__(self): return "<%s: %r>" % (self.__class__.__name__, self.var) def __str__(self): return self.var def _resolve_lookup(self, context): """ Perform resolution of a real variable (i.e. not a literal) against the given context. As indicated by the method's name, this method is an implementation detail and shouldn't be called by external code. Use Variable.resolve() instead. """ current = context try: # catch-all for silent variable failures for bit in self.lookups: try: # dictionary lookup current = current[bit] # ValueError/IndexError are for numpy.array lookup on # numpy < 1.9 and 1.9+ respectively except (TypeError, AttributeError, KeyError, ValueError, IndexError): try: # attribute lookup # Don't return class attributes if the class is the context: if isinstance(current, BaseContext) and getattr(type(current), bit): raise AttributeError current = getattr(current, bit) except (TypeError, AttributeError): # Reraise if the exception was raised by a @property if not isinstance(current, BaseContext) and bit in dir(current): raise try: # list-index lookup current = current[int(bit)] except (IndexError, # list index out of range ValueError, # invalid literal for int() KeyError, # current is a dict without `int(bit)` key TypeError): # unsubscriptable object raise VariableDoesNotExist("Failed lookup for key " "[%s] in %r", (bit, current)) # missing attribute if callable(current): if getattr(current, 'do_not_call_in_templates', False): pass elif getattr(current, 'alters_data', False): current = context.template.engine.string_if_invalid else: try: # method call (assuming no args required) current = current() except TypeError: try: getcallargs(current) except TypeError: # arguments *were* required current = context.template.engine.string_if_invalid # invalid method call else: raise except Exception as e: template_name = getattr(context, 'template_name', None) or 'unknown' logger.debug( "Exception while resolving variable '%s' in template '%s'.", bit, template_name, exc_info=True, ) if getattr(e, 'silent_variable_failure', False): current = context.template.engine.string_if_invalid else: raise return current class Node: # Set this to True for nodes that must be first in the template (although # they can be preceded by text nodes. must_be_first = False child_nodelists = ('nodelist',) token = None def render(self, context): """ Return the node rendered as a string. """ pass def render_annotated(self, context): """ Render the node. If debug is True and an exception occurs during rendering, the exception is annotated with contextual line information where it occurred in the template. For internal usage this method is preferred over using the render method directly. """ try: return self.render(context) except Exception as e: if context.template.engine.debug and not hasattr(e, 'template_debug'): e.template_debug = context.render_context.template.get_exception_info(e, self.token) raise def __iter__(self): yield self def get_nodes_by_type(self, nodetype): """ Return a list of all nodes (within this node and its nodelist) of the given type """ nodes = [] if isinstance(self, nodetype): nodes.append(self) for attr in self.child_nodelists: nodelist = getattr(self, attr, None) if nodelist: nodes.extend(nodelist.get_nodes_by_type(nodetype)) return nodes class NodeList(list): # Set to True the first time a non-TextNode is inserted by # extend_nodelist(). contains_nontext = False def render(self, context): bits = [] for node in self: if isinstance(node, Node): bit = node.render_annotated(context) else: bit = node bits.append(str(bit)) return mark_safe(''.join(bits)) def get_nodes_by_type(self, nodetype): "Return a list of all nodes of the given type" nodes = [] for node in self: nodes.extend(node.get_nodes_by_type(nodetype)) return nodes class TextNode(Node): def __init__(self, s): self.s = s def __repr__(self): return "<%s: %r>" % (self.__class__.__name__, self.s[:25]) def render(self, context): return self.s def render_value_in_context(value, context): """ Convert any value to a string to become part of a rendered template. This means escaping, if required, and conversion to a string. If value is a string, it's expected to already be translated. """ value = template_localtime(value, use_tz=context.use_tz) value = localize(value, use_l10n=context.use_l10n) if context.autoescape: if not issubclass(type(value), str): value = str(value) return conditional_escape(value) else: return str(value) class VariableNode(Node): def __init__(self, filter_expression): self.filter_expression = filter_expression def __repr__(self): return "<Variable Node: %s>" % self.filter_expression def render(self, context): try: output = self.filter_expression.resolve(context) except UnicodeDecodeError: # Unicode conversion can fail sometimes for reasons out of our # control (e.g. exception rendering). In that case, we fail # quietly. return '' return render_value_in_context(output, context) # Regex for token keyword arguments kwarg_re = _lazy_re_compile(r"(?:(\w+)=)?(.+)") def token_kwargs(bits, parser, support_legacy=False): """ Parse token keyword arguments and return a dictionary of the arguments retrieved from the ``bits`` token list. `bits` is a list containing the remainder of the token (split by spaces) that is to be checked for arguments. Valid arguments are removed from this list. `support_legacy` - if True, the legacy format ``1 as foo`` is accepted. Otherwise, only the standard ``foo=1`` format is allowed. There is no requirement for all remaining token ``bits`` to be keyword arguments, so return the dictionary as soon as an invalid argument format is reached. """ if not bits: return {} match = kwarg_re.match(bits[0]) kwarg_format = match and match.group(1) if not kwarg_format: if not support_legacy: return {} if len(bits) < 3 or bits[1] != 'as': return {} kwargs = {} while bits: if kwarg_format: match = kwarg_re.match(bits[0]) if not match or not match.group(1): return kwargs key, value = match.groups() del bits[:1] else: if len(bits) < 3 or bits[1] != 'as': return kwargs key, value = bits[2], bits[0] del bits[:3] kwargs[key] = parser.compile_filter(value) if bits and not kwarg_format: if bits[0] != 'and': return kwargs del bits[:1] return kwargs
eea8b0961cd090ee54d79214b1b7ee7113cfe55d5cc5928074f6e3c672499a9f
"""Default variable filters.""" import random as random_module import re import types from decimal import ROUND_HALF_UP, Context, Decimal, InvalidOperation from functools import wraps from operator import itemgetter from pprint import pformat from urllib.parse import quote from django.utils import formats from django.utils.dateformat import format, time_format from django.utils.encoding import iri_to_uri from django.utils.html import ( avoid_wrapping, conditional_escape, escape, escapejs, json_script as _json_script, linebreaks, strip_tags, urlize as _urlize, ) from django.utils.safestring import SafeData, mark_safe from django.utils.text import ( Truncator, normalize_newlines, phone2numeric, slugify as _slugify, wrap, ) from django.utils.timesince import timesince, timeuntil from django.utils.translation import gettext, ngettext from .base import Variable, VariableDoesNotExist from .library import Library register = Library() ####################### # STRING DECORATOR # ####################### def stringfilter(func): """ Decorator for filters which should only receive strings. The object passed as the first positional argument will be converted to a string. """ def _dec(*args, **kwargs): args = list(args) args[0] = str(args[0]) if (isinstance(args[0], SafeData) and getattr(_dec._decorated_function, 'is_safe', False)): return mark_safe(func(*args, **kwargs)) return func(*args, **kwargs) # Include a reference to the real function (used to check original # arguments by the template parser, and to bear the 'is_safe' attribute # when multiple decorators are applied). _dec._decorated_function = getattr(func, '_decorated_function', func) return wraps(func)(_dec) ################### # STRINGS # ################### @register.filter(is_safe=True) @stringfilter def addslashes(value): """ Add slashes before quotes. Useful for escaping strings in CSV, for example. Less useful for escaping JavaScript; use the ``escapejs`` filter instead. """ return value.replace('\\', '\\\\').replace('"', '\\"').replace("'", "\\'") @register.filter(is_safe=True) @stringfilter def capfirst(value): """Capitalize the first character of the value.""" return value and value[0].upper() + value[1:] @register.filter("escapejs") @stringfilter def escapejs_filter(value): """Hex encode characters for use in JavaScript strings.""" return escapejs(value) @register.filter(is_safe=True) def json_script(value, element_id): """ Output value JSON-encoded, wrapped in a <script type="application/json"> tag. """ return _json_script(value, element_id) @register.filter(is_safe=True) def floatformat(text, arg=-1): """ Display a float to a specified number of decimal places. If called without an argument, display the floating point number with one decimal place -- but only if there's a decimal place to be displayed: * num1 = 34.23234 * num2 = 34.00000 * num3 = 34.26000 * {{ num1|floatformat }} displays "34.2" * {{ num2|floatformat }} displays "34" * {{ num3|floatformat }} displays "34.3" If arg is positive, always display exactly arg number of decimal places: * {{ num1|floatformat:3 }} displays "34.232" * {{ num2|floatformat:3 }} displays "34.000" * {{ num3|floatformat:3 }} displays "34.260" If arg is negative, display arg number of decimal places -- but only if there are places to be displayed: * {{ num1|floatformat:"-3" }} displays "34.232" * {{ num2|floatformat:"-3" }} displays "34" * {{ num3|floatformat:"-3" }} displays "34.260" If the input float is infinity or NaN, display the string representation of that value. """ try: input_val = repr(text) d = Decimal(input_val) except InvalidOperation: try: d = Decimal(str(float(text))) except (ValueError, InvalidOperation, TypeError): return '' try: p = int(arg) except ValueError: return input_val try: m = int(d) - d except (ValueError, OverflowError, InvalidOperation): return input_val if not m and p < 0: return mark_safe(formats.number_format('%d' % (int(d)), 0)) exp = Decimal(1).scaleb(-abs(p)) # Set the precision high enough to avoid an exception (#15789). tupl = d.as_tuple() units = len(tupl[1]) units += -tupl[2] if m else tupl[2] prec = abs(p) + units + 1 # Avoid conversion to scientific notation by accessing `sign`, `digits`, # and `exponent` from Decimal.as_tuple() directly. rounded_d = d.quantize(exp, ROUND_HALF_UP, Context(prec=prec)) sign, digits, exponent = rounded_d.as_tuple() digits = [str(digit) for digit in reversed(digits)] while len(digits) <= abs(exponent): digits.append('0') digits.insert(-exponent, '.') if sign and rounded_d: digits.append('-') number = ''.join(reversed(digits)) return mark_safe(formats.number_format(number, abs(p))) @register.filter(is_safe=True) @stringfilter def iriencode(value): """Escape an IRI value for use in a URL.""" return iri_to_uri(value) @register.filter(is_safe=True, needs_autoescape=True) @stringfilter def linenumbers(value, autoescape=True): """Display text with line numbers.""" lines = value.split('\n') # Find the maximum width of the line count, for use with zero padding # string format command width = str(len(str(len(lines)))) if not autoescape or isinstance(value, SafeData): for i, line in enumerate(lines): lines[i] = ("%0" + width + "d. %s") % (i + 1, line) else: for i, line in enumerate(lines): lines[i] = ("%0" + width + "d. %s") % (i + 1, escape(line)) return mark_safe('\n'.join(lines)) @register.filter(is_safe=True) @stringfilter def lower(value): """Convert a string into all lowercase.""" return value.lower() @register.filter(is_safe=False) @stringfilter def make_list(value): """ Return the value turned into a list. For an integer, it's a list of digits. For a string, it's a list of characters. """ return list(value) @register.filter(is_safe=True) @stringfilter def slugify(value): """ Convert to ASCII. Convert spaces to hyphens. Remove characters that aren't alphanumerics, underscores, or hyphens. Convert to lowercase. Also strip leading and trailing whitespace. """ return _slugify(value) @register.filter(is_safe=True) def stringformat(value, arg): """ Format the variable according to the arg, a string formatting specifier. This specifier uses Python string formatting syntax, with the exception that the leading "%" is dropped. See https://docs.python.org/library/stdtypes.html#printf-style-string-formatting for documentation of Python string formatting. """ if isinstance(value, tuple): value = str(value) try: return ("%" + str(arg)) % value except (ValueError, TypeError): return "" @register.filter(is_safe=True) @stringfilter def title(value): """Convert a string into titlecase.""" t = re.sub("([a-z])'([A-Z])", lambda m: m.group(0).lower(), value.title()) return re.sub(r"\d([A-Z])", lambda m: m.group(0).lower(), t) @register.filter(is_safe=True) @stringfilter def truncatechars(value, arg): """Truncate a string after `arg` number of characters.""" try: length = int(arg) except ValueError: # Invalid literal for int(). return value # Fail silently. return Truncator(value).chars(length) @register.filter(is_safe=True) @stringfilter def truncatechars_html(value, arg): """ Truncate HTML after `arg` number of chars. Preserve newlines in the HTML. """ try: length = int(arg) except ValueError: # invalid literal for int() return value # Fail silently. return Truncator(value).chars(length, html=True) @register.filter(is_safe=True) @stringfilter def truncatewords(value, arg): """ Truncate a string after `arg` number of words. Remove newlines within the string. """ try: length = int(arg) except ValueError: # Invalid literal for int(). return value # Fail silently. return Truncator(value).words(length, truncate=' …') @register.filter(is_safe=True) @stringfilter def truncatewords_html(value, arg): """ Truncate HTML after `arg` number of words. Preserve newlines in the HTML. """ try: length = int(arg) except ValueError: # invalid literal for int() return value # Fail silently. return Truncator(value).words(length, html=True, truncate=' …') @register.filter(is_safe=False) @stringfilter def upper(value): """Convert a string into all uppercase.""" return value.upper() @register.filter(is_safe=False) @stringfilter def urlencode(value, safe=None): """ Escape a value for use in a URL. The ``safe`` parameter determines the characters which should not be escaped by Python's quote() function. If not provided, use the default safe characters (but an empty string can be provided when *all* characters should be escaped). """ kwargs = {} if safe is not None: kwargs['safe'] = safe return quote(value, **kwargs) @register.filter(is_safe=True, needs_autoescape=True) @stringfilter def urlize(value, autoescape=True): """Convert URLs in plain text into clickable links.""" return mark_safe(_urlize(value, nofollow=True, autoescape=autoescape)) @register.filter(is_safe=True, needs_autoescape=True) @stringfilter def urlizetrunc(value, limit, autoescape=True): """ Convert URLs into clickable links, truncating URLs to the given character limit, and adding 'rel=nofollow' attribute to discourage spamming. Argument: Length to truncate URLs to. """ return mark_safe(_urlize(value, trim_url_limit=int(limit), nofollow=True, autoescape=autoescape)) @register.filter(is_safe=False) @stringfilter def wordcount(value): """Return the number of words.""" return len(value.split()) @register.filter(is_safe=True) @stringfilter def wordwrap(value, arg): """Wrap words at `arg` line length.""" return wrap(value, int(arg)) @register.filter(is_safe=True) @stringfilter def ljust(value, arg): """Left-align the value in a field of a given width.""" return value.ljust(int(arg)) @register.filter(is_safe=True) @stringfilter def rjust(value, arg): """Right-align the value in a field of a given width.""" return value.rjust(int(arg)) @register.filter(is_safe=True) @stringfilter def center(value, arg): """Center the value in a field of a given width.""" return value.center(int(arg)) @register.filter @stringfilter def cut(value, arg): """Remove all values of arg from the given string.""" safe = isinstance(value, SafeData) value = value.replace(arg, '') if safe and arg != ';': return mark_safe(value) return value ################### # HTML STRINGS # ################### @register.filter("escape", is_safe=True) @stringfilter def escape_filter(value): """Mark the value as a string that should be auto-escaped.""" return conditional_escape(value) @register.filter(is_safe=True) @stringfilter def force_escape(value): """ Escape a string's HTML. Return a new string containing the escaped characters (as opposed to "escape", which marks the content for later possible escaping). """ return escape(value) @register.filter("linebreaks", is_safe=True, needs_autoescape=True) @stringfilter def linebreaks_filter(value, autoescape=True): """ Replace line breaks in plain text with appropriate HTML; a single newline becomes an HTML line break (``<br>``) and a new line followed by a blank line becomes a paragraph break (``</p>``). """ autoescape = autoescape and not isinstance(value, SafeData) return mark_safe(linebreaks(value, autoescape)) @register.filter(is_safe=True, needs_autoescape=True) @stringfilter def linebreaksbr(value, autoescape=True): """ Convert all newlines in a piece of plain text to HTML line breaks (``<br>``). """ autoescape = autoescape and not isinstance(value, SafeData) value = normalize_newlines(value) if autoescape: value = escape(value) return mark_safe(value.replace('\n', '<br>')) @register.filter(is_safe=True) @stringfilter def safe(value): """Mark the value as a string that should not be auto-escaped.""" return mark_safe(value) @register.filter(is_safe=True) def safeseq(value): """ A "safe" filter for sequences. Mark each element in the sequence, individually, as safe, after converting them to strings. Return a list with the results. """ return [mark_safe(obj) for obj in value] @register.filter(is_safe=True) @stringfilter def striptags(value): """Strip all [X]HTML tags.""" return strip_tags(value) ################### # LISTS # ################### def _property_resolver(arg): """ When arg is convertible to float, behave like operator.itemgetter(arg) Otherwise, behave like Variable(arg).resolve >>> _property_resolver(1)('abc') 'b' >>> _property_resolver('1')('abc') Traceback (most recent call last): ... TypeError: string indices must be integers >>> class Foo: ... a = 42 ... b = 3.14 ... c = 'Hey!' >>> _property_resolver('b')(Foo()) 3.14 """ try: float(arg) except ValueError: return Variable(arg).resolve else: return itemgetter(arg) @register.filter(is_safe=False) def dictsort(value, arg): """ Given a list of dicts, return that list sorted by the property given in the argument. """ try: return sorted(value, key=_property_resolver(arg)) except (TypeError, VariableDoesNotExist): return '' @register.filter(is_safe=False) def dictsortreversed(value, arg): """ Given a list of dicts, return that list sorted in reverse order by the property given in the argument. """ try: return sorted(value, key=_property_resolver(arg), reverse=True) except (TypeError, VariableDoesNotExist): return '' @register.filter(is_safe=False) def first(value): """Return the first item in a list.""" try: return value[0] except IndexError: return '' @register.filter(is_safe=True, needs_autoescape=True) def join(value, arg, autoescape=True): """Join a list with a string, like Python's ``str.join(list)``.""" try: if autoescape: value = [conditional_escape(v) for v in value] data = conditional_escape(arg).join(value) except TypeError: # Fail silently if arg isn't iterable. return value return mark_safe(data) @register.filter(is_safe=True) def last(value): """Return the last item in a list.""" try: return value[-1] except IndexError: return '' @register.filter(is_safe=False) def length(value): """Return the length of the value - useful for lists.""" try: return len(value) except (ValueError, TypeError): return 0 @register.filter(is_safe=False) def length_is(value, arg): """Return a boolean of whether the value's length is the argument.""" try: return len(value) == int(arg) except (ValueError, TypeError): return '' @register.filter(is_safe=True) def random(value): """Return a random item from the list.""" return random_module.choice(value) @register.filter("slice", is_safe=True) def slice_filter(value, arg): """ Return a slice of the list using the same syntax as Python's list slicing. """ try: bits = [] for x in str(arg).split(':'): if not x: bits.append(None) else: bits.append(int(x)) return value[slice(*bits)] except (ValueError, TypeError): return value # Fail silently. @register.filter(is_safe=True, needs_autoescape=True) def unordered_list(value, autoescape=True): """ Recursively take a self-nested list and return an HTML unordered list -- WITHOUT opening and closing <ul> tags. Assume the list is in the proper format. For example, if ``var`` contains: ``['States', ['Kansas', ['Lawrence', 'Topeka'], 'Illinois']]``, then ``{{ var|unordered_list }}`` returns:: <li>States <ul> <li>Kansas <ul> <li>Lawrence</li> <li>Topeka</li> </ul> </li> <li>Illinois</li> </ul> </li> """ if autoescape: escaper = conditional_escape else: def escaper(x): return x def walk_items(item_list): item_iterator = iter(item_list) try: item = next(item_iterator) while True: try: next_item = next(item_iterator) except StopIteration: yield item, None break if isinstance(next_item, (list, tuple, types.GeneratorType)): try: iter(next_item) except TypeError: pass else: yield item, next_item item = next(item_iterator) continue yield item, None item = next_item except StopIteration: pass def list_formatter(item_list, tabs=1): indent = '\t' * tabs output = [] for item, children in walk_items(item_list): sublist = '' if children: sublist = '\n%s<ul>\n%s\n%s</ul>\n%s' % ( indent, list_formatter(children, tabs + 1), indent, indent) output.append('%s<li>%s%s</li>' % ( indent, escaper(item), sublist)) return '\n'.join(output) return mark_safe(list_formatter(value)) ################### # INTEGERS # ################### @register.filter(is_safe=False) def add(value, arg): """Add the arg to the value.""" try: return int(value) + int(arg) except (ValueError, TypeError): try: return value + arg except Exception: return '' @register.filter(is_safe=False) def get_digit(value, arg): """ Given a whole number, return the requested digit of it, where 1 is the right-most digit, 2 is the second-right-most digit, etc. Return the original value for invalid input (if input or argument is not an integer, or if argument is less than 1). Otherwise, output is always an integer. """ try: arg = int(arg) value = int(value) except ValueError: return value # Fail silently for an invalid argument if arg < 1: return value try: return int(str(value)[-arg]) except IndexError: return 0 ################### # DATES # ################### @register.filter(expects_localtime=True, is_safe=False) def date(value, arg=None): """Format a date according to the given format.""" if value in (None, ''): return '' try: return formats.date_format(value, arg) except AttributeError: try: return format(value, arg) except AttributeError: return '' @register.filter(expects_localtime=True, is_safe=False) def time(value, arg=None): """Format a time according to the given format.""" if value in (None, ''): return '' try: return formats.time_format(value, arg) except (AttributeError, TypeError): try: return time_format(value, arg) except (AttributeError, TypeError): return '' @register.filter("timesince", is_safe=False) def timesince_filter(value, arg=None): """Format a date as the time since that date (i.e. "4 days, 6 hours").""" if not value: return '' try: if arg: return timesince(value, arg) return timesince(value) except (ValueError, TypeError): return '' @register.filter("timeuntil", is_safe=False) def timeuntil_filter(value, arg=None): """Format a date as the time until that date (i.e. "4 days, 6 hours").""" if not value: return '' try: return timeuntil(value, arg) except (ValueError, TypeError): return '' ################### # LOGIC # ################### @register.filter(is_safe=False) def default(value, arg): """If value is unavailable, use given default.""" return value or arg @register.filter(is_safe=False) def default_if_none(value, arg): """If value is None, use given default.""" if value is None: return arg return value @register.filter(is_safe=False) def divisibleby(value, arg): """Return True if the value is divisible by the argument.""" return int(value) % int(arg) == 0 @register.filter(is_safe=False) def yesno(value, arg=None): """ Given a string mapping values for true, false, and (optionally) None, return one of those strings according to the value: ========== ====================== ================================== Value Argument Outputs ========== ====================== ================================== ``True`` ``"yeah,no,maybe"`` ``yeah`` ``False`` ``"yeah,no,maybe"`` ``no`` ``None`` ``"yeah,no,maybe"`` ``maybe`` ``None`` ``"yeah,no"`` ``"no"`` (converts None to False if no mapping for None is given. ========== ====================== ================================== """ if arg is None: arg = gettext('yes,no,maybe') bits = arg.split(',') if len(bits) < 2: return value # Invalid arg. try: yes, no, maybe = bits except ValueError: # Unpack list of wrong size (no "maybe" value provided). yes, no, maybe = bits[0], bits[1], bits[1] if value is None: return maybe if value: return yes return no ################### # MISC # ################### @register.filter(is_safe=True) def filesizeformat(bytes_): """ Format the value like a 'human-readable' file size (i.e. 13 KB, 4.1 MB, 102 bytes, etc.). """ try: bytes_ = int(bytes_) except (TypeError, ValueError, UnicodeDecodeError): value = ngettext("%(size)d byte", "%(size)d bytes", 0) % {'size': 0} return avoid_wrapping(value) def filesize_number_format(value): return formats.number_format(round(value, 1), 1) KB = 1 << 10 MB = 1 << 20 GB = 1 << 30 TB = 1 << 40 PB = 1 << 50 negative = bytes_ < 0 if negative: bytes_ = -bytes_ # Allow formatting of negative numbers. if bytes_ < KB: value = ngettext("%(size)d byte", "%(size)d bytes", bytes_) % {'size': bytes_} elif bytes_ < MB: value = gettext("%s KB") % filesize_number_format(bytes_ / KB) elif bytes_ < GB: value = gettext("%s MB") % filesize_number_format(bytes_ / MB) elif bytes_ < TB: value = gettext("%s GB") % filesize_number_format(bytes_ / GB) elif bytes_ < PB: value = gettext("%s TB") % filesize_number_format(bytes_ / TB) else: value = gettext("%s PB") % filesize_number_format(bytes_ / PB) if negative: value = "-%s" % value return avoid_wrapping(value) @register.filter(is_safe=False) def pluralize(value, arg='s'): """ Return a plural suffix if the value is not 1, '1', or an object of length 1. By default, use 's' as the suffix: * If value is 0, vote{{ value|pluralize }} display "votes". * If value is 1, vote{{ value|pluralize }} display "vote". * If value is 2, vote{{ value|pluralize }} display "votes". If an argument is provided, use that string instead: * If value is 0, class{{ value|pluralize:"es" }} display "classes". * If value is 1, class{{ value|pluralize:"es" }} display "class". * If value is 2, class{{ value|pluralize:"es" }} display "classes". If the provided argument contains a comma, use the text before the comma for the singular case and the text after the comma for the plural case: * If value is 0, cand{{ value|pluralize:"y,ies" }} display "candies". * If value is 1, cand{{ value|pluralize:"y,ies" }} display "candy". * If value is 2, cand{{ value|pluralize:"y,ies" }} display "candies". """ if ',' not in arg: arg = ',' + arg bits = arg.split(',') if len(bits) > 2: return '' singular_suffix, plural_suffix = bits[:2] try: return singular_suffix if float(value) == 1 else plural_suffix except ValueError: # Invalid string that's not a number. pass except TypeError: # Value isn't a string or a number; maybe it's a list? try: return singular_suffix if len(value) == 1 else plural_suffix except TypeError: # len() of unsized object. pass return '' @register.filter("phone2numeric", is_safe=True) def phone2numeric_filter(value): """Take a phone number and converts it in to its numerical equivalent.""" return phone2numeric(value) @register.filter(is_safe=True) def pprint(value): """A wrapper around pprint.pprint -- for debugging, really.""" try: return pformat(value) except Exception as e: return "Error in formatting: %s: %s" % (e.__class__.__name__, e)
458265872172cb3ffb2a45c8e8d3f7e98c27819f6f5cddd231e57bd0a5de7966
from django.utils.cache import patch_vary_headers from django.utils.deprecation import MiddlewareMixin from django.utils.regex_helper import _lazy_re_compile from django.utils.text import compress_sequence, compress_string re_accepts_gzip = _lazy_re_compile(r'\bgzip\b') class GZipMiddleware(MiddlewareMixin): """ Compress content if the browser allows gzip compression. Set the Vary header accordingly, so that caches will base their storage on the Accept-Encoding header. """ def process_response(self, request, response): # It's not worth attempting to compress really short responses. if not response.streaming and len(response.content) < 200: return response # Avoid gzipping if we've already got a content-encoding. if response.has_header('Content-Encoding'): return response patch_vary_headers(response, ('Accept-Encoding',)) ae = request.META.get('HTTP_ACCEPT_ENCODING', '') if not re_accepts_gzip.search(ae): return response if response.streaming: # Delete the `Content-Length` header for streaming content, because # we won't know the compressed size until we stream it. response.streaming_content = compress_sequence(response.streaming_content) del response['Content-Length'] else: # Return the compressed content only if it's actually shorter. compressed_content = compress_string(response.content) if len(compressed_content) >= len(response.content): return response response.content = compressed_content response['Content-Length'] = str(len(response.content)) # If there is a strong ETag, make it weak to fulfill the requirements # of RFC 7232 section-2.1 while also allowing conditional request # matches on ETags. etag = response.get('ETag') if etag and etag.startswith('"'): response['ETag'] = 'W/' + etag response['Content-Encoding'] = 'gzip' return response
0249c25fa171554d6356e7f5573585bacda397f79d47bd3e2be2e48a39dc669e
""" This module converts requested URLs to callback view functions. URLResolver is the main class here. Its resolve() method takes a URL (as a string) and returns a ResolverMatch object which provides access to all attributes of the resolved URL match. """ import functools import inspect import re import string from importlib import import_module from urllib.parse import quote from asgiref.local import Local from django.conf import settings from django.core.checks import Error, Warning from django.core.checks.urls import check_resolver from django.core.exceptions import ImproperlyConfigured, ViewDoesNotExist from django.utils.datastructures import MultiValueDict from django.utils.functional import cached_property from django.utils.http import RFC3986_SUBDELIMS, escape_leading_slashes from django.utils.regex_helper import _lazy_re_compile, normalize from django.utils.translation import get_language from .converters import get_converter from .exceptions import NoReverseMatch, Resolver404 from .utils import get_callable class ResolverMatch: def __init__(self, func, args, kwargs, url_name=None, app_names=None, namespaces=None, route=None): self.func = func self.args = args self.kwargs = kwargs self.url_name = url_name self.route = route # If a URLRegexResolver doesn't have a namespace or app_name, it passes # in an empty value. self.app_names = [x for x in app_names if x] if app_names else [] self.app_name = ':'.join(self.app_names) self.namespaces = [x for x in namespaces if x] if namespaces else [] self.namespace = ':'.join(self.namespaces) if not hasattr(func, '__name__'): # A class-based view self._func_path = func.__class__.__module__ + '.' + func.__class__.__name__ else: # A function-based view self._func_path = func.__module__ + '.' + func.__name__ view_path = url_name or self._func_path self.view_name = ':'.join(self.namespaces + [view_path]) def __getitem__(self, index): return (self.func, self.args, self.kwargs)[index] def __repr__(self): return "ResolverMatch(func=%s, args=%s, kwargs=%s, url_name=%s, app_names=%s, namespaces=%s, route=%s)" % ( self._func_path, self.args, self.kwargs, self.url_name, self.app_names, self.namespaces, self.route, ) def get_resolver(urlconf=None): if urlconf is None: urlconf = settings.ROOT_URLCONF return _get_cached_resolver(urlconf) @functools.lru_cache(maxsize=None) def _get_cached_resolver(urlconf=None): return URLResolver(RegexPattern(r'^/'), urlconf) @functools.lru_cache(maxsize=None) def get_ns_resolver(ns_pattern, resolver, converters): # Build a namespaced resolver for the given parent URLconf pattern. # This makes it possible to have captured parameters in the parent # URLconf pattern. pattern = RegexPattern(ns_pattern) pattern.converters = dict(converters) ns_resolver = URLResolver(pattern, resolver.url_patterns) return URLResolver(RegexPattern(r'^/'), [ns_resolver]) class LocaleRegexDescriptor: def __init__(self, attr): self.attr = attr def __get__(self, instance, cls=None): """ Return a compiled regular expression based on the active language. """ if instance is None: return self # As a performance optimization, if the given regex string is a regular # string (not a lazily-translated string proxy), compile it once and # avoid per-language compilation. pattern = getattr(instance, self.attr) if isinstance(pattern, str): instance.__dict__['regex'] = instance._compile(pattern) return instance.__dict__['regex'] language_code = get_language() if language_code not in instance._regex_dict: instance._regex_dict[language_code] = instance._compile(str(pattern)) return instance._regex_dict[language_code] class CheckURLMixin: def describe(self): """ Format the URL pattern for display in warning messages. """ description = "'{}'".format(self) if self.name: description += " [name='{}']".format(self.name) return description def _check_pattern_startswith_slash(self): """ Check that the pattern does not begin with a forward slash. """ regex_pattern = self.regex.pattern if not settings.APPEND_SLASH: # Skip check as it can be useful to start a URL pattern with a slash # when APPEND_SLASH=False. return [] if regex_pattern.startswith(('/', '^/', '^\\/')) and not regex_pattern.endswith('/'): warning = Warning( "Your URL pattern {} has a route beginning with a '/'. Remove this " "slash as it is unnecessary. If this pattern is targeted in an " "include(), ensure the include() pattern has a trailing '/'.".format( self.describe() ), id="urls.W002", ) return [warning] else: return [] class RegexPattern(CheckURLMixin): regex = LocaleRegexDescriptor('_regex') def __init__(self, regex, name=None, is_endpoint=False): self._regex = regex self._regex_dict = {} self._is_endpoint = is_endpoint self.name = name self.converters = {} def match(self, path): match = self.regex.search(path) if match: # If there are any named groups, use those as kwargs, ignoring # non-named groups. Otherwise, pass all non-named arguments as # positional arguments. kwargs = {k: v for k, v in match.groupdict().items() if v is not None} args = () if kwargs else match.groups() return path[match.end():], args, kwargs return None def check(self): warnings = [] warnings.extend(self._check_pattern_startswith_slash()) if not self._is_endpoint: warnings.extend(self._check_include_trailing_dollar()) return warnings def _check_include_trailing_dollar(self): regex_pattern = self.regex.pattern if regex_pattern.endswith('$') and not regex_pattern.endswith(r'\$'): return [Warning( "Your URL pattern {} uses include with a route ending with a '$'. " "Remove the dollar from the route to avoid problems including " "URLs.".format(self.describe()), id='urls.W001', )] else: return [] def _compile(self, regex): """Compile and return the given regular expression.""" try: return re.compile(regex) except re.error as e: raise ImproperlyConfigured( '"%s" is not a valid regular expression: %s' % (regex, e) ) def __str__(self): return str(self._regex) _PATH_PARAMETER_COMPONENT_RE = _lazy_re_compile( r'<(?:(?P<converter>[^>:]+):)?(?P<parameter>\w+)>' ) def _route_to_regex(route, is_endpoint=False): """ Convert a path pattern into a regular expression. Return the regular expression and a dictionary mapping the capture names to the converters. For example, 'foo/<int:pk>' returns '^foo\\/(?P<pk>[0-9]+)' and {'pk': <django.urls.converters.IntConverter>}. """ if not set(route).isdisjoint(string.whitespace): raise ImproperlyConfigured("URL route '%s' cannot contain whitespace." % route) original_route = route parts = ['^'] converters = {} while True: match = _PATH_PARAMETER_COMPONENT_RE.search(route) if not match: parts.append(re.escape(route)) break parts.append(re.escape(route[:match.start()])) route = route[match.end():] parameter = match.group('parameter') if not parameter.isidentifier(): raise ImproperlyConfigured( "URL route '%s' uses parameter name %r which isn't a valid " "Python identifier." % (original_route, parameter) ) raw_converter = match.group('converter') if raw_converter is None: # If a converter isn't specified, the default is `str`. raw_converter = 'str' try: converter = get_converter(raw_converter) except KeyError as e: raise ImproperlyConfigured( "URL route '%s' uses invalid converter %s." % (original_route, e) ) converters[parameter] = converter parts.append('(?P<' + parameter + '>' + converter.regex + ')') if is_endpoint: parts.append('$') return ''.join(parts), converters class RoutePattern(CheckURLMixin): regex = LocaleRegexDescriptor('_route') def __init__(self, route, name=None, is_endpoint=False): self._route = route self._regex_dict = {} self._is_endpoint = is_endpoint self.name = name self.converters = _route_to_regex(str(route), is_endpoint)[1] def match(self, path): match = self.regex.search(path) if match: # RoutePattern doesn't allow non-named groups so args are ignored. kwargs = match.groupdict() for key, value in kwargs.items(): converter = self.converters[key] try: kwargs[key] = converter.to_python(value) except ValueError: return None return path[match.end():], (), kwargs return None def check(self): warnings = self._check_pattern_startswith_slash() route = self._route if '(?P<' in route or route.startswith('^') or route.endswith('$'): warnings.append(Warning( "Your URL pattern {} has a route that contains '(?P<', begins " "with a '^', or ends with a '$'. This was likely an oversight " "when migrating to django.urls.path().".format(self.describe()), id='2_0.W001', )) return warnings def _compile(self, route): return re.compile(_route_to_regex(route, self._is_endpoint)[0]) def __str__(self): return str(self._route) class LocalePrefixPattern: def __init__(self, prefix_default_language=True): self.prefix_default_language = prefix_default_language self.converters = {} @property def regex(self): # This is only used by reverse() and cached in _reverse_dict. return re.compile(self.language_prefix) @property def language_prefix(self): language_code = get_language() or settings.LANGUAGE_CODE if language_code == settings.LANGUAGE_CODE and not self.prefix_default_language: return '' else: return '%s/' % language_code def match(self, path): language_prefix = self.language_prefix if path.startswith(language_prefix): return path[len(language_prefix):], (), {} return None def check(self): return [] def describe(self): return "'{}'".format(self) def __str__(self): return self.language_prefix class URLPattern: def __init__(self, pattern, callback, default_args=None, name=None): self.pattern = pattern self.callback = callback # the view self.default_args = default_args or {} self.name = name def __repr__(self): return '<%s %s>' % (self.__class__.__name__, self.pattern.describe()) def check(self): warnings = self._check_pattern_name() warnings.extend(self.pattern.check()) return warnings def _check_pattern_name(self): """ Check that the pattern name does not contain a colon. """ if self.pattern.name is not None and ":" in self.pattern.name: warning = Warning( "Your URL pattern {} has a name including a ':'. Remove the colon, to " "avoid ambiguous namespace references.".format(self.pattern.describe()), id="urls.W003", ) return [warning] else: return [] def resolve(self, path): match = self.pattern.match(path) if match: new_path, args, kwargs = match # Pass any extra_kwargs as **kwargs. kwargs.update(self.default_args) return ResolverMatch(self.callback, args, kwargs, self.pattern.name, route=str(self.pattern)) @cached_property def lookup_str(self): """ A string that identifies the view (e.g. 'path.to.view_function' or 'path.to.ClassBasedView'). """ callback = self.callback if isinstance(callback, functools.partial): callback = callback.func if not hasattr(callback, '__name__'): return callback.__module__ + "." + callback.__class__.__name__ return callback.__module__ + "." + callback.__qualname__ class URLResolver: def __init__(self, pattern, urlconf_name, default_kwargs=None, app_name=None, namespace=None): self.pattern = pattern # urlconf_name is the dotted Python path to the module defining # urlpatterns. It may also be an object with an urlpatterns attribute # or urlpatterns itself. self.urlconf_name = urlconf_name self.callback = None self.default_kwargs = default_kwargs or {} self.namespace = namespace self.app_name = app_name self._reverse_dict = {} self._namespace_dict = {} self._app_dict = {} # set of dotted paths to all functions and classes that are used in # urlpatterns self._callback_strs = set() self._populated = False self._local = Local() def __repr__(self): if isinstance(self.urlconf_name, list) and self.urlconf_name: # Don't bother to output the whole list, it can be huge urlconf_repr = '<%s list>' % self.urlconf_name[0].__class__.__name__ else: urlconf_repr = repr(self.urlconf_name) return '<%s %s (%s:%s) %s>' % ( self.__class__.__name__, urlconf_repr, self.app_name, self.namespace, self.pattern.describe(), ) def check(self): messages = [] for pattern in self.url_patterns: messages.extend(check_resolver(pattern)) messages.extend(self._check_custom_error_handlers()) return messages or self.pattern.check() def _check_custom_error_handlers(self): messages = [] # All handlers take (request, exception) arguments except handler500 # which takes (request). for status_code, num_parameters in [(400, 2), (403, 2), (404, 2), (500, 1)]: try: handler, param_dict = self.resolve_error_handler(status_code) except (ImportError, ViewDoesNotExist) as e: path = getattr(self.urlconf_module, 'handler%s' % status_code) msg = ( "The custom handler{status_code} view '{path}' could not be imported." ).format(status_code=status_code, path=path) messages.append(Error(msg, hint=str(e), id='urls.E008')) continue signature = inspect.signature(handler) args = [None] * num_parameters try: signature.bind(*args) except TypeError: msg = ( "The custom handler{status_code} view '{path}' does not " "take the correct number of arguments ({args})." ).format( status_code=status_code, path=handler.__module__ + '.' + handler.__qualname__, args='request, exception' if num_parameters == 2 else 'request', ) messages.append(Error(msg, id='urls.E007')) return messages def _populate(self): # Short-circuit if called recursively in this thread to prevent # infinite recursion. Concurrent threads may call this at the same # time and will need to continue, so set 'populating' on a # thread-local variable. if getattr(self._local, 'populating', False): return try: self._local.populating = True lookups = MultiValueDict() namespaces = {} apps = {} language_code = get_language() for url_pattern in reversed(self.url_patterns): p_pattern = url_pattern.pattern.regex.pattern if p_pattern.startswith('^'): p_pattern = p_pattern[1:] if isinstance(url_pattern, URLPattern): self._callback_strs.add(url_pattern.lookup_str) bits = normalize(url_pattern.pattern.regex.pattern) lookups.appendlist( url_pattern.callback, (bits, p_pattern, url_pattern.default_args, url_pattern.pattern.converters) ) if url_pattern.name is not None: lookups.appendlist( url_pattern.name, (bits, p_pattern, url_pattern.default_args, url_pattern.pattern.converters) ) else: # url_pattern is a URLResolver. url_pattern._populate() if url_pattern.app_name: apps.setdefault(url_pattern.app_name, []).append(url_pattern.namespace) namespaces[url_pattern.namespace] = (p_pattern, url_pattern) else: for name in url_pattern.reverse_dict: for matches, pat, defaults, converters in url_pattern.reverse_dict.getlist(name): new_matches = normalize(p_pattern + pat) lookups.appendlist( name, ( new_matches, p_pattern + pat, {**defaults, **url_pattern.default_kwargs}, {**self.pattern.converters, **url_pattern.pattern.converters, **converters} ) ) for namespace, (prefix, sub_pattern) in url_pattern.namespace_dict.items(): current_converters = url_pattern.pattern.converters sub_pattern.pattern.converters.update(current_converters) namespaces[namespace] = (p_pattern + prefix, sub_pattern) for app_name, namespace_list in url_pattern.app_dict.items(): apps.setdefault(app_name, []).extend(namespace_list) self._callback_strs.update(url_pattern._callback_strs) self._namespace_dict[language_code] = namespaces self._app_dict[language_code] = apps self._reverse_dict[language_code] = lookups self._populated = True finally: self._local.populating = False @property def reverse_dict(self): language_code = get_language() if language_code not in self._reverse_dict: self._populate() return self._reverse_dict[language_code] @property def namespace_dict(self): language_code = get_language() if language_code not in self._namespace_dict: self._populate() return self._namespace_dict[language_code] @property def app_dict(self): language_code = get_language() if language_code not in self._app_dict: self._populate() return self._app_dict[language_code] @staticmethod def _join_route(route1, route2): """Join two routes, without the starting ^ in the second route.""" if not route1: return route2 if route2.startswith('^'): route2 = route2[1:] return route1 + route2 def _is_callback(self, name): if not self._populated: self._populate() return name in self._callback_strs def resolve(self, path): path = str(path) # path may be a reverse_lazy object tried = [] match = self.pattern.match(path) if match: new_path, args, kwargs = match for pattern in self.url_patterns: try: sub_match = pattern.resolve(new_path) except Resolver404 as e: sub_tried = e.args[0].get('tried') if sub_tried is not None: tried.extend([pattern] + t for t in sub_tried) else: tried.append([pattern]) else: if sub_match: # Merge captured arguments in match with submatch sub_match_dict = {**kwargs, **self.default_kwargs} # Update the sub_match_dict with the kwargs from the sub_match. sub_match_dict.update(sub_match.kwargs) # If there are *any* named groups, ignore all non-named groups. # Otherwise, pass all non-named arguments as positional arguments. sub_match_args = sub_match.args if not sub_match_dict: sub_match_args = args + sub_match.args current_route = '' if isinstance(pattern, URLPattern) else str(pattern.pattern) return ResolverMatch( sub_match.func, sub_match_args, sub_match_dict, sub_match.url_name, [self.app_name] + sub_match.app_names, [self.namespace] + sub_match.namespaces, self._join_route(current_route, sub_match.route), ) tried.append([pattern]) raise Resolver404({'tried': tried, 'path': new_path}) raise Resolver404({'path': path}) @cached_property def urlconf_module(self): if isinstance(self.urlconf_name, str): return import_module(self.urlconf_name) else: return self.urlconf_name @cached_property def url_patterns(self): # urlconf_module might be a valid set of patterns, so we default to it patterns = getattr(self.urlconf_module, "urlpatterns", self.urlconf_module) try: iter(patterns) except TypeError: msg = ( "The included URLconf '{name}' does not appear to have any " "patterns in it. If you see valid patterns in the file then " "the issue is probably caused by a circular import." ) raise ImproperlyConfigured(msg.format(name=self.urlconf_name)) return patterns def resolve_error_handler(self, view_type): callback = getattr(self.urlconf_module, 'handler%s' % view_type, None) if not callback: # No handler specified in file; use lazy import, since # django.conf.urls imports this file. from django.conf import urls callback = getattr(urls, 'handler%s' % view_type) return get_callable(callback), {} def reverse(self, lookup_view, *args, **kwargs): return self._reverse_with_prefix(lookup_view, '', *args, **kwargs) def _reverse_with_prefix(self, lookup_view, _prefix, *args, **kwargs): if args and kwargs: raise ValueError("Don't mix *args and **kwargs in call to reverse()!") if not self._populated: self._populate() possibilities = self.reverse_dict.getlist(lookup_view) for possibility, pattern, defaults, converters in possibilities: for result, params in possibility: if args: if len(args) != len(params): continue candidate_subs = dict(zip(params, args)) else: if set(kwargs).symmetric_difference(params).difference(defaults): continue if any(kwargs.get(k, v) != v for k, v in defaults.items()): continue candidate_subs = kwargs # Convert the candidate subs to text using Converter.to_url(). text_candidate_subs = {} for k, v in candidate_subs.items(): if k in converters: text_candidate_subs[k] = converters[k].to_url(v) else: text_candidate_subs[k] = str(v) # WSGI provides decoded URLs, without %xx escapes, and the URL # resolver operates on such URLs. First substitute arguments # without quoting to build a decoded URL and look for a match. # Then, if we have a match, redo the substitution with quoted # arguments in order to return a properly encoded URL. candidate_pat = _prefix.replace('%', '%%') + result if re.search('^%s%s' % (re.escape(_prefix), pattern), candidate_pat % text_candidate_subs): # safe characters from `pchar` definition of RFC 3986 url = quote(candidate_pat % text_candidate_subs, safe=RFC3986_SUBDELIMS + '/~:@') # Don't allow construction of scheme relative urls. return escape_leading_slashes(url) # lookup_view can be URL name or callable, but callables are not # friendly in error messages. m = getattr(lookup_view, '__module__', None) n = getattr(lookup_view, '__name__', None) if m is not None and n is not None: lookup_view_s = "%s.%s" % (m, n) else: lookup_view_s = lookup_view patterns = [pattern for (_, pattern, _, _) in possibilities] if patterns: if args: arg_msg = "arguments '%s'" % (args,) elif kwargs: arg_msg = "keyword arguments '%s'" % (kwargs,) else: arg_msg = "no arguments" msg = ( "Reverse for '%s' with %s not found. %d pattern(s) tried: %s" % (lookup_view_s, arg_msg, len(patterns), patterns) ) else: msg = ( "Reverse for '%(view)s' not found. '%(view)s' is not " "a valid view function or pattern name." % {'view': lookup_view_s} ) raise NoReverseMatch(msg)
87a5af9f2f1f2f84484b6efac186dc2539ba16fb1a53a32ac1cba08fde12fee7
""" HTML Widget classes """ import copy import datetime import warnings from collections import defaultdict from itertools import chain from django.conf import settings from django.forms.utils import to_current_timezone from django.templatetags.static import static from django.utils import datetime_safe, formats from django.utils.datastructures import OrderedSet from django.utils.dates import MONTHS from django.utils.formats import get_format from django.utils.html import format_html, html_safe from django.utils.regex_helper import _lazy_re_compile from django.utils.safestring import mark_safe from django.utils.topological_sort import ( CyclicDependencyError, stable_topological_sort, ) from django.utils.translation import gettext_lazy as _ from .renderers import get_default_renderer __all__ = ( 'Media', 'MediaDefiningClass', 'Widget', 'TextInput', 'NumberInput', 'EmailInput', 'URLInput', 'PasswordInput', 'HiddenInput', 'MultipleHiddenInput', 'FileInput', 'ClearableFileInput', 'Textarea', 'DateInput', 'DateTimeInput', 'TimeInput', 'CheckboxInput', 'Select', 'NullBooleanSelect', 'SelectMultiple', 'RadioSelect', 'CheckboxSelectMultiple', 'MultiWidget', 'SplitDateTimeWidget', 'SplitHiddenDateTimeWidget', 'SelectDateWidget', ) MEDIA_TYPES = ('css', 'js') class MediaOrderConflictWarning(RuntimeWarning): pass @html_safe class Media: def __init__(self, media=None, css=None, js=None): if media is not None: css = getattr(media, 'css', {}) js = getattr(media, 'js', []) else: if css is None: css = {} if js is None: js = [] self._css_lists = [css] self._js_lists = [js] def __repr__(self): return 'Media(css=%r, js=%r)' % (self._css, self._js) def __str__(self): return self.render() @property def _css(self): css = defaultdict(list) for css_list in self._css_lists: for medium, sublist in css_list.items(): css[medium].append(sublist) return {medium: self.merge(*lists) for medium, lists in css.items()} @property def _js(self): return self.merge(*self._js_lists) def render(self): return mark_safe('\n'.join(chain.from_iterable(getattr(self, 'render_' + name)() for name in MEDIA_TYPES))) def render_js(self): return [ format_html( '<script type="text/javascript" src="{}"></script>', self.absolute_path(path) ) for path in self._js ] def render_css(self): # To keep rendering order consistent, we can't just iterate over items(). # We need to sort the keys, and iterate over the sorted list. media = sorted(self._css) return chain.from_iterable([ format_html( '<link href="{}" type="text/css" media="{}" rel="stylesheet">', self.absolute_path(path), medium ) for path in self._css[medium] ] for medium in media) def absolute_path(self, path): """ Given a relative or absolute path to a static asset, return an absolute path. An absolute path will be returned unchanged while a relative path will be passed to django.templatetags.static.static(). """ if path.startswith(('http://', 'https://', '/')): return path return static(path) def __getitem__(self, name): """Return a Media object that only contains media of the given type.""" if name in MEDIA_TYPES: return Media(**{str(name): getattr(self, '_' + name)}) raise KeyError('Unknown media type "%s"' % name) @staticmethod def merge(*lists): """ Merge lists while trying to keep the relative order of the elements. Warn if the lists have the same elements in a different relative order. For static assets it can be important to have them included in the DOM in a certain order. In JavaScript you may not be able to reference a global or in CSS you might want to override a style. """ dependency_graph = defaultdict(set) all_items = OrderedSet() for list_ in filter(None, lists): head = list_[0] # The first items depend on nothing but have to be part of the # dependency graph to be included in the result. dependency_graph.setdefault(head, set()) for item in list_: all_items.add(item) # No self dependencies if head != item: dependency_graph[item].add(head) head = item try: return stable_topological_sort(all_items, dependency_graph) except CyclicDependencyError: warnings.warn( 'Detected duplicate Media files in an opposite order: {}'.format( ', '.join(repr(l) for l in lists) ), MediaOrderConflictWarning, ) return list(all_items) def __add__(self, other): combined = Media() combined._css_lists = self._css_lists + other._css_lists combined._js_lists = self._js_lists + other._js_lists return combined def media_property(cls): def _media(self): # Get the media property of the superclass, if it exists sup_cls = super(cls, self) try: base = sup_cls.media except AttributeError: base = Media() # Get the media definition for this class definition = getattr(cls, 'Media', None) if definition: extend = getattr(definition, 'extend', True) if extend: if extend is True: m = base else: m = Media() for medium in extend: m = m + base[medium] return m + Media(definition) return Media(definition) return base return property(_media) class MediaDefiningClass(type): """ Metaclass for classes that can have media definitions. """ def __new__(mcs, name, bases, attrs): new_class = super(MediaDefiningClass, mcs).__new__(mcs, name, bases, attrs) if 'media' not in attrs: new_class.media = media_property(new_class) return new_class class Widget(metaclass=MediaDefiningClass): needs_multipart_form = False # Determines does this widget need multipart form is_localized = False is_required = False supports_microseconds = True def __init__(self, attrs=None): self.attrs = {} if attrs is None else attrs.copy() def __deepcopy__(self, memo): obj = copy.copy(self) obj.attrs = self.attrs.copy() memo[id(self)] = obj return obj @property def is_hidden(self): return self.input_type == 'hidden' if hasattr(self, 'input_type') else False def subwidgets(self, name, value, attrs=None): context = self.get_context(name, value, attrs) yield context['widget'] def format_value(self, value): """ Return a value as it should appear when rendered in a template. """ if value == '' or value is None: return None if self.is_localized: return formats.localize_input(value) return str(value) def get_context(self, name, value, attrs): context = {} context['widget'] = { 'name': name, 'is_hidden': self.is_hidden, 'required': self.is_required, 'value': self.format_value(value), 'attrs': self.build_attrs(self.attrs, attrs), 'template_name': self.template_name, } return context def render(self, name, value, attrs=None, renderer=None): """Render the widget as an HTML string.""" context = self.get_context(name, value, attrs) return self._render(self.template_name, context, renderer) def _render(self, template_name, context, renderer=None): if renderer is None: renderer = get_default_renderer() return mark_safe(renderer.render(template_name, context)) def build_attrs(self, base_attrs, extra_attrs=None): """Build an attribute dictionary.""" return {**base_attrs, **(extra_attrs or {})} def value_from_datadict(self, data, files, name): """ Given a dictionary of data and this widget's name, return the value of this widget or None if it's not provided. """ return data.get(name) def value_omitted_from_data(self, data, files, name): return name not in data def id_for_label(self, id_): """ Return the HTML ID attribute of this Widget for use by a <label>, given the ID of the field. Return None if no ID is available. This hook is necessary because some widgets have multiple HTML elements and, thus, multiple IDs. In that case, this method should return an ID value that corresponds to the first ID in the widget's tags. """ return id_ def use_required_attribute(self, initial): return not self.is_hidden class Input(Widget): """ Base class for all <input> widgets. """ input_type = None # Subclasses must define this. template_name = 'django/forms/widgets/input.html' def __init__(self, attrs=None): if attrs is not None: attrs = attrs.copy() self.input_type = attrs.pop('type', self.input_type) super().__init__(attrs) def get_context(self, name, value, attrs): context = super().get_context(name, value, attrs) context['widget']['type'] = self.input_type return context class TextInput(Input): input_type = 'text' template_name = 'django/forms/widgets/text.html' class NumberInput(Input): input_type = 'number' template_name = 'django/forms/widgets/number.html' class EmailInput(Input): input_type = 'email' template_name = 'django/forms/widgets/email.html' class URLInput(Input): input_type = 'url' template_name = 'django/forms/widgets/url.html' class PasswordInput(Input): input_type = 'password' template_name = 'django/forms/widgets/password.html' def __init__(self, attrs=None, render_value=False): super().__init__(attrs) self.render_value = render_value def get_context(self, name, value, attrs): if not self.render_value: value = None return super().get_context(name, value, attrs) class HiddenInput(Input): input_type = 'hidden' template_name = 'django/forms/widgets/hidden.html' class MultipleHiddenInput(HiddenInput): """ Handle <input type="hidden"> for fields that have a list of values. """ template_name = 'django/forms/widgets/multiple_hidden.html' def get_context(self, name, value, attrs): context = super().get_context(name, value, attrs) final_attrs = context['widget']['attrs'] id_ = context['widget']['attrs'].get('id') subwidgets = [] for index, value_ in enumerate(context['widget']['value']): widget_attrs = final_attrs.copy() if id_: # An ID attribute was given. Add a numeric index as a suffix # so that the inputs don't all have the same ID attribute. widget_attrs['id'] = '%s_%s' % (id_, index) widget = HiddenInput() widget.is_required = self.is_required subwidgets.append(widget.get_context(name, value_, widget_attrs)['widget']) context['widget']['subwidgets'] = subwidgets return context def value_from_datadict(self, data, files, name): try: getter = data.getlist except AttributeError: getter = data.get return getter(name) def format_value(self, value): return [] if value is None else value class FileInput(Input): input_type = 'file' needs_multipart_form = True template_name = 'django/forms/widgets/file.html' def format_value(self, value): """File input never renders a value.""" return def value_from_datadict(self, data, files, name): "File widgets take data from FILES, not POST" return files.get(name) def value_omitted_from_data(self, data, files, name): return name not in files FILE_INPUT_CONTRADICTION = object() class ClearableFileInput(FileInput): clear_checkbox_label = _('Clear') initial_text = _('Currently') input_text = _('Change') template_name = 'django/forms/widgets/clearable_file_input.html' def clear_checkbox_name(self, name): """ Given the name of the file input, return the name of the clear checkbox input. """ return name + '-clear' def clear_checkbox_id(self, name): """ Given the name of the clear checkbox input, return the HTML id for it. """ return name + '_id' def is_initial(self, value): """ Return whether value is considered to be initial value. """ return bool(value and getattr(value, 'url', False)) def format_value(self, value): """ Return the file object if it has a defined url attribute. """ if self.is_initial(value): return value def get_context(self, name, value, attrs): context = super().get_context(name, value, attrs) checkbox_name = self.clear_checkbox_name(name) checkbox_id = self.clear_checkbox_id(checkbox_name) context['widget'].update({ 'checkbox_name': checkbox_name, 'checkbox_id': checkbox_id, 'is_initial': self.is_initial(value), 'input_text': self.input_text, 'initial_text': self.initial_text, 'clear_checkbox_label': self.clear_checkbox_label, }) return context def value_from_datadict(self, data, files, name): upload = super().value_from_datadict(data, files, name) if not self.is_required and CheckboxInput().value_from_datadict( data, files, self.clear_checkbox_name(name)): if upload: # If the user contradicts themselves (uploads a new file AND # checks the "clear" checkbox), we return a unique marker # object that FileField will turn into a ValidationError. return FILE_INPUT_CONTRADICTION # False signals to clear any existing value, as opposed to just None return False return upload def use_required_attribute(self, initial): return super().use_required_attribute(initial) and not initial def value_omitted_from_data(self, data, files, name): return ( super().value_omitted_from_data(data, files, name) and self.clear_checkbox_name(name) not in data ) class Textarea(Widget): template_name = 'django/forms/widgets/textarea.html' def __init__(self, attrs=None): # Use slightly better defaults than HTML's 20x2 box default_attrs = {'cols': '40', 'rows': '10'} if attrs: default_attrs.update(attrs) super().__init__(default_attrs) class DateTimeBaseInput(TextInput): format_key = '' supports_microseconds = False def __init__(self, attrs=None, format=None): super().__init__(attrs) self.format = format or None def format_value(self, value): return formats.localize_input(value, self.format or formats.get_format(self.format_key)[0]) class DateInput(DateTimeBaseInput): format_key = 'DATE_INPUT_FORMATS' template_name = 'django/forms/widgets/date.html' class DateTimeInput(DateTimeBaseInput): format_key = 'DATETIME_INPUT_FORMATS' template_name = 'django/forms/widgets/datetime.html' class TimeInput(DateTimeBaseInput): format_key = 'TIME_INPUT_FORMATS' template_name = 'django/forms/widgets/time.html' # Defined at module level so that CheckboxInput is picklable (#17976) def boolean_check(v): return not (v is False or v is None or v == '') class CheckboxInput(Input): input_type = 'checkbox' template_name = 'django/forms/widgets/checkbox.html' def __init__(self, attrs=None, check_test=None): super().__init__(attrs) # check_test is a callable that takes a value and returns True # if the checkbox should be checked for that value. self.check_test = boolean_check if check_test is None else check_test def format_value(self, value): """Only return the 'value' attribute if value isn't empty.""" if value is True or value is False or value is None or value == '': return return str(value) def get_context(self, name, value, attrs): if self.check_test(value): if attrs is None: attrs = {} attrs['checked'] = True return super().get_context(name, value, attrs) def value_from_datadict(self, data, files, name): if name not in data: # A missing value means False because HTML form submission does not # send results for unselected checkboxes. return False value = data.get(name) # Translate true and false strings to boolean values. values = {'true': True, 'false': False} if isinstance(value, str): value = values.get(value.lower(), value) return bool(value) def value_omitted_from_data(self, data, files, name): # HTML checkboxes don't appear in POST data if not checked, so it's # never known if the value is actually omitted. return False class ChoiceWidget(Widget): allow_multiple_selected = False input_type = None template_name = None option_template_name = None add_id_index = True checked_attribute = {'checked': True} option_inherits_attrs = True def __init__(self, attrs=None, choices=()): super().__init__(attrs) # choices can be any iterable, but we may need to render this widget # multiple times. Thus, collapse it into a list so it can be consumed # more than once. self.choices = list(choices) def __deepcopy__(self, memo): obj = copy.copy(self) obj.attrs = self.attrs.copy() obj.choices = copy.copy(self.choices) memo[id(self)] = obj return obj def subwidgets(self, name, value, attrs=None): """ Yield all "subwidgets" of this widget. Used to enable iterating options from a BoundField for choice widgets. """ value = self.format_value(value) yield from self.options(name, value, attrs) def options(self, name, value, attrs=None): """Yield a flat list of options for this widgets.""" for group in self.optgroups(name, value, attrs): yield from group[1] def optgroups(self, name, value, attrs=None): """Return a list of optgroups for this widget.""" groups = [] has_selected = False for index, (option_value, option_label) in enumerate(self.choices): if option_value is None: option_value = '' subgroup = [] if isinstance(option_label, (list, tuple)): group_name = option_value subindex = 0 choices = option_label else: group_name = None subindex = None choices = [(option_value, option_label)] groups.append((group_name, subgroup, index)) for subvalue, sublabel in choices: selected = ( str(subvalue) in value and (not has_selected or self.allow_multiple_selected) ) has_selected |= selected subgroup.append(self.create_option( name, subvalue, sublabel, selected, index, subindex=subindex, attrs=attrs, )) if subindex is not None: subindex += 1 return groups def create_option(self, name, value, label, selected, index, subindex=None, attrs=None): index = str(index) if subindex is None else "%s_%s" % (index, subindex) if attrs is None: attrs = {} option_attrs = self.build_attrs(self.attrs, attrs) if self.option_inherits_attrs else {} if selected: option_attrs.update(self.checked_attribute) if 'id' in option_attrs: option_attrs['id'] = self.id_for_label(option_attrs['id'], index) return { 'name': name, 'value': value, 'label': label, 'selected': selected, 'index': index, 'attrs': option_attrs, 'type': self.input_type, 'template_name': self.option_template_name, 'wrap_label': True, } def get_context(self, name, value, attrs): context = super().get_context(name, value, attrs) context['widget']['optgroups'] = self.optgroups(name, context['widget']['value'], attrs) return context def id_for_label(self, id_, index='0'): """ Use an incremented id for each option where the main widget references the zero index. """ if id_ and self.add_id_index: id_ = '%s_%s' % (id_, index) return id_ def value_from_datadict(self, data, files, name): getter = data.get if self.allow_multiple_selected: try: getter = data.getlist except AttributeError: pass return getter(name) def format_value(self, value): """Return selected values as a list.""" if value is None and self.allow_multiple_selected: return [] if not isinstance(value, (tuple, list)): value = [value] return [str(v) if v is not None else '' for v in value] class Select(ChoiceWidget): input_type = 'select' template_name = 'django/forms/widgets/select.html' option_template_name = 'django/forms/widgets/select_option.html' add_id_index = False checked_attribute = {'selected': True} option_inherits_attrs = False def get_context(self, name, value, attrs): context = super().get_context(name, value, attrs) if self.allow_multiple_selected: context['widget']['attrs']['multiple'] = True return context @staticmethod def _choice_has_empty_value(choice): """Return True if the choice's value is empty string or None.""" value, _ = choice return value is None or value == '' def use_required_attribute(self, initial): """ Don't render 'required' if the first <option> has a value, as that's invalid HTML. """ use_required_attribute = super().use_required_attribute(initial) # 'required' is always okay for <select multiple>. if self.allow_multiple_selected: return use_required_attribute first_choice = next(iter(self.choices), None) return use_required_attribute and first_choice is not None and self._choice_has_empty_value(first_choice) class NullBooleanSelect(Select): """ A Select Widget intended to be used with NullBooleanField. """ def __init__(self, attrs=None): choices = ( ('unknown', _('Unknown')), ('true', _('Yes')), ('false', _('No')), ) super().__init__(attrs, choices) def format_value(self, value): try: return { True: 'true', False: 'false', 'true': 'true', 'false': 'false', # For backwards compatibility with Django < 2.2. '2': 'true', '3': 'false', }[value] except KeyError: return 'unknown' def value_from_datadict(self, data, files, name): value = data.get(name) return { True: True, 'True': True, 'False': False, False: False, 'true': True, 'false': False, # For backwards compatibility with Django < 2.2. '2': True, '3': False, }.get(value) class SelectMultiple(Select): allow_multiple_selected = True def value_from_datadict(self, data, files, name): try: getter = data.getlist except AttributeError: getter = data.get return getter(name) def value_omitted_from_data(self, data, files, name): # An unselected <select multiple> doesn't appear in POST data, so it's # never known if the value is actually omitted. return False class RadioSelect(ChoiceWidget): input_type = 'radio' template_name = 'django/forms/widgets/radio.html' option_template_name = 'django/forms/widgets/radio_option.html' class CheckboxSelectMultiple(ChoiceWidget): allow_multiple_selected = True input_type = 'checkbox' template_name = 'django/forms/widgets/checkbox_select.html' option_template_name = 'django/forms/widgets/checkbox_option.html' def use_required_attribute(self, initial): # Don't use the 'required' attribute because browser validation would # require all checkboxes to be checked instead of at least one. return False def value_omitted_from_data(self, data, files, name): # HTML checkboxes don't appear in POST data if not checked, so it's # never known if the value is actually omitted. return False def id_for_label(self, id_, index=None): """" Don't include for="field_0" in <label> because clicking such a label would toggle the first checkbox. """ if index is None: return '' return super().id_for_label(id_, index) class MultiWidget(Widget): """ A widget that is composed of multiple widgets. In addition to the values added by Widget.get_context(), this widget adds a list of subwidgets to the context as widget['subwidgets']. These can be looped over and rendered like normal widgets. You'll probably want to use this class with MultiValueField. """ template_name = 'django/forms/widgets/multiwidget.html' def __init__(self, widgets, attrs=None): self.widgets = [w() if isinstance(w, type) else w for w in widgets] super().__init__(attrs) @property def is_hidden(self): return all(w.is_hidden for w in self.widgets) def get_context(self, name, value, attrs): context = super().get_context(name, value, attrs) if self.is_localized: for widget in self.widgets: widget.is_localized = self.is_localized # value is a list of values, each corresponding to a widget # in self.widgets. if not isinstance(value, list): value = self.decompress(value) final_attrs = context['widget']['attrs'] input_type = final_attrs.pop('type', None) id_ = final_attrs.get('id') subwidgets = [] for i, widget in enumerate(self.widgets): if input_type is not None: widget.input_type = input_type widget_name = '%s_%s' % (name, i) try: widget_value = value[i] except IndexError: widget_value = None if id_: widget_attrs = final_attrs.copy() widget_attrs['id'] = '%s_%s' % (id_, i) else: widget_attrs = final_attrs subwidgets.append(widget.get_context(widget_name, widget_value, widget_attrs)['widget']) context['widget']['subwidgets'] = subwidgets return context def id_for_label(self, id_): if id_: id_ += '_0' return id_ def value_from_datadict(self, data, files, name): return [widget.value_from_datadict(data, files, name + '_%s' % i) for i, widget in enumerate(self.widgets)] def value_omitted_from_data(self, data, files, name): return all( widget.value_omitted_from_data(data, files, name + '_%s' % i) for i, widget in enumerate(self.widgets) ) def decompress(self, value): """ Return a list of decompressed values for the given compressed value. The given value can be assumed to be valid, but not necessarily non-empty. """ raise NotImplementedError('Subclasses must implement this method.') def _get_media(self): """ Media for a multiwidget is the combination of all media of the subwidgets. """ media = Media() for w in self.widgets: media = media + w.media return media media = property(_get_media) def __deepcopy__(self, memo): obj = super().__deepcopy__(memo) obj.widgets = copy.deepcopy(self.widgets) return obj @property def needs_multipart_form(self): return any(w.needs_multipart_form for w in self.widgets) class SplitDateTimeWidget(MultiWidget): """ A widget that splits datetime input into two <input type="text"> boxes. """ supports_microseconds = False template_name = 'django/forms/widgets/splitdatetime.html' def __init__(self, attrs=None, date_format=None, time_format=None, date_attrs=None, time_attrs=None): widgets = ( DateInput( attrs=attrs if date_attrs is None else date_attrs, format=date_format, ), TimeInput( attrs=attrs if time_attrs is None else time_attrs, format=time_format, ), ) super().__init__(widgets) def decompress(self, value): if value: value = to_current_timezone(value) return [value.date(), value.time()] return [None, None] class SplitHiddenDateTimeWidget(SplitDateTimeWidget): """ A widget that splits datetime input into two <input type="hidden"> inputs. """ template_name = 'django/forms/widgets/splithiddendatetime.html' def __init__(self, attrs=None, date_format=None, time_format=None, date_attrs=None, time_attrs=None): super().__init__(attrs, date_format, time_format, date_attrs, time_attrs) for widget in self.widgets: widget.input_type = 'hidden' class SelectDateWidget(Widget): """ A widget that splits date input into three <select> boxes. This also serves as an example of a Widget that has more than one HTML element and hence implements value_from_datadict. """ none_value = ('', '---') month_field = '%s_month' day_field = '%s_day' year_field = '%s_year' template_name = 'django/forms/widgets/select_date.html' input_type = 'select' select_widget = Select date_re = _lazy_re_compile(r'(\d{4}|0)-(\d\d?)-(\d\d?)$') def __init__(self, attrs=None, years=None, months=None, empty_label=None): self.attrs = attrs or {} # Optional list or tuple of years to use in the "year" select box. if years: self.years = years else: this_year = datetime.date.today().year self.years = range(this_year, this_year + 10) # Optional dict of months to use in the "month" select box. if months: self.months = months else: self.months = MONTHS # Optional string, list, or tuple to use as empty_label. if isinstance(empty_label, (list, tuple)): if not len(empty_label) == 3: raise ValueError('empty_label list/tuple must have 3 elements.') self.year_none_value = ('', empty_label[0]) self.month_none_value = ('', empty_label[1]) self.day_none_value = ('', empty_label[2]) else: if empty_label is not None: self.none_value = ('', empty_label) self.year_none_value = self.none_value self.month_none_value = self.none_value self.day_none_value = self.none_value def get_context(self, name, value, attrs): context = super().get_context(name, value, attrs) date_context = {} year_choices = [(i, str(i)) for i in self.years] if not self.is_required: year_choices.insert(0, self.year_none_value) year_name = self.year_field % name date_context['year'] = self.select_widget(attrs, choices=year_choices).get_context( name=year_name, value=context['widget']['value']['year'], attrs={ **context['widget']['attrs'], 'id': 'id_%s' % year_name, 'placeholder': _('Year') if self.is_required else False, }, ) month_choices = list(self.months.items()) if not self.is_required: month_choices.insert(0, self.month_none_value) month_name = self.month_field % name date_context['month'] = self.select_widget(attrs, choices=month_choices).get_context( name=month_name, value=context['widget']['value']['month'], attrs={ **context['widget']['attrs'], 'id': 'id_%s' % month_name, 'placeholder': _('Month') if self.is_required else False, }, ) day_choices = [(i, i) for i in range(1, 32)] if not self.is_required: day_choices.insert(0, self.day_none_value) day_name = self.day_field % name date_context['day'] = self.select_widget(attrs, choices=day_choices,).get_context( name=day_name, value=context['widget']['value']['day'], attrs={ **context['widget']['attrs'], 'id': 'id_%s' % day_name, 'placeholder': _('Day') if self.is_required else False, }, ) subwidgets = [] for field in self._parse_date_fmt(): subwidgets.append(date_context[field]['widget']) context['widget']['subwidgets'] = subwidgets return context def format_value(self, value): """ Return a dict containing the year, month, and day of the current value. Use dict instead of a datetime to allow invalid dates such as February 31 to display correctly. """ year, month, day = None, None, None if isinstance(value, (datetime.date, datetime.datetime)): year, month, day = value.year, value.month, value.day elif isinstance(value, str): match = self.date_re.match(value) if match: # Convert any zeros in the date to empty strings to match the # empty option value. year, month, day = [int(val) or '' for val in match.groups()] elif settings.USE_L10N: input_format = get_format('DATE_INPUT_FORMATS')[0] try: d = datetime.datetime.strptime(value, input_format) except ValueError: pass else: year, month, day = d.year, d.month, d.day return {'year': year, 'month': month, 'day': day} @staticmethod def _parse_date_fmt(): fmt = get_format('DATE_FORMAT') escaped = False for char in fmt: if escaped: escaped = False elif char == '\\': escaped = True elif char in 'Yy': yield 'year' elif char in 'bEFMmNn': yield 'month' elif char in 'dj': yield 'day' def id_for_label(self, id_): for first_select in self._parse_date_fmt(): return '%s_%s' % (id_, first_select) return '%s_month' % id_ def value_from_datadict(self, data, files, name): y = data.get(self.year_field % name) m = data.get(self.month_field % name) d = data.get(self.day_field % name) if y == m == d == '': return None if y is not None and m is not None and d is not None: input_format = get_format('DATE_INPUT_FORMATS')[0] try: date_value = datetime.date(int(y), int(m), int(d)) except ValueError: # Return pseudo-ISO dates with zeros for any unselected values, # e.g. '2017-0-23'. return '%s-%s-%s' % (y or 0, m or 0, d or 0) date_value = datetime_safe.new_date(date_value) return date_value.strftime(input_format) return data.get(name) def value_omitted_from_data(self, data, files, name): return not any( ('{}_{}'.format(name, interval) in data) for interval in ('year', 'month', 'day') )
dfe736b9fab103fba0da8770506b4e035335ebfc4b28a7b615729780b3557c9d
""" Field classes. """ import copy import datetime import math import operator import os import re import uuid from decimal import Decimal, DecimalException from io import BytesIO from urllib.parse import urlsplit, urlunsplit from django.core import validators from django.core.exceptions import ValidationError from django.forms.boundfield import BoundField from django.forms.utils import from_current_timezone, to_current_timezone from django.forms.widgets import ( FILE_INPUT_CONTRADICTION, CheckboxInput, ClearableFileInput, DateInput, DateTimeInput, EmailInput, FileInput, HiddenInput, MultipleHiddenInput, NullBooleanSelect, NumberInput, Select, SelectMultiple, SplitDateTimeWidget, SplitHiddenDateTimeWidget, TextInput, TimeInput, URLInput, ) from django.utils import formats from django.utils.dateparse import parse_duration from django.utils.duration import duration_string from django.utils.ipv6 import clean_ipv6_address from django.utils.regex_helper import _lazy_re_compile from django.utils.translation import gettext_lazy as _, ngettext_lazy __all__ = ( 'Field', 'CharField', 'IntegerField', 'DateField', 'TimeField', 'DateTimeField', 'DurationField', 'RegexField', 'EmailField', 'FileField', 'ImageField', 'URLField', 'BooleanField', 'NullBooleanField', 'ChoiceField', 'MultipleChoiceField', 'ComboField', 'MultiValueField', 'FloatField', 'DecimalField', 'SplitDateTimeField', 'GenericIPAddressField', 'FilePathField', 'SlugField', 'TypedChoiceField', 'TypedMultipleChoiceField', 'UUIDField', ) class Field: widget = TextInput # Default widget to use when rendering this type of Field. hidden_widget = HiddenInput # Default widget to use when rendering this as "hidden". default_validators = [] # Default set of validators # Add an 'invalid' entry to default_error_message if you want a specific # field error message not raised by the field validators. default_error_messages = { 'required': _('This field is required.'), } empty_values = list(validators.EMPTY_VALUES) def __init__(self, *, required=True, widget=None, label=None, initial=None, help_text='', error_messages=None, show_hidden_initial=False, validators=(), localize=False, disabled=False, label_suffix=None): # required -- Boolean that specifies whether the field is required. # True by default. # widget -- A Widget class, or instance of a Widget class, that should # be used for this Field when displaying it. Each Field has a # default Widget that it'll use if you don't specify this. In # most cases, the default widget is TextInput. # label -- A verbose name for this field, for use in displaying this # field in a form. By default, Django will use a "pretty" # version of the form field name, if the Field is part of a # Form. # initial -- A value to use in this Field's initial display. This value # is *not* used as a fallback if data isn't given. # help_text -- An optional string to use as "help text" for this Field. # error_messages -- An optional dictionary to override the default # messages that the field will raise. # show_hidden_initial -- Boolean that specifies if it is needed to render a # hidden widget with initial value after widget. # validators -- List of additional validators to use # localize -- Boolean that specifies if the field should be localized. # disabled -- Boolean that specifies whether the field is disabled, that # is its widget is shown in the form but not editable. # label_suffix -- Suffix to be added to the label. Overrides # form's label_suffix. self.required, self.label, self.initial = required, label, initial self.show_hidden_initial = show_hidden_initial self.help_text = help_text self.disabled = disabled self.label_suffix = label_suffix widget = widget or self.widget if isinstance(widget, type): widget = widget() else: widget = copy.deepcopy(widget) # Trigger the localization machinery if needed. self.localize = localize if self.localize: widget.is_localized = True # Let the widget know whether it should display as required. widget.is_required = self.required # Hook into self.widget_attrs() for any Field-specific HTML attributes. extra_attrs = self.widget_attrs(widget) if extra_attrs: widget.attrs.update(extra_attrs) self.widget = widget messages = {} for c in reversed(self.__class__.__mro__): messages.update(getattr(c, 'default_error_messages', {})) messages.update(error_messages or {}) self.error_messages = messages self.validators = [*self.default_validators, *validators] super().__init__() def prepare_value(self, value): return value def to_python(self, value): return value def validate(self, value): if value in self.empty_values and self.required: raise ValidationError(self.error_messages['required'], code='required') def run_validators(self, value): if value in self.empty_values: return errors = [] for v in self.validators: try: v(value) except ValidationError as e: if hasattr(e, 'code') and e.code in self.error_messages: e.message = self.error_messages[e.code] errors.extend(e.error_list) if errors: raise ValidationError(errors) def clean(self, value): """ Validate the given value and return its "cleaned" value as an appropriate Python object. Raise ValidationError for any errors. """ value = self.to_python(value) self.validate(value) self.run_validators(value) return value def bound_data(self, data, initial): """ Return the value that should be shown for this field on render of a bound form, given the submitted POST data for the field and the initial data, if any. For most fields, this will simply be data; FileFields need to handle it a bit differently. """ if self.disabled: return initial return data def widget_attrs(self, widget): """ Given a Widget instance (*not* a Widget class), return a dictionary of any HTML attributes that should be added to the Widget, based on this Field. """ return {} def has_changed(self, initial, data): """Return True if data differs from initial.""" # Always return False if the field is disabled since self.bound_data # always uses the initial value in this case. if self.disabled: return False try: data = self.to_python(data) if hasattr(self, '_coerce'): return self._coerce(data) != self._coerce(initial) except ValidationError: return True # For purposes of seeing whether something has changed, None is # the same as an empty string, if the data or initial value we get # is None, replace it with ''. initial_value = initial if initial is not None else '' data_value = data if data is not None else '' return initial_value != data_value def get_bound_field(self, form, field_name): """ Return a BoundField instance that will be used when accessing the form field in a template. """ return BoundField(form, self, field_name) def __deepcopy__(self, memo): result = copy.copy(self) memo[id(self)] = result result.widget = copy.deepcopy(self.widget, memo) result.error_messages = self.error_messages.copy() result.validators = self.validators[:] return result class CharField(Field): def __init__(self, *, max_length=None, min_length=None, strip=True, empty_value='', **kwargs): self.max_length = max_length self.min_length = min_length self.strip = strip self.empty_value = empty_value super().__init__(**kwargs) if min_length is not None: self.validators.append(validators.MinLengthValidator(int(min_length))) if max_length is not None: self.validators.append(validators.MaxLengthValidator(int(max_length))) self.validators.append(validators.ProhibitNullCharactersValidator()) def to_python(self, value): """Return a string.""" if value not in self.empty_values: value = str(value) if self.strip: value = value.strip() if value in self.empty_values: return self.empty_value return value def widget_attrs(self, widget): attrs = super().widget_attrs(widget) if self.max_length is not None and not widget.is_hidden: # The HTML attribute is maxlength, not max_length. attrs['maxlength'] = str(self.max_length) if self.min_length is not None and not widget.is_hidden: # The HTML attribute is minlength, not min_length. attrs['minlength'] = str(self.min_length) return attrs class IntegerField(Field): widget = NumberInput default_error_messages = { 'invalid': _('Enter a whole number.'), } re_decimal = _lazy_re_compile(r'\.0*\s*$') def __init__(self, *, max_value=None, min_value=None, **kwargs): self.max_value, self.min_value = max_value, min_value if kwargs.get('localize') and self.widget == NumberInput: # Localized number input is not well supported on most browsers kwargs.setdefault('widget', super().widget) super().__init__(**kwargs) if max_value is not None: self.validators.append(validators.MaxValueValidator(max_value)) if min_value is not None: self.validators.append(validators.MinValueValidator(min_value)) def to_python(self, value): """ Validate that int() can be called on the input. Return the result of int() or None for empty values. """ value = super().to_python(value) if value in self.empty_values: return None if self.localize: value = formats.sanitize_separators(value) # Strip trailing decimal and zeros. try: value = int(self.re_decimal.sub('', str(value))) except (ValueError, TypeError): raise ValidationError(self.error_messages['invalid'], code='invalid') return value def widget_attrs(self, widget): attrs = super().widget_attrs(widget) if isinstance(widget, NumberInput): if self.min_value is not None: attrs['min'] = self.min_value if self.max_value is not None: attrs['max'] = self.max_value return attrs class FloatField(IntegerField): default_error_messages = { 'invalid': _('Enter a number.'), } def to_python(self, value): """ Validate that float() can be called on the input. Return the result of float() or None for empty values. """ value = super(IntegerField, self).to_python(value) if value in self.empty_values: return None if self.localize: value = formats.sanitize_separators(value) try: value = float(value) except (ValueError, TypeError): raise ValidationError(self.error_messages['invalid'], code='invalid') return value def validate(self, value): super().validate(value) if value in self.empty_values: return if not math.isfinite(value): raise ValidationError(self.error_messages['invalid'], code='invalid') def widget_attrs(self, widget): attrs = super().widget_attrs(widget) if isinstance(widget, NumberInput) and 'step' not in widget.attrs: attrs.setdefault('step', 'any') return attrs class DecimalField(IntegerField): default_error_messages = { 'invalid': _('Enter a number.'), } def __init__(self, *, max_value=None, min_value=None, max_digits=None, decimal_places=None, **kwargs): self.max_digits, self.decimal_places = max_digits, decimal_places super().__init__(max_value=max_value, min_value=min_value, **kwargs) self.validators.append(validators.DecimalValidator(max_digits, decimal_places)) def to_python(self, value): """ Validate that the input is a decimal number. Return a Decimal instance or None for empty values. Ensure that there are no more than max_digits in the number and no more than decimal_places digits after the decimal point. """ if value in self.empty_values: return None if self.localize: value = formats.sanitize_separators(value) value = str(value).strip() try: value = Decimal(value) except DecimalException: raise ValidationError(self.error_messages['invalid'], code='invalid') return value def validate(self, value): super().validate(value) if value in self.empty_values: return if not value.is_finite(): raise ValidationError(self.error_messages['invalid'], code='invalid') def widget_attrs(self, widget): attrs = super().widget_attrs(widget) if isinstance(widget, NumberInput) and 'step' not in widget.attrs: if self.decimal_places is not None: # Use exponential notation for small values since they might # be parsed as 0 otherwise. ref #20765 step = str(Decimal(1).scaleb(-self.decimal_places)).lower() else: step = 'any' attrs.setdefault('step', step) return attrs class BaseTemporalField(Field): def __init__(self, *, input_formats=None, **kwargs): super().__init__(**kwargs) if input_formats is not None: self.input_formats = input_formats def to_python(self, value): value = value.strip() # Try to strptime against each input format. for format in self.input_formats: try: return self.strptime(value, format) except (ValueError, TypeError): continue raise ValidationError(self.error_messages['invalid'], code='invalid') def strptime(self, value, format): raise NotImplementedError('Subclasses must define this method.') class DateField(BaseTemporalField): widget = DateInput input_formats = formats.get_format_lazy('DATE_INPUT_FORMATS') default_error_messages = { 'invalid': _('Enter a valid date.'), } def to_python(self, value): """ Validate that the input can be converted to a date. Return a Python datetime.date object. """ if value in self.empty_values: return None if isinstance(value, datetime.datetime): return value.date() if isinstance(value, datetime.date): return value return super().to_python(value) def strptime(self, value, format): return datetime.datetime.strptime(value, format).date() class TimeField(BaseTemporalField): widget = TimeInput input_formats = formats.get_format_lazy('TIME_INPUT_FORMATS') default_error_messages = { 'invalid': _('Enter a valid time.') } def to_python(self, value): """ Validate that the input can be converted to a time. Return a Python datetime.time object. """ if value in self.empty_values: return None if isinstance(value, datetime.time): return value return super().to_python(value) def strptime(self, value, format): return datetime.datetime.strptime(value, format).time() class DateTimeField(BaseTemporalField): widget = DateTimeInput input_formats = formats.get_format_lazy('DATETIME_INPUT_FORMATS') default_error_messages = { 'invalid': _('Enter a valid date/time.'), } def prepare_value(self, value): if isinstance(value, datetime.datetime): value = to_current_timezone(value) return value def to_python(self, value): """ Validate that the input can be converted to a datetime. Return a Python datetime.datetime object. """ if value in self.empty_values: return None if isinstance(value, datetime.datetime): return from_current_timezone(value) if isinstance(value, datetime.date): result = datetime.datetime(value.year, value.month, value.day) return from_current_timezone(result) result = super().to_python(value) return from_current_timezone(result) def strptime(self, value, format): return datetime.datetime.strptime(value, format) class DurationField(Field): default_error_messages = { 'invalid': _('Enter a valid duration.'), 'overflow': _('The number of days must be between {min_days} and {max_days}.') } def prepare_value(self, value): if isinstance(value, datetime.timedelta): return duration_string(value) return value def to_python(self, value): if value in self.empty_values: return None if isinstance(value, datetime.timedelta): return value try: value = parse_duration(str(value)) except OverflowError: raise ValidationError(self.error_messages['overflow'].format( min_days=datetime.timedelta.min.days, max_days=datetime.timedelta.max.days, ), code='overflow') if value is None: raise ValidationError(self.error_messages['invalid'], code='invalid') return value class RegexField(CharField): def __init__(self, regex, **kwargs): """ regex can be either a string or a compiled regular expression object. """ kwargs.setdefault('strip', False) super().__init__(**kwargs) self._set_regex(regex) def _get_regex(self): return self._regex def _set_regex(self, regex): if isinstance(regex, str): regex = re.compile(regex) self._regex = regex if hasattr(self, '_regex_validator') and self._regex_validator in self.validators: self.validators.remove(self._regex_validator) self._regex_validator = validators.RegexValidator(regex=regex) self.validators.append(self._regex_validator) regex = property(_get_regex, _set_regex) class EmailField(CharField): widget = EmailInput default_validators = [validators.validate_email] def __init__(self, **kwargs): super().__init__(strip=True, **kwargs) class FileField(Field): widget = ClearableFileInput default_error_messages = { 'invalid': _("No file was submitted. Check the encoding type on the form."), 'missing': _("No file was submitted."), 'empty': _("The submitted file is empty."), 'max_length': ngettext_lazy( 'Ensure this filename has at most %(max)d character (it has %(length)d).', 'Ensure this filename has at most %(max)d characters (it has %(length)d).', 'max'), 'contradiction': _('Please either submit a file or check the clear checkbox, not both.') } def __init__(self, *, max_length=None, allow_empty_file=False, **kwargs): self.max_length = max_length self.allow_empty_file = allow_empty_file super().__init__(**kwargs) def to_python(self, data): if data in self.empty_values: return None # UploadedFile objects should have name and size attributes. try: file_name = data.name file_size = data.size except AttributeError: raise ValidationError(self.error_messages['invalid'], code='invalid') if self.max_length is not None and len(file_name) > self.max_length: params = {'max': self.max_length, 'length': len(file_name)} raise ValidationError(self.error_messages['max_length'], code='max_length', params=params) if not file_name: raise ValidationError(self.error_messages['invalid'], code='invalid') if not self.allow_empty_file and not file_size: raise ValidationError(self.error_messages['empty'], code='empty') return data def clean(self, data, initial=None): # If the widget got contradictory inputs, we raise a validation error if data is FILE_INPUT_CONTRADICTION: raise ValidationError(self.error_messages['contradiction'], code='contradiction') # False means the field value should be cleared; further validation is # not needed. if data is False: if not self.required: return False # If the field is required, clearing is not possible (the widget # shouldn't return False data in that case anyway). False is not # in self.empty_value; if a False value makes it this far # it should be validated from here on out as None (so it will be # caught by the required check). data = None if not data and initial: return initial return super().clean(data) def bound_data(self, data, initial): if data in (None, FILE_INPUT_CONTRADICTION): return initial return data def has_changed(self, initial, data): return not self.disabled and data is not None class ImageField(FileField): default_validators = [validators.validate_image_file_extension] default_error_messages = { 'invalid_image': _( "Upload a valid image. The file you uploaded was either not an " "image or a corrupted image." ), } def to_python(self, data): """ Check that the file-upload field data contains a valid image (GIF, JPG, PNG, etc. -- whatever Pillow supports). """ f = super().to_python(data) if f is None: return None from PIL import Image # We need to get a file object for Pillow. We might have a path or we might # have to read the data into memory. if hasattr(data, 'temporary_file_path'): file = data.temporary_file_path() else: if hasattr(data, 'read'): file = BytesIO(data.read()) else: file = BytesIO(data['content']) try: # load() could spot a truncated JPEG, but it loads the entire # image in memory, which is a DoS vector. See #3848 and #18520. image = Image.open(file) # verify() must be called immediately after the constructor. image.verify() # Annotating so subclasses can reuse it for their own validation f.image = image # Pillow doesn't detect the MIME type of all formats. In those # cases, content_type will be None. f.content_type = Image.MIME.get(image.format) except Exception as exc: # Pillow doesn't recognize it as an image. raise ValidationError( self.error_messages['invalid_image'], code='invalid_image', ) from exc if hasattr(f, 'seek') and callable(f.seek): f.seek(0) return f def widget_attrs(self, widget): attrs = super().widget_attrs(widget) if isinstance(widget, FileInput) and 'accept' not in widget.attrs: attrs.setdefault('accept', 'image/*') return attrs class URLField(CharField): widget = URLInput default_error_messages = { 'invalid': _('Enter a valid URL.'), } default_validators = [validators.URLValidator()] def __init__(self, **kwargs): super().__init__(strip=True, **kwargs) def to_python(self, value): def split_url(url): """ Return a list of url parts via urlparse.urlsplit(), or raise ValidationError for some malformed URLs. """ try: return list(urlsplit(url)) except ValueError: # urlparse.urlsplit can raise a ValueError with some # misformatted URLs. raise ValidationError(self.error_messages['invalid'], code='invalid') value = super().to_python(value) if value: url_fields = split_url(value) if not url_fields[0]: # If no URL scheme given, assume http:// url_fields[0] = 'http' if not url_fields[1]: # Assume that if no domain is provided, that the path segment # contains the domain. url_fields[1] = url_fields[2] url_fields[2] = '' # Rebuild the url_fields list, since the domain segment may now # contain the path too. url_fields = split_url(urlunsplit(url_fields)) value = urlunsplit(url_fields) return value class BooleanField(Field): widget = CheckboxInput def to_python(self, value): """Return a Python boolean object.""" # Explicitly check for the string 'False', which is what a hidden field # will submit for False. Also check for '0', since this is what # RadioSelect will provide. Because bool("True") == bool('1') == True, # we don't need to handle that explicitly. if isinstance(value, str) and value.lower() in ('false', '0'): value = False else: value = bool(value) return super().to_python(value) def validate(self, value): if not value and self.required: raise ValidationError(self.error_messages['required'], code='required') def has_changed(self, initial, data): if self.disabled: return False # Sometimes data or initial may be a string equivalent of a boolean # so we should run it through to_python first to get a boolean value return self.to_python(initial) != self.to_python(data) class NullBooleanField(BooleanField): """ A field whose valid values are None, True, and False. Clean invalid values to None. """ widget = NullBooleanSelect def to_python(self, value): """ Explicitly check for the string 'True' and 'False', which is what a hidden field will submit for True and False, for 'true' and 'false', which are likely to be returned by JavaScript serializations of forms, and for '1' and '0', which is what a RadioField will submit. Unlike the Booleanfield, this field must check for True because it doesn't use the bool() function. """ if value in (True, 'True', 'true', '1'): return True elif value in (False, 'False', 'false', '0'): return False else: return None def validate(self, value): pass class CallableChoiceIterator: def __init__(self, choices_func): self.choices_func = choices_func def __iter__(self): yield from self.choices_func() class ChoiceField(Field): widget = Select default_error_messages = { 'invalid_choice': _('Select a valid choice. %(value)s is not one of the available choices.'), } def __init__(self, *, choices=(), **kwargs): super().__init__(**kwargs) self.choices = choices def __deepcopy__(self, memo): result = super().__deepcopy__(memo) result._choices = copy.deepcopy(self._choices, memo) return result def _get_choices(self): return self._choices def _set_choices(self, value): # Setting choices also sets the choices on the widget. # choices can be any iterable, but we call list() on it because # it will be consumed more than once. if callable(value): value = CallableChoiceIterator(value) else: value = list(value) self._choices = self.widget.choices = value choices = property(_get_choices, _set_choices) def to_python(self, value): """Return a string.""" if value in self.empty_values: return '' return str(value) def validate(self, value): """Validate that the input is in self.choices.""" super().validate(value) if value and not self.valid_value(value): raise ValidationError( self.error_messages['invalid_choice'], code='invalid_choice', params={'value': value}, ) def valid_value(self, value): """Check to see if the provided value is a valid choice.""" text_value = str(value) for k, v in self.choices: if isinstance(v, (list, tuple)): # This is an optgroup, so look inside the group for options for k2, v2 in v: if value == k2 or text_value == str(k2): return True else: if value == k or text_value == str(k): return True return False class TypedChoiceField(ChoiceField): def __init__(self, *, coerce=lambda val: val, empty_value='', **kwargs): self.coerce = coerce self.empty_value = empty_value super().__init__(**kwargs) def _coerce(self, value): """ Validate that the value can be coerced to the right type (if not empty). """ if value == self.empty_value or value in self.empty_values: return self.empty_value try: value = self.coerce(value) except (ValueError, TypeError, ValidationError): raise ValidationError( self.error_messages['invalid_choice'], code='invalid_choice', params={'value': value}, ) return value def clean(self, value): value = super().clean(value) return self._coerce(value) class MultipleChoiceField(ChoiceField): hidden_widget = MultipleHiddenInput widget = SelectMultiple default_error_messages = { 'invalid_choice': _('Select a valid choice. %(value)s is not one of the available choices.'), 'invalid_list': _('Enter a list of values.'), } def to_python(self, value): if not value: return [] elif not isinstance(value, (list, tuple)): raise ValidationError(self.error_messages['invalid_list'], code='invalid_list') return [str(val) for val in value] def validate(self, value): """Validate that the input is a list or tuple.""" if self.required and not value: raise ValidationError(self.error_messages['required'], code='required') # Validate that each value in the value list is in self.choices. for val in value: if not self.valid_value(val): raise ValidationError( self.error_messages['invalid_choice'], code='invalid_choice', params={'value': val}, ) def has_changed(self, initial, data): if self.disabled: return False if initial is None: initial = [] if data is None: data = [] if len(initial) != len(data): return True initial_set = {str(value) for value in initial} data_set = {str(value) for value in data} return data_set != initial_set class TypedMultipleChoiceField(MultipleChoiceField): def __init__(self, *, coerce=lambda val: val, **kwargs): self.coerce = coerce self.empty_value = kwargs.pop('empty_value', []) super().__init__(**kwargs) def _coerce(self, value): """ Validate that the values are in self.choices and can be coerced to the right type. """ if value == self.empty_value or value in self.empty_values: return self.empty_value new_value = [] for choice in value: try: new_value.append(self.coerce(choice)) except (ValueError, TypeError, ValidationError): raise ValidationError( self.error_messages['invalid_choice'], code='invalid_choice', params={'value': choice}, ) return new_value def clean(self, value): value = super().clean(value) return self._coerce(value) def validate(self, value): if value != self.empty_value: super().validate(value) elif self.required: raise ValidationError(self.error_messages['required'], code='required') class ComboField(Field): """ A Field whose clean() method calls multiple Field clean() methods. """ def __init__(self, fields, **kwargs): super().__init__(**kwargs) # Set 'required' to False on the individual fields, because the # required validation will be handled by ComboField, not by those # individual fields. for f in fields: f.required = False self.fields = fields def clean(self, value): """ Validate the given value against all of self.fields, which is a list of Field instances. """ super().clean(value) for field in self.fields: value = field.clean(value) return value class MultiValueField(Field): """ Aggregate the logic of multiple Fields. Its clean() method takes a "decompressed" list of values, which are then cleaned into a single value according to self.fields. Each value in this list is cleaned by the corresponding field -- the first value is cleaned by the first field, the second value is cleaned by the second field, etc. Once all fields are cleaned, the list of clean values is "compressed" into a single value. Subclasses should not have to implement clean(). Instead, they must implement compress(), which takes a list of valid values and returns a "compressed" version of those values -- a single value. You'll probably want to use this with MultiWidget. """ default_error_messages = { 'invalid': _('Enter a list of values.'), 'incomplete': _('Enter a complete value.'), } def __init__(self, fields, *, require_all_fields=True, **kwargs): self.require_all_fields = require_all_fields super().__init__(**kwargs) for f in fields: f.error_messages.setdefault('incomplete', self.error_messages['incomplete']) if self.disabled: f.disabled = True if self.require_all_fields: # Set 'required' to False on the individual fields, because the # required validation will be handled by MultiValueField, not # by those individual fields. f.required = False self.fields = fields def __deepcopy__(self, memo): result = super().__deepcopy__(memo) result.fields = tuple(x.__deepcopy__(memo) for x in self.fields) return result def validate(self, value): pass def clean(self, value): """ Validate every value in the given list. A value is validated against the corresponding Field in self.fields. For example, if this MultiValueField was instantiated with fields=(DateField(), TimeField()), clean() would call DateField.clean(value[0]) and TimeField.clean(value[1]). """ clean_data = [] errors = [] if self.disabled and not isinstance(value, list): value = self.widget.decompress(value) if not value or isinstance(value, (list, tuple)): if not value or not [v for v in value if v not in self.empty_values]: if self.required: raise ValidationError(self.error_messages['required'], code='required') else: return self.compress([]) else: raise ValidationError(self.error_messages['invalid'], code='invalid') for i, field in enumerate(self.fields): try: field_value = value[i] except IndexError: field_value = None if field_value in self.empty_values: if self.require_all_fields: # Raise a 'required' error if the MultiValueField is # required and any field is empty. if self.required: raise ValidationError(self.error_messages['required'], code='required') elif field.required: # Otherwise, add an 'incomplete' error to the list of # collected errors and skip field cleaning, if a required # field is empty. if field.error_messages['incomplete'] not in errors: errors.append(field.error_messages['incomplete']) continue try: clean_data.append(field.clean(field_value)) except ValidationError as e: # Collect all validation errors in a single list, which we'll # raise at the end of clean(), rather than raising a single # exception for the first error we encounter. Skip duplicates. errors.extend(m for m in e.error_list if m not in errors) if errors: raise ValidationError(errors) out = self.compress(clean_data) self.validate(out) self.run_validators(out) return out def compress(self, data_list): """ Return a single value for the given list of values. The values can be assumed to be valid. For example, if this MultiValueField was instantiated with fields=(DateField(), TimeField()), this might return a datetime object created by combining the date and time in data_list. """ raise NotImplementedError('Subclasses must implement this method.') def has_changed(self, initial, data): if self.disabled: return False if initial is None: initial = ['' for x in range(0, len(data))] else: if not isinstance(initial, list): initial = self.widget.decompress(initial) for field, initial, data in zip(self.fields, initial, data): try: initial = field.to_python(initial) except ValidationError: return True if field.has_changed(initial, data): return True return False class FilePathField(ChoiceField): def __init__(self, path, *, match=None, recursive=False, allow_files=True, allow_folders=False, **kwargs): self.path, self.match, self.recursive = path, match, recursive self.allow_files, self.allow_folders = allow_files, allow_folders super().__init__(choices=(), **kwargs) if self.required: self.choices = [] else: self.choices = [("", "---------")] if self.match is not None: self.match_re = re.compile(self.match) if recursive: for root, dirs, files in sorted(os.walk(self.path)): if self.allow_files: for f in sorted(files): if self.match is None or self.match_re.search(f): f = os.path.join(root, f) self.choices.append((f, f.replace(path, "", 1))) if self.allow_folders: for f in sorted(dirs): if f == '__pycache__': continue if self.match is None or self.match_re.search(f): f = os.path.join(root, f) self.choices.append((f, f.replace(path, "", 1))) else: choices = [] for f in os.scandir(self.path): if f.name == '__pycache__': continue if (((self.allow_files and f.is_file()) or (self.allow_folders and f.is_dir())) and (self.match is None or self.match_re.search(f.name))): choices.append((f.path, f.name)) choices.sort(key=operator.itemgetter(1)) self.choices.extend(choices) self.widget.choices = self.choices class SplitDateTimeField(MultiValueField): widget = SplitDateTimeWidget hidden_widget = SplitHiddenDateTimeWidget default_error_messages = { 'invalid_date': _('Enter a valid date.'), 'invalid_time': _('Enter a valid time.'), } def __init__(self, *, input_date_formats=None, input_time_formats=None, **kwargs): errors = self.default_error_messages.copy() if 'error_messages' in kwargs: errors.update(kwargs['error_messages']) localize = kwargs.get('localize', False) fields = ( DateField(input_formats=input_date_formats, error_messages={'invalid': errors['invalid_date']}, localize=localize), TimeField(input_formats=input_time_formats, error_messages={'invalid': errors['invalid_time']}, localize=localize), ) super().__init__(fields, **kwargs) def compress(self, data_list): if data_list: # Raise a validation error if time or date is empty # (possible if SplitDateTimeField has required=False). if data_list[0] in self.empty_values: raise ValidationError(self.error_messages['invalid_date'], code='invalid_date') if data_list[1] in self.empty_values: raise ValidationError(self.error_messages['invalid_time'], code='invalid_time') result = datetime.datetime.combine(*data_list) return from_current_timezone(result) return None class GenericIPAddressField(CharField): def __init__(self, *, protocol='both', unpack_ipv4=False, **kwargs): self.unpack_ipv4 = unpack_ipv4 self.default_validators = validators.ip_address_validators(protocol, unpack_ipv4)[0] super().__init__(**kwargs) def to_python(self, value): if value in self.empty_values: return '' value = value.strip() if value and ':' in value: return clean_ipv6_address(value, self.unpack_ipv4) return value class SlugField(CharField): default_validators = [validators.validate_slug] def __init__(self, *, allow_unicode=False, **kwargs): self.allow_unicode = allow_unicode if self.allow_unicode: self.default_validators = [validators.validate_unicode_slug] super().__init__(**kwargs) class UUIDField(CharField): default_error_messages = { 'invalid': _('Enter a valid UUID.'), } def prepare_value(self, value): if isinstance(value, uuid.UUID): return str(value) return value def to_python(self, value): value = super().to_python(value) if value in self.empty_values: return None if not isinstance(value, uuid.UUID): try: value = uuid.UUID(value) except ValueError: raise ValidationError(self.error_messages['invalid'], code='invalid') return value
5c3e287ead9e61ee7e4c8b4a064bbb0dd8ce761682c86f1c0164e3a3fb65ca7f
""" Functions for creating and restoring url-safe signed JSON objects. The format used looks like this: >>> signing.dumps("hello") 'ImhlbGxvIg:1QaUZC:YIye-ze3TTx7gtSv422nZA4sgmk' There are two components here, separated by a ':'. The first component is a URLsafe base64 encoded JSON of the object passed to dumps(). The second component is a base64 encoded hmac/SHA1 hash of "$first_component:$secret" signing.loads(s) checks the signature and returns the deserialized object. If the signature fails, a BadSignature exception is raised. >>> signing.loads("ImhlbGxvIg:1QaUZC:YIye-ze3TTx7gtSv422nZA4sgmk") 'hello' >>> signing.loads("ImhlbGxvIg:1QaUZC:YIye-ze3TTx7gtSv422nZA4sgmk-modified") ... BadSignature: Signature failed: ImhlbGxvIg:1QaUZC:YIye-ze3TTx7gtSv422nZA4sgmk-modified You can optionally compress the JSON prior to base64 encoding it to save space, using the compress=True argument. This checks if compression actually helps and only applies compression if the result is a shorter string: >>> signing.dumps(list(range(1, 20)), compress=True) '.eJwFwcERACAIwLCF-rCiILN47r-GyZVJsNgkxaFxoDgxcOHGxMKD_T7vhAml:1QaUaL:BA0thEZrp4FQVXIXuOvYJtLJSrQ' The fact that the string is compressed is signalled by the prefixed '.' at the start of the base64 JSON. There are 65 url-safe characters: the 64 used by url-safe base64 and the ':'. These functions make use of all of them. """ import base64 import datetime import json import time import zlib from django.conf import settings from django.utils import baseconv from django.utils.crypto import constant_time_compare, salted_hmac from django.utils.encoding import force_bytes from django.utils.module_loading import import_string from django.utils.regex_helper import _lazy_re_compile _SEP_UNSAFE = _lazy_re_compile(r'^[A-z0-9-_=]*$') class BadSignature(Exception): """Signature does not match.""" pass class SignatureExpired(BadSignature): """Signature timestamp is older than required max_age.""" pass def b64_encode(s): return base64.urlsafe_b64encode(s).strip(b'=') def b64_decode(s): pad = b'=' * (-len(s) % 4) return base64.urlsafe_b64decode(s + pad) def base64_hmac(salt, value, key): return b64_encode(salted_hmac(salt, value, key).digest()).decode() def get_cookie_signer(salt='django.core.signing.get_cookie_signer'): Signer = import_string(settings.SIGNING_BACKEND) key = force_bytes(settings.SECRET_KEY) # SECRET_KEY may be str or bytes. return Signer(b'django.http.cookies' + key, salt=salt) class JSONSerializer: """ Simple wrapper around json to be used in signing.dumps and signing.loads. """ def dumps(self, obj): return json.dumps(obj, separators=(',', ':')).encode('latin-1') def loads(self, data): return json.loads(data.decode('latin-1')) def dumps(obj, key=None, salt='django.core.signing', serializer=JSONSerializer, compress=False): """ Return URL-safe, hmac/SHA1 signed base64 compressed JSON string. If key is None, use settings.SECRET_KEY instead. If compress is True (not the default), check if compressing using zlib can save some space. Prepend a '.' to signify compression. This is included in the signature, to protect against zip bombs. Salt can be used to namespace the hash, so that a signed string is only valid for a given namespace. Leaving this at the default value or re-using a salt value across different parts of your application without good cause is a security risk. The serializer is expected to return a bytestring. """ data = serializer().dumps(obj) # Flag for if it's been compressed or not is_compressed = False if compress: # Avoid zlib dependency unless compress is being used compressed = zlib.compress(data) if len(compressed) < (len(data) - 1): data = compressed is_compressed = True base64d = b64_encode(data).decode() if is_compressed: base64d = '.' + base64d return TimestampSigner(key, salt=salt).sign(base64d) def loads(s, key=None, salt='django.core.signing', serializer=JSONSerializer, max_age=None): """ Reverse of dumps(), raise BadSignature if signature fails. The serializer is expected to accept a bytestring. """ # TimestampSigner.unsign() returns str but base64 and zlib compression # operate on bytes. base64d = TimestampSigner(key, salt=salt).unsign(s, max_age=max_age).encode() decompress = base64d[:1] == b'.' if decompress: # It's compressed; uncompress it first base64d = base64d[1:] data = b64_decode(base64d) if decompress: data = zlib.decompress(data) return serializer().loads(data) class Signer: def __init__(self, key=None, sep=':', salt=None): # Use of native strings in all versions of Python self.key = key or settings.SECRET_KEY self.sep = sep if _SEP_UNSAFE.match(self.sep): raise ValueError( 'Unsafe Signer separator: %r (cannot be empty or consist of ' 'only A-z0-9-_=)' % sep, ) self.salt = salt or '%s.%s' % (self.__class__.__module__, self.__class__.__name__) def signature(self, value): return base64_hmac(self.salt + 'signer', value, self.key) def sign(self, value): return '%s%s%s' % (value, self.sep, self.signature(value)) def unsign(self, signed_value): if self.sep not in signed_value: raise BadSignature('No "%s" found in value' % self.sep) value, sig = signed_value.rsplit(self.sep, 1) if constant_time_compare(sig, self.signature(value)): return value raise BadSignature('Signature "%s" does not match' % sig) class TimestampSigner(Signer): def timestamp(self): return baseconv.base62.encode(int(time.time())) def sign(self, value): value = '%s%s%s' % (value, self.sep, self.timestamp()) return super().sign(value) def unsign(self, value, max_age=None): """ Retrieve original value and check it wasn't signed more than max_age seconds ago. """ result = super().unsign(value) value, timestamp = result.rsplit(self.sep, 1) timestamp = baseconv.base62.decode(timestamp) if max_age is not None: if isinstance(max_age, datetime.timedelta): max_age = max_age.total_seconds() # Check timestamp is not older than max_age age = time.time() - timestamp if age > max_age: raise SignatureExpired( 'Signature age %s > %s seconds' % (age, max_age)) return value
573e84012f323c847360eea927cae99dd5a87d1cd2017ea8bcf79ab8cc157794
import ipaddress import re from pathlib import Path from urllib.parse import urlsplit, urlunsplit from django.core.exceptions import ValidationError from django.utils.deconstruct import deconstructible from django.utils.encoding import punycode from django.utils.ipv6 import is_valid_ipv6_address from django.utils.regex_helper import _lazy_re_compile from django.utils.translation import gettext_lazy as _, ngettext_lazy # These values, if given to validate(), will trigger the self.required check. EMPTY_VALUES = (None, '', [], (), {}) @deconstructible class RegexValidator: regex = '' message = _('Enter a valid value.') code = 'invalid' inverse_match = False flags = 0 def __init__(self, regex=None, message=None, code=None, inverse_match=None, flags=None): if regex is not None: self.regex = regex if message is not None: self.message = message if code is not None: self.code = code if inverse_match is not None: self.inverse_match = inverse_match if flags is not None: self.flags = flags if self.flags and not isinstance(self.regex, str): raise TypeError("If the flags are set, regex must be a regular expression string.") self.regex = _lazy_re_compile(self.regex, self.flags) def __call__(self, value): """ Validate that the input contains (or does *not* contain, if inverse_match is True) a match for the regular expression. """ regex_matches = self.regex.search(str(value)) invalid_input = regex_matches if self.inverse_match else not regex_matches if invalid_input: raise ValidationError(self.message, code=self.code) def __eq__(self, other): return ( isinstance(other, RegexValidator) and self.regex.pattern == other.regex.pattern and self.regex.flags == other.regex.flags and (self.message == other.message) and (self.code == other.code) and (self.inverse_match == other.inverse_match) ) @deconstructible class URLValidator(RegexValidator): ul = '\u00a1-\uffff' # unicode letters range (must not be a raw string) # IP patterns ipv4_re = r'(?:25[0-5]|2[0-4]\d|[0-1]?\d?\d)(?:\.(?:25[0-5]|2[0-4]\d|[0-1]?\d?\d)){3}' ipv6_re = r'\[[0-9a-f:\.]+\]' # (simple regex, validated later) # Host patterns hostname_re = r'[a-z' + ul + r'0-9](?:[a-z' + ul + r'0-9-]{0,61}[a-z' + ul + r'0-9])?' # Max length for domain name labels is 63 characters per RFC 1034 sec. 3.1 domain_re = r'(?:\.(?!-)[a-z' + ul + r'0-9-]{1,63}(?<!-))*' tld_re = ( r'\.' # dot r'(?!-)' # can't start with a dash r'(?:[a-z' + ul + '-]{2,63}' # domain label r'|xn--[a-z0-9]{1,59})' # or punycode label r'(?<!-)' # can't end with a dash r'\.?' # may have a trailing dot ) host_re = '(' + hostname_re + domain_re + tld_re + '|localhost)' regex = _lazy_re_compile( r'^(?:[a-z0-9\.\-\+]*)://' # scheme is validated separately r'(?:[^\s:@/]+(?::[^\s:@/]*)?@)?' # user:pass authentication r'(?:' + ipv4_re + '|' + ipv6_re + '|' + host_re + ')' r'(?::\d{2,5})?' # port r'(?:[/?#][^\s]*)?' # resource path r'\Z', re.IGNORECASE) message = _('Enter a valid URL.') schemes = ['http', 'https', 'ftp', 'ftps'] def __init__(self, schemes=None, **kwargs): super().__init__(**kwargs) if schemes is not None: self.schemes = schemes def __call__(self, value): # Check first if the scheme is valid scheme = value.split('://')[0].lower() if scheme not in self.schemes: raise ValidationError(self.message, code=self.code) # Then check full URL try: super().__call__(value) except ValidationError as e: # Trivial case failed. Try for possible IDN domain if value: try: scheme, netloc, path, query, fragment = urlsplit(value) except ValueError: # for example, "Invalid IPv6 URL" raise ValidationError(self.message, code=self.code) try: netloc = punycode(netloc) # IDN -> ACE except UnicodeError: # invalid domain part raise e url = urlunsplit((scheme, netloc, path, query, fragment)) super().__call__(url) else: raise else: # Now verify IPv6 in the netloc part host_match = re.search(r'^\[(.+)\](?::\d{2,5})?$', urlsplit(value).netloc) if host_match: potential_ip = host_match.groups()[0] try: validate_ipv6_address(potential_ip) except ValidationError: raise ValidationError(self.message, code=self.code) # The maximum length of a full host name is 253 characters per RFC 1034 # section 3.1. It's defined to be 255 bytes or less, but this includes # one byte for the length of the name and one byte for the trailing dot # that's used to indicate absolute names in DNS. if len(urlsplit(value).netloc) > 253: raise ValidationError(self.message, code=self.code) integer_validator = RegexValidator( _lazy_re_compile(r'^-?\d+\Z'), message=_('Enter a valid integer.'), code='invalid', ) def validate_integer(value): return integer_validator(value) @deconstructible class EmailValidator: message = _('Enter a valid email address.') code = 'invalid' user_regex = _lazy_re_compile( r"(^[-!#$%&'*+/=?^_`{}|~0-9A-Z]+(\.[-!#$%&'*+/=?^_`{}|~0-9A-Z]+)*\Z" # dot-atom r'|^"([\001-\010\013\014\016-\037!#-\[\]-\177]|\\[\001-\011\013\014\016-\177])*"\Z)', # quoted-string re.IGNORECASE) domain_regex = _lazy_re_compile( # max length for domain name labels is 63 characters per RFC 1034 r'((?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+)(?:[A-Z0-9-]{2,63}(?<!-))\Z', re.IGNORECASE) literal_regex = _lazy_re_compile( # literal form, ipv4 or ipv6 address (SMTP 4.1.3) r'\[([A-f0-9:\.]+)\]\Z', re.IGNORECASE) domain_whitelist = ['localhost'] def __init__(self, message=None, code=None, whitelist=None): if message is not None: self.message = message if code is not None: self.code = code if whitelist is not None: self.domain_whitelist = whitelist def __call__(self, value): if not value or '@' not in value: raise ValidationError(self.message, code=self.code) user_part, domain_part = value.rsplit('@', 1) if not self.user_regex.match(user_part): raise ValidationError(self.message, code=self.code) if (domain_part not in self.domain_whitelist and not self.validate_domain_part(domain_part)): # Try for possible IDN domain-part try: domain_part = punycode(domain_part) except UnicodeError: pass else: if self.validate_domain_part(domain_part): return raise ValidationError(self.message, code=self.code) def validate_domain_part(self, domain_part): if self.domain_regex.match(domain_part): return True literal_match = self.literal_regex.match(domain_part) if literal_match: ip_address = literal_match.group(1) try: validate_ipv46_address(ip_address) return True except ValidationError: pass return False def __eq__(self, other): return ( isinstance(other, EmailValidator) and (self.domain_whitelist == other.domain_whitelist) and (self.message == other.message) and (self.code == other.code) ) validate_email = EmailValidator() slug_re = _lazy_re_compile(r'^[-a-zA-Z0-9_]+\Z') validate_slug = RegexValidator( slug_re, # Translators: "letters" means latin letters: a-z and A-Z. _('Enter a valid “slug” consisting of letters, numbers, underscores or hyphens.'), 'invalid' ) slug_unicode_re = _lazy_re_compile(r'^[-\w]+\Z') validate_unicode_slug = RegexValidator( slug_unicode_re, _('Enter a valid “slug” consisting of Unicode letters, numbers, underscores, or hyphens.'), 'invalid' ) def validate_ipv4_address(value): try: ipaddress.IPv4Address(value) except ValueError: raise ValidationError(_('Enter a valid IPv4 address.'), code='invalid') def validate_ipv6_address(value): if not is_valid_ipv6_address(value): raise ValidationError(_('Enter a valid IPv6 address.'), code='invalid') def validate_ipv46_address(value): try: validate_ipv4_address(value) except ValidationError: try: validate_ipv6_address(value) except ValidationError: raise ValidationError(_('Enter a valid IPv4 or IPv6 address.'), code='invalid') ip_address_validator_map = { 'both': ([validate_ipv46_address], _('Enter a valid IPv4 or IPv6 address.')), 'ipv4': ([validate_ipv4_address], _('Enter a valid IPv4 address.')), 'ipv6': ([validate_ipv6_address], _('Enter a valid IPv6 address.')), } def ip_address_validators(protocol, unpack_ipv4): """ Depending on the given parameters, return the appropriate validators for the GenericIPAddressField. """ if protocol != 'both' and unpack_ipv4: raise ValueError( "You can only use `unpack_ipv4` if `protocol` is set to 'both'") try: return ip_address_validator_map[protocol.lower()] except KeyError: raise ValueError("The protocol '%s' is unknown. Supported: %s" % (protocol, list(ip_address_validator_map))) def int_list_validator(sep=',', message=None, code='invalid', allow_negative=False): regexp = _lazy_re_compile(r'^%(neg)s\d+(?:%(sep)s%(neg)s\d+)*\Z' % { 'neg': '(-)?' if allow_negative else '', 'sep': re.escape(sep), }) return RegexValidator(regexp, message=message, code=code) validate_comma_separated_integer_list = int_list_validator( message=_('Enter only digits separated by commas.'), ) @deconstructible class BaseValidator: message = _('Ensure this value is %(limit_value)s (it is %(show_value)s).') code = 'limit_value' def __init__(self, limit_value, message=None): self.limit_value = limit_value if message: self.message = message def __call__(self, value): cleaned = self.clean(value) limit_value = self.limit_value() if callable(self.limit_value) else self.limit_value params = {'limit_value': limit_value, 'show_value': cleaned, 'value': value} if self.compare(cleaned, limit_value): raise ValidationError(self.message, code=self.code, params=params) def __eq__(self, other): if not isinstance(other, self.__class__): return NotImplemented return ( self.limit_value == other.limit_value and self.message == other.message and self.code == other.code ) def compare(self, a, b): return a is not b def clean(self, x): return x @deconstructible class MaxValueValidator(BaseValidator): message = _('Ensure this value is less than or equal to %(limit_value)s.') code = 'max_value' def compare(self, a, b): return a > b @deconstructible class MinValueValidator(BaseValidator): message = _('Ensure this value is greater than or equal to %(limit_value)s.') code = 'min_value' def compare(self, a, b): return a < b @deconstructible class MinLengthValidator(BaseValidator): message = ngettext_lazy( 'Ensure this value has at least %(limit_value)d character (it has %(show_value)d).', 'Ensure this value has at least %(limit_value)d characters (it has %(show_value)d).', 'limit_value') code = 'min_length' def compare(self, a, b): return a < b def clean(self, x): return len(x) @deconstructible class MaxLengthValidator(BaseValidator): message = ngettext_lazy( 'Ensure this value has at most %(limit_value)d character (it has %(show_value)d).', 'Ensure this value has at most %(limit_value)d characters (it has %(show_value)d).', 'limit_value') code = 'max_length' def compare(self, a, b): return a > b def clean(self, x): return len(x) @deconstructible class DecimalValidator: """ Validate that the input does not exceed the maximum number of digits expected, otherwise raise ValidationError. """ messages = { 'invalid': _('Enter a number.'), 'max_digits': ngettext_lazy( 'Ensure that there are no more than %(max)s digit in total.', 'Ensure that there are no more than %(max)s digits in total.', 'max' ), 'max_decimal_places': ngettext_lazy( 'Ensure that there are no more than %(max)s decimal place.', 'Ensure that there are no more than %(max)s decimal places.', 'max' ), 'max_whole_digits': ngettext_lazy( 'Ensure that there are no more than %(max)s digit before the decimal point.', 'Ensure that there are no more than %(max)s digits before the decimal point.', 'max' ), } def __init__(self, max_digits, decimal_places): self.max_digits = max_digits self.decimal_places = decimal_places def __call__(self, value): digit_tuple, exponent = value.as_tuple()[1:] if exponent in {'F', 'n', 'N'}: raise ValidationError(self.messages['invalid']) if exponent >= 0: # A positive exponent adds that many trailing zeros. digits = len(digit_tuple) + exponent decimals = 0 else: # If the absolute value of the negative exponent is larger than the # number of digits, then it's the same as the number of digits, # because it'll consume all of the digits in digit_tuple and then # add abs(exponent) - len(digit_tuple) leading zeros after the # decimal point. if abs(exponent) > len(digit_tuple): digits = decimals = abs(exponent) else: digits = len(digit_tuple) decimals = abs(exponent) whole_digits = digits - decimals if self.max_digits is not None and digits > self.max_digits: raise ValidationError( self.messages['max_digits'], code='max_digits', params={'max': self.max_digits}, ) if self.decimal_places is not None and decimals > self.decimal_places: raise ValidationError( self.messages['max_decimal_places'], code='max_decimal_places', params={'max': self.decimal_places}, ) if (self.max_digits is not None and self.decimal_places is not None and whole_digits > (self.max_digits - self.decimal_places)): raise ValidationError( self.messages['max_whole_digits'], code='max_whole_digits', params={'max': (self.max_digits - self.decimal_places)}, ) def __eq__(self, other): return ( isinstance(other, self.__class__) and self.max_digits == other.max_digits and self.decimal_places == other.decimal_places ) @deconstructible class FileExtensionValidator: message = _( 'File extension “%(extension)s” is not allowed. ' 'Allowed extensions are: %(allowed_extensions)s.' ) code = 'invalid_extension' def __init__(self, allowed_extensions=None, message=None, code=None): if allowed_extensions is not None: allowed_extensions = [allowed_extension.lower() for allowed_extension in allowed_extensions] self.allowed_extensions = allowed_extensions if message is not None: self.message = message if code is not None: self.code = code def __call__(self, value): extension = Path(value.name).suffix[1:].lower() if self.allowed_extensions is not None and extension not in self.allowed_extensions: raise ValidationError( self.message, code=self.code, params={ 'extension': extension, 'allowed_extensions': ', '.join(self.allowed_extensions) } ) def __eq__(self, other): return ( isinstance(other, self.__class__) and self.allowed_extensions == other.allowed_extensions and self.message == other.message and self.code == other.code ) def get_available_image_extensions(): try: from PIL import Image except ImportError: return [] else: Image.init() return [ext.lower()[1:] for ext in Image.EXTENSION] def validate_image_file_extension(value): return FileExtensionValidator(allowed_extensions=get_available_image_extensions())(value) @deconstructible class ProhibitNullCharactersValidator: """Validate that the string doesn't contain the null character.""" message = _('Null characters are not allowed.') code = 'null_characters_not_allowed' def __init__(self, message=None, code=None): if message is not None: self.message = message if code is not None: self.code = code def __call__(self, value): if '\x00' in str(value): raise ValidationError(self.message, code=self.code) def __eq__(self, other): return ( isinstance(other, self.__class__) and self.message == other.message and self.code == other.code )
71ef47ffa43e73bbde121857d0eb65792a511407748da6717273737ab5228ac9
import datetime import json import mimetypes import os import re import sys import time from email.header import Header from http.client import responses from urllib.parse import quote, urlparse from django.conf import settings from django.core import signals, signing from django.core.exceptions import DisallowedRedirect from django.core.serializers.json import DjangoJSONEncoder from django.http.cookie import SimpleCookie from django.utils import timezone from django.utils.encoding import iri_to_uri from django.utils.http import http_date from django.utils.regex_helper import _lazy_re_compile _charset_from_content_type_re = _lazy_re_compile(r';\s*charset=(?P<charset>[^\s;]+)', re.I) class BadHeaderError(ValueError): pass class HttpResponseBase: """ An HTTP response base class with dictionary-accessed headers. This class doesn't handle content. It should not be used directly. Use the HttpResponse and StreamingHttpResponse subclasses instead. """ status_code = 200 def __init__(self, content_type=None, status=None, reason=None, charset=None): # _headers is a mapping of the lowercase name to the original case of # the header (required for working with legacy systems) and the header # value. Both the name of the header and its value are ASCII strings. self._headers = {} self._closable_objects = [] # This parameter is set by the handler. It's necessary to preserve the # historical behavior of request_finished. self._handler_class = None self.cookies = SimpleCookie() self.closed = False if status is not None: try: self.status_code = int(status) except (ValueError, TypeError): raise TypeError('HTTP status code must be an integer.') if not 100 <= self.status_code <= 599: raise ValueError('HTTP status code must be an integer from 100 to 599.') self._reason_phrase = reason self._charset = charset if content_type is None: content_type = 'text/html; charset=%s' % self.charset self['Content-Type'] = content_type @property def reason_phrase(self): if self._reason_phrase is not None: return self._reason_phrase # Leave self._reason_phrase unset in order to use the default # reason phrase for status code. return responses.get(self.status_code, 'Unknown Status Code') @reason_phrase.setter def reason_phrase(self, value): self._reason_phrase = value @property def charset(self): if self._charset is not None: return self._charset content_type = self.get('Content-Type', '') matched = _charset_from_content_type_re.search(content_type) if matched: # Extract the charset and strip its double quotes return matched.group('charset').replace('"', '') return settings.DEFAULT_CHARSET @charset.setter def charset(self, value): self._charset = value def serialize_headers(self): """HTTP headers as a bytestring.""" def to_bytes(val, encoding): return val if isinstance(val, bytes) else val.encode(encoding) headers = [ (to_bytes(key, 'ascii') + b': ' + to_bytes(value, 'latin-1')) for key, value in self._headers.values() ] return b'\r\n'.join(headers) __bytes__ = serialize_headers @property def _content_type_for_repr(self): return ', "%s"' % self['Content-Type'] if 'Content-Type' in self else '' def _convert_to_charset(self, value, charset, mime_encode=False): """ Convert headers key/value to ascii/latin-1 native strings. `charset` must be 'ascii' or 'latin-1'. If `mime_encode` is True and `value` can't be represented in the given charset, apply MIME-encoding. """ if not isinstance(value, (bytes, str)): value = str(value) if ((isinstance(value, bytes) and (b'\n' in value or b'\r' in value)) or isinstance(value, str) and ('\n' in value or '\r' in value)): raise BadHeaderError("Header values can't contain newlines (got %r)" % value) try: if isinstance(value, str): # Ensure string is valid in given charset value.encode(charset) else: # Convert bytestring using given charset value = value.decode(charset) except UnicodeError as e: if mime_encode: value = Header(value, 'utf-8', maxlinelen=sys.maxsize).encode() else: e.reason += ', HTTP response headers must be in %s format' % charset raise return value def __setitem__(self, header, value): header = self._convert_to_charset(header, 'ascii') value = self._convert_to_charset(value, 'latin-1', mime_encode=True) self._headers[header.lower()] = (header, value) def __delitem__(self, header): self._headers.pop(header.lower(), False) def __getitem__(self, header): return self._headers[header.lower()][1] def has_header(self, header): """Case-insensitive check for a header.""" return header.lower() in self._headers __contains__ = has_header def items(self): return self._headers.values() def get(self, header, alternate=None): return self._headers.get(header.lower(), (None, alternate))[1] def set_cookie(self, key, value='', max_age=None, expires=None, path='/', domain=None, secure=False, httponly=False, samesite=None): """ Set a cookie. ``expires`` can be: - a string in the correct format, - a naive ``datetime.datetime`` object in UTC, - an aware ``datetime.datetime`` object in any time zone. If it is a ``datetime.datetime`` object then calculate ``max_age``. """ self.cookies[key] = value if expires is not None: if isinstance(expires, datetime.datetime): if timezone.is_aware(expires): expires = timezone.make_naive(expires, timezone.utc) delta = expires - expires.utcnow() # Add one second so the date matches exactly (a fraction of # time gets lost between converting to a timedelta and # then the date string). delta = delta + datetime.timedelta(seconds=1) # Just set max_age - the max_age logic will set expires. expires = None max_age = max(0, delta.days * 86400 + delta.seconds) else: self.cookies[key]['expires'] = expires else: self.cookies[key]['expires'] = '' if max_age is not None: self.cookies[key]['max-age'] = max_age # IE requires expires, so set it if hasn't been already. if not expires: self.cookies[key]['expires'] = http_date(time.time() + max_age) if path is not None: self.cookies[key]['path'] = path if domain is not None: self.cookies[key]['domain'] = domain if secure: self.cookies[key]['secure'] = True if httponly: self.cookies[key]['httponly'] = True if samesite: if samesite.lower() not in ('lax', 'strict'): raise ValueError('samesite must be "lax" or "strict".') self.cookies[key]['samesite'] = samesite def setdefault(self, key, value): """Set a header unless it has already been set.""" if key not in self: self[key] = value def set_signed_cookie(self, key, value, salt='', **kwargs): value = signing.get_cookie_signer(salt=key + salt).sign(value) return self.set_cookie(key, value, **kwargs) def delete_cookie(self, key, path='/', domain=None): # Most browsers ignore the Set-Cookie header if the cookie name starts # with __Host- or __Secure- and the cookie doesn't use the secure flag. secure = key.startswith(('__Secure-', '__Host-')) self.set_cookie( key, max_age=0, path=path, domain=domain, secure=secure, expires='Thu, 01 Jan 1970 00:00:00 GMT', ) # Common methods used by subclasses def make_bytes(self, value): """Turn a value into a bytestring encoded in the output charset.""" # Per PEP 3333, this response body must be bytes. To avoid returning # an instance of a subclass, this function returns `bytes(value)`. # This doesn't make a copy when `value` already contains bytes. # Handle string types -- we can't rely on force_bytes here because: # - Python attempts str conversion first # - when self._charset != 'utf-8' it re-encodes the content if isinstance(value, (bytes, memoryview)): return bytes(value) if isinstance(value, str): return bytes(value.encode(self.charset)) # Handle non-string types. return str(value).encode(self.charset) # These methods partially implement the file-like object interface. # See https://docs.python.org/library/io.html#io.IOBase # The WSGI server must call this method upon completion of the request. # See http://blog.dscpl.com.au/2012/10/obligations-for-calling-close-on.html # When wsgi.file_wrapper is used, the WSGI server instead calls close() # on the file-like object. Django ensures this method is called in this # case by replacing self.file_to_stream.close() with a wrapped version. def close(self): for closable in self._closable_objects: try: closable.close() except Exception: pass self.closed = True signals.request_finished.send(sender=self._handler_class) def write(self, content): raise OSError('This %s instance is not writable' % self.__class__.__name__) def flush(self): pass def tell(self): raise OSError('This %s instance cannot tell its position' % self.__class__.__name__) # These methods partially implement a stream-like object interface. # See https://docs.python.org/library/io.html#io.IOBase def readable(self): return False def seekable(self): return False def writable(self): return False def writelines(self, lines): raise OSError('This %s instance is not writable' % self.__class__.__name__) class HttpResponse(HttpResponseBase): """ An HTTP response class with a string as content. This content that can be read, appended to, or replaced. """ streaming = False def __init__(self, content=b'', *args, **kwargs): super().__init__(*args, **kwargs) # Content is a bytestring. See the `content` property methods. self.content = content def __repr__(self): return '<%(cls)s status_code=%(status_code)d%(content_type)s>' % { 'cls': self.__class__.__name__, 'status_code': self.status_code, 'content_type': self._content_type_for_repr, } def serialize(self): """Full HTTP message, including headers, as a bytestring.""" return self.serialize_headers() + b'\r\n\r\n' + self.content __bytes__ = serialize @property def content(self): return b''.join(self._container) @content.setter def content(self, value): # Consume iterators upon assignment to allow repeated iteration. if hasattr(value, '__iter__') and not isinstance(value, (bytes, str)): content = b''.join(self.make_bytes(chunk) for chunk in value) if hasattr(value, 'close'): try: value.close() except Exception: pass else: content = self.make_bytes(value) # Create a list of properly encoded bytestrings to support write(). self._container = [content] def __iter__(self): return iter(self._container) def write(self, content): self._container.append(self.make_bytes(content)) def tell(self): return len(self.content) def getvalue(self): return self.content def writable(self): return True def writelines(self, lines): for line in lines: self.write(line) class StreamingHttpResponse(HttpResponseBase): """ A streaming HTTP response class with an iterator as content. This should only be iterated once, when the response is streamed to the client. However, it can be appended to or replaced with a new iterator that wraps the original content (or yields entirely new content). """ streaming = True def __init__(self, streaming_content=(), *args, **kwargs): super().__init__(*args, **kwargs) # `streaming_content` should be an iterable of bytestrings. # See the `streaming_content` property methods. self.streaming_content = streaming_content @property def content(self): raise AttributeError( "This %s instance has no `content` attribute. Use " "`streaming_content` instead." % self.__class__.__name__ ) @property def streaming_content(self): return map(self.make_bytes, self._iterator) @streaming_content.setter def streaming_content(self, value): self._set_streaming_content(value) def _set_streaming_content(self, value): # Ensure we can never iterate on "value" more than once. self._iterator = iter(value) if hasattr(value, 'close'): self._closable_objects.append(value) def __iter__(self): return self.streaming_content def getvalue(self): return b''.join(self.streaming_content) class FileResponse(StreamingHttpResponse): """ A streaming HTTP response class optimized for files. """ block_size = 4096 def __init__(self, *args, as_attachment=False, filename='', **kwargs): self.as_attachment = as_attachment self.filename = filename super().__init__(*args, **kwargs) def _wrap_file_to_stream_close(self, filelike): """ Wrap the file-like close() with a version that calls FileResponse.close(). """ closing = False filelike_close = getattr(filelike, 'close', lambda: None) def file_wrapper_close(): nonlocal closing # Prevent an infinite loop since FileResponse.close() tries to # close the objects in self._closable_objects. if closing: return closing = True try: filelike_close() finally: self.close() filelike.close = file_wrapper_close def _set_streaming_content(self, value): if not hasattr(value, 'read'): self.file_to_stream = None return super()._set_streaming_content(value) self.file_to_stream = filelike = value # Add to closable objects before wrapping close(), since the filelike # might not have close(). if hasattr(filelike, 'close'): self._closable_objects.append(filelike) self._wrap_file_to_stream_close(filelike) value = iter(lambda: filelike.read(self.block_size), b'') self.set_headers(filelike) super()._set_streaming_content(value) def set_headers(self, filelike): """ Set some common response headers (Content-Length, Content-Type, and Content-Disposition) based on the `filelike` response content. """ encoding_map = { 'bzip2': 'application/x-bzip', 'gzip': 'application/gzip', 'xz': 'application/x-xz', } filename = getattr(filelike, 'name', None) filename = filename if (isinstance(filename, str) and filename) else self.filename if os.path.isabs(filename): self['Content-Length'] = os.path.getsize(filelike.name) elif hasattr(filelike, 'getbuffer'): self['Content-Length'] = filelike.getbuffer().nbytes if self.get('Content-Type', '').startswith('text/html'): if filename: content_type, encoding = mimetypes.guess_type(filename) # Encoding isn't set to prevent browsers from automatically # uncompressing files. content_type = encoding_map.get(encoding, content_type) self['Content-Type'] = content_type or 'application/octet-stream' else: self['Content-Type'] = 'application/octet-stream' filename = self.filename or os.path.basename(filename) if filename: disposition = 'attachment' if self.as_attachment else 'inline' try: filename.encode('ascii') file_expr = 'filename="{}"'.format(filename) except UnicodeEncodeError: file_expr = "filename*=utf-8''{}".format(quote(filename)) self['Content-Disposition'] = '{}; {}'.format(disposition, file_expr) elif self.as_attachment: self['Content-Disposition'] = 'attachment' class HttpResponseRedirectBase(HttpResponse): allowed_schemes = ['http', 'https', 'ftp'] def __init__(self, redirect_to, *args, **kwargs): super().__init__(*args, **kwargs) self['Location'] = iri_to_uri(redirect_to) parsed = urlparse(str(redirect_to)) if parsed.scheme and parsed.scheme not in self.allowed_schemes: raise DisallowedRedirect("Unsafe redirect to URL with protocol '%s'" % parsed.scheme) url = property(lambda self: self['Location']) def __repr__(self): return '<%(cls)s status_code=%(status_code)d%(content_type)s, url="%(url)s">' % { 'cls': self.__class__.__name__, 'status_code': self.status_code, 'content_type': self._content_type_for_repr, 'url': self.url, } class HttpResponseRedirect(HttpResponseRedirectBase): status_code = 302 class HttpResponsePermanentRedirect(HttpResponseRedirectBase): status_code = 301 class HttpResponseNotModified(HttpResponse): status_code = 304 def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) del self['content-type'] @HttpResponse.content.setter def content(self, value): if value: raise AttributeError("You cannot set content to a 304 (Not Modified) response") self._container = [] class HttpResponseBadRequest(HttpResponse): status_code = 400 class HttpResponseNotFound(HttpResponse): status_code = 404 class HttpResponseForbidden(HttpResponse): status_code = 403 class HttpResponseNotAllowed(HttpResponse): status_code = 405 def __init__(self, permitted_methods, *args, **kwargs): super().__init__(*args, **kwargs) self['Allow'] = ', '.join(permitted_methods) def __repr__(self): return '<%(cls)s [%(methods)s] status_code=%(status_code)d%(content_type)s>' % { 'cls': self.__class__.__name__, 'status_code': self.status_code, 'content_type': self._content_type_for_repr, 'methods': self['Allow'], } class HttpResponseGone(HttpResponse): status_code = 410 class HttpResponseServerError(HttpResponse): status_code = 500 class Http404(Exception): pass class JsonResponse(HttpResponse): """ An HTTP response class that consumes data to be serialized to JSON. :param data: Data to be dumped into json. By default only ``dict`` objects are allowed to be passed due to a security flaw before EcmaScript 5. See the ``safe`` parameter for more information. :param encoder: Should be a json encoder class. Defaults to ``django.core.serializers.json.DjangoJSONEncoder``. :param safe: Controls if only ``dict`` objects may be serialized. Defaults to ``True``. :param json_dumps_params: A dictionary of kwargs passed to json.dumps(). """ def __init__(self, data, encoder=DjangoJSONEncoder, safe=True, json_dumps_params=None, **kwargs): if safe and not isinstance(data, dict): raise TypeError( 'In order to allow non-dict objects to be serialized set the ' 'safe parameter to False.' ) if json_dumps_params is None: json_dumps_params = {} kwargs.setdefault('content_type', 'application/json') data = json.dumps(data, cls=encoder, **json_dumps_params) super().__init__(content=data, **kwargs)
e02bf531db996dd20e90207a3bf947ce2a42c6861d864c65e5ce2e1af1ffd098
import cgi import codecs import copy from io import BytesIO from itertools import chain from urllib.parse import quote, urlencode, urljoin, urlsplit from django.conf import settings from django.core import signing from django.core.exceptions import ( DisallowedHost, ImproperlyConfigured, RequestDataTooBig, ) from django.core.files import uploadhandler from django.http.multipartparser import MultiPartParser, MultiPartParserError from django.utils.datastructures import ( CaseInsensitiveMapping, ImmutableList, MultiValueDict, ) from django.utils.encoding import escape_uri_path, iri_to_uri from django.utils.functional import cached_property from django.utils.http import is_same_domain, limited_parse_qsl from django.utils.regex_helper import _lazy_re_compile RAISE_ERROR = object() host_validation_re = _lazy_re_compile(r"^([a-z0-9.-]+|\[[a-f0-9]*:[a-f0-9\.:]+\])(:\d+)?$") class UnreadablePostError(OSError): pass class RawPostDataException(Exception): """ You cannot access raw_post_data from a request that has multipart/* POST data if it has been accessed via POST, FILES, etc.. """ pass class HttpRequest: """A basic HTTP request.""" # The encoding used in GET/POST dicts. None means use default setting. _encoding = None _upload_handlers = [] def __init__(self): # WARNING: The `WSGIRequest` subclass doesn't call `super`. # Any variable assignment made here should also happen in # `WSGIRequest.__init__()`. self.GET = QueryDict(mutable=True) self.POST = QueryDict(mutable=True) self.COOKIES = {} self.META = {} self.FILES = MultiValueDict() self.path = '' self.path_info = '' self.method = None self.resolver_match = None self.content_type = None self.content_params = None def __repr__(self): if self.method is None or not self.get_full_path(): return '<%s>' % self.__class__.__name__ return '<%s: %s %r>' % (self.__class__.__name__, self.method, self.get_full_path()) @cached_property def headers(self): return HttpHeaders(self.META) def _set_content_type_params(self, meta): """Set content_type, content_params, and encoding.""" self.content_type, self.content_params = cgi.parse_header(meta.get('CONTENT_TYPE', '')) if 'charset' in self.content_params: try: codecs.lookup(self.content_params['charset']) except LookupError: pass else: self.encoding = self.content_params['charset'] def _get_raw_host(self): """ Return the HTTP host using the environment or request headers. Skip allowed hosts protection, so may return an insecure host. """ # We try three options, in order of decreasing preference. if settings.USE_X_FORWARDED_HOST and ( 'HTTP_X_FORWARDED_HOST' in self.META): host = self.META['HTTP_X_FORWARDED_HOST'] elif 'HTTP_HOST' in self.META: host = self.META['HTTP_HOST'] else: # Reconstruct the host using the algorithm from PEP 333. host = self.META['SERVER_NAME'] server_port = self.get_port() if server_port != ('443' if self.is_secure() else '80'): host = '%s:%s' % (host, server_port) return host def get_host(self): """Return the HTTP host using the environment or request headers.""" host = self._get_raw_host() # Allow variants of localhost if ALLOWED_HOSTS is empty and DEBUG=True. allowed_hosts = settings.ALLOWED_HOSTS if settings.DEBUG and not allowed_hosts: allowed_hosts = ['localhost', '127.0.0.1', '[::1]'] domain, port = split_domain_port(host) if domain and validate_host(domain, allowed_hosts): return host else: msg = "Invalid HTTP_HOST header: %r." % host if domain: msg += " You may need to add %r to ALLOWED_HOSTS." % domain else: msg += " The domain name provided is not valid according to RFC 1034/1035." raise DisallowedHost(msg) def get_port(self): """Return the port number for the request as a string.""" if settings.USE_X_FORWARDED_PORT and 'HTTP_X_FORWARDED_PORT' in self.META: port = self.META['HTTP_X_FORWARDED_PORT'] else: port = self.META['SERVER_PORT'] return str(port) def get_full_path(self, force_append_slash=False): return self._get_full_path(self.path, force_append_slash) def get_full_path_info(self, force_append_slash=False): return self._get_full_path(self.path_info, force_append_slash) def _get_full_path(self, path, force_append_slash): # RFC 3986 requires query string arguments to be in the ASCII range. # Rather than crash if this doesn't happen, we encode defensively. return '%s%s%s' % ( escape_uri_path(path), '/' if force_append_slash and not path.endswith('/') else '', ('?' + iri_to_uri(self.META.get('QUERY_STRING', ''))) if self.META.get('QUERY_STRING', '') else '' ) def get_signed_cookie(self, key, default=RAISE_ERROR, salt='', max_age=None): """ Attempt to return a signed cookie. If the signature fails or the cookie has expired, raise an exception, unless the `default` argument is provided, in which case return that value. """ try: cookie_value = self.COOKIES[key] except KeyError: if default is not RAISE_ERROR: return default else: raise try: value = signing.get_cookie_signer(salt=key + salt).unsign( cookie_value, max_age=max_age) except signing.BadSignature: if default is not RAISE_ERROR: return default else: raise return value def get_raw_uri(self): """ Return an absolute URI from variables available in this request. Skip allowed hosts protection, so may return insecure URI. """ return '{scheme}://{host}{path}'.format( scheme=self.scheme, host=self._get_raw_host(), path=self.get_full_path(), ) def build_absolute_uri(self, location=None): """ Build an absolute URI from the location and the variables available in this request. If no ``location`` is specified, build the absolute URI using request.get_full_path(). If the location is absolute, convert it to an RFC 3987 compliant URI and return it. If location is relative or is scheme-relative (i.e., ``//example.com/``), urljoin() it to a base URL constructed from the request variables. """ if location is None: # Make it an absolute url (but schemeless and domainless) for the # edge case that the path starts with '//'. location = '//%s' % self.get_full_path() bits = urlsplit(location) if not (bits.scheme and bits.netloc): # Handle the simple, most common case. If the location is absolute # and a scheme or host (netloc) isn't provided, skip an expensive # urljoin() as long as no path segments are '.' or '..'. if (bits.path.startswith('/') and not bits.scheme and not bits.netloc and '/./' not in bits.path and '/../' not in bits.path): # If location starts with '//' but has no netloc, reuse the # schema and netloc from the current request. Strip the double # slashes and continue as if it wasn't specified. if location.startswith('//'): location = location[2:] location = self._current_scheme_host + location else: # Join the constructed URL with the provided location, which # allows the provided location to apply query strings to the # base path. location = urljoin(self._current_scheme_host + self.path, location) return iri_to_uri(location) @cached_property def _current_scheme_host(self): return '{}://{}'.format(self.scheme, self.get_host()) def _get_scheme(self): """ Hook for subclasses like WSGIRequest to implement. Return 'http' by default. """ return 'http' @property def scheme(self): if settings.SECURE_PROXY_SSL_HEADER: try: header, secure_value = settings.SECURE_PROXY_SSL_HEADER except ValueError: raise ImproperlyConfigured( 'The SECURE_PROXY_SSL_HEADER setting must be a tuple containing two values.' ) header_value = self.META.get(header) if header_value is not None: return 'https' if header_value == secure_value else 'http' return self._get_scheme() def is_secure(self): return self.scheme == 'https' def is_ajax(self): return self.META.get('HTTP_X_REQUESTED_WITH') == 'XMLHttpRequest' @property def encoding(self): return self._encoding @encoding.setter def encoding(self, val): """ Set the encoding used for GET/POST accesses. If the GET or POST dictionary has already been created, remove and recreate it on the next access (so that it is decoded correctly). """ self._encoding = val if hasattr(self, 'GET'): del self.GET if hasattr(self, '_post'): del self._post def _initialize_handlers(self): self._upload_handlers = [uploadhandler.load_handler(handler, self) for handler in settings.FILE_UPLOAD_HANDLERS] @property def upload_handlers(self): if not self._upload_handlers: # If there are no upload handlers defined, initialize them from settings. self._initialize_handlers() return self._upload_handlers @upload_handlers.setter def upload_handlers(self, upload_handlers): if hasattr(self, '_files'): raise AttributeError("You cannot set the upload handlers after the upload has been processed.") self._upload_handlers = upload_handlers def parse_file_upload(self, META, post_data): """Return a tuple of (POST QueryDict, FILES MultiValueDict).""" self.upload_handlers = ImmutableList( self.upload_handlers, warning="You cannot alter upload handlers after the upload has been processed." ) parser = MultiPartParser(META, post_data, self.upload_handlers, self.encoding) return parser.parse() @property def body(self): if not hasattr(self, '_body'): if self._read_started: raise RawPostDataException("You cannot access body after reading from request's data stream") # Limit the maximum request data size that will be handled in-memory. if (settings.DATA_UPLOAD_MAX_MEMORY_SIZE is not None and int(self.META.get('CONTENT_LENGTH') or 0) > settings.DATA_UPLOAD_MAX_MEMORY_SIZE): raise RequestDataTooBig('Request body exceeded settings.DATA_UPLOAD_MAX_MEMORY_SIZE.') try: self._body = self.read() except OSError as e: raise UnreadablePostError(*e.args) from e self._stream = BytesIO(self._body) return self._body def _mark_post_parse_error(self): self._post = QueryDict() self._files = MultiValueDict() def _load_post_and_files(self): """Populate self._post and self._files if the content-type is a form type""" if self.method != 'POST': self._post, self._files = QueryDict(encoding=self._encoding), MultiValueDict() return if self._read_started and not hasattr(self, '_body'): self._mark_post_parse_error() return if self.content_type == 'multipart/form-data': if hasattr(self, '_body'): # Use already read data data = BytesIO(self._body) else: data = self try: self._post, self._files = self.parse_file_upload(self.META, data) except MultiPartParserError: # An error occurred while parsing POST data. Since when # formatting the error the request handler might access # self.POST, set self._post and self._file to prevent # attempts to parse POST data again. self._mark_post_parse_error() raise elif self.content_type == 'application/x-www-form-urlencoded': self._post, self._files = QueryDict(self.body, encoding=self._encoding), MultiValueDict() else: self._post, self._files = QueryDict(encoding=self._encoding), MultiValueDict() def close(self): if hasattr(self, '_files'): for f in chain.from_iterable(l[1] for l in self._files.lists()): f.close() # File-like and iterator interface. # # Expects self._stream to be set to an appropriate source of bytes by # a corresponding request subclass (e.g. WSGIRequest). # Also when request data has already been read by request.POST or # request.body, self._stream points to a BytesIO instance # containing that data. def read(self, *args, **kwargs): self._read_started = True try: return self._stream.read(*args, **kwargs) except OSError as e: raise UnreadablePostError(*e.args) from e def readline(self, *args, **kwargs): self._read_started = True try: return self._stream.readline(*args, **kwargs) except OSError as e: raise UnreadablePostError(*e.args) from e def __iter__(self): return iter(self.readline, b'') def readlines(self): return list(self) class HttpHeaders(CaseInsensitiveMapping): HTTP_PREFIX = 'HTTP_' # PEP 333 gives two headers which aren't prepended with HTTP_. UNPREFIXED_HEADERS = {'CONTENT_TYPE', 'CONTENT_LENGTH'} def __init__(self, environ): headers = {} for header, value in environ.items(): name = self.parse_header_name(header) if name: headers[name] = value super().__init__(headers) def __getitem__(self, key): """Allow header lookup using underscores in place of hyphens.""" return super().__getitem__(key.replace('_', '-')) @classmethod def parse_header_name(cls, header): if header.startswith(cls.HTTP_PREFIX): header = header[len(cls.HTTP_PREFIX):] elif header not in cls.UNPREFIXED_HEADERS: return None return header.replace('_', '-').title() class QueryDict(MultiValueDict): """ A specialized MultiValueDict which represents a query string. A QueryDict can be used to represent GET or POST data. It subclasses MultiValueDict since keys in such data can be repeated, for instance in the data from a form with a <select multiple> field. By default QueryDicts are immutable, though the copy() method will always return a mutable copy. Both keys and values set on this class are converted from the given encoding (DEFAULT_CHARSET by default) to str. """ # These are both reset in __init__, but is specified here at the class # level so that unpickling will have valid values _mutable = True _encoding = None def __init__(self, query_string=None, mutable=False, encoding=None): super().__init__() self.encoding = encoding or settings.DEFAULT_CHARSET query_string = query_string or '' parse_qsl_kwargs = { 'keep_blank_values': True, 'fields_limit': settings.DATA_UPLOAD_MAX_NUMBER_FIELDS, 'encoding': self.encoding, } if isinstance(query_string, bytes): # query_string normally contains URL-encoded data, a subset of ASCII. try: query_string = query_string.decode(self.encoding) except UnicodeDecodeError: # ... but some user agents are misbehaving :-( query_string = query_string.decode('iso-8859-1') for key, value in limited_parse_qsl(query_string, **parse_qsl_kwargs): self.appendlist(key, value) self._mutable = mutable @classmethod def fromkeys(cls, iterable, value='', mutable=False, encoding=None): """ Return a new QueryDict with keys (may be repeated) from an iterable and values from value. """ q = cls('', mutable=True, encoding=encoding) for key in iterable: q.appendlist(key, value) if not mutable: q._mutable = False return q @property def encoding(self): if self._encoding is None: self._encoding = settings.DEFAULT_CHARSET return self._encoding @encoding.setter def encoding(self, value): self._encoding = value def _assert_mutable(self): if not self._mutable: raise AttributeError("This QueryDict instance is immutable") def __setitem__(self, key, value): self._assert_mutable() key = bytes_to_text(key, self.encoding) value = bytes_to_text(value, self.encoding) super().__setitem__(key, value) def __delitem__(self, key): self._assert_mutable() super().__delitem__(key) def __copy__(self): result = self.__class__('', mutable=True, encoding=self.encoding) for key, value in self.lists(): result.setlist(key, value) return result def __deepcopy__(self, memo): result = self.__class__('', mutable=True, encoding=self.encoding) memo[id(self)] = result for key, value in self.lists(): result.setlist(copy.deepcopy(key, memo), copy.deepcopy(value, memo)) return result def setlist(self, key, list_): self._assert_mutable() key = bytes_to_text(key, self.encoding) list_ = [bytes_to_text(elt, self.encoding) for elt in list_] super().setlist(key, list_) def setlistdefault(self, key, default_list=None): self._assert_mutable() return super().setlistdefault(key, default_list) def appendlist(self, key, value): self._assert_mutable() key = bytes_to_text(key, self.encoding) value = bytes_to_text(value, self.encoding) super().appendlist(key, value) def pop(self, key, *args): self._assert_mutable() return super().pop(key, *args) def popitem(self): self._assert_mutable() return super().popitem() def clear(self): self._assert_mutable() super().clear() def setdefault(self, key, default=None): self._assert_mutable() key = bytes_to_text(key, self.encoding) default = bytes_to_text(default, self.encoding) return super().setdefault(key, default) def copy(self): """Return a mutable copy of this object.""" return self.__deepcopy__({}) def urlencode(self, safe=None): """ Return an encoded string of all query string arguments. `safe` specifies characters which don't require quoting, for example:: >>> q = QueryDict(mutable=True) >>> q['next'] = '/a&b/' >>> q.urlencode() 'next=%2Fa%26b%2F' >>> q.urlencode(safe='/') 'next=/a%26b/' """ output = [] if safe: safe = safe.encode(self.encoding) def encode(k, v): return '%s=%s' % ((quote(k, safe), quote(v, safe))) else: def encode(k, v): return urlencode({k: v}) for k, list_ in self.lists(): output.extend( encode(k.encode(self.encoding), str(v).encode(self.encoding)) for v in list_ ) return '&'.join(output) # It's neither necessary nor appropriate to use # django.utils.encoding.force_str() for parsing URLs and form inputs. Thus, # this slightly more restricted function, used by QueryDict. def bytes_to_text(s, encoding): """ Convert bytes objects to strings, using the given encoding. Illegally encoded input characters are replaced with Unicode "unknown" codepoint (\ufffd). Return any non-bytes objects without change. """ if isinstance(s, bytes): return str(s, encoding, 'replace') else: return s def split_domain_port(host): """ Return a (domain, port) tuple from a given host. Returned domain is lowercased. If the host is invalid, the domain will be empty. """ host = host.lower() if not host_validation_re.match(host): return '', '' if host[-1] == ']': # It's an IPv6 address without a port. return host, '' bits = host.rsplit(':', 1) domain, port = bits if len(bits) == 2 else (bits[0], '') # Remove a trailing dot (if present) from the domain. domain = domain[:-1] if domain.endswith('.') else domain return domain, port def validate_host(host, allowed_hosts): """ Validate the given host for this site. Check that the host looks valid and matches a host or host pattern in the given list of ``allowed_hosts``. Any pattern beginning with a period matches a domain and all its subdomains (e.g. ``.example.com`` matches ``example.com`` and any subdomain), ``*`` matches anything, and anything else must match exactly. Note: This function assumes that the given host is lowercased and has already had the port, if any, stripped off. Return ``True`` for a valid host, ``False`` otherwise. """ return any(pattern == '*' or is_same_domain(host, pattern) for pattern in allowed_hosts)
7fb40e4f190b219ef56a7b406fea07f94fb91468cf80944ea6649efe9637c18c
import logging from functools import update_wrapper from django.core.exceptions import ImproperlyConfigured from django.http import ( HttpResponse, HttpResponseGone, HttpResponseNotAllowed, HttpResponsePermanentRedirect, HttpResponseRedirect, ) from django.template.response import TemplateResponse from django.urls import reverse from django.utils.decorators import classonlymethod logger = logging.getLogger('django.request') class ContextMixin: """ A default context mixin that passes the keyword arguments received by get_context_data() as the template context. """ extra_context = None def get_context_data(self, **kwargs): kwargs.setdefault('view', self) if self.extra_context is not None: kwargs.update(self.extra_context) return kwargs class View: """ Intentionally simple parent class for all views. Only implements dispatch-by-method and simple sanity checking. """ http_method_names = ['get', 'post', 'put', 'patch', 'delete', 'head', 'options', 'trace'] def __init__(self, **kwargs): """ Constructor. Called in the URLconf; can contain helpful extra keyword arguments, and other things. """ # Go through keyword arguments, and either save their values to our # instance, or raise an error. for key, value in kwargs.items(): setattr(self, key, value) @classonlymethod def as_view(cls, **initkwargs): """Main entry point for a request-response process.""" for key in initkwargs: if key in cls.http_method_names: raise TypeError("You tried to pass in the %s method name as a " "keyword argument to %s(). Don't do that." % (key, cls.__name__)) if not hasattr(cls, key): raise TypeError("%s() received an invalid keyword %r. as_view " "only accepts arguments that are already " "attributes of the class." % (cls.__name__, key)) def view(request, *args, **kwargs): self = cls(**initkwargs) self.setup(request, *args, **kwargs) if not hasattr(self, 'request'): raise AttributeError( "%s instance has no 'request' attribute. Did you override " "setup() and forget to call super()?" % cls.__name__ ) return self.dispatch(request, *args, **kwargs) view.view_class = cls view.view_initkwargs = initkwargs # take name and docstring from class update_wrapper(view, cls, updated=()) # and possible attributes set by decorators # like csrf_exempt from dispatch update_wrapper(view, cls.dispatch, assigned=()) return view def setup(self, request, *args, **kwargs): """Initialize attributes shared by all view methods.""" if hasattr(self, 'get') and not hasattr(self, 'head'): self.head = self.get self.request = request self.args = args self.kwargs = kwargs def dispatch(self, request, *args, **kwargs): # Try to dispatch to the right method; if a method doesn't exist, # defer to the error handler. Also defer to the error handler if the # request method isn't on the approved list. if request.method.lower() in self.http_method_names: handler = getattr(self, request.method.lower(), self.http_method_not_allowed) else: handler = self.http_method_not_allowed return handler(request, *args, **kwargs) def http_method_not_allowed(self, request, *args, **kwargs): logger.warning( 'Method Not Allowed (%s): %s', request.method, request.path, extra={'status_code': 405, 'request': request} ) return HttpResponseNotAllowed(self._allowed_methods()) def options(self, request, *args, **kwargs): """Handle responding to requests for the OPTIONS HTTP verb.""" response = HttpResponse() response['Allow'] = ', '.join(self._allowed_methods()) response['Content-Length'] = '0' return response def _allowed_methods(self): return [m.upper() for m in self.http_method_names if hasattr(self, m)] class TemplateResponseMixin: """A mixin that can be used to render a template.""" template_name = None template_engine = None response_class = TemplateResponse content_type = None def render_to_response(self, context, **response_kwargs): """ Return a response, using the `response_class` for this view, with a template rendered with the given context. Pass response_kwargs to the constructor of the response class. """ response_kwargs.setdefault('content_type', self.content_type) return self.response_class( request=self.request, template=self.get_template_names(), context=context, using=self.template_engine, **response_kwargs ) def get_template_names(self): """ Return a list of template names to be used for the request. Must return a list. May not be called if render_to_response() is overridden. """ if self.template_name is None: raise ImproperlyConfigured( "TemplateResponseMixin requires either a definition of " "'template_name' or an implementation of 'get_template_names()'") else: return [self.template_name] class TemplateView(TemplateResponseMixin, ContextMixin, View): """ Render a template. Pass keyword arguments from the URLconf to the context. """ def get(self, request, *args, **kwargs): context = self.get_context_data(**kwargs) return self.render_to_response(context) class RedirectView(View): """Provide a redirect on any GET request.""" permanent = False url = None pattern_name = None query_string = False def get_redirect_url(self, *args, **kwargs): """ Return the URL redirect to. Keyword arguments from the URL pattern match generating the redirect request are provided as kwargs to this method. """ if self.url: url = self.url % kwargs elif self.pattern_name: url = reverse(self.pattern_name, args=args, kwargs=kwargs) else: return None args = self.request.META.get('QUERY_STRING', '') if args and self.query_string: url = "%s?%s" % (url, args) return url def get(self, request, *args, **kwargs): url = self.get_redirect_url(*args, **kwargs) if url: if self.permanent: return HttpResponsePermanentRedirect(url) else: return HttpResponseRedirect(url) else: logger.warning( 'Gone: %s', request.path, extra={'status_code': 410, 'request': request} ) return HttpResponseGone() def head(self, request, *args, **kwargs): return self.get(request, *args, **kwargs) def post(self, request, *args, **kwargs): return self.get(request, *args, **kwargs) def options(self, request, *args, **kwargs): return self.get(request, *args, **kwargs) def delete(self, request, *args, **kwargs): return self.get(request, *args, **kwargs) def put(self, request, *args, **kwargs): return self.get(request, *args, **kwargs) def patch(self, request, *args, **kwargs): return self.get(request, *args, **kwargs)
dc2094c51f99128a3bbc58cbcdfe9a85eda24e788912dba0ef35ac36c62a55fa
"""Translation helper functions.""" import functools import gettext as gettext_module import os import re import sys import warnings from asgiref.local import Local from django.apps import apps from django.conf import settings from django.conf.locale import LANG_INFO from django.core.exceptions import AppRegistryNotReady from django.core.signals import setting_changed from django.dispatch import receiver from django.utils.regex_helper import _lazy_re_compile from django.utils.safestring import SafeData, mark_safe from . import to_language, to_locale # Translations are cached in a dictionary for every language. # The active translations are stored by threadid to make them thread local. _translations = {} _active = Local() # The default translation is based on the settings file. _default = None # magic gettext number to separate context from message CONTEXT_SEPARATOR = "\x04" # Format of Accept-Language header values. From RFC 2616, section 14.4 and 3.9 # and RFC 3066, section 2.1 accept_language_re = _lazy_re_compile(r''' ([A-Za-z]{1,8}(?:-[A-Za-z0-9]{1,8})*|\*) # "en", "en-au", "x-y-z", "es-419", "*" (?:\s*;\s*q=(0(?:\.\d{,3})?|1(?:\.0{,3})?))? # Optional "q=1.00", "q=0.8" (?:\s*,\s*|$) # Multiple accepts per header. ''', re.VERBOSE) language_code_re = _lazy_re_compile( r'^[a-z]{1,8}(?:-[a-z0-9]{1,8})*(?:@[a-z0-9]{1,20})?$', re.IGNORECASE ) language_code_prefix_re = _lazy_re_compile(r'^/(\w+([@-]\w+)?)(/|$)') @receiver(setting_changed) def reset_cache(**kwargs): """ Reset global state when LANGUAGES setting has been changed, as some languages should no longer be accepted. """ if kwargs['setting'] in ('LANGUAGES', 'LANGUAGE_CODE'): check_for_language.cache_clear() get_languages.cache_clear() get_supported_language_variant.cache_clear() class DjangoTranslation(gettext_module.GNUTranslations): """ Set up the GNUTranslations context with regard to output charset. This translation object will be constructed out of multiple GNUTranslations objects by merging their catalogs. It will construct an object for the requested language and add a fallback to the default language, if it's different from the requested language. """ domain = 'django' def __init__(self, language, domain=None, localedirs=None): """Create a GNUTranslations() using many locale directories""" gettext_module.GNUTranslations.__init__(self) if domain is not None: self.domain = domain self.__language = language self.__to_language = to_language(language) self.__locale = to_locale(language) self._catalog = None # If a language doesn't have a catalog, use the Germanic default for # pluralization: anything except one is pluralized. self.plural = lambda n: int(n != 1) if self.domain == 'django': if localedirs is not None: # A module-level cache is used for caching 'django' translations warnings.warn("localedirs is ignored when domain is 'django'.", RuntimeWarning) localedirs = None self._init_translation_catalog() if localedirs: for localedir in localedirs: translation = self._new_gnu_trans(localedir) self.merge(translation) else: self._add_installed_apps_translations() self._add_local_translations() if self.__language == settings.LANGUAGE_CODE and self.domain == 'django' and self._catalog is None: # default lang should have at least one translation file available. raise OSError('No translation files found for default language %s.' % settings.LANGUAGE_CODE) self._add_fallback(localedirs) if self._catalog is None: # No catalogs found for this language, set an empty catalog. self._catalog = {} def __repr__(self): return "<DjangoTranslation lang:%s>" % self.__language def _new_gnu_trans(self, localedir, use_null_fallback=True): """ Return a mergeable gettext.GNUTranslations instance. A convenience wrapper. By default gettext uses 'fallback=False'. Using param `use_null_fallback` to avoid confusion with any other references to 'fallback'. """ return gettext_module.translation( domain=self.domain, localedir=localedir, languages=[self.__locale], fallback=use_null_fallback, ) def _init_translation_catalog(self): """Create a base catalog using global django translations.""" settingsfile = sys.modules[settings.__module__].__file__ localedir = os.path.join(os.path.dirname(settingsfile), 'locale') translation = self._new_gnu_trans(localedir) self.merge(translation) def _add_installed_apps_translations(self): """Merge translations from each installed app.""" try: app_configs = reversed(list(apps.get_app_configs())) except AppRegistryNotReady: raise AppRegistryNotReady( "The translation infrastructure cannot be initialized before the " "apps registry is ready. Check that you don't make non-lazy " "gettext calls at import time.") for app_config in app_configs: localedir = os.path.join(app_config.path, 'locale') if os.path.exists(localedir): translation = self._new_gnu_trans(localedir) self.merge(translation) def _add_local_translations(self): """Merge translations defined in LOCALE_PATHS.""" for localedir in reversed(settings.LOCALE_PATHS): translation = self._new_gnu_trans(localedir) self.merge(translation) def _add_fallback(self, localedirs=None): """Set the GNUTranslations() fallback with the default language.""" # Don't set a fallback for the default language or any English variant # (as it's empty, so it'll ALWAYS fall back to the default language) if self.__language == settings.LANGUAGE_CODE or self.__language.startswith('en'): return if self.domain == 'django': # Get from cache default_translation = translation(settings.LANGUAGE_CODE) else: default_translation = DjangoTranslation( settings.LANGUAGE_CODE, domain=self.domain, localedirs=localedirs ) self.add_fallback(default_translation) def merge(self, other): """Merge another translation into this catalog.""" if not getattr(other, '_catalog', None): return # NullTranslations() has no _catalog if self._catalog is None: # Take plural and _info from first catalog found (generally Django's). self.plural = other.plural self._info = other._info.copy() self._catalog = other._catalog.copy() else: self._catalog.update(other._catalog) if other._fallback: self.add_fallback(other._fallback) def language(self): """Return the translation language.""" return self.__language def to_language(self): """Return the translation language name.""" return self.__to_language def translation(language): """ Return a translation object in the default 'django' domain. """ global _translations if language not in _translations: _translations[language] = DjangoTranslation(language) return _translations[language] def activate(language): """ Fetch the translation object for a given language and install it as the current translation object for the current thread. """ if not language: return _active.value = translation(language) def deactivate(): """ Uninstall the active translation object so that further _() calls resolve to the default translation object. """ if hasattr(_active, "value"): del _active.value def deactivate_all(): """ Make the active translation object a NullTranslations() instance. This is useful when we want delayed translations to appear as the original string for some reason. """ _active.value = gettext_module.NullTranslations() _active.value.to_language = lambda *args: None def get_language(): """Return the currently selected language.""" t = getattr(_active, "value", None) if t is not None: try: return t.to_language() except AttributeError: pass # If we don't have a real translation object, assume it's the default language. return settings.LANGUAGE_CODE def get_language_bidi(): """ Return selected language's BiDi layout. * False = left-to-right layout * True = right-to-left layout """ lang = get_language() if lang is None: return False else: base_lang = get_language().split('-')[0] return base_lang in settings.LANGUAGES_BIDI def catalog(): """ Return the current active catalog for further processing. This can be used if you need to modify the catalog or want to access the whole message catalog instead of just translating one string. """ global _default t = getattr(_active, "value", None) if t is not None: return t if _default is None: _default = translation(settings.LANGUAGE_CODE) return _default def gettext(message): """ Translate the 'message' string. It uses the current thread to find the translation object to use. If no current translation is activated, the message will be run through the default translation object. """ global _default eol_message = message.replace('\r\n', '\n').replace('\r', '\n') if eol_message: _default = _default or translation(settings.LANGUAGE_CODE) translation_object = getattr(_active, "value", _default) result = translation_object.gettext(eol_message) else: # Return an empty value of the corresponding type if an empty message # is given, instead of metadata, which is the default gettext behavior. result = type(message)('') if isinstance(message, SafeData): return mark_safe(result) return result def pgettext(context, message): msg_with_ctxt = "%s%s%s" % (context, CONTEXT_SEPARATOR, message) result = gettext(msg_with_ctxt) if CONTEXT_SEPARATOR in result: # Translation not found result = message elif isinstance(message, SafeData): result = mark_safe(result) return result def gettext_noop(message): """ Mark strings for translation but don't translate them now. This can be used to store strings in global variables that should stay in the base language (because they might be used externally) and will be translated later. """ return message def do_ntranslate(singular, plural, number, translation_function): global _default t = getattr(_active, "value", None) if t is not None: return getattr(t, translation_function)(singular, plural, number) if _default is None: _default = translation(settings.LANGUAGE_CODE) return getattr(_default, translation_function)(singular, plural, number) def ngettext(singular, plural, number): """ Return a string of the translation of either the singular or plural, based on the number. """ return do_ntranslate(singular, plural, number, 'ngettext') def npgettext(context, singular, plural, number): msgs_with_ctxt = ("%s%s%s" % (context, CONTEXT_SEPARATOR, singular), "%s%s%s" % (context, CONTEXT_SEPARATOR, plural), number) result = ngettext(*msgs_with_ctxt) if CONTEXT_SEPARATOR in result: # Translation not found result = ngettext(singular, plural, number) return result def all_locale_paths(): """ Return a list of paths to user-provides languages files. """ globalpath = os.path.join( os.path.dirname(sys.modules[settings.__module__].__file__), 'locale') app_paths = [] for app_config in apps.get_app_configs(): locale_path = os.path.join(app_config.path, 'locale') if os.path.exists(locale_path): app_paths.append(locale_path) return [globalpath, *settings.LOCALE_PATHS, *app_paths] @functools.lru_cache(maxsize=1000) def check_for_language(lang_code): """ Check whether there is a global language file for the given language code. This is used to decide whether a user-provided language is available. lru_cache should have a maxsize to prevent from memory exhaustion attacks, as the provided language codes are taken from the HTTP request. See also <https://www.djangoproject.com/weblog/2007/oct/26/security-fix/>. """ # First, a quick check to make sure lang_code is well-formed (#21458) if lang_code is None or not language_code_re.search(lang_code): return False return any( gettext_module.find('django', path, [to_locale(lang_code)]) is not None for path in all_locale_paths() ) @functools.lru_cache() def get_languages(): """ Cache of settings.LANGUAGES in a dictionary for easy lookups by key. """ return dict(settings.LANGUAGES) @functools.lru_cache(maxsize=1000) def get_supported_language_variant(lang_code, strict=False): """ Return the language code that's listed in supported languages, possibly selecting a more generic variant. Raise LookupError if nothing is found. If `strict` is False (the default), look for a country-specific variant when neither the language code nor its generic variant is found. lru_cache should have a maxsize to prevent from memory exhaustion attacks, as the provided language codes are taken from the HTTP request. See also <https://www.djangoproject.com/weblog/2007/oct/26/security-fix/>. """ if lang_code: # If 'fr-ca' is not supported, try special fallback or language-only 'fr'. possible_lang_codes = [lang_code] try: possible_lang_codes.extend(LANG_INFO[lang_code]['fallback']) except KeyError: pass generic_lang_code = lang_code.split('-')[0] possible_lang_codes.append(generic_lang_code) supported_lang_codes = get_languages() for code in possible_lang_codes: if code in supported_lang_codes and check_for_language(code): return code if not strict: # if fr-fr is not supported, try fr-ca. for supported_code in supported_lang_codes: if supported_code.startswith(generic_lang_code + '-'): return supported_code raise LookupError(lang_code) def get_language_from_path(path, strict=False): """ Return the language code if there's a valid language code found in `path`. If `strict` is False (the default), look for a country-specific variant when neither the language code nor its generic variant is found. """ regex_match = language_code_prefix_re.match(path) if not regex_match: return None lang_code = regex_match.group(1) try: return get_supported_language_variant(lang_code, strict=strict) except LookupError: return None def get_language_from_request(request, check_path=False): """ Analyze the request to find what language the user wants the system to show. Only languages listed in settings.LANGUAGES are taken into account. If the user requests a sublanguage where we have a main language, we send out the main language. If check_path is True, the URL path prefix will be checked for a language code, otherwise this is skipped for backwards compatibility. """ if check_path: lang_code = get_language_from_path(request.path_info) if lang_code is not None: return lang_code lang_code = request.COOKIES.get(settings.LANGUAGE_COOKIE_NAME) if lang_code is not None and lang_code in get_languages() and check_for_language(lang_code): return lang_code try: return get_supported_language_variant(lang_code) except LookupError: pass accept = request.META.get('HTTP_ACCEPT_LANGUAGE', '') for accept_lang, unused in parse_accept_lang_header(accept): if accept_lang == '*': break if not language_code_re.search(accept_lang): continue try: return get_supported_language_variant(accept_lang) except LookupError: continue try: return get_supported_language_variant(settings.LANGUAGE_CODE) except LookupError: return settings.LANGUAGE_CODE @functools.lru_cache(maxsize=1000) def parse_accept_lang_header(lang_string): """ Parse the lang_string, which is the body of an HTTP Accept-Language header, and return a tuple of (lang, q-value), ordered by 'q' values. Return an empty tuple if there are any format errors in lang_string. """ result = [] pieces = accept_language_re.split(lang_string.lower()) if pieces[-1]: return () for i in range(0, len(pieces) - 1, 3): first, lang, priority = pieces[i:i + 3] if first: return () if priority: priority = float(priority) else: priority = 1.0 result.append((lang, priority)) result.sort(key=lambda k: k[1], reverse=True) return tuple(result)
a263f5f526c7f50b15086c4078a41978b207976b88660fd2f3179b030da872a5
""" Internationalization support. """ import warnings from contextlib import ContextDecorator from decimal import ROUND_UP, Decimal from django.utils.autoreload import autoreload_started, file_changed from django.utils.deprecation import RemovedInDjango40Warning from django.utils.functional import lazy from django.utils.regex_helper import _lazy_re_compile __all__ = [ 'activate', 'deactivate', 'override', 'deactivate_all', 'get_language', 'get_language_from_request', 'get_language_info', 'get_language_bidi', 'check_for_language', 'to_language', 'to_locale', 'templatize', 'gettext', 'gettext_lazy', 'gettext_noop', 'ugettext', 'ugettext_lazy', 'ugettext_noop', 'ngettext', 'ngettext_lazy', 'ungettext', 'ungettext_lazy', 'pgettext', 'pgettext_lazy', 'npgettext', 'npgettext_lazy', 'LANGUAGE_SESSION_KEY', ] LANGUAGE_SESSION_KEY = '_language' class TranslatorCommentWarning(SyntaxWarning): pass # Here be dragons, so a short explanation of the logic won't hurt: # We are trying to solve two problems: (1) access settings, in particular # settings.USE_I18N, as late as possible, so that modules can be imported # without having to first configure Django, and (2) if some other code creates # a reference to one of these functions, don't break that reference when we # replace the functions with their real counterparts (once we do access the # settings). class Trans: """ The purpose of this class is to store the actual translation function upon receiving the first call to that function. After this is done, changes to USE_I18N will have no effect to which function is served upon request. If your tests rely on changing USE_I18N, you can delete all the functions from _trans.__dict__. Note that storing the function with setattr will have a noticeable performance effect, as access to the function goes the normal path, instead of using __getattr__. """ def __getattr__(self, real_name): from django.conf import settings if settings.USE_I18N: from django.utils.translation import trans_real as trans from django.utils.translation.reloader import watch_for_translation_changes, translation_file_changed autoreload_started.connect(watch_for_translation_changes, dispatch_uid='translation_file_changed') file_changed.connect(translation_file_changed, dispatch_uid='translation_file_changed') else: from django.utils.translation import trans_null as trans setattr(self, real_name, getattr(trans, real_name)) return getattr(trans, real_name) _trans = Trans() # The Trans class is no more needed, so remove it from the namespace. del Trans def gettext_noop(message): return _trans.gettext_noop(message) def ugettext_noop(message): """ A legacy compatibility wrapper for Unicode handling on Python 2. Alias of gettext_noop() since Django 2.0. """ warnings.warn( 'django.utils.translation.ugettext_noop() is deprecated in favor of ' 'django.utils.translation.gettext_noop().', RemovedInDjango40Warning, stacklevel=2, ) return gettext_noop(message) def gettext(message): return _trans.gettext(message) def ugettext(message): """ A legacy compatibility wrapper for Unicode handling on Python 2. Alias of gettext() since Django 2.0. """ warnings.warn( 'django.utils.translation.ugettext() is deprecated in favor of ' 'django.utils.translation.gettext().', RemovedInDjango40Warning, stacklevel=2, ) return gettext(message) def ngettext(singular, plural, number): return _trans.ngettext(singular, plural, number) def ungettext(singular, plural, number): """ A legacy compatibility wrapper for Unicode handling on Python 2. Alias of ngettext() since Django 2.0. """ warnings.warn( 'django.utils.translation.ungettext() is deprecated in favor of ' 'django.utils.translation.ngettext().', RemovedInDjango40Warning, stacklevel=2, ) return ngettext(singular, plural, number) def pgettext(context, message): return _trans.pgettext(context, message) def npgettext(context, singular, plural, number): return _trans.npgettext(context, singular, plural, number) gettext_lazy = lazy(gettext, str) pgettext_lazy = lazy(pgettext, str) def ugettext_lazy(message): """ A legacy compatibility wrapper for Unicode handling on Python 2. Has been Alias of gettext_lazy since Django 2.0. """ warnings.warn( 'django.utils.translation.ugettext_lazy() is deprecated in favor of ' 'django.utils.translation.gettext_lazy().', RemovedInDjango40Warning, stacklevel=2, ) return gettext_lazy(message) def lazy_number(func, resultclass, number=None, **kwargs): if isinstance(number, int): kwargs['number'] = number proxy = lazy(func, resultclass)(**kwargs) else: original_kwargs = kwargs.copy() class NumberAwareString(resultclass): def __bool__(self): return bool(kwargs['singular']) def _get_number_value(self, values): try: return values[number] except KeyError: raise KeyError( "Your dictionary lacks key '%s\'. Please provide " "it, because it is required to determine whether " "string is singular or plural." % number ) def _translate(self, number_value): kwargs['number'] = number_value return func(**kwargs) def format(self, *args, **kwargs): number_value = self._get_number_value(kwargs) if kwargs and number else args[0] return self._translate(number_value).format(*args, **kwargs) def __mod__(self, rhs): if isinstance(rhs, dict) and number: number_value = self._get_number_value(rhs) else: number_value = rhs translated = self._translate(number_value) try: translated = translated % rhs except TypeError: # String doesn't contain a placeholder for the number. pass return translated proxy = lazy(lambda **kwargs: NumberAwareString(), NumberAwareString)(**kwargs) proxy.__reduce__ = lambda: (_lazy_number_unpickle, (func, resultclass, number, original_kwargs)) return proxy def _lazy_number_unpickle(func, resultclass, number, kwargs): return lazy_number(func, resultclass, number=number, **kwargs) def ngettext_lazy(singular, plural, number=None): return lazy_number(ngettext, str, singular=singular, plural=plural, number=number) def ungettext_lazy(singular, plural, number=None): """ A legacy compatibility wrapper for Unicode handling on Python 2. An alias of ungettext_lazy() since Django 2.0. """ warnings.warn( 'django.utils.translation.ungettext_lazy() is deprecated in favor of ' 'django.utils.translation.ngettext_lazy().', RemovedInDjango40Warning, stacklevel=2, ) return ngettext_lazy(singular, plural, number) def npgettext_lazy(context, singular, plural, number=None): return lazy_number(npgettext, str, context=context, singular=singular, plural=plural, number=number) def activate(language): return _trans.activate(language) def deactivate(): return _trans.deactivate() class override(ContextDecorator): def __init__(self, language, deactivate=False): self.language = language self.deactivate = deactivate def __enter__(self): self.old_language = get_language() if self.language is not None: activate(self.language) else: deactivate_all() def __exit__(self, exc_type, exc_value, traceback): if self.old_language is None: deactivate_all() elif self.deactivate: deactivate() else: activate(self.old_language) def get_language(): return _trans.get_language() def get_language_bidi(): return _trans.get_language_bidi() def check_for_language(lang_code): return _trans.check_for_language(lang_code) def to_language(locale): """Turn a locale name (en_US) into a language name (en-us).""" p = locale.find('_') if p >= 0: return locale[:p].lower() + '-' + locale[p + 1:].lower() else: return locale.lower() def to_locale(language): """Turn a language name (en-us) into a locale name (en_US).""" language, _, country = language.lower().partition('-') if not country: return language # A language with > 2 characters after the dash only has its first # character after the dash capitalized; e.g. sr-latn becomes sr_Latn. # A language with 2 characters after the dash has both characters # capitalized; e.g. en-us becomes en_US. country, _, tail = country.partition('-') country = country.title() if len(country) > 2 else country.upper() if tail: country += '-' + tail return language + '_' + country def get_language_from_request(request, check_path=False): return _trans.get_language_from_request(request, check_path) def get_language_from_path(path): return _trans.get_language_from_path(path) def get_supported_language_variant(lang_code, *, strict=False): return _trans.get_supported_language_variant(lang_code, strict) def templatize(src, **kwargs): from .template import templatize return templatize(src, **kwargs) def deactivate_all(): return _trans.deactivate_all() def get_language_info(lang_code): from django.conf.locale import LANG_INFO try: lang_info = LANG_INFO[lang_code] if 'fallback' in lang_info and 'name' not in lang_info: info = get_language_info(lang_info['fallback'][0]) else: info = lang_info except KeyError: if '-' not in lang_code: raise KeyError("Unknown language code %s." % lang_code) generic_lang_code = lang_code.split('-')[0] try: info = LANG_INFO[generic_lang_code] except KeyError: raise KeyError("Unknown language code %s and %s." % (lang_code, generic_lang_code)) if info: info['name_translated'] = gettext_lazy(info['name']) return info trim_whitespace_re = _lazy_re_compile(r'\s*\n\s*') def trim_whitespace(s): return trim_whitespace_re.sub(' ', s.strip()) def round_away_from_one(value): return int(Decimal(value - 1).quantize(Decimal('0'), rounding=ROUND_UP)) + 1
4963b11222a4f517c029568350048769df3e6f7101d1103a180dc7b22cb28bcd
import warnings from io import StringIO from django.template.base import TRANSLATOR_COMMENT_MARK, Lexer, TokenType from django.utils.regex_helper import _lazy_re_compile from . import TranslatorCommentWarning, trim_whitespace dot_re = _lazy_re_compile(r'\S') def blankout(src, char): """ Change every non-whitespace character to the given char. Used in the templatize function. """ return dot_re.sub(char, src) context_re = _lazy_re_compile(r"""^\s+.*context\s+((?:"[^"]*?")|(?:'[^']*?'))\s*""") inline_re = _lazy_re_compile( # Match the trans 'some text' part r"""^\s*trans\s+((?:"[^"]*?")|(?:'[^']*?'))""" # Match and ignore optional filters r"""(?:\s*\|\s*[^\s:]+(?::(?:[^\s'":]+|(?:"[^"]*?")|(?:'[^']*?')))?)*""" # Match the optional context part r"""(\s+.*context\s+((?:"[^"]*?")|(?:'[^']*?')))?\s*""" ) block_re = _lazy_re_compile(r"""^\s*blocktrans(\s+.*context\s+((?:"[^"]*?")|(?:'[^']*?')))?(?:\s+|$)""") endblock_re = _lazy_re_compile(r"""^\s*endblocktrans$""") plural_re = _lazy_re_compile(r"""^\s*plural$""") constant_re = _lazy_re_compile(r"""_\(((?:".*?")|(?:'.*?'))\)""") def templatize(src, origin=None): """ Turn a Django template into something that is understood by xgettext. It does so by translating the Django translation tags into standard gettext function invocations. """ out = StringIO('') message_context = None intrans = False inplural = False trimmed = False singular = [] plural = [] incomment = False comment = [] lineno_comment_map = {} comment_lineno_cache = None # Adding the u prefix allows gettext to recognize the string (#26093). raw_prefix = 'u' def join_tokens(tokens, trim=False): message = ''.join(tokens) if trim: message = trim_whitespace(message) return message for t in Lexer(src).tokenize(): if incomment: if t.token_type == TokenType.BLOCK and t.contents == 'endcomment': content = ''.join(comment) translators_comment_start = None for lineno, line in enumerate(content.splitlines(True)): if line.lstrip().startswith(TRANSLATOR_COMMENT_MARK): translators_comment_start = lineno for lineno, line in enumerate(content.splitlines(True)): if translators_comment_start is not None and lineno >= translators_comment_start: out.write(' # %s' % line) else: out.write(' #\n') incomment = False comment = [] else: comment.append(t.contents) elif intrans: if t.token_type == TokenType.BLOCK: endbmatch = endblock_re.match(t.contents) pluralmatch = plural_re.match(t.contents) if endbmatch: if inplural: if message_context: out.write(' npgettext({p}{!r}, {p}{!r}, {p}{!r},count) '.format( message_context, join_tokens(singular, trimmed), join_tokens(plural, trimmed), p=raw_prefix, )) else: out.write(' ngettext({p}{!r}, {p}{!r}, count) '.format( join_tokens(singular, trimmed), join_tokens(plural, trimmed), p=raw_prefix, )) for part in singular: out.write(blankout(part, 'S')) for part in plural: out.write(blankout(part, 'P')) else: if message_context: out.write(' pgettext({p}{!r}, {p}{!r}) '.format( message_context, join_tokens(singular, trimmed), p=raw_prefix, )) else: out.write(' gettext({p}{!r}) '.format( join_tokens(singular, trimmed), p=raw_prefix, )) for part in singular: out.write(blankout(part, 'S')) message_context = None intrans = False inplural = False singular = [] plural = [] elif pluralmatch: inplural = True else: filemsg = '' if origin: filemsg = 'file %s, ' % origin raise SyntaxError( "Translation blocks must not include other block tags: " "%s (%sline %d)" % (t.contents, filemsg, t.lineno) ) elif t.token_type == TokenType.VAR: if inplural: plural.append('%%(%s)s' % t.contents) else: singular.append('%%(%s)s' % t.contents) elif t.token_type == TokenType.TEXT: contents = t.contents.replace('%', '%%') if inplural: plural.append(contents) else: singular.append(contents) else: # Handle comment tokens (`{# ... #}`) plus other constructs on # the same line: if comment_lineno_cache is not None: cur_lineno = t.lineno + t.contents.count('\n') if comment_lineno_cache == cur_lineno: if t.token_type != TokenType.COMMENT: for c in lineno_comment_map[comment_lineno_cache]: filemsg = '' if origin: filemsg = 'file %s, ' % origin warn_msg = ( "The translator-targeted comment '%s' " "(%sline %d) was ignored, because it wasn't " "the last item on the line." ) % (c, filemsg, comment_lineno_cache) warnings.warn(warn_msg, TranslatorCommentWarning) lineno_comment_map[comment_lineno_cache] = [] else: out.write('# %s' % ' | '.join(lineno_comment_map[comment_lineno_cache])) comment_lineno_cache = None if t.token_type == TokenType.BLOCK: imatch = inline_re.match(t.contents) bmatch = block_re.match(t.contents) cmatches = constant_re.findall(t.contents) if imatch: g = imatch.group(1) if g[0] == '"': g = g.strip('"') elif g[0] == "'": g = g.strip("'") g = g.replace('%', '%%') if imatch.group(2): # A context is provided context_match = context_re.match(imatch.group(2)) message_context = context_match.group(1) if message_context[0] == '"': message_context = message_context.strip('"') elif message_context[0] == "'": message_context = message_context.strip("'") out.write(' pgettext({p}{!r}, {p}{!r}) '.format( message_context, g, p=raw_prefix )) message_context = None else: out.write(' gettext({p}{!r}) '.format(g, p=raw_prefix)) elif bmatch: for fmatch in constant_re.findall(t.contents): out.write(' _(%s) ' % fmatch) if bmatch.group(1): # A context is provided context_match = context_re.match(bmatch.group(1)) message_context = context_match.group(1) if message_context[0] == '"': message_context = message_context.strip('"') elif message_context[0] == "'": message_context = message_context.strip("'") intrans = True inplural = False trimmed = 'trimmed' in t.split_contents() singular = [] plural = [] elif cmatches: for cmatch in cmatches: out.write(' _(%s) ' % cmatch) elif t.contents == 'comment': incomment = True else: out.write(blankout(t.contents, 'B')) elif t.token_type == TokenType.VAR: parts = t.contents.split('|') cmatch = constant_re.match(parts[0]) if cmatch: out.write(' _(%s) ' % cmatch.group(1)) for p in parts[1:]: if p.find(':_(') >= 0: out.write(' %s ' % p.split(':', 1)[1]) else: out.write(blankout(p, 'F')) elif t.token_type == TokenType.COMMENT: if t.contents.lstrip().startswith(TRANSLATOR_COMMENT_MARK): lineno_comment_map.setdefault(t.lineno, []).append(t.contents) comment_lineno_cache = t.lineno else: out.write(blankout(t.contents, 'X')) return out.getvalue()
6b267ea422e09017bccb5dd8d7e6985baf8464bd8192d4e35dd1cdf46b986d4c
import enum from django.utils.functional import Promise __all__ = ['Choices', 'IntegerChoices', 'TextChoices'] class ChoicesMeta(enum.EnumMeta): """A metaclass for creating a enum choices.""" def __new__(metacls, classname, bases, classdict): labels = [] for key in classdict._member_names: value = classdict[key] if ( isinstance(value, (list, tuple)) and len(value) > 1 and isinstance(value[-1], (Promise, str)) ): *value, label = value value = tuple(value) else: label = key.replace('_', ' ').title() labels.append(label) # Use dict.__setitem__() to suppress defenses against double # assignment in enum's classdict. dict.__setitem__(classdict, key, value) cls = super().__new__(metacls, classname, bases, classdict) cls._value2label_map_ = dict(zip(cls._value2member_map_, labels)) # Add a label property to instances of enum which uses the enum member # that is passed in as "self" as the value to use when looking up the # label in the choices. cls.label = property(lambda self: cls._value2label_map_.get(self.value)) return enum.unique(cls) def __contains__(cls, member): if not isinstance(member, enum.Enum): # Allow non-enums to match against member values. return member in {x.value for x in cls} return super().__contains__(member) @property def names(cls): empty = ['__empty__'] if hasattr(cls, '__empty__') else [] return empty + [member.name for member in cls] @property def choices(cls): empty = [(None, cls.__empty__)] if hasattr(cls, '__empty__') else [] return empty + [(member.value, member.label) for member in cls] @property def labels(cls): return [label for _, label in cls.choices] @property def values(cls): return [value for value, _ in cls.choices] class Choices(enum.Enum, metaclass=ChoicesMeta): """Class for creating enumerated choices.""" def __str__(self): """ Use value when cast to str, so that Choices set as model instance attributes are rendered as expected in templates and similar contexts. """ return str(self.value) class IntegerChoices(int, Choices): """Class for creating enumerated integer choices.""" pass class TextChoices(str, Choices): """Class for creating enumerated string choices.""" def _generate_next_value_(name, start, count, last_values): return name
20ef887aa13b37ba3bae30b4a9efaf0ac8567bbe2621f1c68608f900a1d74b9f
import copy import datetime import inspect from decimal import Decimal from django.core.exceptions import EmptyResultSet, FieldError from django.db import connection from django.db.models import fields from django.db.models.query_utils import Q from django.db.utils import NotSupportedError from django.utils.deconstruct import deconstructible from django.utils.functional import cached_property from django.utils.hashable import make_hashable class SQLiteNumericMixin: """ Some expressions with output_field=DecimalField() must be cast to numeric to be properly filtered. """ def as_sqlite(self, compiler, connection, **extra_context): sql, params = self.as_sql(compiler, connection, **extra_context) try: if self.output_field.get_internal_type() == 'DecimalField': sql = 'CAST(%s AS NUMERIC)' % sql except FieldError: pass return sql, params class Combinable: """ Provide the ability to combine one or two objects with some connector. For example F('foo') + F('bar'). """ # Arithmetic connectors ADD = '+' SUB = '-' MUL = '*' DIV = '/' POW = '^' # The following is a quoted % operator - it is quoted because it can be # used in strings that also have parameter substitution. MOD = '%%' # Bitwise operators - note that these are generated by .bitand() # and .bitor(), the '&' and '|' are reserved for boolean operator # usage. BITAND = '&' BITOR = '|' BITLEFTSHIFT = '<<' BITRIGHTSHIFT = '>>' def _combine(self, other, connector, reversed): if not hasattr(other, 'resolve_expression'): # everything must be resolvable to an expression if isinstance(other, datetime.timedelta): other = DurationValue(other, output_field=fields.DurationField()) else: other = Value(other) if reversed: return CombinedExpression(other, connector, self) return CombinedExpression(self, connector, other) ############# # OPERATORS # ############# def __neg__(self): return self._combine(-1, self.MUL, False) def __add__(self, other): return self._combine(other, self.ADD, False) def __sub__(self, other): return self._combine(other, self.SUB, False) def __mul__(self, other): return self._combine(other, self.MUL, False) def __truediv__(self, other): return self._combine(other, self.DIV, False) def __mod__(self, other): return self._combine(other, self.MOD, False) def __pow__(self, other): return self._combine(other, self.POW, False) def __and__(self, other): if getattr(self, 'conditional', False) and getattr(other, 'conditional', False): return Q(self) & Q(other) raise NotImplementedError( "Use .bitand() and .bitor() for bitwise logical operations." ) def bitand(self, other): return self._combine(other, self.BITAND, False) def bitleftshift(self, other): return self._combine(other, self.BITLEFTSHIFT, False) def bitrightshift(self, other): return self._combine(other, self.BITRIGHTSHIFT, False) def __or__(self, other): if getattr(self, 'conditional', False) and getattr(other, 'conditional', False): return Q(self) | Q(other) raise NotImplementedError( "Use .bitand() and .bitor() for bitwise logical operations." ) def bitor(self, other): return self._combine(other, self.BITOR, False) def __radd__(self, other): return self._combine(other, self.ADD, True) def __rsub__(self, other): return self._combine(other, self.SUB, True) def __rmul__(self, other): return self._combine(other, self.MUL, True) def __rtruediv__(self, other): return self._combine(other, self.DIV, True) def __rmod__(self, other): return self._combine(other, self.MOD, True) def __rpow__(self, other): return self._combine(other, self.POW, True) def __rand__(self, other): raise NotImplementedError( "Use .bitand() and .bitor() for bitwise logical operations." ) def __ror__(self, other): raise NotImplementedError( "Use .bitand() and .bitor() for bitwise logical operations." ) @deconstructible class BaseExpression: """Base class for all query expressions.""" # aggregate specific fields is_summary = False _output_field_resolved_to_none = False # Can the expression be used in a WHERE clause? filterable = True # Can the expression can be used as a source expression in Window? window_compatible = False def __init__(self, output_field=None): if output_field is not None: self.output_field = output_field def __getstate__(self): state = self.__dict__.copy() state.pop('convert_value', None) return state def get_db_converters(self, connection): return ( [] if self.convert_value is self._convert_value_noop else [self.convert_value] ) + self.output_field.get_db_converters(connection) def get_source_expressions(self): return [] def set_source_expressions(self, exprs): assert not exprs def _parse_expressions(self, *expressions): return [ arg if hasattr(arg, 'resolve_expression') else ( F(arg) if isinstance(arg, str) else Value(arg) ) for arg in expressions ] def as_sql(self, compiler, connection): """ Responsible for returning a (sql, [params]) tuple to be included in the current query. Different backends can provide their own implementation, by providing an `as_{vendor}` method and patching the Expression: ``` def override_as_sql(self, compiler, connection): # custom logic return super().as_sql(compiler, connection) setattr(Expression, 'as_' + connection.vendor, override_as_sql) ``` Arguments: * compiler: the query compiler responsible for generating the query. Must have a compile method, returning a (sql, [params]) tuple. Calling compiler(value) will return a quoted `value`. * connection: the database connection used for the current query. Return: (sql, params) Where `sql` is a string containing ordered sql parameters to be replaced with the elements of the list `params`. """ raise NotImplementedError("Subclasses must implement as_sql()") @cached_property def contains_aggregate(self): return any(expr and expr.contains_aggregate for expr in self.get_source_expressions()) @cached_property def contains_over_clause(self): return any(expr and expr.contains_over_clause for expr in self.get_source_expressions()) @cached_property def contains_column_references(self): return any(expr and expr.contains_column_references for expr in self.get_source_expressions()) def resolve_expression(self, query=None, allow_joins=True, reuse=None, summarize=False, for_save=False): """ Provide the chance to do any preprocessing or validation before being added to the query. Arguments: * query: the backend query implementation * allow_joins: boolean allowing or denying use of joins in this query * reuse: a set of reusable joins for multijoins * summarize: a terminal aggregate clause * for_save: whether this expression about to be used in a save or update Return: an Expression to be added to the query. """ c = self.copy() c.is_summary = summarize c.set_source_expressions([ expr.resolve_expression(query, allow_joins, reuse, summarize) if expr else None for expr in c.get_source_expressions() ]) return c @property def conditional(self): return isinstance(self.output_field, fields.BooleanField) @property def field(self): return self.output_field @cached_property def output_field(self): """Return the output type of this expressions.""" output_field = self._resolve_output_field() if output_field is None: self._output_field_resolved_to_none = True raise FieldError('Cannot resolve expression type, unknown output_field') return output_field @cached_property def _output_field_or_none(self): """ Return the output field of this expression, or None if _resolve_output_field() didn't return an output type. """ try: return self.output_field except FieldError: if not self._output_field_resolved_to_none: raise def _resolve_output_field(self): """ Attempt to infer the output type of the expression. If the output fields of all source fields match then, simply infer the same type here. This isn't always correct, but it makes sense most of the time. Consider the difference between `2 + 2` and `2 / 3`. Inferring the type here is a convenience for the common case. The user should supply their own output_field with more complex computations. If a source's output field resolves to None, exclude it from this check. If all sources are None, then an error is raised higher up the stack in the output_field property. """ sources_iter = (source for source in self.get_source_fields() if source is not None) for output_field in sources_iter: for source in sources_iter: if not isinstance(output_field, source.__class__): raise FieldError( 'Expression contains mixed types: %s, %s. You must ' 'set output_field.' % ( output_field.__class__.__name__, source.__class__.__name__, ) ) return output_field @staticmethod def _convert_value_noop(value, expression, connection): return value @cached_property def convert_value(self): """ Expressions provide their own converters because users have the option of manually specifying the output_field which may be a different type from the one the database returns. """ field = self.output_field internal_type = field.get_internal_type() if internal_type == 'FloatField': return lambda value, expression, connection: None if value is None else float(value) elif internal_type.endswith('IntegerField'): return lambda value, expression, connection: None if value is None else int(value) elif internal_type == 'DecimalField': return lambda value, expression, connection: None if value is None else Decimal(value) return self._convert_value_noop def get_lookup(self, lookup): return self.output_field.get_lookup(lookup) def get_transform(self, name): return self.output_field.get_transform(name) def relabeled_clone(self, change_map): clone = self.copy() clone.set_source_expressions([ e.relabeled_clone(change_map) if e is not None else None for e in self.get_source_expressions() ]) return clone def copy(self): return copy.copy(self) def get_group_by_cols(self, alias=None): if not self.contains_aggregate: return [self] cols = [] for source in self.get_source_expressions(): cols.extend(source.get_group_by_cols()) return cols def get_source_fields(self): """Return the underlying field types used by this aggregate.""" return [e._output_field_or_none for e in self.get_source_expressions()] def asc(self, **kwargs): return OrderBy(self, **kwargs) def desc(self, **kwargs): return OrderBy(self, descending=True, **kwargs) def reverse_ordering(self): return self def flatten(self): """ Recursively yield this expression and all subexpressions, in depth-first order. """ yield self for expr in self.get_source_expressions(): if expr: yield from expr.flatten() def select_format(self, compiler, sql, params): """ Custom format for select clauses. For example, EXISTS expressions need to be wrapped in CASE WHEN on Oracle. """ return self.output_field.select_format(compiler, sql, params) @cached_property def identity(self): constructor_signature = inspect.signature(self.__init__) args, kwargs = self._constructor_args signature = constructor_signature.bind_partial(*args, **kwargs) signature.apply_defaults() arguments = signature.arguments.items() identity = [self.__class__] for arg, value in arguments: if isinstance(value, fields.Field): if value.name and value.model: value = (value.model._meta.label, value.name) else: value = type(value) else: value = make_hashable(value) identity.append((arg, value)) return tuple(identity) def __eq__(self, other): if not isinstance(other, BaseExpression): return NotImplemented return other.identity == self.identity def __hash__(self): return hash(self.identity) class Expression(BaseExpression, Combinable): """An expression that can be combined with other expressions.""" pass class CombinedExpression(SQLiteNumericMixin, Expression): def __init__(self, lhs, connector, rhs, output_field=None): super().__init__(output_field=output_field) self.connector = connector self.lhs = lhs self.rhs = rhs def __repr__(self): return "<{}: {}>".format(self.__class__.__name__, self) def __str__(self): return "{} {} {}".format(self.lhs, self.connector, self.rhs) def get_source_expressions(self): return [self.lhs, self.rhs] def set_source_expressions(self, exprs): self.lhs, self.rhs = exprs def as_sql(self, compiler, connection): try: lhs_output = self.lhs.output_field except FieldError: lhs_output = None try: rhs_output = self.rhs.output_field except FieldError: rhs_output = None if (not connection.features.has_native_duration_field and ((lhs_output and lhs_output.get_internal_type() == 'DurationField') or (rhs_output and rhs_output.get_internal_type() == 'DurationField'))): return DurationExpression(self.lhs, self.connector, self.rhs).as_sql(compiler, connection) if (lhs_output and rhs_output and self.connector == self.SUB and lhs_output.get_internal_type() in {'DateField', 'DateTimeField', 'TimeField'} and lhs_output.get_internal_type() == rhs_output.get_internal_type()): return TemporalSubtraction(self.lhs, self.rhs).as_sql(compiler, connection) expressions = [] expression_params = [] sql, params = compiler.compile(self.lhs) expressions.append(sql) expression_params.extend(params) sql, params = compiler.compile(self.rhs) expressions.append(sql) expression_params.extend(params) # order of precedence expression_wrapper = '(%s)' sql = connection.ops.combine_expression(self.connector, expressions) return expression_wrapper % sql, expression_params def resolve_expression(self, query=None, allow_joins=True, reuse=None, summarize=False, for_save=False): c = self.copy() c.is_summary = summarize c.lhs = c.lhs.resolve_expression(query, allow_joins, reuse, summarize, for_save) c.rhs = c.rhs.resolve_expression(query, allow_joins, reuse, summarize, for_save) return c class DurationExpression(CombinedExpression): def compile(self, side, compiler, connection): if not isinstance(side, DurationValue): try: output = side.output_field except FieldError: pass else: if output.get_internal_type() == 'DurationField': sql, params = compiler.compile(side) return connection.ops.format_for_duration_arithmetic(sql), params return compiler.compile(side) def as_sql(self, compiler, connection): connection.ops.check_expression_support(self) expressions = [] expression_params = [] sql, params = self.compile(self.lhs, compiler, connection) expressions.append(sql) expression_params.extend(params) sql, params = self.compile(self.rhs, compiler, connection) expressions.append(sql) expression_params.extend(params) # order of precedence expression_wrapper = '(%s)' sql = connection.ops.combine_duration_expression(self.connector, expressions) return expression_wrapper % sql, expression_params class TemporalSubtraction(CombinedExpression): output_field = fields.DurationField() def __init__(self, lhs, rhs): super().__init__(lhs, self.SUB, rhs) def as_sql(self, compiler, connection): connection.ops.check_expression_support(self) lhs = compiler.compile(self.lhs) rhs = compiler.compile(self.rhs) return connection.ops.subtract_temporals(self.lhs.output_field.get_internal_type(), lhs, rhs) @deconstructible class F(Combinable): """An object capable of resolving references to existing query objects.""" def __init__(self, name): """ Arguments: * name: the name of the field this expression references """ self.name = name def __repr__(self): return "{}({})".format(self.__class__.__name__, self.name) def resolve_expression(self, query=None, allow_joins=True, reuse=None, summarize=False, for_save=False, simple_col=False): return query.resolve_ref(self.name, allow_joins, reuse, summarize, simple_col) def asc(self, **kwargs): return OrderBy(self, **kwargs) def desc(self, **kwargs): return OrderBy(self, descending=True, **kwargs) def __eq__(self, other): return self.__class__ == other.__class__ and self.name == other.name def __hash__(self): return hash(self.name) class ResolvedOuterRef(F): """ An object that contains a reference to an outer query. In this case, the reference to the outer query has been resolved because the inner query has been used as a subquery. """ contains_aggregate = False def as_sql(self, *args, **kwargs): raise ValueError( 'This queryset contains a reference to an outer query and may ' 'only be used in a subquery.' ) def relabeled_clone(self, relabels): return self class OuterRef(F): def resolve_expression(self, query=None, allow_joins=True, reuse=None, summarize=False, for_save=False, simple_col=False): if isinstance(self.name, self.__class__): return self.name return ResolvedOuterRef(self.name) class Func(SQLiteNumericMixin, Expression): """An SQL function call.""" function = None template = '%(function)s(%(expressions)s)' arg_joiner = ', ' arity = None # The number of arguments the function accepts. def __init__(self, *expressions, output_field=None, **extra): if self.arity is not None and len(expressions) != self.arity: raise TypeError( "'%s' takes exactly %s %s (%s given)" % ( self.__class__.__name__, self.arity, "argument" if self.arity == 1 else "arguments", len(expressions), ) ) super().__init__(output_field=output_field) self.source_expressions = self._parse_expressions(*expressions) self.extra = extra def __repr__(self): args = self.arg_joiner.join(str(arg) for arg in self.source_expressions) extra = {**self.extra, **self._get_repr_options()} if extra: extra = ', '.join(str(key) + '=' + str(val) for key, val in sorted(extra.items())) return "{}({}, {})".format(self.__class__.__name__, args, extra) return "{}({})".format(self.__class__.__name__, args) def _get_repr_options(self): """Return a dict of extra __init__() options to include in the repr.""" return {} def get_source_expressions(self): return self.source_expressions def set_source_expressions(self, exprs): self.source_expressions = exprs def resolve_expression(self, query=None, allow_joins=True, reuse=None, summarize=False, for_save=False): c = self.copy() c.is_summary = summarize for pos, arg in enumerate(c.source_expressions): c.source_expressions[pos] = arg.resolve_expression(query, allow_joins, reuse, summarize, for_save) return c def as_sql(self, compiler, connection, function=None, template=None, arg_joiner=None, **extra_context): connection.ops.check_expression_support(self) sql_parts = [] params = [] for arg in self.source_expressions: arg_sql, arg_params = compiler.compile(arg) sql_parts.append(arg_sql) params.extend(arg_params) data = {**self.extra, **extra_context} # Use the first supplied value in this order: the parameter to this # method, a value supplied in __init__()'s **extra (the value in # `data`), or the value defined on the class. if function is not None: data['function'] = function else: data.setdefault('function', self.function) template = template or data.get('template', self.template) arg_joiner = arg_joiner or data.get('arg_joiner', self.arg_joiner) data['expressions'] = data['field'] = arg_joiner.join(sql_parts) return template % data, params def copy(self): copy = super().copy() copy.source_expressions = self.source_expressions[:] copy.extra = self.extra.copy() return copy class Value(Expression): """Represent a wrapped value as a node within an expression.""" def __init__(self, value, output_field=None): """ Arguments: * value: the value this expression represents. The value will be added into the sql parameter list and properly quoted. * output_field: an instance of the model field type that this expression will return, such as IntegerField() or CharField(). """ super().__init__(output_field=output_field) self.value = value def __repr__(self): return "{}({})".format(self.__class__.__name__, self.value) def as_sql(self, compiler, connection): connection.ops.check_expression_support(self) val = self.value output_field = self._output_field_or_none if output_field is not None: if self.for_save: val = output_field.get_db_prep_save(val, connection=connection) else: val = output_field.get_db_prep_value(val, connection=connection) if hasattr(output_field, 'get_placeholder'): return output_field.get_placeholder(val, compiler, connection), [val] if val is None: # cx_Oracle does not always convert None to the appropriate # NULL type (like in case expressions using numbers), so we # use a literal SQL NULL return 'NULL', [] return '%s', [val] def resolve_expression(self, query=None, allow_joins=True, reuse=None, summarize=False, for_save=False): c = super().resolve_expression(query, allow_joins, reuse, summarize, for_save) c.for_save = for_save return c def get_group_by_cols(self, alias=None): return [] class DurationValue(Value): def as_sql(self, compiler, connection): connection.ops.check_expression_support(self) if connection.features.has_native_duration_field: return super().as_sql(compiler, connection) return connection.ops.date_interval_sql(self.value), [] class RawSQL(Expression): def __init__(self, sql, params, output_field=None): if output_field is None: output_field = fields.Field() self.sql, self.params = sql, params super().__init__(output_field=output_field) def __repr__(self): return "{}({}, {})".format(self.__class__.__name__, self.sql, self.params) def as_sql(self, compiler, connection): return '(%s)' % self.sql, self.params def get_group_by_cols(self, alias=None): return [self] def resolve_expression(self, query=None, allow_joins=True, reuse=None, summarize=False, for_save=False): # Resolve parents fields used in raw SQL. for parent in query.model._meta.get_parent_list(): for parent_field in parent._meta.local_fields: _, column_name = parent_field.get_attname_column() if column_name.lower() in self.sql.lower(): query.resolve_ref(parent_field.name, allow_joins, reuse, summarize) break return super().resolve_expression(query, allow_joins, reuse, summarize, for_save) class Star(Expression): def __repr__(self): return "'*'" def as_sql(self, compiler, connection): return '*', [] class Random(Expression): output_field = fields.FloatField() def __repr__(self): return "Random()" def as_sql(self, compiler, connection): return connection.ops.random_function_sql(), [] class Col(Expression): contains_column_references = True def __init__(self, alias, target, output_field=None): if output_field is None: output_field = target super().__init__(output_field=output_field) self.alias, self.target = alias, target def __repr__(self): return "{}({}, {})".format( self.__class__.__name__, self.alias, self.target) def as_sql(self, compiler, connection): qn = compiler.quote_name_unless_alias return "%s.%s" % (qn(self.alias), qn(self.target.column)), [] def relabeled_clone(self, relabels): return self.__class__(relabels.get(self.alias, self.alias), self.target, self.output_field) def get_group_by_cols(self, alias=None): return [self] def get_db_converters(self, connection): if self.target == self.output_field: return self.output_field.get_db_converters(connection) return (self.output_field.get_db_converters(connection) + self.target.get_db_converters(connection)) class SimpleCol(Expression): """ Represents the SQL of a column name without the table name. This variant of Col doesn't include the table name (or an alias) to avoid a syntax error in check constraints. """ contains_column_references = True def __init__(self, target, output_field=None): if output_field is None: output_field = target super().__init__(output_field=output_field) self.target = target def __repr__(self): return '{}({})'.format(self.__class__.__name__, self.target) def as_sql(self, compiler, connection): qn = compiler.quote_name_unless_alias return qn(self.target.column), [] def get_group_by_cols(self, alias=None): return [self] def get_db_converters(self, connection): if self.target == self.output_field: return self.output_field.get_db_converters(connection) return ( self.output_field.get_db_converters(connection) + self.target.get_db_converters(connection) ) class Ref(Expression): """ Reference to column alias of the query. For example, Ref('sum_cost') in qs.annotate(sum_cost=Sum('cost')) query. """ def __init__(self, refs, source): super().__init__() self.refs, self.source = refs, source def __repr__(self): return "{}({}, {})".format(self.__class__.__name__, self.refs, self.source) def get_source_expressions(self): return [self.source] def set_source_expressions(self, exprs): self.source, = exprs def resolve_expression(self, query=None, allow_joins=True, reuse=None, summarize=False, for_save=False): # The sub-expression `source` has already been resolved, as this is # just a reference to the name of `source`. return self def relabeled_clone(self, relabels): return self def as_sql(self, compiler, connection): return connection.ops.quote_name(self.refs), [] def get_group_by_cols(self, alias=None): return [self] class ExpressionList(Func): """ An expression containing multiple expressions. Can be used to provide a list of expressions as an argument to another expression, like an ordering clause. """ template = '%(expressions)s' def __init__(self, *expressions, **extra): if not expressions: raise ValueError('%s requires at least one expression.' % self.__class__.__name__) super().__init__(*expressions, **extra) def __str__(self): return self.arg_joiner.join(str(arg) for arg in self.source_expressions) class ExpressionWrapper(Expression): """ An expression that can wrap another expression so that it can provide extra context to the inner expression, such as the output_field. """ def __init__(self, expression, output_field): super().__init__(output_field=output_field) self.expression = expression def set_source_expressions(self, exprs): self.expression = exprs[0] def get_source_expressions(self): return [self.expression] def as_sql(self, compiler, connection): return self.expression.as_sql(compiler, connection) def __repr__(self): return "{}({})".format(self.__class__.__name__, self.expression) class When(Expression): template = 'WHEN %(condition)s THEN %(result)s' # This isn't a complete conditional expression, must be used in Case(). conditional = False def __init__(self, condition=None, then=None, **lookups): if lookups and condition is None: condition, lookups = Q(**lookups), None if condition is None or not getattr(condition, 'conditional', False) or lookups: raise TypeError( 'When() supports a Q object, a boolean expression, or lookups ' 'as a condition.' ) if isinstance(condition, Q) and not condition: raise ValueError("An empty Q() can't be used as a When() condition.") super().__init__(output_field=None) self.condition = condition self.result = self._parse_expressions(then)[0] def __str__(self): return "WHEN %r THEN %r" % (self.condition, self.result) def __repr__(self): return "<%s: %s>" % (self.__class__.__name__, self) def get_source_expressions(self): return [self.condition, self.result] def set_source_expressions(self, exprs): self.condition, self.result = exprs def get_source_fields(self): # We're only interested in the fields of the result expressions. return [self.result._output_field_or_none] def resolve_expression(self, query=None, allow_joins=True, reuse=None, summarize=False, for_save=False): c = self.copy() c.is_summary = summarize if hasattr(c.condition, 'resolve_expression'): c.condition = c.condition.resolve_expression(query, allow_joins, reuse, summarize, False) c.result = c.result.resolve_expression(query, allow_joins, reuse, summarize, for_save) return c def as_sql(self, compiler, connection, template=None, **extra_context): connection.ops.check_expression_support(self) template_params = extra_context sql_params = [] condition_sql, condition_params = compiler.compile(self.condition) template_params['condition'] = condition_sql sql_params.extend(condition_params) result_sql, result_params = compiler.compile(self.result) template_params['result'] = result_sql sql_params.extend(result_params) template = template or self.template return template % template_params, sql_params def get_group_by_cols(self, alias=None): # This is not a complete expression and cannot be used in GROUP BY. cols = [] for source in self.get_source_expressions(): cols.extend(source.get_group_by_cols()) return cols class Case(Expression): """ An SQL searched CASE expression: CASE WHEN n > 0 THEN 'positive' WHEN n < 0 THEN 'negative' ELSE 'zero' END """ template = 'CASE %(cases)s ELSE %(default)s END' case_joiner = ' ' def __init__(self, *cases, default=None, output_field=None, **extra): if not all(isinstance(case, When) for case in cases): raise TypeError("Positional arguments must all be When objects.") super().__init__(output_field) self.cases = list(cases) self.default = self._parse_expressions(default)[0] self.extra = extra def __str__(self): return "CASE %s, ELSE %r" % (', '.join(str(c) for c in self.cases), self.default) def __repr__(self): return "<%s: %s>" % (self.__class__.__name__, self) def get_source_expressions(self): return self.cases + [self.default] def set_source_expressions(self, exprs): *self.cases, self.default = exprs def resolve_expression(self, query=None, allow_joins=True, reuse=None, summarize=False, for_save=False): c = self.copy() c.is_summary = summarize for pos, case in enumerate(c.cases): c.cases[pos] = case.resolve_expression(query, allow_joins, reuse, summarize, for_save) c.default = c.default.resolve_expression(query, allow_joins, reuse, summarize, for_save) return c def copy(self): c = super().copy() c.cases = c.cases[:] return c def as_sql(self, compiler, connection, template=None, case_joiner=None, **extra_context): connection.ops.check_expression_support(self) if not self.cases: return compiler.compile(self.default) template_params = {**self.extra, **extra_context} case_parts = [] sql_params = [] for case in self.cases: try: case_sql, case_params = compiler.compile(case) except EmptyResultSet: continue case_parts.append(case_sql) sql_params.extend(case_params) default_sql, default_params = compiler.compile(self.default) if not case_parts: return default_sql, default_params case_joiner = case_joiner or self.case_joiner template_params['cases'] = case_joiner.join(case_parts) template_params['default'] = default_sql sql_params.extend(default_params) template = template or template_params.get('template', self.template) sql = template % template_params if self._output_field_or_none is not None: sql = connection.ops.unification_cast_sql(self.output_field) % sql return sql, sql_params class Subquery(Expression): """ An explicit subquery. It may contain OuterRef() references to the outer query which will be resolved when it is applied to that query. """ template = '(%(subquery)s)' contains_aggregate = False def __init__(self, queryset, output_field=None, **extra): self.query = queryset.query self.extra = extra super().__init__(output_field) def __getstate__(self): state = super().__getstate__() state.pop('_constructor_args', None) return state def get_source_expressions(self): return [self.query] def set_source_expressions(self, exprs): self.query = exprs[0] def _resolve_output_field(self): return self.query.output_field def copy(self): clone = super().copy() clone.query = clone.query.clone() return clone @property def external_aliases(self): return self.query.external_aliases def as_sql(self, compiler, connection, template=None, **extra_context): connection.ops.check_expression_support(self) template_params = {**self.extra, **extra_context} subquery_sql, sql_params = self.query.as_sql(compiler, connection) template_params['subquery'] = subquery_sql[1:-1] template = template or template_params.get('template', self.template) sql = template % template_params return sql, sql_params def get_group_by_cols(self, alias=None): if alias: return [Ref(alias, self)] return [] class Exists(Subquery): template = 'EXISTS(%(subquery)s)' output_field = fields.BooleanField() def __init__(self, queryset, negated=False, **kwargs): # As a performance optimization, remove ordering since EXISTS doesn't # care about it, just whether or not a row matches. queryset = queryset.order_by() self.negated = negated super().__init__(queryset, **kwargs) def __invert__(self): clone = self.copy() clone.negated = not self.negated return clone def as_sql(self, compiler, connection, template=None, **extra_context): sql, params = super().as_sql(compiler, connection, template, **extra_context) if self.negated: sql = 'NOT {}'.format(sql) return sql, params def select_format(self, compiler, sql, params): # Wrap EXISTS() with a CASE WHEN expression if a database backend # (e.g. Oracle) doesn't support boolean expression in the SELECT list. if not compiler.connection.features.supports_boolean_expr_in_select_clause: sql = 'CASE WHEN {} THEN 1 ELSE 0 END'.format(sql) return sql, params class OrderBy(BaseExpression): template = '%(expression)s %(ordering)s' conditional = False def __init__(self, expression, descending=False, nulls_first=False, nulls_last=False): if nulls_first and nulls_last: raise ValueError('nulls_first and nulls_last are mutually exclusive') self.nulls_first = nulls_first self.nulls_last = nulls_last self.descending = descending if not hasattr(expression, 'resolve_expression'): raise ValueError('expression must be an expression type') self.expression = expression def __repr__(self): return "{}({}, descending={})".format( self.__class__.__name__, self.expression, self.descending) def set_source_expressions(self, exprs): self.expression = exprs[0] def get_source_expressions(self): return [self.expression] def as_sql(self, compiler, connection, template=None, **extra_context): if not template: if self.nulls_last: template = '%s NULLS LAST' % self.template elif self.nulls_first: template = '%s NULLS FIRST' % self.template connection.ops.check_expression_support(self) expression_sql, params = compiler.compile(self.expression) placeholders = { 'expression': expression_sql, 'ordering': 'DESC' if self.descending else 'ASC', **extra_context, } template = template or self.template params *= template.count('%(expression)s') return (template % placeholders).rstrip(), params def as_sqlite(self, compiler, connection): template = None if connection.Database.sqlite_version_info < (3, 30, 0): if self.nulls_last: template = '%(expression)s IS NULL, %(expression)s %(ordering)s' elif self.nulls_first: template = '%(expression)s IS NOT NULL, %(expression)s %(ordering)s' return self.as_sql(compiler, connection, template=template) def as_mysql(self, compiler, connection): template = None if self.nulls_last: template = 'ISNULL(%(expression)s), %(expression)s %(ordering)s ' elif self.nulls_first: template = 'IF(ISNULL(%(expression)s),0,1), %(expression)s %(ordering)s ' return self.as_sql(compiler, connection, template=template) def as_oracle(self, compiler, connection): # Oracle doesn't allow ORDER BY EXISTS() unless it's wrapped in # a CASE WHEN. if isinstance(self.expression, Exists): copy = self.copy() copy.expression = Case( When(self.expression, then=True), default=False, output_field=fields.BooleanField(), ) return copy.as_sql(compiler, connection) return self.as_sql(compiler, connection) def get_group_by_cols(self, alias=None): cols = [] for source in self.get_source_expressions(): cols.extend(source.get_group_by_cols()) return cols def reverse_ordering(self): self.descending = not self.descending if self.nulls_first or self.nulls_last: self.nulls_first = not self.nulls_first self.nulls_last = not self.nulls_last return self def asc(self): self.descending = False def desc(self): self.descending = True class Window(Expression): template = '%(expression)s OVER (%(window)s)' # Although the main expression may either be an aggregate or an # expression with an aggregate function, the GROUP BY that will # be introduced in the query as a result is not desired. contains_aggregate = False contains_over_clause = True filterable = False def __init__(self, expression, partition_by=None, order_by=None, frame=None, output_field=None): self.partition_by = partition_by self.order_by = order_by self.frame = frame if not getattr(expression, 'window_compatible', False): raise ValueError( "Expression '%s' isn't compatible with OVER clauses." % expression.__class__.__name__ ) if self.partition_by is not None: if not isinstance(self.partition_by, (tuple, list)): self.partition_by = (self.partition_by,) self.partition_by = ExpressionList(*self.partition_by) if self.order_by is not None: if isinstance(self.order_by, (list, tuple)): self.order_by = ExpressionList(*self.order_by) elif not isinstance(self.order_by, BaseExpression): raise ValueError( 'order_by must be either an Expression or a sequence of ' 'expressions.' ) super().__init__(output_field=output_field) self.source_expression = self._parse_expressions(expression)[0] def _resolve_output_field(self): return self.source_expression.output_field def get_source_expressions(self): return [self.source_expression, self.partition_by, self.order_by, self.frame] def set_source_expressions(self, exprs): self.source_expression, self.partition_by, self.order_by, self.frame = exprs def as_sql(self, compiler, connection, template=None): connection.ops.check_expression_support(self) if not connection.features.supports_over_clause: raise NotSupportedError('This backend does not support window expressions.') expr_sql, params = compiler.compile(self.source_expression) window_sql, window_params = [], [] if self.partition_by is not None: sql_expr, sql_params = self.partition_by.as_sql( compiler=compiler, connection=connection, template='PARTITION BY %(expressions)s', ) window_sql.extend(sql_expr) window_params.extend(sql_params) if self.order_by is not None: window_sql.append(' ORDER BY ') order_sql, order_params = compiler.compile(self.order_by) window_sql.extend(order_sql) window_params.extend(order_params) if self.frame: frame_sql, frame_params = compiler.compile(self.frame) window_sql.append(' ' + frame_sql) window_params.extend(frame_params) params.extend(window_params) template = template or self.template return template % { 'expression': expr_sql, 'window': ''.join(window_sql).strip() }, params def __str__(self): return '{} OVER ({}{}{})'.format( str(self.source_expression), 'PARTITION BY ' + str(self.partition_by) if self.partition_by else '', 'ORDER BY ' + str(self.order_by) if self.order_by else '', str(self.frame or ''), ) def __repr__(self): return '<%s: %s>' % (self.__class__.__name__, self) def get_group_by_cols(self, alias=None): return [] class WindowFrame(Expression): """ Model the frame clause in window expressions. There are two types of frame clauses which are subclasses, however, all processing and validation (by no means intended to be complete) is done here. Thus, providing an end for a frame is optional (the default is UNBOUNDED FOLLOWING, which is the last row in the frame). """ template = '%(frame_type)s BETWEEN %(start)s AND %(end)s' def __init__(self, start=None, end=None): self.start = Value(start) self.end = Value(end) def set_source_expressions(self, exprs): self.start, self.end = exprs def get_source_expressions(self): return [self.start, self.end] def as_sql(self, compiler, connection): connection.ops.check_expression_support(self) start, end = self.window_frame_start_end(connection, self.start.value, self.end.value) return self.template % { 'frame_type': self.frame_type, 'start': start, 'end': end, }, [] def __repr__(self): return '<%s: %s>' % (self.__class__.__name__, self) def get_group_by_cols(self, alias=None): return [] def __str__(self): if self.start.value is not None and self.start.value < 0: start = '%d %s' % (abs(self.start.value), connection.ops.PRECEDING) elif self.start.value is not None and self.start.value == 0: start = connection.ops.CURRENT_ROW else: start = connection.ops.UNBOUNDED_PRECEDING if self.end.value is not None and self.end.value > 0: end = '%d %s' % (self.end.value, connection.ops.FOLLOWING) elif self.end.value is not None and self.end.value == 0: end = connection.ops.CURRENT_ROW else: end = connection.ops.UNBOUNDED_FOLLOWING return self.template % { 'frame_type': self.frame_type, 'start': start, 'end': end, } def window_frame_start_end(self, connection, start, end): raise NotImplementedError('Subclasses must implement window_frame_start_end().') class RowRange(WindowFrame): frame_type = 'ROWS' def window_frame_start_end(self, connection, start, end): return connection.ops.window_frame_rows_start_end(start, end) class ValueRange(WindowFrame): frame_type = 'RANGE' def window_frame_start_end(self, connection, start, end): return connection.ops.window_frame_range_start_end(start, end)
b605cdadad41cb6f5dbb270b2f60120bd8dadaf010f5c7409dd1a52dbeff93aa
import collections.abc import copy import datetime import decimal import operator import uuid import warnings from base64 import b64decode, b64encode from functools import partialmethod, total_ordering from django import forms from django.apps import apps from django.conf import settings from django.core import checks, exceptions, validators from django.db import connection, connections, router from django.db.models.constants import LOOKUP_SEP from django.db.models.query_utils import DeferredAttribute, RegisterLookupMixin from django.utils import timezone from django.utils.datastructures import DictWrapper from django.utils.dateparse import ( parse_date, parse_datetime, parse_duration, parse_time, ) from django.utils.duration import duration_microseconds, duration_string from django.utils.functional import Promise, cached_property from django.utils.ipv6 import clean_ipv6_address from django.utils.itercompat import is_iterable from django.utils.text import capfirst from django.utils.translation import gettext_lazy as _ __all__ = [ 'AutoField', 'BLANK_CHOICE_DASH', 'BigAutoField', 'BigIntegerField', 'BinaryField', 'BooleanField', 'CharField', 'CommaSeparatedIntegerField', 'DateField', 'DateTimeField', 'DecimalField', 'DurationField', 'EmailField', 'Empty', 'Field', 'FilePathField', 'FloatField', 'GenericIPAddressField', 'IPAddressField', 'IntegerField', 'NOT_PROVIDED', 'NullBooleanField', 'PositiveIntegerField', 'PositiveSmallIntegerField', 'SlugField', 'SmallAutoField', 'SmallIntegerField', 'TextField', 'TimeField', 'URLField', 'UUIDField', ] class Empty: pass class NOT_PROVIDED: pass # The values to use for "blank" in SelectFields. Will be appended to the start # of most "choices" lists. BLANK_CHOICE_DASH = [("", "---------")] def _load_field(app_label, model_name, field_name): return apps.get_model(app_label, model_name)._meta.get_field(field_name) # A guide to Field parameters: # # * name: The name of the field specified in the model. # * attname: The attribute to use on the model object. This is the same as # "name", except in the case of ForeignKeys, where "_id" is # appended. # * db_column: The db_column specified in the model (or None). # * column: The database column for this field. This is the same as # "attname", except if db_column is specified. # # Code that introspects values, or does other dynamic things, should use # attname. For example, this gets the primary key value of object "obj": # # getattr(obj, opts.pk.attname) def _empty(of_cls): new = Empty() new.__class__ = of_cls return new def return_None(): return None @total_ordering class Field(RegisterLookupMixin): """Base class for all field types""" # Designates whether empty strings fundamentally are allowed at the # database level. empty_strings_allowed = True empty_values = list(validators.EMPTY_VALUES) # These track each time a Field instance is created. Used to retain order. # The auto_creation_counter is used for fields that Django implicitly # creates, creation_counter is used for all user-specified fields. creation_counter = 0 auto_creation_counter = -1 default_validators = [] # Default set of validators default_error_messages = { 'invalid_choice': _('Value %(value)r is not a valid choice.'), 'null': _('This field cannot be null.'), 'blank': _('This field cannot be blank.'), 'unique': _('%(model_name)s with this %(field_label)s ' 'already exists.'), # Translators: The 'lookup_type' is one of 'date', 'year' or 'month'. # Eg: "Title must be unique for pub_date year" 'unique_for_date': _("%(field_label)s must be unique for " "%(date_field_label)s %(lookup_type)s."), } system_check_deprecated_details = None system_check_removed_details = None # Field flags hidden = False many_to_many = None many_to_one = None one_to_many = None one_to_one = None related_model = None descriptor_class = DeferredAttribute # Generic field type description, usually overridden by subclasses def _description(self): return _('Field of type: %(field_type)s') % { 'field_type': self.__class__.__name__ } description = property(_description) def __init__(self, verbose_name=None, name=None, primary_key=False, max_length=None, unique=False, blank=False, null=False, db_index=False, rel=None, default=NOT_PROVIDED, editable=True, serialize=True, unique_for_date=None, unique_for_month=None, unique_for_year=None, choices=None, help_text='', db_column=None, db_tablespace=None, auto_created=False, validators=(), error_messages=None): self.name = name self.verbose_name = verbose_name # May be set by set_attributes_from_name self._verbose_name = verbose_name # Store original for deconstruction self.primary_key = primary_key self.max_length, self._unique = max_length, unique self.blank, self.null = blank, null self.remote_field = rel self.is_relation = self.remote_field is not None self.default = default self.editable = editable self.serialize = serialize self.unique_for_date = unique_for_date self.unique_for_month = unique_for_month self.unique_for_year = unique_for_year if isinstance(choices, collections.abc.Iterator): choices = list(choices) self.choices = choices self.help_text = help_text self.db_index = db_index self.db_column = db_column self._db_tablespace = db_tablespace self.auto_created = auto_created # Adjust the appropriate creation counter, and save our local copy. if auto_created: self.creation_counter = Field.auto_creation_counter Field.auto_creation_counter -= 1 else: self.creation_counter = Field.creation_counter Field.creation_counter += 1 self._validators = list(validators) # Store for deconstruction later messages = {} for c in reversed(self.__class__.__mro__): messages.update(getattr(c, 'default_error_messages', {})) messages.update(error_messages or {}) self._error_messages = error_messages # Store for deconstruction later self.error_messages = messages def __str__(self): """ Return "app_label.model_label.field_name" for fields attached to models. """ if not hasattr(self, 'model'): return super().__str__() model = self.model app = model._meta.app_label return '%s.%s.%s' % (app, model._meta.object_name, self.name) def __repr__(self): """Display the module, class, and name of the field.""" path = '%s.%s' % (self.__class__.__module__, self.__class__.__qualname__) name = getattr(self, 'name', None) if name is not None: return '<%s: %s>' % (path, name) return '<%s>' % path def check(self, **kwargs): return [ *self._check_field_name(), *self._check_choices(), *self._check_db_index(), *self._check_null_allowed_for_primary_keys(), *self._check_backend_specific_checks(**kwargs), *self._check_validators(), *self._check_deprecation_details(), ] def _check_field_name(self): """ Check if field name is valid, i.e. 1) does not end with an underscore, 2) does not contain "__" and 3) is not "pk". """ if self.name.endswith('_'): return [ checks.Error( 'Field names must not end with an underscore.', obj=self, id='fields.E001', ) ] elif LOOKUP_SEP in self.name: return [ checks.Error( 'Field names must not contain "%s".' % (LOOKUP_SEP,), obj=self, id='fields.E002', ) ] elif self.name == 'pk': return [ checks.Error( "'pk' is a reserved word that cannot be used as a field name.", obj=self, id='fields.E003', ) ] else: return [] def _check_choices(self): if not self.choices: return [] def is_value(value): return isinstance(value, (str, Promise)) or not is_iterable(value) if not is_iterable(self.choices) or isinstance(self.choices, str): return [ checks.Error( "'choices' must be an iterable (e.g., a list or tuple).", obj=self, id='fields.E004', ) ] choice_max_length = 0 # Expect [group_name, [value, display]] for choices_group in self.choices: try: group_name, group_choices = choices_group except (TypeError, ValueError): # Containing non-pairs break try: if not all( is_value(value) and is_value(human_name) for value, human_name in group_choices ): break if self.max_length is not None and group_choices: choice_max_length = max( choice_max_length, *(len(value) for value, _ in group_choices if isinstance(value, str)), ) except (TypeError, ValueError): # No groups, choices in the form [value, display] value, human_name = group_name, group_choices if not is_value(value) or not is_value(human_name): break if self.max_length is not None and isinstance(value, str): choice_max_length = max(choice_max_length, len(value)) # Special case: choices=['ab'] if isinstance(choices_group, str): break else: if self.max_length is not None and choice_max_length > self.max_length: return [ checks.Error( "'max_length' is too small to fit the longest value " "in 'choices' (%d characters)." % choice_max_length, obj=self, id='fields.E009', ), ] return [] return [ checks.Error( "'choices' must be an iterable containing " "(actual value, human readable name) tuples.", obj=self, id='fields.E005', ) ] def _check_db_index(self): if self.db_index not in (None, True, False): return [ checks.Error( "'db_index' must be None, True or False.", obj=self, id='fields.E006', ) ] else: return [] def _check_null_allowed_for_primary_keys(self): if (self.primary_key and self.null and not connection.features.interprets_empty_strings_as_nulls): # We cannot reliably check this for backends like Oracle which # consider NULL and '' to be equal (and thus set up # character-based fields a little differently). return [ checks.Error( 'Primary keys must not have null=True.', hint=('Set null=False on the field, or ' 'remove primary_key=True argument.'), obj=self, id='fields.E007', ) ] else: return [] def _check_backend_specific_checks(self, **kwargs): app_label = self.model._meta.app_label for db in connections: if router.allow_migrate(db, app_label, model_name=self.model._meta.model_name): return connections[db].validation.check_field(self, **kwargs) return [] def _check_validators(self): errors = [] for i, validator in enumerate(self.validators): if not callable(validator): errors.append( checks.Error( "All 'validators' must be callable.", hint=( "validators[{i}] ({repr}) isn't a function or " "instance of a validator class.".format( i=i, repr=repr(validator), ) ), obj=self, id='fields.E008', ) ) return errors def _check_deprecation_details(self): if self.system_check_removed_details is not None: return [ checks.Error( self.system_check_removed_details.get( 'msg', '%s has been removed except for support in historical ' 'migrations.' % self.__class__.__name__ ), hint=self.system_check_removed_details.get('hint'), obj=self, id=self.system_check_removed_details.get('id', 'fields.EXXX'), ) ] elif self.system_check_deprecated_details is not None: return [ checks.Warning( self.system_check_deprecated_details.get( 'msg', '%s has been deprecated.' % self.__class__.__name__ ), hint=self.system_check_deprecated_details.get('hint'), obj=self, id=self.system_check_deprecated_details.get('id', 'fields.WXXX'), ) ] return [] def get_col(self, alias, output_field=None): if output_field is None: output_field = self if alias != self.model._meta.db_table or output_field != self: from django.db.models.expressions import Col return Col(alias, self, output_field) else: return self.cached_col @cached_property def cached_col(self): from django.db.models.expressions import Col return Col(self.model._meta.db_table, self) def select_format(self, compiler, sql, params): """ Custom format for select clauses. For example, GIS columns need to be selected as AsText(table.col) on MySQL as the table.col data can't be used by Django. """ return sql, params def deconstruct(self): """ Return enough information to recreate the field as a 4-tuple: * The name of the field on the model, if contribute_to_class() has been run. * The import path of the field, including the class:e.g. django.db.models.IntegerField This should be the most portable version, so less specific may be better. * A list of positional arguments. * A dict of keyword arguments. Note that the positional or keyword arguments must contain values of the following types (including inner values of collection types): * None, bool, str, int, float, complex, set, frozenset, list, tuple, dict * UUID * datetime.datetime (naive), datetime.date * top-level classes, top-level functions - will be referenced by their full import path * Storage instances - these have their own deconstruct() method This is because the values here must be serialized into a text format (possibly new Python code, possibly JSON) and these are the only types with encoding handlers defined. There's no need to return the exact way the field was instantiated this time, just ensure that the resulting field is the same - prefer keyword arguments over positional ones, and omit parameters with their default values. """ # Short-form way of fetching all the default parameters keywords = {} possibles = { "verbose_name": None, "primary_key": False, "max_length": None, "unique": False, "blank": False, "null": False, "db_index": False, "default": NOT_PROVIDED, "editable": True, "serialize": True, "unique_for_date": None, "unique_for_month": None, "unique_for_year": None, "choices": None, "help_text": '', "db_column": None, "db_tablespace": None, "auto_created": False, "validators": [], "error_messages": None, } attr_overrides = { "unique": "_unique", "error_messages": "_error_messages", "validators": "_validators", "verbose_name": "_verbose_name", "db_tablespace": "_db_tablespace", } equals_comparison = {"choices", "validators"} for name, default in possibles.items(): value = getattr(self, attr_overrides.get(name, name)) # Unroll anything iterable for choices into a concrete list if name == "choices" and isinstance(value, collections.abc.Iterable): value = list(value) # Do correct kind of comparison if name in equals_comparison: if value != default: keywords[name] = value else: if value is not default: keywords[name] = value # Work out path - we shorten it for known Django core fields path = "%s.%s" % (self.__class__.__module__, self.__class__.__qualname__) if path.startswith("django.db.models.fields.related"): path = path.replace("django.db.models.fields.related", "django.db.models") elif path.startswith("django.db.models.fields.files"): path = path.replace("django.db.models.fields.files", "django.db.models") elif path.startswith("django.db.models.fields.proxy"): path = path.replace("django.db.models.fields.proxy", "django.db.models") elif path.startswith("django.db.models.fields"): path = path.replace("django.db.models.fields", "django.db.models") # Return basic info - other fields should override this. return (self.name, path, [], keywords) def clone(self): """ Uses deconstruct() to clone a new copy of this Field. Will not preserve any class attachments/attribute names. """ name, path, args, kwargs = self.deconstruct() return self.__class__(*args, **kwargs) def __eq__(self, other): # Needed for @total_ordering if isinstance(other, Field): return self.creation_counter == other.creation_counter return NotImplemented def __lt__(self, other): # This is needed because bisect does not take a comparison function. if isinstance(other, Field): return self.creation_counter < other.creation_counter return NotImplemented def __hash__(self): return hash(self.creation_counter) def __deepcopy__(self, memodict): # We don't have to deepcopy very much here, since most things are not # intended to be altered after initial creation. obj = copy.copy(self) if self.remote_field: obj.remote_field = copy.copy(self.remote_field) if hasattr(self.remote_field, 'field') and self.remote_field.field is self: obj.remote_field.field = obj memodict[id(self)] = obj return obj def __copy__(self): # We need to avoid hitting __reduce__, so define this # slightly weird copy construct. obj = Empty() obj.__class__ = self.__class__ obj.__dict__ = self.__dict__.copy() return obj def __reduce__(self): """ Pickling should return the model._meta.fields instance of the field, not a new copy of that field. So, use the app registry to load the model and then the field back. """ if not hasattr(self, 'model'): # Fields are sometimes used without attaching them to models (for # example in aggregation). In this case give back a plain field # instance. The code below will create a new empty instance of # class self.__class__, then update its dict with self.__dict__ # values - so, this is very close to normal pickle. state = self.__dict__.copy() # The _get_default cached_property can't be pickled due to lambda # usage. state.pop('_get_default', None) return _empty, (self.__class__,), state return _load_field, (self.model._meta.app_label, self.model._meta.object_name, self.name) def get_pk_value_on_save(self, instance): """ Hook to generate new PK values on save. This method is called when saving instances with no primary key value set. If this method returns something else than None, then the returned value is used when saving the new instance. """ if self.default: return self.get_default() return None def to_python(self, value): """ Convert the input value into the expected Python data type, raising django.core.exceptions.ValidationError if the data can't be converted. Return the converted value. Subclasses should override this. """ return value @cached_property def validators(self): """ Some validators can't be created at field initialization time. This method provides a way to delay their creation until required. """ return [*self.default_validators, *self._validators] def run_validators(self, value): if value in self.empty_values: return errors = [] for v in self.validators: try: v(value) except exceptions.ValidationError as e: if hasattr(e, 'code') and e.code in self.error_messages: e.message = self.error_messages[e.code] errors.extend(e.error_list) if errors: raise exceptions.ValidationError(errors) def validate(self, value, model_instance): """ Validate value and raise ValidationError if necessary. Subclasses should override this to provide validation logic. """ if not self.editable: # Skip validation for non-editable fields. return if self.choices is not None and value not in self.empty_values: for option_key, option_value in self.choices: if isinstance(option_value, (list, tuple)): # This is an optgroup, so look inside the group for # options. for optgroup_key, optgroup_value in option_value: if value == optgroup_key: return elif value == option_key: return raise exceptions.ValidationError( self.error_messages['invalid_choice'], code='invalid_choice', params={'value': value}, ) if value is None and not self.null: raise exceptions.ValidationError(self.error_messages['null'], code='null') if not self.blank and value in self.empty_values: raise exceptions.ValidationError(self.error_messages['blank'], code='blank') def clean(self, value, model_instance): """ Convert the value's type and run validation. Validation errors from to_python() and validate() are propagated. Return the correct value if no error is raised. """ value = self.to_python(value) self.validate(value, model_instance) self.run_validators(value) return value def db_type_parameters(self, connection): return DictWrapper(self.__dict__, connection.ops.quote_name, 'qn_') def db_check(self, connection): """ Return the database column check constraint for this field, for the provided connection. Works the same way as db_type() for the case that get_internal_type() does not map to a preexisting model field. """ data = self.db_type_parameters(connection) try: return connection.data_type_check_constraints[self.get_internal_type()] % data except KeyError: return None def db_type(self, connection): """ Return the database column data type for this field, for the provided connection. """ # The default implementation of this method looks at the # backend-specific data_types dictionary, looking up the field by its # "internal type". # # A Field class can implement the get_internal_type() method to specify # which *preexisting* Django Field class it's most similar to -- i.e., # a custom field might be represented by a TEXT column type, which is # the same as the TextField Django field type, which means the custom # field's get_internal_type() returns 'TextField'. # # But the limitation of the get_internal_type() / data_types approach # is that it cannot handle database column types that aren't already # mapped to one of the built-in Django field types. In this case, you # can implement db_type() instead of get_internal_type() to specify # exactly which wacky database column type you want to use. data = self.db_type_parameters(connection) try: return connection.data_types[self.get_internal_type()] % data except KeyError: return None def rel_db_type(self, connection): """ Return the data type that a related field pointing to this field should use. For example, this method is called by ForeignKey and OneToOneField to determine its data type. """ return self.db_type(connection) def cast_db_type(self, connection): """Return the data type to use in the Cast() function.""" db_type = connection.ops.cast_data_types.get(self.get_internal_type()) if db_type: return db_type % self.db_type_parameters(connection) return self.db_type(connection) def db_parameters(self, connection): """ Extension of db_type(), providing a range of different return values (type, checks). This will look at db_type(), allowing custom model fields to override it. """ type_string = self.db_type(connection) check_string = self.db_check(connection) return { "type": type_string, "check": check_string, } def db_type_suffix(self, connection): return connection.data_types_suffix.get(self.get_internal_type()) def get_db_converters(self, connection): if hasattr(self, 'from_db_value'): return [self.from_db_value] return [] @property def unique(self): return self._unique or self.primary_key @property def db_tablespace(self): return self._db_tablespace or settings.DEFAULT_INDEX_TABLESPACE @property def db_returning(self): """ Private API intended only to be used by Django itself. Currently only the PostgreSQL backend supports returning multiple fields on a model. """ return False def set_attributes_from_name(self, name): self.name = self.name or name self.attname, self.column = self.get_attname_column() self.concrete = self.column is not None if self.verbose_name is None and self.name: self.verbose_name = self.name.replace('_', ' ') def contribute_to_class(self, cls, name, private_only=False): """ Register the field with the model class it belongs to. If private_only is True, create a separate instance of this field for every subclass of cls, even if cls is not an abstract model. """ self.set_attributes_from_name(name) self.model = cls cls._meta.add_field(self, private=private_only) if self.column: # Don't override classmethods with the descriptor. This means that # if you have a classmethod and a field with the same name, then # such fields can't be deferred (we don't have a check for this). if not getattr(cls, self.attname, None): setattr(cls, self.attname, self.descriptor_class(self)) if self.choices is not None: setattr(cls, 'get_%s_display' % self.name, partialmethod(cls._get_FIELD_display, field=self)) def get_filter_kwargs_for_object(self, obj): """ Return a dict that when passed as kwargs to self.model.filter(), would yield all instances having the same value for this field as obj has. """ return {self.name: getattr(obj, self.attname)} def get_attname(self): return self.name def get_attname_column(self): attname = self.get_attname() column = self.db_column or attname return attname, column def get_internal_type(self): return self.__class__.__name__ def pre_save(self, model_instance, add): """Return field's value just before saving.""" return getattr(model_instance, self.attname) def get_prep_value(self, value): """Perform preliminary non-db specific value checks and conversions.""" if isinstance(value, Promise): value = value._proxy____cast() return value def get_db_prep_value(self, value, connection, prepared=False): """ Return field's value prepared for interacting with the database backend. Used by the default implementations of get_db_prep_save(). """ if not prepared: value = self.get_prep_value(value) return value def get_db_prep_save(self, value, connection): """Return field's value prepared for saving into a database.""" return self.get_db_prep_value(value, connection=connection, prepared=False) def has_default(self): """Return a boolean of whether this field has a default value.""" return self.default is not NOT_PROVIDED def get_default(self): """Return the default value for this field.""" return self._get_default() @cached_property def _get_default(self): if self.has_default(): if callable(self.default): return self.default return lambda: self.default if not self.empty_strings_allowed or self.null and not connection.features.interprets_empty_strings_as_nulls: return return_None return str # return empty string def get_choices(self, include_blank=True, blank_choice=BLANK_CHOICE_DASH, limit_choices_to=None, ordering=()): """ Return choices with a default blank choices included, for use as <select> choices for this field. """ if self.choices is not None: choices = list(self.choices) if include_blank: blank_defined = any(choice in ('', None) for choice, _ in self.flatchoices) if not blank_defined: choices = blank_choice + choices return choices rel_model = self.remote_field.model limit_choices_to = limit_choices_to or self.get_limit_choices_to() choice_func = operator.attrgetter( self.remote_field.get_related_field().attname if hasattr(self.remote_field, 'get_related_field') else 'pk' ) qs = rel_model._default_manager.complex_filter(limit_choices_to) if ordering: qs = qs.order_by(*ordering) return (blank_choice if include_blank else []) + [ (choice_func(x), str(x)) for x in qs ] def value_to_string(self, obj): """ Return a string value of this field from the passed obj. This is used by the serialization framework. """ return str(self.value_from_object(obj)) def _get_flatchoices(self): """Flattened version of choices tuple.""" if self.choices is None: return [] flat = [] for choice, value in self.choices: if isinstance(value, (list, tuple)): flat.extend(value) else: flat.append((choice, value)) return flat flatchoices = property(_get_flatchoices) def save_form_data(self, instance, data): setattr(instance, self.name, data) def formfield(self, form_class=None, choices_form_class=None, **kwargs): """Return a django.forms.Field instance for this field.""" defaults = { 'required': not self.blank, 'label': capfirst(self.verbose_name), 'help_text': self.help_text, } if self.has_default(): if callable(self.default): defaults['initial'] = self.default defaults['show_hidden_initial'] = True else: defaults['initial'] = self.get_default() if self.choices is not None: # Fields with choices get special treatment. include_blank = (self.blank or not (self.has_default() or 'initial' in kwargs)) defaults['choices'] = self.get_choices(include_blank=include_blank) defaults['coerce'] = self.to_python if self.null: defaults['empty_value'] = None if choices_form_class is not None: form_class = choices_form_class else: form_class = forms.TypedChoiceField # Many of the subclass-specific formfield arguments (min_value, # max_value) don't apply for choice fields, so be sure to only pass # the values that TypedChoiceField will understand. for k in list(kwargs): if k not in ('coerce', 'empty_value', 'choices', 'required', 'widget', 'label', 'initial', 'help_text', 'error_messages', 'show_hidden_initial', 'disabled'): del kwargs[k] defaults.update(kwargs) if form_class is None: form_class = forms.CharField return form_class(**defaults) def value_from_object(self, obj): """Return the value of this field in the given model instance.""" return getattr(obj, self.attname) class BooleanField(Field): empty_strings_allowed = False default_error_messages = { 'invalid': _('“%(value)s” value must be either True or False.'), 'invalid_nullable': _('“%(value)s” value must be either True, False, or None.'), } description = _("Boolean (Either True or False)") def get_internal_type(self): return "BooleanField" def to_python(self, value): if self.null and value in self.empty_values: return None if value in (True, False): # 1/0 are equal to True/False. bool() converts former to latter. return bool(value) if value in ('t', 'True', '1'): return True if value in ('f', 'False', '0'): return False raise exceptions.ValidationError( self.error_messages['invalid_nullable' if self.null else 'invalid'], code='invalid', params={'value': value}, ) def get_prep_value(self, value): value = super().get_prep_value(value) if value is None: return None return self.to_python(value) def formfield(self, **kwargs): if self.choices is not None: include_blank = not (self.has_default() or 'initial' in kwargs) defaults = {'choices': self.get_choices(include_blank=include_blank)} else: form_class = forms.NullBooleanField if self.null else forms.BooleanField # In HTML checkboxes, 'required' means "must be checked" which is # different from the choices case ("must select some value"). # required=False allows unchecked checkboxes. defaults = {'form_class': form_class, 'required': False} return super().formfield(**{**defaults, **kwargs}) class CharField(Field): description = _("String (up to %(max_length)s)") def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.validators.append(validators.MaxLengthValidator(self.max_length)) def check(self, **kwargs): return [ *super().check(**kwargs), *self._check_max_length_attribute(**kwargs), ] def _check_max_length_attribute(self, **kwargs): if self.max_length is None: return [ checks.Error( "CharFields must define a 'max_length' attribute.", obj=self, id='fields.E120', ) ] elif (not isinstance(self.max_length, int) or isinstance(self.max_length, bool) or self.max_length <= 0): return [ checks.Error( "'max_length' must be a positive integer.", obj=self, id='fields.E121', ) ] else: return [] def cast_db_type(self, connection): if self.max_length is None: return connection.ops.cast_char_field_without_max_length return super().cast_db_type(connection) def get_internal_type(self): return "CharField" def to_python(self, value): if isinstance(value, str) or value is None: return value return str(value) def get_prep_value(self, value): value = super().get_prep_value(value) return self.to_python(value) def formfield(self, **kwargs): # Passing max_length to forms.CharField means that the value's length # will be validated twice. This is considered acceptable since we want # the value in the form field (to pass into widget for example). defaults = {'max_length': self.max_length} # TODO: Handle multiple backends with different feature flags. if self.null and not connection.features.interprets_empty_strings_as_nulls: defaults['empty_value'] = None defaults.update(kwargs) return super().formfield(**defaults) class CommaSeparatedIntegerField(CharField): default_validators = [validators.validate_comma_separated_integer_list] description = _("Comma-separated integers") system_check_removed_details = { 'msg': ( 'CommaSeparatedIntegerField is removed except for support in ' 'historical migrations.' ), 'hint': ( 'Use CharField(validators=[validate_comma_separated_integer_list]) ' 'instead.' ), 'id': 'fields.E901', } class DateTimeCheckMixin: def check(self, **kwargs): return [ *super().check(**kwargs), *self._check_mutually_exclusive_options(), *self._check_fix_default_value(), ] def _check_mutually_exclusive_options(self): # auto_now, auto_now_add, and default are mutually exclusive # options. The use of more than one of these options together # will trigger an Error mutually_exclusive_options = [self.auto_now_add, self.auto_now, self.has_default()] enabled_options = [option not in (None, False) for option in mutually_exclusive_options].count(True) if enabled_options > 1: return [ checks.Error( "The options auto_now, auto_now_add, and default " "are mutually exclusive. Only one of these options " "may be present.", obj=self, id='fields.E160', ) ] else: return [] def _check_fix_default_value(self): return [] class DateField(DateTimeCheckMixin, Field): empty_strings_allowed = False default_error_messages = { 'invalid': _('“%(value)s” value has an invalid date format. It must be ' 'in YYYY-MM-DD format.'), 'invalid_date': _('“%(value)s” value has the correct format (YYYY-MM-DD) ' 'but it is an invalid date.'), } description = _("Date (without time)") def __init__(self, verbose_name=None, name=None, auto_now=False, auto_now_add=False, **kwargs): self.auto_now, self.auto_now_add = auto_now, auto_now_add if auto_now or auto_now_add: kwargs['editable'] = False kwargs['blank'] = True super().__init__(verbose_name, name, **kwargs) def _check_fix_default_value(self): """ Warn that using an actual date or datetime value is probably wrong; it's only evaluated on server startup. """ if not self.has_default(): return [] now = timezone.now() if not timezone.is_naive(now): now = timezone.make_naive(now, timezone.utc) value = self.default if isinstance(value, datetime.datetime): if not timezone.is_naive(value): value = timezone.make_naive(value, timezone.utc) value = value.date() elif isinstance(value, datetime.date): # Nothing to do, as dates don't have tz information pass else: # No explicit date / datetime value -- no checks necessary return [] offset = datetime.timedelta(days=1) lower = (now - offset).date() upper = (now + offset).date() if lower <= value <= upper: return [ checks.Warning( 'Fixed default value provided.', hint='It seems you set a fixed date / time / datetime ' 'value as default for this field. This may not be ' 'what you want. If you want to have the current date ' 'as default, use `django.utils.timezone.now`', obj=self, id='fields.W161', ) ] return [] def deconstruct(self): name, path, args, kwargs = super().deconstruct() if self.auto_now: kwargs['auto_now'] = True if self.auto_now_add: kwargs['auto_now_add'] = True if self.auto_now or self.auto_now_add: del kwargs['editable'] del kwargs['blank'] return name, path, args, kwargs def get_internal_type(self): return "DateField" def to_python(self, value): if value is None: return value if isinstance(value, datetime.datetime): if settings.USE_TZ and timezone.is_aware(value): # Convert aware datetimes to the default time zone # before casting them to dates (#17742). default_timezone = timezone.get_default_timezone() value = timezone.make_naive(value, default_timezone) return value.date() if isinstance(value, datetime.date): return value try: parsed = parse_date(value) if parsed is not None: return parsed except ValueError: raise exceptions.ValidationError( self.error_messages['invalid_date'], code='invalid_date', params={'value': value}, ) raise exceptions.ValidationError( self.error_messages['invalid'], code='invalid', params={'value': value}, ) def pre_save(self, model_instance, add): if self.auto_now or (self.auto_now_add and add): value = datetime.date.today() setattr(model_instance, self.attname, value) return value else: return super().pre_save(model_instance, add) def contribute_to_class(self, cls, name, **kwargs): super().contribute_to_class(cls, name, **kwargs) if not self.null: setattr( cls, 'get_next_by_%s' % self.name, partialmethod(cls._get_next_or_previous_by_FIELD, field=self, is_next=True) ) setattr( cls, 'get_previous_by_%s' % self.name, partialmethod(cls._get_next_or_previous_by_FIELD, field=self, is_next=False) ) def get_prep_value(self, value): value = super().get_prep_value(value) return self.to_python(value) def get_db_prep_value(self, value, connection, prepared=False): # Casts dates into the format expected by the backend if not prepared: value = self.get_prep_value(value) return connection.ops.adapt_datefield_value(value) def value_to_string(self, obj): val = self.value_from_object(obj) return '' if val is None else val.isoformat() def formfield(self, **kwargs): return super().formfield(**{ 'form_class': forms.DateField, **kwargs, }) class DateTimeField(DateField): empty_strings_allowed = False default_error_messages = { 'invalid': _('“%(value)s” value has an invalid format. It must be in ' 'YYYY-MM-DD HH:MM[:ss[.uuuuuu]][TZ] format.'), 'invalid_date': _("“%(value)s” value has the correct format " "(YYYY-MM-DD) but it is an invalid date."), 'invalid_datetime': _('“%(value)s” value has the correct format ' '(YYYY-MM-DD HH:MM[:ss[.uuuuuu]][TZ]) ' 'but it is an invalid date/time.'), } description = _("Date (with time)") # __init__ is inherited from DateField def _check_fix_default_value(self): """ Warn that using an actual date or datetime value is probably wrong; it's only evaluated on server startup. """ if not self.has_default(): return [] now = timezone.now() if not timezone.is_naive(now): now = timezone.make_naive(now, timezone.utc) value = self.default if isinstance(value, datetime.datetime): second_offset = datetime.timedelta(seconds=10) lower = now - second_offset upper = now + second_offset if timezone.is_aware(value): value = timezone.make_naive(value, timezone.utc) elif isinstance(value, datetime.date): second_offset = datetime.timedelta(seconds=10) lower = now - second_offset lower = datetime.datetime(lower.year, lower.month, lower.day) upper = now + second_offset upper = datetime.datetime(upper.year, upper.month, upper.day) value = datetime.datetime(value.year, value.month, value.day) else: # No explicit date / datetime value -- no checks necessary return [] if lower <= value <= upper: return [ checks.Warning( 'Fixed default value provided.', hint='It seems you set a fixed date / time / datetime ' 'value as default for this field. This may not be ' 'what you want. If you want to have the current date ' 'as default, use `django.utils.timezone.now`', obj=self, id='fields.W161', ) ] return [] def get_internal_type(self): return "DateTimeField" def to_python(self, value): if value is None: return value if isinstance(value, datetime.datetime): return value if isinstance(value, datetime.date): value = datetime.datetime(value.year, value.month, value.day) if settings.USE_TZ: # For backwards compatibility, interpret naive datetimes in # local time. This won't work during DST change, but we can't # do much about it, so we let the exceptions percolate up the # call stack. warnings.warn("DateTimeField %s.%s received a naive datetime " "(%s) while time zone support is active." % (self.model.__name__, self.name, value), RuntimeWarning) default_timezone = timezone.get_default_timezone() value = timezone.make_aware(value, default_timezone) return value try: parsed = parse_datetime(value) if parsed is not None: return parsed except ValueError: raise exceptions.ValidationError( self.error_messages['invalid_datetime'], code='invalid_datetime', params={'value': value}, ) try: parsed = parse_date(value) if parsed is not None: return datetime.datetime(parsed.year, parsed.month, parsed.day) except ValueError: raise exceptions.ValidationError( self.error_messages['invalid_date'], code='invalid_date', params={'value': value}, ) raise exceptions.ValidationError( self.error_messages['invalid'], code='invalid', params={'value': value}, ) def pre_save(self, model_instance, add): if self.auto_now or (self.auto_now_add and add): value = timezone.now() setattr(model_instance, self.attname, value) return value else: return super().pre_save(model_instance, add) # contribute_to_class is inherited from DateField, it registers # get_next_by_FOO and get_prev_by_FOO def get_prep_value(self, value): value = super().get_prep_value(value) value = self.to_python(value) if value is not None and settings.USE_TZ and timezone.is_naive(value): # For backwards compatibility, interpret naive datetimes in local # time. This won't work during DST change, but we can't do much # about it, so we let the exceptions percolate up the call stack. try: name = '%s.%s' % (self.model.__name__, self.name) except AttributeError: name = '(unbound)' warnings.warn("DateTimeField %s received a naive datetime (%s)" " while time zone support is active." % (name, value), RuntimeWarning) default_timezone = timezone.get_default_timezone() value = timezone.make_aware(value, default_timezone) return value def get_db_prep_value(self, value, connection, prepared=False): # Casts datetimes into the format expected by the backend if not prepared: value = self.get_prep_value(value) return connection.ops.adapt_datetimefield_value(value) def value_to_string(self, obj): val = self.value_from_object(obj) return '' if val is None else val.isoformat() def formfield(self, **kwargs): return super().formfield(**{ 'form_class': forms.DateTimeField, **kwargs, }) class DecimalField(Field): empty_strings_allowed = False default_error_messages = { 'invalid': _('“%(value)s” value must be a decimal number.'), } description = _("Decimal number") def __init__(self, verbose_name=None, name=None, max_digits=None, decimal_places=None, **kwargs): self.max_digits, self.decimal_places = max_digits, decimal_places super().__init__(verbose_name, name, **kwargs) def check(self, **kwargs): errors = super().check(**kwargs) digits_errors = [ *self._check_decimal_places(), *self._check_max_digits(), ] if not digits_errors: errors.extend(self._check_decimal_places_and_max_digits(**kwargs)) else: errors.extend(digits_errors) return errors def _check_decimal_places(self): try: decimal_places = int(self.decimal_places) if decimal_places < 0: raise ValueError() except TypeError: return [ checks.Error( "DecimalFields must define a 'decimal_places' attribute.", obj=self, id='fields.E130', ) ] except ValueError: return [ checks.Error( "'decimal_places' must be a non-negative integer.", obj=self, id='fields.E131', ) ] else: return [] def _check_max_digits(self): try: max_digits = int(self.max_digits) if max_digits <= 0: raise ValueError() except TypeError: return [ checks.Error( "DecimalFields must define a 'max_digits' attribute.", obj=self, id='fields.E132', ) ] except ValueError: return [ checks.Error( "'max_digits' must be a positive integer.", obj=self, id='fields.E133', ) ] else: return [] def _check_decimal_places_and_max_digits(self, **kwargs): if int(self.decimal_places) > int(self.max_digits): return [ checks.Error( "'max_digits' must be greater or equal to 'decimal_places'.", obj=self, id='fields.E134', ) ] return [] @cached_property def validators(self): return super().validators + [ validators.DecimalValidator(self.max_digits, self.decimal_places) ] @cached_property def context(self): return decimal.Context(prec=self.max_digits) def deconstruct(self): name, path, args, kwargs = super().deconstruct() if self.max_digits is not None: kwargs['max_digits'] = self.max_digits if self.decimal_places is not None: kwargs['decimal_places'] = self.decimal_places return name, path, args, kwargs def get_internal_type(self): return "DecimalField" def to_python(self, value): if value is None: return value if isinstance(value, float): return self.context.create_decimal_from_float(value) try: return decimal.Decimal(value) except decimal.InvalidOperation: raise exceptions.ValidationError( self.error_messages['invalid'], code='invalid', params={'value': value}, ) def get_db_prep_save(self, value, connection): return connection.ops.adapt_decimalfield_value(self.to_python(value), self.max_digits, self.decimal_places) def get_prep_value(self, value): value = super().get_prep_value(value) return self.to_python(value) def formfield(self, **kwargs): return super().formfield(**{ 'max_digits': self.max_digits, 'decimal_places': self.decimal_places, 'form_class': forms.DecimalField, **kwargs, }) class DurationField(Field): """ Store timedelta objects. Use interval on PostgreSQL, INTERVAL DAY TO SECOND on Oracle, and bigint of microseconds on other databases. """ empty_strings_allowed = False default_error_messages = { 'invalid': _('“%(value)s” value has an invalid format. It must be in ' '[DD] [[HH:]MM:]ss[.uuuuuu] format.') } description = _("Duration") def get_internal_type(self): return "DurationField" def to_python(self, value): if value is None: return value if isinstance(value, datetime.timedelta): return value try: parsed = parse_duration(value) except ValueError: pass else: if parsed is not None: return parsed raise exceptions.ValidationError( self.error_messages['invalid'], code='invalid', params={'value': value}, ) def get_db_prep_value(self, value, connection, prepared=False): if connection.features.has_native_duration_field: return value if value is None: return None return duration_microseconds(value) def get_db_converters(self, connection): converters = [] if not connection.features.has_native_duration_field: converters.append(connection.ops.convert_durationfield_value) return converters + super().get_db_converters(connection) def value_to_string(self, obj): val = self.value_from_object(obj) return '' if val is None else duration_string(val) def formfield(self, **kwargs): return super().formfield(**{ 'form_class': forms.DurationField, **kwargs, }) class EmailField(CharField): default_validators = [validators.validate_email] description = _("Email address") def __init__(self, *args, **kwargs): # max_length=254 to be compliant with RFCs 3696 and 5321 kwargs.setdefault('max_length', 254) super().__init__(*args, **kwargs) def deconstruct(self): name, path, args, kwargs = super().deconstruct() # We do not exclude max_length if it matches default as we want to change # the default in future. return name, path, args, kwargs def formfield(self, **kwargs): # As with CharField, this will cause email validation to be performed # twice. return super().formfield(**{ 'form_class': forms.EmailField, **kwargs, }) class FilePathField(Field): description = _("File path") def __init__(self, verbose_name=None, name=None, path='', match=None, recursive=False, allow_files=True, allow_folders=False, **kwargs): self.path, self.match, self.recursive = path, match, recursive self.allow_files, self.allow_folders = allow_files, allow_folders kwargs.setdefault('max_length', 100) super().__init__(verbose_name, name, **kwargs) def check(self, **kwargs): return [ *super().check(**kwargs), *self._check_allowing_files_or_folders(**kwargs), ] def _check_allowing_files_or_folders(self, **kwargs): if not self.allow_files and not self.allow_folders: return [ checks.Error( "FilePathFields must have either 'allow_files' or 'allow_folders' set to True.", obj=self, id='fields.E140', ) ] return [] def deconstruct(self): name, path, args, kwargs = super().deconstruct() if self.path != '': kwargs['path'] = self.path if self.match is not None: kwargs['match'] = self.match if self.recursive is not False: kwargs['recursive'] = self.recursive if self.allow_files is not True: kwargs['allow_files'] = self.allow_files if self.allow_folders is not False: kwargs['allow_folders'] = self.allow_folders if kwargs.get("max_length") == 100: del kwargs["max_length"] return name, path, args, kwargs def get_prep_value(self, value): value = super().get_prep_value(value) if value is None: return None return str(value) def formfield(self, **kwargs): return super().formfield(**{ 'path': self.path() if callable(self.path) else self.path, 'match': self.match, 'recursive': self.recursive, 'form_class': forms.FilePathField, 'allow_files': self.allow_files, 'allow_folders': self.allow_folders, **kwargs, }) def get_internal_type(self): return "FilePathField" class FloatField(Field): empty_strings_allowed = False default_error_messages = { 'invalid': _('“%(value)s” value must be a float.'), } description = _("Floating point number") def get_prep_value(self, value): value = super().get_prep_value(value) if value is None: return None try: return float(value) except (TypeError, ValueError) as e: raise e.__class__( "Field '%s' expected a number but got %r." % (self.name, value), ) from e def get_internal_type(self): return "FloatField" def to_python(self, value): if value is None: return value try: return float(value) except (TypeError, ValueError): raise exceptions.ValidationError( self.error_messages['invalid'], code='invalid', params={'value': value}, ) def formfield(self, **kwargs): return super().formfield(**{ 'form_class': forms.FloatField, **kwargs, }) class IntegerField(Field): empty_strings_allowed = False default_error_messages = { 'invalid': _('“%(value)s” value must be an integer.'), } description = _("Integer") def check(self, **kwargs): return [ *super().check(**kwargs), *self._check_max_length_warning(), ] def _check_max_length_warning(self): if self.max_length is not None: return [ checks.Warning( "'max_length' is ignored when used with %s." % self.__class__.__name__, hint="Remove 'max_length' from field", obj=self, id='fields.W122', ) ] return [] @cached_property def validators(self): # These validators can't be added at field initialization time since # they're based on values retrieved from `connection`. validators_ = super().validators internal_type = self.get_internal_type() min_value, max_value = connection.ops.integer_field_range(internal_type) if min_value is not None and not any( ( isinstance(validator, validators.MinValueValidator) and ( validator.limit_value() if callable(validator.limit_value) else validator.limit_value ) >= min_value ) for validator in validators_ ): validators_.append(validators.MinValueValidator(min_value)) if max_value is not None and not any( ( isinstance(validator, validators.MaxValueValidator) and ( validator.limit_value() if callable(validator.limit_value) else validator.limit_value ) <= max_value ) for validator in validators_ ): validators_.append(validators.MaxValueValidator(max_value)) return validators_ def get_prep_value(self, value): value = super().get_prep_value(value) if value is None: return None try: return int(value) except (TypeError, ValueError) as e: raise e.__class__( "Field '%s' expected a number but got %r." % (self.name, value), ) from e def get_internal_type(self): return "IntegerField" def to_python(self, value): if value is None: return value try: return int(value) except (TypeError, ValueError): raise exceptions.ValidationError( self.error_messages['invalid'], code='invalid', params={'value': value}, ) def formfield(self, **kwargs): return super().formfield(**{ 'form_class': forms.IntegerField, **kwargs, }) class BigIntegerField(IntegerField): description = _("Big (8 byte) integer") MAX_BIGINT = 9223372036854775807 def get_internal_type(self): return "BigIntegerField" def formfield(self, **kwargs): return super().formfield(**{ 'min_value': -BigIntegerField.MAX_BIGINT - 1, 'max_value': BigIntegerField.MAX_BIGINT, **kwargs, }) class IPAddressField(Field): empty_strings_allowed = False description = _("IPv4 address") system_check_removed_details = { 'msg': ( 'IPAddressField has been removed except for support in ' 'historical migrations.' ), 'hint': 'Use GenericIPAddressField instead.', 'id': 'fields.E900', } def __init__(self, *args, **kwargs): kwargs['max_length'] = 15 super().__init__(*args, **kwargs) def deconstruct(self): name, path, args, kwargs = super().deconstruct() del kwargs['max_length'] return name, path, args, kwargs def get_prep_value(self, value): value = super().get_prep_value(value) if value is None: return None return str(value) def get_internal_type(self): return "IPAddressField" class GenericIPAddressField(Field): empty_strings_allowed = False description = _("IP address") default_error_messages = {} def __init__(self, verbose_name=None, name=None, protocol='both', unpack_ipv4=False, *args, **kwargs): self.unpack_ipv4 = unpack_ipv4 self.protocol = protocol self.default_validators, invalid_error_message = \ validators.ip_address_validators(protocol, unpack_ipv4) self.default_error_messages['invalid'] = invalid_error_message kwargs['max_length'] = 39 super().__init__(verbose_name, name, *args, **kwargs) def check(self, **kwargs): return [ *super().check(**kwargs), *self._check_blank_and_null_values(**kwargs), ] def _check_blank_and_null_values(self, **kwargs): if not getattr(self, 'null', False) and getattr(self, 'blank', False): return [ checks.Error( 'GenericIPAddressFields cannot have blank=True if null=False, ' 'as blank values are stored as nulls.', obj=self, id='fields.E150', ) ] return [] def deconstruct(self): name, path, args, kwargs = super().deconstruct() if self.unpack_ipv4 is not False: kwargs['unpack_ipv4'] = self.unpack_ipv4 if self.protocol != "both": kwargs['protocol'] = self.protocol if kwargs.get("max_length") == 39: del kwargs['max_length'] return name, path, args, kwargs def get_internal_type(self): return "GenericIPAddressField" def to_python(self, value): if value is None: return None if not isinstance(value, str): value = str(value) value = value.strip() if ':' in value: return clean_ipv6_address(value, self.unpack_ipv4, self.error_messages['invalid']) return value def get_db_prep_value(self, value, connection, prepared=False): if not prepared: value = self.get_prep_value(value) return connection.ops.adapt_ipaddressfield_value(value) def get_prep_value(self, value): value = super().get_prep_value(value) if value is None: return None if value and ':' in value: try: return clean_ipv6_address(value, self.unpack_ipv4) except exceptions.ValidationError: pass return str(value) def formfield(self, **kwargs): return super().formfield(**{ 'protocol': self.protocol, 'form_class': forms.GenericIPAddressField, **kwargs, }) class NullBooleanField(BooleanField): default_error_messages = { 'invalid': _('“%(value)s” value must be either None, True or False.'), 'invalid_nullable': _('“%(value)s” value must be either None, True or False.'), } description = _("Boolean (Either True, False or None)") def __init__(self, *args, **kwargs): kwargs['null'] = True kwargs['blank'] = True super().__init__(*args, **kwargs) def deconstruct(self): name, path, args, kwargs = super().deconstruct() del kwargs['null'] del kwargs['blank'] return name, path, args, kwargs def get_internal_type(self): return "NullBooleanField" class PositiveIntegerRelDbTypeMixin: def rel_db_type(self, connection): """ Return the data type that a related field pointing to this field should use. In most cases, a foreign key pointing to a positive integer primary key will have an integer column data type but some databases (e.g. MySQL) have an unsigned integer type. In that case (related_fields_match_type=True), the primary key should return its db_type. """ if connection.features.related_fields_match_type: return self.db_type(connection) else: return IntegerField().db_type(connection=connection) class PositiveIntegerField(PositiveIntegerRelDbTypeMixin, IntegerField): description = _("Positive integer") def get_internal_type(self): return "PositiveIntegerField" def formfield(self, **kwargs): return super().formfield(**{ 'min_value': 0, **kwargs, }) class PositiveSmallIntegerField(PositiveIntegerRelDbTypeMixin, IntegerField): description = _("Positive small integer") def get_internal_type(self): return "PositiveSmallIntegerField" def formfield(self, **kwargs): return super().formfield(**{ 'min_value': 0, **kwargs, }) class SlugField(CharField): default_validators = [validators.validate_slug] description = _("Slug (up to %(max_length)s)") def __init__(self, *args, max_length=50, db_index=True, allow_unicode=False, **kwargs): self.allow_unicode = allow_unicode if self.allow_unicode: self.default_validators = [validators.validate_unicode_slug] super().__init__(*args, max_length=max_length, db_index=db_index, **kwargs) def deconstruct(self): name, path, args, kwargs = super().deconstruct() if kwargs.get("max_length") == 50: del kwargs['max_length'] if self.db_index is False: kwargs['db_index'] = False else: del kwargs['db_index'] if self.allow_unicode is not False: kwargs['allow_unicode'] = self.allow_unicode return name, path, args, kwargs def get_internal_type(self): return "SlugField" def formfield(self, **kwargs): return super().formfield(**{ 'form_class': forms.SlugField, 'allow_unicode': self.allow_unicode, **kwargs, }) class SmallIntegerField(IntegerField): description = _("Small integer") def get_internal_type(self): return "SmallIntegerField" class TextField(Field): description = _("Text") def get_internal_type(self): return "TextField" def to_python(self, value): if isinstance(value, str) or value is None: return value return str(value) def get_prep_value(self, value): value = super().get_prep_value(value) return self.to_python(value) def formfield(self, **kwargs): # Passing max_length to forms.CharField means that the value's length # will be validated twice. This is considered acceptable since we want # the value in the form field (to pass into widget for example). return super().formfield(**{ 'max_length': self.max_length, **({} if self.choices is not None else {'widget': forms.Textarea}), **kwargs, }) class TimeField(DateTimeCheckMixin, Field): empty_strings_allowed = False default_error_messages = { 'invalid': _('“%(value)s” value has an invalid format. It must be in ' 'HH:MM[:ss[.uuuuuu]] format.'), 'invalid_time': _('“%(value)s” value has the correct format ' '(HH:MM[:ss[.uuuuuu]]) but it is an invalid time.'), } description = _("Time") def __init__(self, verbose_name=None, name=None, auto_now=False, auto_now_add=False, **kwargs): self.auto_now, self.auto_now_add = auto_now, auto_now_add if auto_now or auto_now_add: kwargs['editable'] = False kwargs['blank'] = True super().__init__(verbose_name, name, **kwargs) def _check_fix_default_value(self): """ Warn that using an actual date or datetime value is probably wrong; it's only evaluated on server startup. """ if not self.has_default(): return [] now = timezone.now() if not timezone.is_naive(now): now = timezone.make_naive(now, timezone.utc) value = self.default if isinstance(value, datetime.datetime): second_offset = datetime.timedelta(seconds=10) lower = now - second_offset upper = now + second_offset if timezone.is_aware(value): value = timezone.make_naive(value, timezone.utc) elif isinstance(value, datetime.time): second_offset = datetime.timedelta(seconds=10) lower = now - second_offset upper = now + second_offset value = datetime.datetime.combine(now.date(), value) if timezone.is_aware(value): value = timezone.make_naive(value, timezone.utc).time() else: # No explicit time / datetime value -- no checks necessary return [] if lower <= value <= upper: return [ checks.Warning( 'Fixed default value provided.', hint='It seems you set a fixed date / time / datetime ' 'value as default for this field. This may not be ' 'what you want. If you want to have the current date ' 'as default, use `django.utils.timezone.now`', obj=self, id='fields.W161', ) ] return [] def deconstruct(self): name, path, args, kwargs = super().deconstruct() if self.auto_now is not False: kwargs["auto_now"] = self.auto_now if self.auto_now_add is not False: kwargs["auto_now_add"] = self.auto_now_add if self.auto_now or self.auto_now_add: del kwargs['blank'] del kwargs['editable'] return name, path, args, kwargs def get_internal_type(self): return "TimeField" def to_python(self, value): if value is None: return None if isinstance(value, datetime.time): return value if isinstance(value, datetime.datetime): # Not usually a good idea to pass in a datetime here (it loses # information), but this can be a side-effect of interacting with a # database backend (e.g. Oracle), so we'll be accommodating. return value.time() try: parsed = parse_time(value) if parsed is not None: return parsed except ValueError: raise exceptions.ValidationError( self.error_messages['invalid_time'], code='invalid_time', params={'value': value}, ) raise exceptions.ValidationError( self.error_messages['invalid'], code='invalid', params={'value': value}, ) def pre_save(self, model_instance, add): if self.auto_now or (self.auto_now_add and add): value = datetime.datetime.now().time() setattr(model_instance, self.attname, value) return value else: return super().pre_save(model_instance, add) def get_prep_value(self, value): value = super().get_prep_value(value) return self.to_python(value) def get_db_prep_value(self, value, connection, prepared=False): # Casts times into the format expected by the backend if not prepared: value = self.get_prep_value(value) return connection.ops.adapt_timefield_value(value) def value_to_string(self, obj): val = self.value_from_object(obj) return '' if val is None else val.isoformat() def formfield(self, **kwargs): return super().formfield(**{ 'form_class': forms.TimeField, **kwargs, }) class URLField(CharField): default_validators = [validators.URLValidator()] description = _("URL") def __init__(self, verbose_name=None, name=None, **kwargs): kwargs.setdefault('max_length', 200) super().__init__(verbose_name, name, **kwargs) def deconstruct(self): name, path, args, kwargs = super().deconstruct() if kwargs.get("max_length") == 200: del kwargs['max_length'] return name, path, args, kwargs def formfield(self, **kwargs): # As with CharField, this will cause URL validation to be performed # twice. return super().formfield(**{ 'form_class': forms.URLField, **kwargs, }) class BinaryField(Field): description = _("Raw binary data") empty_values = [None, b''] def __init__(self, *args, **kwargs): kwargs.setdefault('editable', False) super().__init__(*args, **kwargs) if self.max_length is not None: self.validators.append(validators.MaxLengthValidator(self.max_length)) def check(self, **kwargs): return [*super().check(**kwargs), *self._check_str_default_value()] def _check_str_default_value(self): if self.has_default() and isinstance(self.default, str): return [ checks.Error( "BinaryField's default cannot be a string. Use bytes " "content instead.", obj=self, id='fields.E170', ) ] return [] def deconstruct(self): name, path, args, kwargs = super().deconstruct() if self.editable: kwargs['editable'] = True else: del kwargs['editable'] return name, path, args, kwargs def get_internal_type(self): return "BinaryField" def get_placeholder(self, value, compiler, connection): return connection.ops.binary_placeholder_sql(value) def get_default(self): if self.has_default() and not callable(self.default): return self.default default = super().get_default() if default == '': return b'' return default def get_db_prep_value(self, value, connection, prepared=False): value = super().get_db_prep_value(value, connection, prepared) if value is not None: return connection.Database.Binary(value) return value def value_to_string(self, obj): """Binary data is serialized as base64""" return b64encode(self.value_from_object(obj)).decode('ascii') def to_python(self, value): # If it's a string, it should be base64-encoded data if isinstance(value, str): return memoryview(b64decode(value.encode('ascii'))) return value class UUIDField(Field): default_error_messages = { 'invalid': _('“%(value)s” is not a valid UUID.'), } description = _('Universally unique identifier') empty_strings_allowed = False def __init__(self, verbose_name=None, **kwargs): kwargs['max_length'] = 32 super().__init__(verbose_name, **kwargs) def deconstruct(self): name, path, args, kwargs = super().deconstruct() del kwargs['max_length'] return name, path, args, kwargs def get_internal_type(self): return "UUIDField" def get_prep_value(self, value): value = super().get_prep_value(value) return self.to_python(value) def get_db_prep_value(self, value, connection, prepared=False): if value is None: return None if not isinstance(value, uuid.UUID): value = self.to_python(value) if connection.features.has_native_uuid_field: return value return value.hex def to_python(self, value): if value is not None and not isinstance(value, uuid.UUID): input_form = 'int' if isinstance(value, int) else 'hex' try: return uuid.UUID(**{input_form: value}) except (AttributeError, ValueError): raise exceptions.ValidationError( self.error_messages['invalid'], code='invalid', params={'value': value}, ) return value def formfield(self, **kwargs): return super().formfield(**{ 'form_class': forms.UUIDField, **kwargs, }) class AutoFieldMixin: db_returning = True def __init__(self, *args, **kwargs): kwargs['blank'] = True super().__init__(*args, **kwargs) def check(self, **kwargs): return [ *super().check(**kwargs), *self._check_primary_key(), ] def _check_primary_key(self): if not self.primary_key: return [ checks.Error( 'AutoFields must set primary_key=True.', obj=self, id='fields.E100', ), ] else: return [] def deconstruct(self): name, path, args, kwargs = super().deconstruct() del kwargs['blank'] kwargs['primary_key'] = True return name, path, args, kwargs def validate(self, value, model_instance): pass def get_db_prep_value(self, value, connection, prepared=False): if not prepared: value = self.get_prep_value(value) value = connection.ops.validate_autopk_value(value) return value def contribute_to_class(self, cls, name, **kwargs): assert not cls._meta.auto_field, ( "Model %s can't have more than one auto-generated field." % cls._meta.label ) super().contribute_to_class(cls, name, **kwargs) cls._meta.auto_field = self def formfield(self, **kwargs): return None class AutoFieldMeta(type): """ Metaclass to maintain backward inheritance compatibility for AutoField. It is intended that AutoFieldMixin become public API when it is possible to create a non-integer automatically-generated field using column defaults stored in the database. In many areas Django also relies on using isinstance() to check for an automatically-generated field as a subclass of AutoField. A new flag needs to be implemented on Field to be used instead. When these issues have been addressed, this metaclass could be used to deprecate inheritance from AutoField and use of isinstance() with AutoField for detecting automatically-generated fields. """ @property def _subclasses(self): return (BigAutoField, SmallAutoField) def __instancecheck__(self, instance): return isinstance(instance, self._subclasses) or super().__instancecheck__(instance) def __subclasscheck__(self, subclass): return subclass in self._subclasses or super().__subclasscheck__(subclass) class AutoField(AutoFieldMixin, IntegerField, metaclass=AutoFieldMeta): def get_internal_type(self): return 'AutoField' def rel_db_type(self, connection): return IntegerField().db_type(connection=connection) class BigAutoField(AutoFieldMixin, BigIntegerField): def get_internal_type(self): return 'BigAutoField' def rel_db_type(self, connection): return BigIntegerField().db_type(connection=connection) class SmallAutoField(AutoFieldMixin, SmallIntegerField): def get_internal_type(self): return 'SmallAutoField' def rel_db_type(self, connection): return SmallIntegerField().db_type(connection=connection)
196cba0582a8b672a9d860905b9614a74dd29ee8d8a8c1ffe78d7f5d33f7441a
""" Constants specific to the SQL storage portion of the ORM. """ from django.utils.regex_helper import _lazy_re_compile # Size of each "chunk" for get_iterator calls. # Larger values are slightly faster at the expense of more storage space. GET_ITERATOR_CHUNK_SIZE = 100 # Namedtuples for sql.* internal use. # How many results to expect from a cursor.execute call MULTI = 'multi' SINGLE = 'single' CURSOR = 'cursor' NO_RESULTS = 'no results' ORDER_PATTERN = _lazy_re_compile(r'\?|[-+]?[.\w]+$') ORDER_DIR = { 'ASC': ('ASC', 'DESC'), 'DESC': ('DESC', 'ASC'), } # SQL join types. INNER = 'INNER JOIN' LOUTER = 'LEFT OUTER JOIN'
3cb746c9a7c0dd63def7330712bebf65b723bd722a5c1c9c04cc4dda03eb48c0
import datetime import uuid from functools import lru_cache from django.conf import settings from django.db.backends.base.operations import BaseDatabaseOperations from django.db.backends.utils import strip_quotes, truncate_name from django.db.models.expressions import Exists, ExpressionWrapper from django.db.models.query_utils import Q from django.db.utils import DatabaseError from django.utils import timezone from django.utils.encoding import force_bytes, force_str from django.utils.functional import cached_property from django.utils.regex_helper import _lazy_re_compile from .base import Database from .utils import BulkInsertMapper, InsertVar, Oracle_datetime class DatabaseOperations(BaseDatabaseOperations): # Oracle uses NUMBER(5), NUMBER(11), and NUMBER(19) for integer fields. # SmallIntegerField uses NUMBER(11) instead of NUMBER(5), which is used by # SmallAutoField, to preserve backward compatibility. integer_field_ranges = { 'SmallIntegerField': (-99999999999, 99999999999), 'IntegerField': (-99999999999, 99999999999), 'BigIntegerField': (-9999999999999999999, 9999999999999999999), 'PositiveSmallIntegerField': (0, 99999999999), 'PositiveIntegerField': (0, 99999999999), 'SmallAutoField': (-99999, 99999), 'AutoField': (-99999999999, 99999999999), 'BigAutoField': (-9999999999999999999, 9999999999999999999), } set_operators = {**BaseDatabaseOperations.set_operators, 'difference': 'MINUS'} # TODO: colorize this SQL code with style.SQL_KEYWORD(), etc. _sequence_reset_sql = """ DECLARE table_value integer; seq_value integer; seq_name user_tab_identity_cols.sequence_name%%TYPE; BEGIN BEGIN SELECT sequence_name INTO seq_name FROM user_tab_identity_cols WHERE table_name = '%(table_name)s' AND column_name = '%(column_name)s'; EXCEPTION WHEN NO_DATA_FOUND THEN seq_name := '%(no_autofield_sequence_name)s'; END; SELECT NVL(MAX(%(column)s), 0) INTO table_value FROM %(table)s; SELECT NVL(last_number - cache_size, 0) INTO seq_value FROM user_sequences WHERE sequence_name = seq_name; WHILE table_value > seq_value LOOP EXECUTE IMMEDIATE 'SELECT "'||seq_name||'".nextval FROM DUAL' INTO seq_value; END LOOP; END; /""" # Oracle doesn't support string without precision; use the max string size. cast_char_field_without_max_length = 'NVARCHAR2(2000)' cast_data_types = { 'AutoField': 'NUMBER(11)', 'BigAutoField': 'NUMBER(19)', 'SmallAutoField': 'NUMBER(5)', 'TextField': cast_char_field_without_max_length, } def cache_key_culling_sql(self): return 'SELECT cache_key FROM %s ORDER BY cache_key OFFSET %%s ROWS FETCH FIRST 1 ROWS ONLY' def date_extract_sql(self, lookup_type, field_name): if lookup_type == 'week_day': # TO_CHAR(field, 'D') returns an integer from 1-7, where 1=Sunday. return "TO_CHAR(%s, 'D')" % field_name elif lookup_type == 'iso_week_day': return "TO_CHAR(%s - 1, 'D')" % field_name elif lookup_type == 'week': # IW = ISO week number return "TO_CHAR(%s, 'IW')" % field_name elif lookup_type == 'quarter': return "TO_CHAR(%s, 'Q')" % field_name elif lookup_type == 'iso_year': return "TO_CHAR(%s, 'IYYY')" % field_name else: # https://docs.oracle.com/en/database/oracle/oracle-database/18/sqlrf/EXTRACT-datetime.html return "EXTRACT(%s FROM %s)" % (lookup_type.upper(), field_name) def date_trunc_sql(self, lookup_type, field_name): # https://docs.oracle.com/en/database/oracle/oracle-database/18/sqlrf/ROUND-and-TRUNC-Date-Functions.html if lookup_type in ('year', 'month'): return "TRUNC(%s, '%s')" % (field_name, lookup_type.upper()) elif lookup_type == 'quarter': return "TRUNC(%s, 'Q')" % field_name elif lookup_type == 'week': return "TRUNC(%s, 'IW')" % field_name else: return "TRUNC(%s)" % field_name # Oracle crashes with "ORA-03113: end-of-file on communication channel" # if the time zone name is passed in parameter. Use interpolation instead. # https://groups.google.com/forum/#!msg/django-developers/zwQju7hbG78/9l934yelwfsJ # This regexp matches all time zone names from the zoneinfo database. _tzname_re = _lazy_re_compile(r'^[\w/:+-]+$') def _prepare_tzname_delta(self, tzname): if '+' in tzname: return tzname[tzname.find('+'):] elif '-' in tzname: return tzname[tzname.find('-'):] return tzname def _convert_field_to_tz(self, field_name, tzname): if not settings.USE_TZ: return field_name if not self._tzname_re.match(tzname): raise ValueError("Invalid time zone name: %s" % tzname) # Convert from connection timezone to the local time, returning # TIMESTAMP WITH TIME ZONE and cast it back to TIMESTAMP to strip the # TIME ZONE details. if self.connection.timezone_name != tzname: return "CAST((FROM_TZ(%s, '%s') AT TIME ZONE '%s') AS TIMESTAMP)" % ( field_name, self.connection.timezone_name, self._prepare_tzname_delta(tzname), ) return field_name def datetime_cast_date_sql(self, field_name, tzname): field_name = self._convert_field_to_tz(field_name, tzname) return 'TRUNC(%s)' % field_name def datetime_cast_time_sql(self, field_name, tzname): # Since `TimeField` values are stored as TIMESTAMP where only the date # part is ignored, convert the field to the specified timezone. return self._convert_field_to_tz(field_name, tzname) def datetime_extract_sql(self, lookup_type, field_name, tzname): field_name = self._convert_field_to_tz(field_name, tzname) return self.date_extract_sql(lookup_type, field_name) def datetime_trunc_sql(self, lookup_type, field_name, tzname): field_name = self._convert_field_to_tz(field_name, tzname) # https://docs.oracle.com/en/database/oracle/oracle-database/18/sqlrf/ROUND-and-TRUNC-Date-Functions.html if lookup_type in ('year', 'month'): sql = "TRUNC(%s, '%s')" % (field_name, lookup_type.upper()) elif lookup_type == 'quarter': sql = "TRUNC(%s, 'Q')" % field_name elif lookup_type == 'week': sql = "TRUNC(%s, 'IW')" % field_name elif lookup_type == 'day': sql = "TRUNC(%s)" % field_name elif lookup_type == 'hour': sql = "TRUNC(%s, 'HH24')" % field_name elif lookup_type == 'minute': sql = "TRUNC(%s, 'MI')" % field_name else: sql = "CAST(%s AS DATE)" % field_name # Cast to DATE removes sub-second precision. return sql def time_trunc_sql(self, lookup_type, field_name): # The implementation is similar to `datetime_trunc_sql` as both # `DateTimeField` and `TimeField` are stored as TIMESTAMP where # the date part of the later is ignored. if lookup_type == 'hour': sql = "TRUNC(%s, 'HH24')" % field_name elif lookup_type == 'minute': sql = "TRUNC(%s, 'MI')" % field_name elif lookup_type == 'second': sql = "CAST(%s AS DATE)" % field_name # Cast to DATE removes sub-second precision. return sql def get_db_converters(self, expression): converters = super().get_db_converters(expression) internal_type = expression.output_field.get_internal_type() if internal_type == 'TextField': converters.append(self.convert_textfield_value) elif internal_type == 'BinaryField': converters.append(self.convert_binaryfield_value) elif internal_type in ['BooleanField', 'NullBooleanField']: converters.append(self.convert_booleanfield_value) elif internal_type == 'DateTimeField': if settings.USE_TZ: converters.append(self.convert_datetimefield_value) elif internal_type == 'DateField': converters.append(self.convert_datefield_value) elif internal_type == 'TimeField': converters.append(self.convert_timefield_value) elif internal_type == 'UUIDField': converters.append(self.convert_uuidfield_value) # Oracle stores empty strings as null. If the field accepts the empty # string, undo this to adhere to the Django convention of using # the empty string instead of null. if expression.field.empty_strings_allowed: converters.append( self.convert_empty_bytes if internal_type == 'BinaryField' else self.convert_empty_string ) return converters def convert_textfield_value(self, value, expression, connection): if isinstance(value, Database.LOB): value = value.read() return value def convert_binaryfield_value(self, value, expression, connection): if isinstance(value, Database.LOB): value = force_bytes(value.read()) return value def convert_booleanfield_value(self, value, expression, connection): if value in (0, 1): value = bool(value) return value # cx_Oracle always returns datetime.datetime objects for # DATE and TIMESTAMP columns, but Django wants to see a # python datetime.date, .time, or .datetime. def convert_datetimefield_value(self, value, expression, connection): if value is not None: value = timezone.make_aware(value, self.connection.timezone) return value def convert_datefield_value(self, value, expression, connection): if isinstance(value, Database.Timestamp): value = value.date() return value def convert_timefield_value(self, value, expression, connection): if isinstance(value, Database.Timestamp): value = value.time() return value def convert_uuidfield_value(self, value, expression, connection): if value is not None: value = uuid.UUID(value) return value @staticmethod def convert_empty_string(value, expression, connection): return '' if value is None else value @staticmethod def convert_empty_bytes(value, expression, connection): return b'' if value is None else value def deferrable_sql(self): return " DEFERRABLE INITIALLY DEFERRED" def fetch_returned_insert_columns(self, cursor, returning_params): for param in returning_params: value = param.get_value() if value is None or value == []: # cx_Oracle < 6.3 returns None, >= 6.3 returns empty list. raise DatabaseError( 'The database did not return a new row id. Probably ' '"ORA-1403: no data found" was raised internally but was ' 'hidden by the Oracle OCI library (see ' 'https://code.djangoproject.com/ticket/28859).' ) # cx_Oracle < 7 returns value, >= 7 returns list with single value. yield value[0] if isinstance(value, list) else value def field_cast_sql(self, db_type, internal_type): if db_type and db_type.endswith('LOB'): return "DBMS_LOB.SUBSTR(%s)" else: return "%s" def no_limit_value(self): return None def limit_offset_sql(self, low_mark, high_mark): fetch, offset = self._get_limit_offset_params(low_mark, high_mark) return ' '.join(sql for sql in ( ('OFFSET %d ROWS' % offset) if offset else None, ('FETCH FIRST %d ROWS ONLY' % fetch) if fetch else None, ) if sql) def last_executed_query(self, cursor, sql, params): # https://cx-oracle.readthedocs.io/en/latest/cursor.html#Cursor.statement # The DB API definition does not define this attribute. statement = cursor.statement # Unlike Psycopg's `query` and MySQLdb`'s `_executed`, cx_Oracle's # `statement` doesn't contain the query parameters. Substitute # parameters manually. if isinstance(params, (tuple, list)): for i, param in enumerate(params): statement = statement.replace(':arg%d' % i, force_str(param, errors='replace')) elif isinstance(params, dict): for key, param in params.items(): statement = statement.replace(':%s' % key, force_str(param, errors='replace')) return statement def last_insert_id(self, cursor, table_name, pk_name): sq_name = self._get_sequence_name(cursor, strip_quotes(table_name), pk_name) cursor.execute('"%s".currval' % sq_name) return cursor.fetchone()[0] def lookup_cast(self, lookup_type, internal_type=None): if lookup_type in ('iexact', 'icontains', 'istartswith', 'iendswith'): return "UPPER(%s)" return "%s" def max_in_list_size(self): return 1000 def max_name_length(self): return 30 def pk_default_value(self): return "NULL" def prep_for_iexact_query(self, x): return x def process_clob(self, value): if value is None: return '' return value.read() def quote_name(self, name): # SQL92 requires delimited (quoted) names to be case-sensitive. When # not quoted, Oracle has case-insensitive behavior for identifiers, but # always defaults to uppercase. # We simplify things by making Oracle identifiers always uppercase. if not name.startswith('"') and not name.endswith('"'): name = '"%s"' % truncate_name(name.upper(), self.max_name_length()) # Oracle puts the query text into a (query % args) construct, so % signs # in names need to be escaped. The '%%' will be collapsed back to '%' at # that stage so we aren't really making the name longer here. name = name.replace('%', '%%') return name.upper() def random_function_sql(self): return "DBMS_RANDOM.RANDOM" def regex_lookup(self, lookup_type): if lookup_type == 'regex': match_option = "'c'" else: match_option = "'i'" return 'REGEXP_LIKE(%%s, %%s, %s)' % match_option def return_insert_columns(self, fields): if not fields: return '', () field_names = [] params = [] for field in fields: field_names.append('%s.%s' % ( self.quote_name(field.model._meta.db_table), self.quote_name(field.column), )) params.append(InsertVar(field)) return 'RETURNING %s INTO %s' % ( ', '.join(field_names), ', '.join(['%s'] * len(params)), ), tuple(params) def __foreign_key_constraints(self, table_name, recursive): with self.connection.cursor() as cursor: if recursive: cursor.execute(""" SELECT user_tables.table_name, rcons.constraint_name FROM user_tables JOIN user_constraints cons ON (user_tables.table_name = cons.table_name AND cons.constraint_type = ANY('P', 'U')) LEFT JOIN user_constraints rcons ON (user_tables.table_name = rcons.table_name AND rcons.constraint_type = 'R') START WITH user_tables.table_name = UPPER(%s) CONNECT BY NOCYCLE PRIOR cons.constraint_name = rcons.r_constraint_name GROUP BY user_tables.table_name, rcons.constraint_name HAVING user_tables.table_name != UPPER(%s) ORDER BY MAX(level) DESC """, (table_name, table_name)) else: cursor.execute(""" SELECT cons.table_name, cons.constraint_name FROM user_constraints cons WHERE cons.constraint_type = 'R' AND cons.table_name = UPPER(%s) """, (table_name,)) return cursor.fetchall() @cached_property def _foreign_key_constraints(self): # 512 is large enough to fit the ~330 tables (as of this writing) in # Django's test suite. return lru_cache(maxsize=512)(self.__foreign_key_constraints) def sql_flush(self, style, tables, sequences, allow_cascade=False): if tables: truncated_tables = {table.upper() for table in tables} constraints = set() # Oracle's TRUNCATE CASCADE only works with ON DELETE CASCADE # foreign keys which Django doesn't define. Emulate the # PostgreSQL behavior which truncates all dependent tables by # manually retrieving all foreign key constraints and resolving # dependencies. for table in tables: for foreign_table, constraint in self._foreign_key_constraints(table, recursive=allow_cascade): if allow_cascade: truncated_tables.add(foreign_table) constraints.add((foreign_table, constraint)) sql = [ "%s %s %s %s %s %s %s %s;" % ( style.SQL_KEYWORD('ALTER'), style.SQL_KEYWORD('TABLE'), style.SQL_FIELD(self.quote_name(table)), style.SQL_KEYWORD('DISABLE'), style.SQL_KEYWORD('CONSTRAINT'), style.SQL_FIELD(self.quote_name(constraint)), style.SQL_KEYWORD('KEEP'), style.SQL_KEYWORD('INDEX'), ) for table, constraint in constraints ] + [ "%s %s %s;" % ( style.SQL_KEYWORD('TRUNCATE'), style.SQL_KEYWORD('TABLE'), style.SQL_FIELD(self.quote_name(table)), ) for table in truncated_tables ] + [ "%s %s %s %s %s %s;" % ( style.SQL_KEYWORD('ALTER'), style.SQL_KEYWORD('TABLE'), style.SQL_FIELD(self.quote_name(table)), style.SQL_KEYWORD('ENABLE'), style.SQL_KEYWORD('CONSTRAINT'), style.SQL_FIELD(self.quote_name(constraint)), ) for table, constraint in constraints ] # Since we've just deleted all the rows, running our sequence # ALTER code will reset the sequence to 0. sql.extend(self.sequence_reset_by_name_sql(style, sequences)) return sql else: return [] def sequence_reset_by_name_sql(self, style, sequences): sql = [] for sequence_info in sequences: no_autofield_sequence_name = self._get_no_autofield_sequence_name(sequence_info['table']) table = self.quote_name(sequence_info['table']) column = self.quote_name(sequence_info['column'] or 'id') query = self._sequence_reset_sql % { 'no_autofield_sequence_name': no_autofield_sequence_name, 'table': table, 'column': column, 'table_name': strip_quotes(table), 'column_name': strip_quotes(column), } sql.append(query) return sql def sequence_reset_sql(self, style, model_list): from django.db import models output = [] query = self._sequence_reset_sql for model in model_list: for f in model._meta.local_fields: if isinstance(f, models.AutoField): no_autofield_sequence_name = self._get_no_autofield_sequence_name(model._meta.db_table) table = self.quote_name(model._meta.db_table) column = self.quote_name(f.column) output.append(query % { 'no_autofield_sequence_name': no_autofield_sequence_name, 'table': table, 'column': column, 'table_name': strip_quotes(table), 'column_name': strip_quotes(column), }) # Only one AutoField is allowed per model, so don't # continue to loop break for f in model._meta.many_to_many: if not f.remote_field.through: no_autofield_sequence_name = self._get_no_autofield_sequence_name(f.m2m_db_table()) table = self.quote_name(f.m2m_db_table()) column = self.quote_name('id') output.append(query % { 'no_autofield_sequence_name': no_autofield_sequence_name, 'table': table, 'column': column, 'table_name': strip_quotes(table), 'column_name': 'ID', }) return output def start_transaction_sql(self): return '' def tablespace_sql(self, tablespace, inline=False): if inline: return "USING INDEX TABLESPACE %s" % self.quote_name(tablespace) else: return "TABLESPACE %s" % self.quote_name(tablespace) def adapt_datefield_value(self, value): """ Transform a date value to an object compatible with what is expected by the backend driver for date columns. The default implementation transforms the date to text, but that is not necessary for Oracle. """ return value def adapt_datetimefield_value(self, value): """ Transform a datetime value to an object compatible with what is expected by the backend driver for datetime columns. If naive datetime is passed assumes that is in UTC. Normally Django models.DateTimeField makes sure that if USE_TZ is True passed datetime is timezone aware. """ if value is None: return None # Expression values are adapted by the database. if hasattr(value, 'resolve_expression'): return value # cx_Oracle doesn't support tz-aware datetimes if timezone.is_aware(value): if settings.USE_TZ: value = timezone.make_naive(value, self.connection.timezone) else: raise ValueError("Oracle backend does not support timezone-aware datetimes when USE_TZ is False.") return Oracle_datetime.from_datetime(value) def adapt_timefield_value(self, value): if value is None: return None # Expression values are adapted by the database. if hasattr(value, 'resolve_expression'): return value if isinstance(value, str): return datetime.datetime.strptime(value, '%H:%M:%S') # Oracle doesn't support tz-aware times if timezone.is_aware(value): raise ValueError("Oracle backend does not support timezone-aware times.") return Oracle_datetime(1900, 1, 1, value.hour, value.minute, value.second, value.microsecond) def combine_expression(self, connector, sub_expressions): lhs, rhs = sub_expressions if connector == '%%': return 'MOD(%s)' % ','.join(sub_expressions) elif connector == '&': return 'BITAND(%s)' % ','.join(sub_expressions) elif connector == '|': return 'BITAND(-%(lhs)s-1,%(rhs)s)+%(lhs)s' % {'lhs': lhs, 'rhs': rhs} elif connector == '<<': return '(%(lhs)s * POWER(2, %(rhs)s))' % {'lhs': lhs, 'rhs': rhs} elif connector == '>>': return 'FLOOR(%(lhs)s / POWER(2, %(rhs)s))' % {'lhs': lhs, 'rhs': rhs} elif connector == '^': return 'POWER(%s)' % ','.join(sub_expressions) return super().combine_expression(connector, sub_expressions) def _get_no_autofield_sequence_name(self, table): """ Manually created sequence name to keep backward compatibility for AutoFields that aren't Oracle identity columns. """ name_length = self.max_name_length() - 3 return '%s_SQ' % truncate_name(strip_quotes(table), name_length).upper() def _get_sequence_name(self, cursor, table, pk_name): cursor.execute(""" SELECT sequence_name FROM user_tab_identity_cols WHERE table_name = UPPER(%s) AND column_name = UPPER(%s)""", [table, pk_name]) row = cursor.fetchone() return self._get_no_autofield_sequence_name(table) if row is None else row[0] def bulk_insert_sql(self, fields, placeholder_rows): query = [] for row in placeholder_rows: select = [] for i, placeholder in enumerate(row): # A model without any fields has fields=[None]. if fields[i]: internal_type = getattr(fields[i], 'target_field', fields[i]).get_internal_type() placeholder = BulkInsertMapper.types.get(internal_type, '%s') % placeholder # Add columns aliases to the first select to avoid "ORA-00918: # column ambiguously defined" when two or more columns in the # first select have the same value. if not query: placeholder = '%s col_%s' % (placeholder, i) select.append(placeholder) query.append('SELECT %s FROM DUAL' % ', '.join(select)) # Bulk insert to tables with Oracle identity columns causes Oracle to # add sequence.nextval to it. Sequence.nextval cannot be used with the # UNION operator. To prevent incorrect SQL, move UNION to a subquery. return 'SELECT * FROM (%s)' % ' UNION ALL '.join(query) def subtract_temporals(self, internal_type, lhs, rhs): if internal_type == 'DateField': lhs_sql, lhs_params = lhs rhs_sql, rhs_params = rhs return "NUMTODSINTERVAL(TO_NUMBER(%s - %s), 'DAY')" % (lhs_sql, rhs_sql), lhs_params + rhs_params return super().subtract_temporals(internal_type, lhs, rhs) def bulk_batch_size(self, fields, objs): """Oracle restricts the number of parameters in a query.""" if fields: return self.connection.features.max_query_params // len(fields) return len(objs) def conditional_expression_supported_in_where_clause(self, expression): """ Oracle supports only EXISTS(...) or filters in the WHERE clause, others must be compared with True. """ if isinstance(expression, Exists): return True if isinstance(expression, ExpressionWrapper) and isinstance(expression.expression, Q): return True return False
a36d76eb1edc9abb6bac1407d641b89256e3e3aa5f545930444d97761f2c58ac
from django.db.models.sql import compiler class SQLCompiler(compiler.SQLCompiler): def as_subquery_condition(self, alias, columns, compiler): qn = compiler.quote_name_unless_alias qn2 = self.connection.ops.quote_name sql, params = self.as_sql() return '(%s) IN (%s)' % (', '.join('%s.%s' % (qn(alias), qn2(column)) for column in columns), sql), params class SQLInsertCompiler(compiler.SQLInsertCompiler, SQLCompiler): pass class SQLDeleteCompiler(compiler.SQLDeleteCompiler, SQLCompiler): def as_sql(self): if self.connection.features.update_can_self_select or self.single_alias: return super().as_sql() # MySQL and MariaDB < 10.3.2 doesn't support deletion with a subquery # which is what the default implementation of SQLDeleteCompiler uses # when multiple tables are involved. Use the MySQL/MariaDB specific # DELETE table FROM table syntax instead to avoid performing the # operation in two queries. result = [ 'DELETE %s FROM' % self.quote_name_unless_alias( self.query.get_initial_alias() ) ] from_sql, from_params = self.get_from_clause() result.extend(from_sql) where, params = self.compile(self.query.where) if where: result.append('WHERE %s' % where) return ' '.join(result), tuple(from_params) + tuple(params) class SQLUpdateCompiler(compiler.SQLUpdateCompiler, SQLCompiler): pass class SQLAggregateCompiler(compiler.SQLAggregateCompiler, SQLCompiler): pass
da7809780f41f158b80d174770aaa93226ed3aab6a32aaab94f1b6e0520c16dd
""" MySQL database backend for Django. Requires mysqlclient: https://pypi.org/project/mysqlclient/ """ from django.core.exceptions import ImproperlyConfigured from django.db import utils from django.db.backends import utils as backend_utils from django.db.backends.base.base import BaseDatabaseWrapper from django.utils.asyncio import async_unsafe from django.utils.functional import cached_property from django.utils.regex_helper import _lazy_re_compile try: import MySQLdb as Database except ImportError as err: raise ImproperlyConfigured( 'Error loading MySQLdb module.\n' 'Did you install mysqlclient?' ) from err from MySQLdb.constants import CLIENT, FIELD_TYPE # isort:skip from MySQLdb.converters import conversions # isort:skip # Some of these import MySQLdb, so import them after checking if it's installed. from .client import DatabaseClient # isort:skip from .creation import DatabaseCreation # isort:skip from .features import DatabaseFeatures # isort:skip from .introspection import DatabaseIntrospection # isort:skip from .operations import DatabaseOperations # isort:skip from .schema import DatabaseSchemaEditor # isort:skip from .validation import DatabaseValidation # isort:skip version = Database.version_info if version < (1, 3, 13): raise ImproperlyConfigured('mysqlclient 1.3.13 or newer is required; you have %s.' % Database.__version__) # MySQLdb returns TIME columns as timedelta -- they are more like timedelta in # terms of actual behavior as they are signed and include days -- and Django # expects time. django_conversions = { **conversions, **{FIELD_TYPE.TIME: backend_utils.typecast_time}, } # This should match the numerical portion of the version numbers (we can treat # versions like 5.0.24 and 5.0.24a as the same). server_version_re = _lazy_re_compile(r'(\d{1,2})\.(\d{1,2})\.(\d{1,2})') class CursorWrapper: """ A thin wrapper around MySQLdb's normal cursor class that catches particular exception instances and reraises them with the correct types. Implemented as a wrapper, rather than a subclass, so that it isn't stuck to the particular underlying representation returned by Connection.cursor(). """ codes_for_integrityerror = ( 1048, # Column cannot be null 1690, # BIGINT UNSIGNED value is out of range 3819, # CHECK constraint is violated 4025, # CHECK constraint failed ) def __init__(self, cursor): self.cursor = cursor def execute(self, query, args=None): try: # args is None means no string interpolation return self.cursor.execute(query, args) except Database.OperationalError as e: # Map some error codes to IntegrityError, since they seem to be # misclassified and Django would prefer the more logical place. if e.args[0] in self.codes_for_integrityerror: raise utils.IntegrityError(*tuple(e.args)) raise def executemany(self, query, args): try: return self.cursor.executemany(query, args) except Database.OperationalError as e: # Map some error codes to IntegrityError, since they seem to be # misclassified and Django would prefer the more logical place. if e.args[0] in self.codes_for_integrityerror: raise utils.IntegrityError(*tuple(e.args)) raise def __getattr__(self, attr): return getattr(self.cursor, attr) def __iter__(self): return iter(self.cursor) class DatabaseWrapper(BaseDatabaseWrapper): vendor = 'mysql' # This dictionary maps Field objects to their associated MySQL column # types, as strings. Column-type strings can contain format strings; they'll # be interpolated against the values of Field.__dict__ before being output. # If a column type is set to None, it won't be included in the output. data_types = { 'AutoField': 'integer AUTO_INCREMENT', 'BigAutoField': 'bigint AUTO_INCREMENT', 'BinaryField': 'longblob', 'BooleanField': 'bool', 'CharField': 'varchar(%(max_length)s)', 'DateField': 'date', 'DateTimeField': 'datetime(6)', 'DecimalField': 'numeric(%(max_digits)s, %(decimal_places)s)', 'DurationField': 'bigint', 'FileField': 'varchar(%(max_length)s)', 'FilePathField': 'varchar(%(max_length)s)', 'FloatField': 'double precision', 'IntegerField': 'integer', 'BigIntegerField': 'bigint', 'IPAddressField': 'char(15)', 'GenericIPAddressField': 'char(39)', 'NullBooleanField': 'bool', 'OneToOneField': 'integer', 'PositiveIntegerField': 'integer UNSIGNED', 'PositiveSmallIntegerField': 'smallint UNSIGNED', 'SlugField': 'varchar(%(max_length)s)', 'SmallAutoField': 'smallint AUTO_INCREMENT', 'SmallIntegerField': 'smallint', 'TextField': 'longtext', 'TimeField': 'time(6)', 'UUIDField': 'char(32)', } # For these data types: # - MySQL < 8.0.13 and MariaDB < 10.2.1 don't accept default values and # implicitly treat them as nullable # - all versions of MySQL and MariaDB don't support full width database # indexes _limited_data_types = ( 'tinyblob', 'blob', 'mediumblob', 'longblob', 'tinytext', 'text', 'mediumtext', 'longtext', 'json', ) operators = { 'exact': '= %s', 'iexact': 'LIKE %s', 'contains': 'LIKE BINARY %s', 'icontains': 'LIKE %s', 'gt': '> %s', 'gte': '>= %s', 'lt': '< %s', 'lte': '<= %s', 'startswith': 'LIKE BINARY %s', 'endswith': 'LIKE BINARY %s', 'istartswith': 'LIKE %s', 'iendswith': 'LIKE %s', } # The patterns below are used to generate SQL pattern lookup clauses when # the right-hand side of the lookup isn't a raw string (it might be an expression # or the result of a bilateral transformation). # In those cases, special characters for LIKE operators (e.g. \, *, _) should be # escaped on database side. # # Note: we use str.format() here for readability as '%' is used as a wildcard for # the LIKE operator. pattern_esc = r"REPLACE(REPLACE(REPLACE({}, '\\', '\\\\'), '%%', '\%%'), '_', '\_')" pattern_ops = { 'contains': "LIKE BINARY CONCAT('%%', {}, '%%')", 'icontains': "LIKE CONCAT('%%', {}, '%%')", 'startswith': "LIKE BINARY CONCAT({}, '%%')", 'istartswith': "LIKE CONCAT({}, '%%')", 'endswith': "LIKE BINARY CONCAT('%%', {})", 'iendswith': "LIKE CONCAT('%%', {})", } isolation_levels = { 'read uncommitted', 'read committed', 'repeatable read', 'serializable', } Database = Database SchemaEditorClass = DatabaseSchemaEditor # Classes instantiated in __init__(). client_class = DatabaseClient creation_class = DatabaseCreation features_class = DatabaseFeatures introspection_class = DatabaseIntrospection ops_class = DatabaseOperations validation_class = DatabaseValidation def get_connection_params(self): kwargs = { 'conv': django_conversions, 'charset': 'utf8', } settings_dict = self.settings_dict if settings_dict['USER']: kwargs['user'] = settings_dict['USER'] if settings_dict['NAME']: kwargs['db'] = settings_dict['NAME'] if settings_dict['PASSWORD']: kwargs['passwd'] = settings_dict['PASSWORD'] if settings_dict['HOST'].startswith('/'): kwargs['unix_socket'] = settings_dict['HOST'] elif settings_dict['HOST']: kwargs['host'] = settings_dict['HOST'] if settings_dict['PORT']: kwargs['port'] = int(settings_dict['PORT']) # We need the number of potentially affected rows after an # "UPDATE", not the number of changed rows. kwargs['client_flag'] = CLIENT.FOUND_ROWS # Validate the transaction isolation level, if specified. options = settings_dict['OPTIONS'].copy() isolation_level = options.pop('isolation_level', 'read committed') if isolation_level: isolation_level = isolation_level.lower() if isolation_level not in self.isolation_levels: raise ImproperlyConfigured( "Invalid transaction isolation level '%s' specified.\n" "Use one of %s, or None." % ( isolation_level, ', '.join("'%s'" % s for s in sorted(self.isolation_levels)) )) self.isolation_level = isolation_level kwargs.update(options) return kwargs @async_unsafe def get_new_connection(self, conn_params): return Database.connect(**conn_params) def init_connection_state(self): assignments = [] if self.features.is_sql_auto_is_null_enabled: # SQL_AUTO_IS_NULL controls whether an AUTO_INCREMENT column on # a recently inserted row will return when the field is tested # for NULL. Disabling this brings this aspect of MySQL in line # with SQL standards. assignments.append('SET SQL_AUTO_IS_NULL = 0') if self.isolation_level: assignments.append('SET SESSION TRANSACTION ISOLATION LEVEL %s' % self.isolation_level.upper()) if assignments: with self.cursor() as cursor: cursor.execute('; '.join(assignments)) @async_unsafe def create_cursor(self, name=None): cursor = self.connection.cursor() return CursorWrapper(cursor) def _rollback(self): try: BaseDatabaseWrapper._rollback(self) except Database.NotSupportedError: pass def _set_autocommit(self, autocommit): with self.wrap_database_errors: self.connection.autocommit(autocommit) def disable_constraint_checking(self): """ Disable foreign key checks, primarily for use in adding rows with forward references. Always return True to indicate constraint checks need to be re-enabled. """ self.cursor().execute('SET foreign_key_checks=0') return True def enable_constraint_checking(self): """ Re-enable foreign key checks after they have been disabled. """ # Override needs_rollback in case constraint_checks_disabled is # nested inside transaction.atomic. self.needs_rollback, needs_rollback = False, self.needs_rollback try: self.cursor().execute('SET foreign_key_checks=1') finally: self.needs_rollback = needs_rollback def check_constraints(self, table_names=None): """ Check each table name in `table_names` for rows with invalid foreign key references. This method is intended to be used in conjunction with `disable_constraint_checking()` and `enable_constraint_checking()`, to determine if rows with invalid references were entered while constraint checks were off. """ with self.cursor() as cursor: if table_names is None: table_names = self.introspection.table_names(cursor) for table_name in table_names: primary_key_column_name = self.introspection.get_primary_key_column(cursor, table_name) if not primary_key_column_name: continue key_columns = self.introspection.get_key_columns(cursor, table_name) for column_name, referenced_table_name, referenced_column_name in key_columns: cursor.execute( """ SELECT REFERRING.`%s`, REFERRING.`%s` FROM `%s` as REFERRING LEFT JOIN `%s` as REFERRED ON (REFERRING.`%s` = REFERRED.`%s`) WHERE REFERRING.`%s` IS NOT NULL AND REFERRED.`%s` IS NULL """ % ( primary_key_column_name, column_name, table_name, referenced_table_name, column_name, referenced_column_name, column_name, referenced_column_name, ) ) for bad_row in cursor.fetchall(): raise utils.IntegrityError( "The row in table '%s' with primary key '%s' has an invalid " "foreign key: %s.%s contains a value '%s' that does not " "have a corresponding value in %s.%s." % ( table_name, bad_row[0], table_name, column_name, bad_row[1], referenced_table_name, referenced_column_name, ) ) def is_usable(self): try: self.connection.ping() except Database.Error: return False else: return True @cached_property def display_name(self): return 'MariaDB' if self.mysql_is_mariadb else 'MySQL' @cached_property def data_type_check_constraints(self): if self.features.supports_column_check_constraints: return { 'PositiveIntegerField': '`%(column)s` >= 0', 'PositiveSmallIntegerField': '`%(column)s` >= 0', } return {} @cached_property def mysql_server_info(self): with self.temporary_connection() as cursor: cursor.execute('SELECT VERSION()') return cursor.fetchone()[0] @cached_property def mysql_version(self): match = server_version_re.match(self.mysql_server_info) if not match: raise Exception('Unable to determine MySQL version from version string %r' % self.mysql_server_info) return tuple(int(x) for x in match.groups()) @cached_property def mysql_is_mariadb(self): return 'mariadb' in self.mysql_server_info.lower()
bfafdeb814bfbc3c5004bd1921b2a30bdffdcb7ecd6df1366dacad75c6ed8195
import re from collections import namedtuple import sqlparse from django.db.backends.base.introspection import ( BaseDatabaseIntrospection, FieldInfo as BaseFieldInfo, TableInfo, ) from django.db.models.indexes import Index from django.utils.regex_helper import _lazy_re_compile FieldInfo = namedtuple('FieldInfo', BaseFieldInfo._fields + ('pk',)) field_size_re = _lazy_re_compile(r'^\s*(?:var)?char\s*\(\s*(\d+)\s*\)\s*$') def get_field_size(name): """ Extract the size number from a "varchar(11)" type name """ m = field_size_re.search(name) return int(m.group(1)) if m else None # This light wrapper "fakes" a dictionary interface, because some SQLite data # types include variables in them -- e.g. "varchar(30)" -- and can't be matched # as a simple dictionary lookup. class FlexibleFieldLookupDict: # Maps SQL types to Django Field types. Some of the SQL types have multiple # entries here because SQLite allows for anything and doesn't normalize the # field type; it uses whatever was given. base_data_types_reverse = { 'bool': 'BooleanField', 'boolean': 'BooleanField', 'smallint': 'SmallIntegerField', 'smallint unsigned': 'PositiveSmallIntegerField', 'smallinteger': 'SmallIntegerField', 'int': 'IntegerField', 'integer': 'IntegerField', 'bigint': 'BigIntegerField', 'integer unsigned': 'PositiveIntegerField', 'decimal': 'DecimalField', 'real': 'FloatField', 'text': 'TextField', 'char': 'CharField', 'varchar': 'CharField', 'blob': 'BinaryField', 'date': 'DateField', 'datetime': 'DateTimeField', 'time': 'TimeField', } def __getitem__(self, key): key = key.lower().split('(', 1)[0].strip() return self.base_data_types_reverse[key] class DatabaseIntrospection(BaseDatabaseIntrospection): data_types_reverse = FlexibleFieldLookupDict() def get_field_type(self, data_type, description): field_type = super().get_field_type(data_type, description) if description.pk and field_type in {'BigIntegerField', 'IntegerField', 'SmallIntegerField'}: # No support for BigAutoField or SmallAutoField as SQLite treats # all integer primary keys as signed 64-bit integers. return 'AutoField' return field_type def get_table_list(self, cursor): """Return a list of table and view names in the current database.""" # Skip the sqlite_sequence system table used for autoincrement key # generation. cursor.execute(""" SELECT name, type FROM sqlite_master WHERE type in ('table', 'view') AND NOT name='sqlite_sequence' ORDER BY name""") return [TableInfo(row[0], row[1][0]) for row in cursor.fetchall()] def get_table_description(self, cursor, table_name): """ Return a description of the table with the DB-API cursor.description interface. """ cursor.execute('PRAGMA table_info(%s)' % self.connection.ops.quote_name(table_name)) return [ FieldInfo( name, data_type, None, get_field_size(data_type), None, None, not notnull, default, pk == 1, ) for cid, name, data_type, notnull, default, pk in cursor.fetchall() ] def get_sequences(self, cursor, table_name, table_fields=()): pk_col = self.get_primary_key_column(cursor, table_name) return [{'table': table_name, 'column': pk_col}] def get_relations(self, cursor, table_name): """ Return a dictionary of {field_name: (field_name_other_table, other_table)} representing all relationships to the given table. """ # Dictionary of relations to return relations = {} # Schema for this table cursor.execute( "SELECT sql, type FROM sqlite_master " "WHERE tbl_name = %s AND type IN ('table', 'view')", [table_name] ) create_sql, table_type = cursor.fetchone() if table_type == 'view': # It might be a view, then no results will be returned return relations results = create_sql[create_sql.index('(') + 1:create_sql.rindex(')')] # Walk through and look for references to other tables. SQLite doesn't # really have enforced references, but since it echoes out the SQL used # to create the table we can look for REFERENCES statements used there. for field_desc in results.split(','): field_desc = field_desc.strip() if field_desc.startswith("UNIQUE"): continue m = re.search(r'references (\S*) ?\(["|]?(.*)["|]?\)', field_desc, re.I) if not m: continue table, column = [s.strip('"') for s in m.groups()] if field_desc.startswith("FOREIGN KEY"): # Find name of the target FK field m = re.match(r'FOREIGN KEY\s*\(([^\)]*)\).*', field_desc, re.I) field_name = m.groups()[0].strip('"') else: field_name = field_desc.split()[0].strip('"') cursor.execute("SELECT sql FROM sqlite_master WHERE tbl_name = %s", [table]) result = cursor.fetchall()[0] other_table_results = result[0].strip() li, ri = other_table_results.index('('), other_table_results.rindex(')') other_table_results = other_table_results[li + 1:ri] for other_desc in other_table_results.split(','): other_desc = other_desc.strip() if other_desc.startswith('UNIQUE'): continue other_name = other_desc.split(' ', 1)[0].strip('"') if other_name == column: relations[field_name] = (other_name, table) break return relations def get_key_columns(self, cursor, table_name): """ Return a list of (column_name, referenced_table_name, referenced_column_name) for all key columns in given table. """ key_columns = [] # Schema for this table cursor.execute("SELECT sql FROM sqlite_master WHERE tbl_name = %s AND type = %s", [table_name, "table"]) results = cursor.fetchone()[0].strip() results = results[results.index('(') + 1:results.rindex(')')] # Walk through and look for references to other tables. SQLite doesn't # really have enforced references, but since it echoes out the SQL used # to create the table we can look for REFERENCES statements used there. for field_index, field_desc in enumerate(results.split(',')): field_desc = field_desc.strip() if field_desc.startswith("UNIQUE"): continue m = re.search(r'"(.*)".*references (.*) \(["|](.*)["|]\)', field_desc, re.I) if not m: continue # This will append (column_name, referenced_table_name, referenced_column_name) to key_columns key_columns.append(tuple(s.strip('"') for s in m.groups())) return key_columns def get_primary_key_column(self, cursor, table_name): """Return the column name of the primary key for the given table.""" # Don't use PRAGMA because that causes issues with some transactions cursor.execute( "SELECT sql, type FROM sqlite_master " "WHERE tbl_name = %s AND type IN ('table', 'view')", [table_name] ) row = cursor.fetchone() if row is None: raise ValueError("Table %s does not exist" % table_name) create_sql, table_type = row if table_type == 'view': # Views don't have a primary key. return None fields_sql = create_sql[create_sql.index('(') + 1:create_sql.rindex(')')] for field_desc in fields_sql.split(','): field_desc = field_desc.strip() m = re.match(r'(?:(?:["`\[])(.*)(?:["`\]])|(\w+)).*PRIMARY KEY.*', field_desc) if m: return m.group(1) if m.group(1) else m.group(2) return None def _get_foreign_key_constraints(self, cursor, table_name): constraints = {} cursor.execute('PRAGMA foreign_key_list(%s)' % self.connection.ops.quote_name(table_name)) for row in cursor.fetchall(): # Remaining on_update/on_delete/match values are of no interest. id_, _, table, from_, to = row[:5] constraints['fk_%d' % id_] = { 'columns': [from_], 'primary_key': False, 'unique': False, 'foreign_key': (table, to), 'check': False, 'index': False, } return constraints def _parse_column_or_constraint_definition(self, tokens, columns): token = None is_constraint_definition = None field_name = None constraint_name = None unique = False unique_columns = [] check = False check_columns = [] braces_deep = 0 for token in tokens: if token.match(sqlparse.tokens.Punctuation, '('): braces_deep += 1 elif token.match(sqlparse.tokens.Punctuation, ')'): braces_deep -= 1 if braces_deep < 0: # End of columns and constraints for table definition. break elif braces_deep == 0 and token.match(sqlparse.tokens.Punctuation, ','): # End of current column or constraint definition. break # Detect column or constraint definition by first token. if is_constraint_definition is None: is_constraint_definition = token.match(sqlparse.tokens.Keyword, 'CONSTRAINT') if is_constraint_definition: continue if is_constraint_definition: # Detect constraint name by second token. if constraint_name is None: if token.ttype in (sqlparse.tokens.Name, sqlparse.tokens.Keyword): constraint_name = token.value elif token.ttype == sqlparse.tokens.Literal.String.Symbol: constraint_name = token.value[1:-1] # Start constraint columns parsing after UNIQUE keyword. if token.match(sqlparse.tokens.Keyword, 'UNIQUE'): unique = True unique_braces_deep = braces_deep elif unique: if unique_braces_deep == braces_deep: if unique_columns: # Stop constraint parsing. unique = False continue if token.ttype in (sqlparse.tokens.Name, sqlparse.tokens.Keyword): unique_columns.append(token.value) elif token.ttype == sqlparse.tokens.Literal.String.Symbol: unique_columns.append(token.value[1:-1]) else: # Detect field name by first token. if field_name is None: if token.ttype in (sqlparse.tokens.Name, sqlparse.tokens.Keyword): field_name = token.value elif token.ttype == sqlparse.tokens.Literal.String.Symbol: field_name = token.value[1:-1] if token.match(sqlparse.tokens.Keyword, 'UNIQUE'): unique_columns = [field_name] # Start constraint columns parsing after CHECK keyword. if token.match(sqlparse.tokens.Keyword, 'CHECK'): check = True check_braces_deep = braces_deep elif check: if check_braces_deep == braces_deep: if check_columns: # Stop constraint parsing. check = False continue if token.ttype in (sqlparse.tokens.Name, sqlparse.tokens.Keyword): if token.value in columns: check_columns.append(token.value) elif token.ttype == sqlparse.tokens.Literal.String.Symbol: if token.value[1:-1] in columns: check_columns.append(token.value[1:-1]) unique_constraint = { 'unique': True, 'columns': unique_columns, 'primary_key': False, 'foreign_key': None, 'check': False, 'index': False, } if unique_columns else None check_constraint = { 'check': True, 'columns': check_columns, 'primary_key': False, 'unique': False, 'foreign_key': None, 'index': False, } if check_columns else None return constraint_name, unique_constraint, check_constraint, token def _parse_table_constraints(self, sql, columns): # Check constraint parsing is based of SQLite syntax diagram. # https://www.sqlite.org/syntaxdiagrams.html#table-constraint statement = sqlparse.parse(sql)[0] constraints = {} unnamed_constrains_index = 0 tokens = (token for token in statement.flatten() if not token.is_whitespace) # Go to columns and constraint definition for token in tokens: if token.match(sqlparse.tokens.Punctuation, '('): break # Parse columns and constraint definition while True: constraint_name, unique, check, end_token = self._parse_column_or_constraint_definition(tokens, columns) if unique: if constraint_name: constraints[constraint_name] = unique else: unnamed_constrains_index += 1 constraints['__unnamed_constraint_%s__' % unnamed_constrains_index] = unique if check: if constraint_name: constraints[constraint_name] = check else: unnamed_constrains_index += 1 constraints['__unnamed_constraint_%s__' % unnamed_constrains_index] = check if end_token.match(sqlparse.tokens.Punctuation, ')'): break return constraints def get_constraints(self, cursor, table_name): """ Retrieve any constraints or keys (unique, pk, fk, check, index) across one or more columns. """ constraints = {} # Find inline check constraints. try: table_schema = cursor.execute( "SELECT sql FROM sqlite_master WHERE type='table' and name=%s" % ( self.connection.ops.quote_name(table_name), ) ).fetchone()[0] except TypeError: # table_name is a view. pass else: columns = {info.name for info in self.get_table_description(cursor, table_name)} constraints.update(self._parse_table_constraints(table_schema, columns)) # Get the index info cursor.execute("PRAGMA index_list(%s)" % self.connection.ops.quote_name(table_name)) for row in cursor.fetchall(): # SQLite 3.8.9+ has 5 columns, however older versions only give 3 # columns. Discard last 2 columns if there. number, index, unique = row[:3] cursor.execute( "SELECT sql FROM sqlite_master " "WHERE type='index' AND name=%s" % self.connection.ops.quote_name(index) ) # There's at most one row. sql, = cursor.fetchone() or (None,) # Inline constraints are already detected in # _parse_table_constraints(). The reasons to avoid fetching inline # constraints from `PRAGMA index_list` are: # - Inline constraints can have a different name and information # than what `PRAGMA index_list` gives. # - Not all inline constraints may appear in `PRAGMA index_list`. if not sql: # An inline constraint continue # Get the index info for that index cursor.execute('PRAGMA index_info(%s)' % self.connection.ops.quote_name(index)) for index_rank, column_rank, column in cursor.fetchall(): if index not in constraints: constraints[index] = { "columns": [], "primary_key": False, "unique": bool(unique), "foreign_key": None, "check": False, "index": True, } constraints[index]['columns'].append(column) # Add type and column orders for indexes if constraints[index]['index'] and not constraints[index]['unique']: # SQLite doesn't support any index type other than b-tree constraints[index]['type'] = Index.suffix order_info = sql.split('(')[-1].split(')')[0].split(',') orders = ['DESC' if info.endswith('DESC') else 'ASC' for info in order_info] constraints[index]['orders'] = orders # Get the PK pk_column = self.get_primary_key_column(cursor, table_name) if pk_column: # SQLite doesn't actually give a name to the PK constraint, # so we invent one. This is fine, as the SQLite backend never # deletes PK constraints by name, as you can't delete constraints # in SQLite; we remake the table with a new PK instead. constraints["__primary__"] = { "columns": [pk_column], "primary_key": True, "unique": False, # It's not actually a unique constraint. "foreign_key": None, "check": False, "index": False, } constraints.update(self._get_foreign_key_constraints(cursor, table_name)) return constraints
33829536ee2a52b1ac7d8403925804ea6c4d65bf183cde4cff67af5cb29483d8
""" SQLite backend for the sqlite3 module in the standard library. """ import datetime import decimal import functools import hashlib import math import operator import re import statistics import warnings from itertools import chain from sqlite3 import dbapi2 as Database import pytz from django.core.exceptions import ImproperlyConfigured from django.db import utils from django.db.backends import utils as backend_utils from django.db.backends.base.base import BaseDatabaseWrapper from django.utils import timezone from django.utils.asyncio import async_unsafe from django.utils.dateparse import parse_datetime, parse_time from django.utils.duration import duration_microseconds from django.utils.regex_helper import _lazy_re_compile from .client import DatabaseClient # isort:skip from .creation import DatabaseCreation # isort:skip from .features import DatabaseFeatures # isort:skip from .introspection import DatabaseIntrospection # isort:skip from .operations import DatabaseOperations # isort:skip from .schema import DatabaseSchemaEditor # isort:skip def decoder(conv_func): """ Convert bytestrings from Python's sqlite3 interface to a regular string. """ return lambda s: conv_func(s.decode()) def none_guard(func): """ Decorator that returns None if any of the arguments to the decorated function are None. Many SQL functions return NULL if any of their arguments are NULL. This decorator simplifies the implementation of this for the custom functions registered below. """ @functools.wraps(func) def wrapper(*args, **kwargs): return None if None in args else func(*args, **kwargs) return wrapper def list_aggregate(function): """ Return an aggregate class that accumulates values in a list and applies the provided function to the data. """ return type('ListAggregate', (list,), {'finalize': function, 'step': list.append}) def check_sqlite_version(): if Database.sqlite_version_info < (3, 8, 3): raise ImproperlyConfigured('SQLite 3.8.3 or later is required (found %s).' % Database.sqlite_version) check_sqlite_version() Database.register_converter("bool", b'1'.__eq__) Database.register_converter("time", decoder(parse_time)) Database.register_converter("datetime", decoder(parse_datetime)) Database.register_converter("timestamp", decoder(parse_datetime)) Database.register_converter("TIMESTAMP", decoder(parse_datetime)) Database.register_adapter(decimal.Decimal, str) class DatabaseWrapper(BaseDatabaseWrapper): vendor = 'sqlite' display_name = 'SQLite' # SQLite doesn't actually support most of these types, but it "does the right # thing" given more verbose field definitions, so leave them as is so that # schema inspection is more useful. data_types = { 'AutoField': 'integer', 'BigAutoField': 'integer', 'BinaryField': 'BLOB', 'BooleanField': 'bool', 'CharField': 'varchar(%(max_length)s)', 'DateField': 'date', 'DateTimeField': 'datetime', 'DecimalField': 'decimal', 'DurationField': 'bigint', 'FileField': 'varchar(%(max_length)s)', 'FilePathField': 'varchar(%(max_length)s)', 'FloatField': 'real', 'IntegerField': 'integer', 'BigIntegerField': 'bigint', 'IPAddressField': 'char(15)', 'GenericIPAddressField': 'char(39)', 'NullBooleanField': 'bool', 'OneToOneField': 'integer', 'PositiveIntegerField': 'integer unsigned', 'PositiveSmallIntegerField': 'smallint unsigned', 'SlugField': 'varchar(%(max_length)s)', 'SmallAutoField': 'integer', 'SmallIntegerField': 'smallint', 'TextField': 'text', 'TimeField': 'time', 'UUIDField': 'char(32)', } data_type_check_constraints = { 'PositiveIntegerField': '"%(column)s" >= 0', 'PositiveSmallIntegerField': '"%(column)s" >= 0', } data_types_suffix = { 'AutoField': 'AUTOINCREMENT', 'BigAutoField': 'AUTOINCREMENT', 'SmallAutoField': 'AUTOINCREMENT', } # SQLite requires LIKE statements to include an ESCAPE clause if the value # being escaped has a percent or underscore in it. # See https://www.sqlite.org/lang_expr.html for an explanation. operators = { 'exact': '= %s', 'iexact': "LIKE %s ESCAPE '\\'", 'contains': "LIKE %s ESCAPE '\\'", 'icontains': "LIKE %s ESCAPE '\\'", 'regex': 'REGEXP %s', 'iregex': "REGEXP '(?i)' || %s", 'gt': '> %s', 'gte': '>= %s', 'lt': '< %s', 'lte': '<= %s', 'startswith': "LIKE %s ESCAPE '\\'", 'endswith': "LIKE %s ESCAPE '\\'", 'istartswith': "LIKE %s ESCAPE '\\'", 'iendswith': "LIKE %s ESCAPE '\\'", } # The patterns below are used to generate SQL pattern lookup clauses when # the right-hand side of the lookup isn't a raw string (it might be an expression # or the result of a bilateral transformation). # In those cases, special characters for LIKE operators (e.g. \, *, _) should be # escaped on database side. # # Note: we use str.format() here for readability as '%' is used as a wildcard for # the LIKE operator. pattern_esc = r"REPLACE(REPLACE(REPLACE({}, '\', '\\'), '%%', '\%%'), '_', '\_')" pattern_ops = { 'contains': r"LIKE '%%' || {} || '%%' ESCAPE '\'", 'icontains': r"LIKE '%%' || UPPER({}) || '%%' ESCAPE '\'", 'startswith': r"LIKE {} || '%%' ESCAPE '\'", 'istartswith': r"LIKE UPPER({}) || '%%' ESCAPE '\'", 'endswith': r"LIKE '%%' || {} ESCAPE '\'", 'iendswith': r"LIKE '%%' || UPPER({}) ESCAPE '\'", } Database = Database SchemaEditorClass = DatabaseSchemaEditor # Classes instantiated in __init__(). client_class = DatabaseClient creation_class = DatabaseCreation features_class = DatabaseFeatures introspection_class = DatabaseIntrospection ops_class = DatabaseOperations def get_connection_params(self): settings_dict = self.settings_dict if not settings_dict['NAME']: raise ImproperlyConfigured( "settings.DATABASES is improperly configured. " "Please supply the NAME value.") kwargs = { 'database': settings_dict['NAME'], 'detect_types': Database.PARSE_DECLTYPES | Database.PARSE_COLNAMES, **settings_dict['OPTIONS'], } # Always allow the underlying SQLite connection to be shareable # between multiple threads. The safe-guarding will be handled at a # higher level by the `BaseDatabaseWrapper.allow_thread_sharing` # property. This is necessary as the shareability is disabled by # default in pysqlite and it cannot be changed once a connection is # opened. if 'check_same_thread' in kwargs and kwargs['check_same_thread']: warnings.warn( 'The `check_same_thread` option was provided and set to ' 'True. It will be overridden with False. Use the ' '`DatabaseWrapper.allow_thread_sharing` property instead ' 'for controlling thread shareability.', RuntimeWarning ) kwargs.update({'check_same_thread': False, 'uri': True}) return kwargs @async_unsafe def get_new_connection(self, conn_params): conn = Database.connect(**conn_params) conn.create_function("django_date_extract", 2, _sqlite_datetime_extract) conn.create_function("django_date_trunc", 2, _sqlite_date_trunc) conn.create_function('django_datetime_cast_date', 3, _sqlite_datetime_cast_date) conn.create_function('django_datetime_cast_time', 3, _sqlite_datetime_cast_time) conn.create_function('django_datetime_extract', 4, _sqlite_datetime_extract) conn.create_function('django_datetime_trunc', 4, _sqlite_datetime_trunc) conn.create_function("django_time_extract", 2, _sqlite_time_extract) conn.create_function("django_time_trunc", 2, _sqlite_time_trunc) conn.create_function("django_time_diff", 2, _sqlite_time_diff) conn.create_function("django_timestamp_diff", 2, _sqlite_timestamp_diff) conn.create_function("django_format_dtdelta", 3, _sqlite_format_dtdelta) conn.create_function('regexp', 2, _sqlite_regexp) conn.create_function('ACOS', 1, none_guard(math.acos)) conn.create_function('ASIN', 1, none_guard(math.asin)) conn.create_function('ATAN', 1, none_guard(math.atan)) conn.create_function('ATAN2', 2, none_guard(math.atan2)) conn.create_function('CEILING', 1, none_guard(math.ceil)) conn.create_function('COS', 1, none_guard(math.cos)) conn.create_function('COT', 1, none_guard(lambda x: 1 / math.tan(x))) conn.create_function('DEGREES', 1, none_guard(math.degrees)) conn.create_function('EXP', 1, none_guard(math.exp)) conn.create_function('FLOOR', 1, none_guard(math.floor)) conn.create_function('LN', 1, none_guard(math.log)) conn.create_function('LOG', 2, none_guard(lambda x, y: math.log(y, x))) conn.create_function('LPAD', 3, _sqlite_lpad) conn.create_function('MD5', 1, none_guard(lambda x: hashlib.md5(x.encode()).hexdigest())) conn.create_function('MOD', 2, none_guard(math.fmod)) conn.create_function('PI', 0, lambda: math.pi) conn.create_function('POWER', 2, none_guard(operator.pow)) conn.create_function('RADIANS', 1, none_guard(math.radians)) conn.create_function('REPEAT', 2, none_guard(operator.mul)) conn.create_function('REVERSE', 1, none_guard(lambda x: x[::-1])) conn.create_function('RPAD', 3, _sqlite_rpad) conn.create_function('SHA1', 1, none_guard(lambda x: hashlib.sha1(x.encode()).hexdigest())) conn.create_function('SHA224', 1, none_guard(lambda x: hashlib.sha224(x.encode()).hexdigest())) conn.create_function('SHA256', 1, none_guard(lambda x: hashlib.sha256(x.encode()).hexdigest())) conn.create_function('SHA384', 1, none_guard(lambda x: hashlib.sha384(x.encode()).hexdigest())) conn.create_function('SHA512', 1, none_guard(lambda x: hashlib.sha512(x.encode()).hexdigest())) conn.create_function('SIGN', 1, none_guard(lambda x: (x > 0) - (x < 0))) conn.create_function('SIN', 1, none_guard(math.sin)) conn.create_function('SQRT', 1, none_guard(math.sqrt)) conn.create_function('TAN', 1, none_guard(math.tan)) conn.create_aggregate('STDDEV_POP', 1, list_aggregate(statistics.pstdev)) conn.create_aggregate('STDDEV_SAMP', 1, list_aggregate(statistics.stdev)) conn.create_aggregate('VAR_POP', 1, list_aggregate(statistics.pvariance)) conn.create_aggregate('VAR_SAMP', 1, list_aggregate(statistics.variance)) conn.execute('PRAGMA foreign_keys = ON') return conn def init_connection_state(self): pass def create_cursor(self, name=None): return self.connection.cursor(factory=SQLiteCursorWrapper) @async_unsafe def close(self): self.validate_thread_sharing() # If database is in memory, closing the connection destroys the # database. To prevent accidental data loss, ignore close requests on # an in-memory db. if not self.is_in_memory_db(): BaseDatabaseWrapper.close(self) def _savepoint_allowed(self): # When 'isolation_level' is not None, sqlite3 commits before each # savepoint; it's a bug. When it is None, savepoints don't make sense # because autocommit is enabled. The only exception is inside 'atomic' # blocks. To work around that bug, on SQLite, 'atomic' starts a # transaction explicitly rather than simply disable autocommit. return self.in_atomic_block def _set_autocommit(self, autocommit): if autocommit: level = None else: # sqlite3's internal default is ''. It's different from None. # See Modules/_sqlite/connection.c. level = '' # 'isolation_level' is a misleading API. # SQLite always runs at the SERIALIZABLE isolation level. with self.wrap_database_errors: self.connection.isolation_level = level def disable_constraint_checking(self): with self.cursor() as cursor: cursor.execute('PRAGMA foreign_keys = OFF') # Foreign key constraints cannot be turned off while in a multi- # statement transaction. Fetch the current state of the pragma # to determine if constraints are effectively disabled. enabled = cursor.execute('PRAGMA foreign_keys').fetchone()[0] return not bool(enabled) def enable_constraint_checking(self): self.cursor().execute('PRAGMA foreign_keys = ON') def check_constraints(self, table_names=None): """ Check each table name in `table_names` for rows with invalid foreign key references. This method is intended to be used in conjunction with `disable_constraint_checking()` and `enable_constraint_checking()`, to determine if rows with invalid references were entered while constraint checks were off. """ if self.features.supports_pragma_foreign_key_check: with self.cursor() as cursor: if table_names is None: violations = self.cursor().execute('PRAGMA foreign_key_check').fetchall() else: violations = chain.from_iterable( cursor.execute('PRAGMA foreign_key_check(%s)' % table_name).fetchall() for table_name in table_names ) # See https://www.sqlite.org/pragma.html#pragma_foreign_key_check for table_name, rowid, referenced_table_name, foreign_key_index in violations: foreign_key = cursor.execute( 'PRAGMA foreign_key_list(%s)' % table_name ).fetchall()[foreign_key_index] column_name, referenced_column_name = foreign_key[3:5] primary_key_column_name = self.introspection.get_primary_key_column(cursor, table_name) primary_key_value, bad_value = cursor.execute( 'SELECT %s, %s FROM %s WHERE rowid = %%s' % ( primary_key_column_name, column_name, table_name ), (rowid,), ).fetchone() raise utils.IntegrityError( "The row in table '%s' with primary key '%s' has an " "invalid foreign key: %s.%s contains a value '%s' that " "does not have a corresponding value in %s.%s." % ( table_name, primary_key_value, table_name, column_name, bad_value, referenced_table_name, referenced_column_name ) ) else: with self.cursor() as cursor: if table_names is None: table_names = self.introspection.table_names(cursor) for table_name in table_names: primary_key_column_name = self.introspection.get_primary_key_column(cursor, table_name) if not primary_key_column_name: continue key_columns = self.introspection.get_key_columns(cursor, table_name) for column_name, referenced_table_name, referenced_column_name in key_columns: cursor.execute( """ SELECT REFERRING.`%s`, REFERRING.`%s` FROM `%s` as REFERRING LEFT JOIN `%s` as REFERRED ON (REFERRING.`%s` = REFERRED.`%s`) WHERE REFERRING.`%s` IS NOT NULL AND REFERRED.`%s` IS NULL """ % ( primary_key_column_name, column_name, table_name, referenced_table_name, column_name, referenced_column_name, column_name, referenced_column_name, ) ) for bad_row in cursor.fetchall(): raise utils.IntegrityError( "The row in table '%s' with primary key '%s' has an " "invalid foreign key: %s.%s contains a value '%s' that " "does not have a corresponding value in %s.%s." % ( table_name, bad_row[0], table_name, column_name, bad_row[1], referenced_table_name, referenced_column_name, ) ) def is_usable(self): return True def _start_transaction_under_autocommit(self): """ Start a transaction explicitly in autocommit mode. Staying in autocommit mode works around a bug of sqlite3 that breaks savepoints when autocommit is disabled. """ self.cursor().execute("BEGIN") def is_in_memory_db(self): return self.creation.is_in_memory_db(self.settings_dict['NAME']) FORMAT_QMARK_REGEX = _lazy_re_compile(r'(?<!%)%s') class SQLiteCursorWrapper(Database.Cursor): """ Django uses "format" style placeholders, but pysqlite2 uses "qmark" style. This fixes it -- but note that if you want to use a literal "%s" in a query, you'll need to use "%%s". """ def execute(self, query, params=None): if params is None: return Database.Cursor.execute(self, query) query = self.convert_query(query) return Database.Cursor.execute(self, query, params) def executemany(self, query, param_list): query = self.convert_query(query) return Database.Cursor.executemany(self, query, param_list) def convert_query(self, query): return FORMAT_QMARK_REGEX.sub('?', query).replace('%%', '%') def _sqlite_datetime_parse(dt, tzname=None, conn_tzname=None): if dt is None: return None try: dt = backend_utils.typecast_timestamp(dt) except (TypeError, ValueError): return None if conn_tzname: dt = dt.replace(tzinfo=pytz.timezone(conn_tzname)) if tzname is not None and tzname != conn_tzname: sign_index = tzname.find('+') + tzname.find('-') + 1 if sign_index > -1: sign = tzname[sign_index] tzname, offset = tzname.split(sign) if offset: hours, minutes = offset.split(':') offset_delta = datetime.timedelta(hours=int(hours), minutes=int(minutes)) dt += offset_delta if sign == '+' else -offset_delta dt = timezone.localtime(dt, pytz.timezone(tzname)) return dt def _sqlite_date_trunc(lookup_type, dt): dt = _sqlite_datetime_parse(dt) if dt is None: return None if lookup_type == 'year': return "%i-01-01" % dt.year elif lookup_type == 'quarter': month_in_quarter = dt.month - (dt.month - 1) % 3 return '%i-%02i-01' % (dt.year, month_in_quarter) elif lookup_type == 'month': return "%i-%02i-01" % (dt.year, dt.month) elif lookup_type == 'week': dt = dt - datetime.timedelta(days=dt.weekday()) return "%i-%02i-%02i" % (dt.year, dt.month, dt.day) elif lookup_type == 'day': return "%i-%02i-%02i" % (dt.year, dt.month, dt.day) def _sqlite_time_trunc(lookup_type, dt): if dt is None: return None try: dt = backend_utils.typecast_time(dt) except (ValueError, TypeError): return None if lookup_type == 'hour': return "%02i:00:00" % dt.hour elif lookup_type == 'minute': return "%02i:%02i:00" % (dt.hour, dt.minute) elif lookup_type == 'second': return "%02i:%02i:%02i" % (dt.hour, dt.minute, dt.second) def _sqlite_datetime_cast_date(dt, tzname, conn_tzname): dt = _sqlite_datetime_parse(dt, tzname, conn_tzname) if dt is None: return None return dt.date().isoformat() def _sqlite_datetime_cast_time(dt, tzname, conn_tzname): dt = _sqlite_datetime_parse(dt, tzname, conn_tzname) if dt is None: return None return dt.time().isoformat() def _sqlite_datetime_extract(lookup_type, dt, tzname=None, conn_tzname=None): dt = _sqlite_datetime_parse(dt, tzname, conn_tzname) if dt is None: return None if lookup_type == 'week_day': return (dt.isoweekday() % 7) + 1 elif lookup_type == 'iso_week_day': return dt.isoweekday() elif lookup_type == 'week': return dt.isocalendar()[1] elif lookup_type == 'quarter': return math.ceil(dt.month / 3) elif lookup_type == 'iso_year': return dt.isocalendar()[0] else: return getattr(dt, lookup_type) def _sqlite_datetime_trunc(lookup_type, dt, tzname, conn_tzname): dt = _sqlite_datetime_parse(dt, tzname, conn_tzname) if dt is None: return None if lookup_type == 'year': return "%i-01-01 00:00:00" % dt.year elif lookup_type == 'quarter': month_in_quarter = dt.month - (dt.month - 1) % 3 return '%i-%02i-01 00:00:00' % (dt.year, month_in_quarter) elif lookup_type == 'month': return "%i-%02i-01 00:00:00" % (dt.year, dt.month) elif lookup_type == 'week': dt = dt - datetime.timedelta(days=dt.weekday()) return "%i-%02i-%02i 00:00:00" % (dt.year, dt.month, dt.day) elif lookup_type == 'day': return "%i-%02i-%02i 00:00:00" % (dt.year, dt.month, dt.day) elif lookup_type == 'hour': return "%i-%02i-%02i %02i:00:00" % (dt.year, dt.month, dt.day, dt.hour) elif lookup_type == 'minute': return "%i-%02i-%02i %02i:%02i:00" % (dt.year, dt.month, dt.day, dt.hour, dt.minute) elif lookup_type == 'second': return "%i-%02i-%02i %02i:%02i:%02i" % (dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second) def _sqlite_time_extract(lookup_type, dt): if dt is None: return None try: dt = backend_utils.typecast_time(dt) except (ValueError, TypeError): return None return getattr(dt, lookup_type) @none_guard def _sqlite_format_dtdelta(conn, lhs, rhs): """ LHS and RHS can be either: - An integer number of microseconds - A string representing a datetime """ try: real_lhs = datetime.timedelta(0, 0, lhs) if isinstance(lhs, int) else backend_utils.typecast_timestamp(lhs) real_rhs = datetime.timedelta(0, 0, rhs) if isinstance(rhs, int) else backend_utils.typecast_timestamp(rhs) if conn.strip() == '+': out = real_lhs + real_rhs else: out = real_lhs - real_rhs except (ValueError, TypeError): return None # typecast_timestamp returns a date or a datetime without timezone. # It will be formatted as "%Y-%m-%d" or "%Y-%m-%d %H:%M:%S[.%f]" return str(out) @none_guard def _sqlite_time_diff(lhs, rhs): left = backend_utils.typecast_time(lhs) right = backend_utils.typecast_time(rhs) return ( (left.hour * 60 * 60 * 1000000) + (left.minute * 60 * 1000000) + (left.second * 1000000) + (left.microsecond) - (right.hour * 60 * 60 * 1000000) - (right.minute * 60 * 1000000) - (right.second * 1000000) - (right.microsecond) ) @none_guard def _sqlite_timestamp_diff(lhs, rhs): left = backend_utils.typecast_timestamp(lhs) right = backend_utils.typecast_timestamp(rhs) return duration_microseconds(left - right) @none_guard def _sqlite_regexp(re_pattern, re_string): return bool(re.search(re_pattern, str(re_string))) @none_guard def _sqlite_lpad(text, length, fill_text): if len(text) >= length: return text[:length] return (fill_text * length)[:length - len(text)] + text @none_guard def _sqlite_rpad(text, length, fill_text): return (text + fill_text * length)[:length]
2637d6ccd8237368f76687c30b218e6d556eac1ce972fcda7a0ecf134ec3b374
import os from datetime import datetime from urllib.parse import urljoin from django.conf import settings from django.core.exceptions import SuspiciousFileOperation from django.core.files import File, locks from django.core.files.move import file_move_safe from django.core.signals import setting_changed from django.utils import timezone from django.utils._os import safe_join from django.utils.crypto import get_random_string from django.utils.deconstruct import deconstructible from django.utils.encoding import filepath_to_uri from django.utils.functional import LazyObject, cached_property from django.utils.module_loading import import_string from django.utils.text import get_valid_filename __all__ = ( 'Storage', 'FileSystemStorage', 'DefaultStorage', 'default_storage', 'get_storage_class', ) class Storage: """ A base storage class, providing some default behaviors that all other storage systems can inherit or override, as necessary. """ # The following methods represent a public interface to private methods. # These shouldn't be overridden by subclasses unless absolutely necessary. def open(self, name, mode='rb'): """Retrieve the specified file from storage.""" return self._open(name, mode) def save(self, name, content, max_length=None): """ Save new content to the file specified by name. The content should be a proper File object or any Python file-like object, ready to be read from the beginning. """ # Get the proper name for the file, as it will actually be saved. if name is None: name = content.name if not hasattr(content, 'chunks'): content = File(content, name) name = self.get_available_name(name, max_length=max_length) return self._save(name, content) # These methods are part of the public API, with default implementations. def get_valid_name(self, name): """ Return a filename, based on the provided filename, that's suitable for use in the target storage system. """ return get_valid_filename(name) def get_alternative_name(self, file_root, file_ext): """ Return an alternative filename, by adding an underscore and a random 7 character alphanumeric string (before the file extension, if one exists) to the filename. """ return '%s_%s%s' % (file_root, get_random_string(7), file_ext) def get_available_name(self, name, max_length=None): """ Return a filename that's free on the target storage system and available for new content to be written to. """ dir_name, file_name = os.path.split(name) file_root, file_ext = os.path.splitext(file_name) # If the filename already exists, generate an alternative filename # until it doesn't exist. # Truncate original name if required, so the new filename does not # exceed the max_length. while self.exists(name) or (max_length and len(name) > max_length): # file_ext includes the dot. name = os.path.join(dir_name, self.get_alternative_name(file_root, file_ext)) if max_length is None: continue # Truncate file_root if max_length exceeded. truncation = len(name) - max_length if truncation > 0: file_root = file_root[:-truncation] # Entire file_root was truncated in attempt to find an available filename. if not file_root: raise SuspiciousFileOperation( 'Storage can not find an available filename for "%s". ' 'Please make sure that the corresponding file field ' 'allows sufficient "max_length".' % name ) name = os.path.join(dir_name, self.get_alternative_name(file_root, file_ext)) return name def generate_filename(self, filename): """ Validate the filename by calling get_valid_name() and return a filename to be passed to the save() method. """ # `filename` may include a path as returned by FileField.upload_to. dirname, filename = os.path.split(filename) return os.path.normpath(os.path.join(dirname, self.get_valid_name(filename))) def path(self, name): """ Return a local filesystem path where the file can be retrieved using Python's built-in open() function. Storage systems that can't be accessed using open() should *not* implement this method. """ raise NotImplementedError("This backend doesn't support absolute paths.") # The following methods form the public API for storage systems, but with # no default implementations. Subclasses must implement *all* of these. def delete(self, name): """ Delete the specified file from the storage system. """ raise NotImplementedError('subclasses of Storage must provide a delete() method') def exists(self, name): """ Return True if a file referenced by the given name already exists in the storage system, or False if the name is available for a new file. """ raise NotImplementedError('subclasses of Storage must provide an exists() method') def listdir(self, path): """ List the contents of the specified path. Return a 2-tuple of lists: the first item being directories, the second item being files. """ raise NotImplementedError('subclasses of Storage must provide a listdir() method') def size(self, name): """ Return the total size, in bytes, of the file specified by name. """ raise NotImplementedError('subclasses of Storage must provide a size() method') def url(self, name): """ Return an absolute URL where the file's contents can be accessed directly by a Web browser. """ raise NotImplementedError('subclasses of Storage must provide a url() method') def get_accessed_time(self, name): """ Return the last accessed time (as a datetime) of the file specified by name. The datetime will be timezone-aware if USE_TZ=True. """ raise NotImplementedError('subclasses of Storage must provide a get_accessed_time() method') def get_created_time(self, name): """ Return the creation time (as a datetime) of the file specified by name. The datetime will be timezone-aware if USE_TZ=True. """ raise NotImplementedError('subclasses of Storage must provide a get_created_time() method') def get_modified_time(self, name): """ Return the last modified time (as a datetime) of the file specified by name. The datetime will be timezone-aware if USE_TZ=True. """ raise NotImplementedError('subclasses of Storage must provide a get_modified_time() method') @deconstructible class FileSystemStorage(Storage): """ Standard filesystem storage """ # The combination of O_CREAT and O_EXCL makes os.open() raise OSError if # the file already exists before it's opened. OS_OPEN_FLAGS = os.O_WRONLY | os.O_CREAT | os.O_EXCL | getattr(os, 'O_BINARY', 0) def __init__(self, location=None, base_url=None, file_permissions_mode=None, directory_permissions_mode=None): self._location = location self._base_url = base_url self._file_permissions_mode = file_permissions_mode self._directory_permissions_mode = directory_permissions_mode setting_changed.connect(self._clear_cached_properties) def _clear_cached_properties(self, setting, **kwargs): """Reset setting based property values.""" if setting == 'MEDIA_ROOT': self.__dict__.pop('base_location', None) self.__dict__.pop('location', None) elif setting == 'MEDIA_URL': self.__dict__.pop('base_url', None) elif setting == 'FILE_UPLOAD_PERMISSIONS': self.__dict__.pop('file_permissions_mode', None) elif setting == 'FILE_UPLOAD_DIRECTORY_PERMISSIONS': self.__dict__.pop('directory_permissions_mode', None) def _value_or_setting(self, value, setting): return setting if value is None else value @cached_property def base_location(self): return self._value_or_setting(self._location, settings.MEDIA_ROOT) @cached_property def location(self): return os.path.abspath(self.base_location) @cached_property def base_url(self): if self._base_url is not None and not self._base_url.endswith('/'): self._base_url += '/' return self._value_or_setting(self._base_url, settings.MEDIA_URL) @cached_property def file_permissions_mode(self): return self._value_or_setting(self._file_permissions_mode, settings.FILE_UPLOAD_PERMISSIONS) @cached_property def directory_permissions_mode(self): return self._value_or_setting(self._directory_permissions_mode, settings.FILE_UPLOAD_DIRECTORY_PERMISSIONS) def _open(self, name, mode='rb'): return File(open(self.path(name), mode)) def _save(self, name, content): full_path = self.path(name) # Create any intermediate directories that do not exist. directory = os.path.dirname(full_path) try: if self.directory_permissions_mode is not None: # os.makedirs applies the global umask, so we reset it, # for consistency with file_permissions_mode behavior. old_umask = os.umask(0) try: os.makedirs(directory, self.directory_permissions_mode, exist_ok=True) finally: os.umask(old_umask) else: os.makedirs(directory, exist_ok=True) except FileExistsError: raise FileExistsError('%s exists and is not a directory.' % directory) # There's a potential race condition between get_available_name and # saving the file; it's possible that two threads might return the # same name, at which point all sorts of fun happens. So we need to # try to create the file, but if it already exists we have to go back # to get_available_name() and try again. while True: try: # This file has a file path that we can move. if hasattr(content, 'temporary_file_path'): file_move_safe(content.temporary_file_path(), full_path) # This is a normal uploadedfile that we can stream. else: # The current umask value is masked out by os.open! fd = os.open(full_path, self.OS_OPEN_FLAGS, 0o666) _file = None try: locks.lock(fd, locks.LOCK_EX) for chunk in content.chunks(): if _file is None: mode = 'wb' if isinstance(chunk, bytes) else 'wt' _file = os.fdopen(fd, mode) _file.write(chunk) finally: locks.unlock(fd) if _file is not None: _file.close() else: os.close(fd) except FileExistsError: # A new name is needed if the file exists. name = self.get_available_name(name) full_path = self.path(name) else: # OK, the file save worked. Break out of the loop. break if self.file_permissions_mode is not None: os.chmod(full_path, self.file_permissions_mode) # Store filenames with forward slashes, even on Windows. return str(name).replace('\\', '/') def delete(self, name): assert name, "The name argument is not allowed to be empty." name = self.path(name) # If the file or directory exists, delete it from the filesystem. try: if os.path.isdir(name): os.rmdir(name) else: os.remove(name) except FileNotFoundError: # FileNotFoundError is raised if the file or directory was removed # concurrently. pass def exists(self, name): return os.path.exists(self.path(name)) def listdir(self, path): path = self.path(path) directories, files = [], [] for entry in os.scandir(path): if entry.is_dir(): directories.append(entry.name) else: files.append(entry.name) return directories, files def path(self, name): return safe_join(self.location, name) def size(self, name): return os.path.getsize(self.path(name)) def url(self, name): if self.base_url is None: raise ValueError("This file is not accessible via a URL.") url = filepath_to_uri(name) if url is not None: url = url.lstrip('/') return urljoin(self.base_url, url) def _datetime_from_timestamp(self, ts): """ If timezone support is enabled, make an aware datetime object in UTC; otherwise make a naive one in the local timezone. """ if settings.USE_TZ: # Safe to use .replace() because UTC doesn't have DST return datetime.utcfromtimestamp(ts).replace(tzinfo=timezone.utc) else: return datetime.fromtimestamp(ts) def get_accessed_time(self, name): return self._datetime_from_timestamp(os.path.getatime(self.path(name))) def get_created_time(self, name): return self._datetime_from_timestamp(os.path.getctime(self.path(name))) def get_modified_time(self, name): return self._datetime_from_timestamp(os.path.getmtime(self.path(name))) def get_storage_class(import_path=None): return import_string(import_path or settings.DEFAULT_FILE_STORAGE) class DefaultStorage(LazyObject): def _setup(self): self._wrapped = get_storage_class()() default_storage = DefaultStorage()
1899cc4f568fa098284cf8d8a2450cf8bf48869fa495c62375470f1299f91695
from io import BytesIO from django.conf import settings from django.core import signals from django.core.handlers import base from django.http import HttpRequest, QueryDict, parse_cookie from django.urls import set_script_prefix from django.utils.encoding import repercent_broken_unicode from django.utils.functional import cached_property from django.utils.regex_helper import _lazy_re_compile _slashes_re = _lazy_re_compile(br'/+') class LimitedStream: """Wrap another stream to disallow reading it past a number of bytes.""" def __init__(self, stream, limit, buf_size=64 * 1024 * 1024): self.stream = stream self.remaining = limit self.buffer = b'' self.buf_size = buf_size def _read_limited(self, size=None): if size is None or size > self.remaining: size = self.remaining if size == 0: return b'' result = self.stream.read(size) self.remaining -= len(result) return result def read(self, size=None): if size is None: result = self.buffer + self._read_limited() self.buffer = b'' elif size < len(self.buffer): result = self.buffer[:size] self.buffer = self.buffer[size:] else: # size >= len(self.buffer) result = self.buffer + self._read_limited(size - len(self.buffer)) self.buffer = b'' return result def readline(self, size=None): while b'\n' not in self.buffer and \ (size is None or len(self.buffer) < size): if size: # since size is not None here, len(self.buffer) < size chunk = self._read_limited(size - len(self.buffer)) else: chunk = self._read_limited() if not chunk: break self.buffer += chunk sio = BytesIO(self.buffer) if size: line = sio.readline(size) else: line = sio.readline() self.buffer = sio.read() return line class WSGIRequest(HttpRequest): def __init__(self, environ): script_name = get_script_name(environ) # If PATH_INFO is empty (e.g. accessing the SCRIPT_NAME URL without a # trailing slash), operate as if '/' was requested. path_info = get_path_info(environ) or '/' self.environ = environ self.path_info = path_info # be careful to only replace the first slash in the path because of # http://test/something and http://test//something being different as # stated in https://www.ietf.org/rfc/rfc2396.txt self.path = '%s/%s' % (script_name.rstrip('/'), path_info.replace('/', '', 1)) self.META = environ self.META['PATH_INFO'] = path_info self.META['SCRIPT_NAME'] = script_name self.method = environ['REQUEST_METHOD'].upper() # Set content_type, content_params, and encoding. self._set_content_type_params(environ) try: content_length = int(environ.get('CONTENT_LENGTH')) except (ValueError, TypeError): content_length = 0 self._stream = LimitedStream(self.environ['wsgi.input'], content_length) self._read_started = False self.resolver_match = None def _get_scheme(self): return self.environ.get('wsgi.url_scheme') @cached_property def GET(self): # The WSGI spec says 'QUERY_STRING' may be absent. raw_query_string = get_bytes_from_wsgi(self.environ, 'QUERY_STRING', '') return QueryDict(raw_query_string, encoding=self._encoding) def _get_post(self): if not hasattr(self, '_post'): self._load_post_and_files() return self._post def _set_post(self, post): self._post = post @cached_property def COOKIES(self): raw_cookie = get_str_from_wsgi(self.environ, 'HTTP_COOKIE', '') return parse_cookie(raw_cookie) @property def FILES(self): if not hasattr(self, '_files'): self._load_post_and_files() return self._files POST = property(_get_post, _set_post) class WSGIHandler(base.BaseHandler): request_class = WSGIRequest def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.load_middleware() def __call__(self, environ, start_response): set_script_prefix(get_script_name(environ)) signals.request_started.send(sender=self.__class__, environ=environ) request = self.request_class(environ) response = self.get_response(request) response._handler_class = self.__class__ status = '%d %s' % (response.status_code, response.reason_phrase) response_headers = [ *response.items(), *(('Set-Cookie', c.output(header='')) for c in response.cookies.values()), ] start_response(status, response_headers) if getattr(response, 'file_to_stream', None) is not None and environ.get('wsgi.file_wrapper'): response = environ['wsgi.file_wrapper'](response.file_to_stream, response.block_size) return response def get_path_info(environ): """Return the HTTP request's PATH_INFO as a string.""" path_info = get_bytes_from_wsgi(environ, 'PATH_INFO', '/') return repercent_broken_unicode(path_info).decode() def get_script_name(environ): """ Return the equivalent of the HTTP request's SCRIPT_NAME environment variable. If Apache mod_rewrite is used, return what would have been the script name prior to any rewriting (so it's the script name as seen from the client's perspective), unless the FORCE_SCRIPT_NAME setting is set (to anything). """ if settings.FORCE_SCRIPT_NAME is not None: return settings.FORCE_SCRIPT_NAME # If Apache's mod_rewrite had a whack at the URL, Apache set either # SCRIPT_URL or REDIRECT_URL to the full resource URL before applying any # rewrites. Unfortunately not every Web server (lighttpd!) passes this # information through all the time, so FORCE_SCRIPT_NAME, above, is still # needed. script_url = get_bytes_from_wsgi(environ, 'SCRIPT_URL', '') or get_bytes_from_wsgi(environ, 'REDIRECT_URL', '') if script_url: if b'//' in script_url: # mod_wsgi squashes multiple successive slashes in PATH_INFO, # do the same with script_url before manipulating paths (#17133). script_url = _slashes_re.sub(b'/', script_url) path_info = get_bytes_from_wsgi(environ, 'PATH_INFO', '') script_name = script_url[:-len(path_info)] if path_info else script_url else: script_name = get_bytes_from_wsgi(environ, 'SCRIPT_NAME', '') return script_name.decode() def get_bytes_from_wsgi(environ, key, default): """ Get a value from the WSGI environ dictionary as bytes. key and default should be strings. """ value = environ.get(key, default) # Non-ASCII values in the WSGI environ are arbitrarily decoded with # ISO-8859-1. This is wrong for Django websites where UTF-8 is the default. # Re-encode to recover the original bytestring. return value.encode('iso-8859-1') def get_str_from_wsgi(environ, key, default): """ Get a value from the WSGI environ dictionary as str. key and default should be str objects. """ value = get_bytes_from_wsgi(environ, key, default) return value.decode(errors='replace')
d9146fa794f1ba6c4baacaea2a2798ec780be77389e803eb65ac114244cab46e
import errno import os import re import socket import sys from datetime import datetime from django.conf import settings from django.core.management.base import BaseCommand, CommandError from django.core.servers.basehttp import ( WSGIServer, get_internal_wsgi_application, run, ) from django.utils import autoreload from django.utils.regex_helper import _lazy_re_compile naiveip_re = _lazy_re_compile(r"""^(?: (?P<addr> (?P<ipv4>\d{1,3}(?:\.\d{1,3}){3}) | # IPv4 address (?P<ipv6>\[[a-fA-F0-9:]+\]) | # IPv6 address (?P<fqdn>[a-zA-Z0-9-]+(?:\.[a-zA-Z0-9-]+)*) # FQDN ):)?(?P<port>\d+)$""", re.X) class Command(BaseCommand): help = "Starts a lightweight Web server for development." # Validation is called explicitly each time the server is reloaded. requires_system_checks = False stealth_options = ('shutdown_message',) default_addr = '127.0.0.1' default_addr_ipv6 = '::1' default_port = '8000' protocol = 'http' server_cls = WSGIServer def add_arguments(self, parser): parser.add_argument( 'addrport', nargs='?', help='Optional port number, or ipaddr:port' ) parser.add_argument( '--ipv6', '-6', action='store_true', dest='use_ipv6', help='Tells Django to use an IPv6 address.', ) parser.add_argument( '--nothreading', action='store_false', dest='use_threading', help='Tells Django to NOT use threading.', ) parser.add_argument( '--noreload', action='store_false', dest='use_reloader', help='Tells Django to NOT use the auto-reloader.', ) def execute(self, *args, **options): if options['no_color']: # We rely on the environment because it's currently the only # way to reach WSGIRequestHandler. This seems an acceptable # compromise considering `runserver` runs indefinitely. os.environ["DJANGO_COLORS"] = "nocolor" super().execute(*args, **options) def get_handler(self, *args, **options): """Return the default WSGI handler for the runner.""" return get_internal_wsgi_application() def handle(self, *args, **options): if not settings.DEBUG and not settings.ALLOWED_HOSTS: raise CommandError('You must set settings.ALLOWED_HOSTS if DEBUG is False.') self.use_ipv6 = options['use_ipv6'] if self.use_ipv6 and not socket.has_ipv6: raise CommandError('Your Python does not support IPv6.') self._raw_ipv6 = False if not options['addrport']: self.addr = '' self.port = self.default_port else: m = re.match(naiveip_re, options['addrport']) if m is None: raise CommandError('"%s" is not a valid port number ' 'or address:port pair.' % options['addrport']) self.addr, _ipv4, _ipv6, _fqdn, self.port = m.groups() if not self.port.isdigit(): raise CommandError("%r is not a valid port number." % self.port) if self.addr: if _ipv6: self.addr = self.addr[1:-1] self.use_ipv6 = True self._raw_ipv6 = True elif self.use_ipv6 and not _fqdn: raise CommandError('"%s" is not a valid IPv6 address.' % self.addr) if not self.addr: self.addr = self.default_addr_ipv6 if self.use_ipv6 else self.default_addr self._raw_ipv6 = self.use_ipv6 self.run(**options) def run(self, **options): """Run the server, using the autoreloader if needed.""" use_reloader = options['use_reloader'] if use_reloader: autoreload.run_with_reloader(self.inner_run, **options) else: self.inner_run(None, **options) def inner_run(self, *args, **options): # If an exception was silenced in ManagementUtility.execute in order # to be raised in the child process, raise it now. autoreload.raise_last_exception() threading = options['use_threading'] # 'shutdown_message' is a stealth option. shutdown_message = options.get('shutdown_message', '') quit_command = 'CTRL-BREAK' if sys.platform == 'win32' else 'CONTROL-C' self.stdout.write("Performing system checks...\n\n") self.check(display_num_errors=True) # Need to check migrations here, so can't use the # requires_migrations_check attribute. self.check_migrations() now = datetime.now().strftime('%B %d, %Y - %X') self.stdout.write(now) self.stdout.write(( "Django version %(version)s, using settings %(settings)r\n" "Starting development server at %(protocol)s://%(addr)s:%(port)s/\n" "Quit the server with %(quit_command)s.\n" ) % { "version": self.get_version(), "settings": settings.SETTINGS_MODULE, "protocol": self.protocol, "addr": '[%s]' % self.addr if self._raw_ipv6 else self.addr, "port": self.port, "quit_command": quit_command, }) try: handler = self.get_handler(*args, **options) run(self.addr, int(self.port), handler, ipv6=self.use_ipv6, threading=threading, server_cls=self.server_cls) except OSError as e: # Use helpful error messages instead of ugly tracebacks. ERRORS = { errno.EACCES: "You don't have permission to access that port.", errno.EADDRINUSE: "That port is already in use.", errno.EADDRNOTAVAIL: "That IP address can't be assigned to.", } try: error_text = ERRORS[e.errno] except KeyError: error_text = e self.stderr.write("Error: %s" % error_text) # Need to use an OS exit because sys.exit doesn't work in a thread os._exit(1) except KeyboardInterrupt: if shutdown_message: self.stdout.write(shutdown_message) sys.exit(0)
d233549f7ef849705cd72585c7b4405a92c28e039df5f19bee7ca1e1502312f3
import glob import os import re import sys from functools import total_ordering from itertools import dropwhile import django from django.conf import settings from django.core.exceptions import ImproperlyConfigured from django.core.files.temp import NamedTemporaryFile from django.core.management.base import BaseCommand, CommandError from django.core.management.utils import ( find_command, handle_extensions, is_ignored_path, popen_wrapper, ) from django.utils.encoding import DEFAULT_LOCALE_ENCODING from django.utils.functional import cached_property from django.utils.jslex import prepare_js_for_gettext from django.utils.regex_helper import _lazy_re_compile from django.utils.text import get_text_list from django.utils.translation import templatize plural_forms_re = _lazy_re_compile(r'^(?P<value>"Plural-Forms.+?\\n")\s*$', re.MULTILINE | re.DOTALL) STATUS_OK = 0 NO_LOCALE_DIR = object() def check_programs(*programs): for program in programs: if find_command(program) is None: raise CommandError( "Can't find %s. Make sure you have GNU gettext tools 0.15 or " "newer installed." % program ) @total_ordering class TranslatableFile: def __init__(self, dirpath, file_name, locale_dir): self.file = file_name self.dirpath = dirpath self.locale_dir = locale_dir def __repr__(self): return "<%s: %s>" % ( self.__class__.__name__, os.sep.join([self.dirpath, self.file]), ) def __eq__(self, other): return self.path == other.path def __lt__(self, other): return self.path < other.path @property def path(self): return os.path.join(self.dirpath, self.file) class BuildFile: """ Represent the state of a translatable file during the build process. """ def __init__(self, command, domain, translatable): self.command = command self.domain = domain self.translatable = translatable @cached_property def is_templatized(self): if self.domain == 'djangojs': return self.command.gettext_version < (0, 18, 3) elif self.domain == 'django': file_ext = os.path.splitext(self.translatable.file)[1] return file_ext != '.py' return False @cached_property def path(self): return self.translatable.path @cached_property def work_path(self): """ Path to a file which is being fed into GNU gettext pipeline. This may be either a translatable or its preprocessed version. """ if not self.is_templatized: return self.path extension = { 'djangojs': 'c', 'django': 'py', }.get(self.domain) filename = '%s.%s' % (self.translatable.file, extension) return os.path.join(self.translatable.dirpath, filename) def preprocess(self): """ Preprocess (if necessary) a translatable file before passing it to xgettext GNU gettext utility. """ if not self.is_templatized: return with open(self.path, encoding='utf-8') as fp: src_data = fp.read() if self.domain == 'djangojs': content = prepare_js_for_gettext(src_data) elif self.domain == 'django': content = templatize(src_data, origin=self.path[2:]) with open(self.work_path, 'w', encoding='utf-8') as fp: fp.write(content) def postprocess_messages(self, msgs): """ Postprocess messages generated by xgettext GNU gettext utility. Transform paths as if these messages were generated from original translatable files rather than from preprocessed versions. """ if not self.is_templatized: return msgs # Remove '.py' suffix if os.name == 'nt': # Preserve '.\' prefix on Windows to respect gettext behavior old_path = self.work_path new_path = self.path else: old_path = self.work_path[2:] new_path = self.path[2:] return re.sub( r'^(#: .*)(' + re.escape(old_path) + r')', lambda match: match.group().replace(old_path, new_path), msgs, flags=re.MULTILINE ) def cleanup(self): """ Remove a preprocessed copy of a translatable file (if any). """ if self.is_templatized: # This check is needed for the case of a symlinked file and its # source being processed inside a single group (locale dir); # removing either of those two removes both. if os.path.exists(self.work_path): os.unlink(self.work_path) def normalize_eols(raw_contents): """ Take a block of raw text that will be passed through str.splitlines() to get universal newlines treatment. Return the resulting block of text with normalized `\n` EOL sequences ready to be written to disk using current platform's native EOLs. """ lines_list = raw_contents.splitlines() # Ensure last line has its EOL if lines_list and lines_list[-1]: lines_list.append('') return '\n'.join(lines_list) def write_pot_file(potfile, msgs): """ Write the `potfile` with the `msgs` contents, making sure its format is valid. """ pot_lines = msgs.splitlines() if os.path.exists(potfile): # Strip the header lines = dropwhile(len, pot_lines) else: lines = [] found, header_read = False, False for line in pot_lines: if not found and not header_read: if 'charset=CHARSET' in line: found = True line = line.replace('charset=CHARSET', 'charset=UTF-8') if not line and not found: header_read = True lines.append(line) msgs = '\n'.join(lines) # Force newlines of POT files to '\n' to work around # https://savannah.gnu.org/bugs/index.php?52395 with open(potfile, 'a', encoding='utf-8', newline='\n') as fp: fp.write(msgs) class Command(BaseCommand): help = ( "Runs over the entire source tree of the current directory and " "pulls out all strings marked for translation. It creates (or updates) a message " "file in the conf/locale (in the django tree) or locale (for projects and " "applications) directory.\n\nYou must run this command with one of either the " "--locale, --exclude, or --all options." ) translatable_file_class = TranslatableFile build_file_class = BuildFile requires_system_checks = False msgmerge_options = ['-q', '--previous'] msguniq_options = ['--to-code=utf-8'] msgattrib_options = ['--no-obsolete'] xgettext_options = ['--from-code=UTF-8', '--add-comments=Translators'] def add_arguments(self, parser): parser.add_argument( '--locale', '-l', default=[], action='append', help='Creates or updates the message files for the given locale(s) (e.g. pt_BR). ' 'Can be used multiple times.', ) parser.add_argument( '--exclude', '-x', default=[], action='append', help='Locales to exclude. Default is none. Can be used multiple times.', ) parser.add_argument( '--domain', '-d', default='django', help='The domain of the message files (default: "django").', ) parser.add_argument( '--all', '-a', action='store_true', help='Updates the message files for all existing locales.', ) parser.add_argument( '--extension', '-e', dest='extensions', action='append', help='The file extension(s) to examine (default: "html,txt,py", or "js" ' 'if the domain is "djangojs"). Separate multiple extensions with ' 'commas, or use -e multiple times.', ) parser.add_argument( '--symlinks', '-s', action='store_true', help='Follows symlinks to directories when examining source code ' 'and templates for translation strings.', ) parser.add_argument( '--ignore', '-i', action='append', dest='ignore_patterns', default=[], metavar='PATTERN', help='Ignore files or directories matching this glob-style pattern. ' 'Use multiple times to ignore more.', ) parser.add_argument( '--no-default-ignore', action='store_false', dest='use_default_ignore_patterns', help="Don't ignore the common glob-style patterns 'CVS', '.*', '*~' and '*.pyc'.", ) parser.add_argument( '--no-wrap', action='store_true', help="Don't break long message lines into several lines.", ) parser.add_argument( '--no-location', action='store_true', help="Don't write '#: filename:line' lines.", ) parser.add_argument( '--add-location', choices=('full', 'file', 'never'), const='full', nargs='?', help=( "Controls '#: filename:line' lines. If the option is 'full' " "(the default if not given), the lines include both file name " "and line number. If it's 'file', the line number is omitted. If " "it's 'never', the lines are suppressed (same as --no-location). " "--add-location requires gettext 0.19 or newer." ), ) parser.add_argument( '--no-obsolete', action='store_true', help="Remove obsolete message strings.", ) parser.add_argument( '--keep-pot', action='store_true', help="Keep .pot file after making messages. Useful when debugging.", ) def handle(self, *args, **options): locale = options['locale'] exclude = options['exclude'] self.domain = options['domain'] self.verbosity = options['verbosity'] process_all = options['all'] extensions = options['extensions'] self.symlinks = options['symlinks'] ignore_patterns = options['ignore_patterns'] if options['use_default_ignore_patterns']: ignore_patterns += ['CVS', '.*', '*~', '*.pyc'] self.ignore_patterns = list(set(ignore_patterns)) # Avoid messing with mutable class variables if options['no_wrap']: self.msgmerge_options = self.msgmerge_options[:] + ['--no-wrap'] self.msguniq_options = self.msguniq_options[:] + ['--no-wrap'] self.msgattrib_options = self.msgattrib_options[:] + ['--no-wrap'] self.xgettext_options = self.xgettext_options[:] + ['--no-wrap'] if options['no_location']: self.msgmerge_options = self.msgmerge_options[:] + ['--no-location'] self.msguniq_options = self.msguniq_options[:] + ['--no-location'] self.msgattrib_options = self.msgattrib_options[:] + ['--no-location'] self.xgettext_options = self.xgettext_options[:] + ['--no-location'] if options['add_location']: if self.gettext_version < (0, 19): raise CommandError( "The --add-location option requires gettext 0.19 or later. " "You have %s." % '.'.join(str(x) for x in self.gettext_version) ) arg_add_location = "--add-location=%s" % options['add_location'] self.msgmerge_options = self.msgmerge_options[:] + [arg_add_location] self.msguniq_options = self.msguniq_options[:] + [arg_add_location] self.msgattrib_options = self.msgattrib_options[:] + [arg_add_location] self.xgettext_options = self.xgettext_options[:] + [arg_add_location] self.no_obsolete = options['no_obsolete'] self.keep_pot = options['keep_pot'] if self.domain not in ('django', 'djangojs'): raise CommandError("currently makemessages only supports domains " "'django' and 'djangojs'") if self.domain == 'djangojs': exts = extensions or ['js'] else: exts = extensions or ['html', 'txt', 'py'] self.extensions = handle_extensions(exts) if (locale is None and not exclude and not process_all) or self.domain is None: raise CommandError( "Type '%s help %s' for usage information." % (os.path.basename(sys.argv[0]), sys.argv[1]) ) if self.verbosity > 1: self.stdout.write( 'examining files with the extensions: %s\n' % get_text_list(list(self.extensions), 'and') ) self.invoked_for_django = False self.locale_paths = [] self.default_locale_path = None if os.path.isdir(os.path.join('conf', 'locale')): self.locale_paths = [os.path.abspath(os.path.join('conf', 'locale'))] self.default_locale_path = self.locale_paths[0] self.invoked_for_django = True else: if self.settings_available: self.locale_paths.extend(settings.LOCALE_PATHS) # Allow to run makemessages inside an app dir if os.path.isdir('locale'): self.locale_paths.append(os.path.abspath('locale')) if self.locale_paths: self.default_locale_path = self.locale_paths[0] os.makedirs(self.default_locale_path, exist_ok=True) # Build locale list looks_like_locale = re.compile(r'[a-z]{2}') locale_dirs = filter(os.path.isdir, glob.glob('%s/*' % self.default_locale_path)) all_locales = [ lang_code for lang_code in map(os.path.basename, locale_dirs) if looks_like_locale.match(lang_code) ] # Account for excluded locales if process_all: locales = all_locales else: locales = locale or all_locales locales = set(locales).difference(exclude) if locales: check_programs('msguniq', 'msgmerge', 'msgattrib') check_programs('xgettext') try: potfiles = self.build_potfiles() # Build po files for each selected locale for locale in locales: if self.verbosity > 0: self.stdout.write("processing locale %s\n" % locale) for potfile in potfiles: self.write_po_file(potfile, locale) finally: if not self.keep_pot: self.remove_potfiles() @cached_property def gettext_version(self): # Gettext tools will output system-encoded bytestrings instead of UTF-8, # when looking up the version. It's especially a problem on Windows. out, err, status = popen_wrapper( ['xgettext', '--version'], stdout_encoding=DEFAULT_LOCALE_ENCODING, ) m = re.search(r'(\d+)\.(\d+)\.?(\d+)?', out) if m: return tuple(int(d) for d in m.groups() if d is not None) else: raise CommandError("Unable to get gettext version. Is it installed?") @cached_property def settings_available(self): try: settings.LOCALE_PATHS except ImproperlyConfigured: if self.verbosity > 1: self.stderr.write("Running without configured settings.") return False return True def build_potfiles(self): """ Build pot files and apply msguniq to them. """ file_list = self.find_files(".") self.remove_potfiles() self.process_files(file_list) potfiles = [] for path in self.locale_paths: potfile = os.path.join(path, '%s.pot' % self.domain) if not os.path.exists(potfile): continue args = ['msguniq'] + self.msguniq_options + [potfile] msgs, errors, status = popen_wrapper(args) if errors: if status != STATUS_OK: raise CommandError( "errors happened while running msguniq\n%s" % errors) elif self.verbosity > 0: self.stdout.write(errors) msgs = normalize_eols(msgs) with open(potfile, 'w', encoding='utf-8') as fp: fp.write(msgs) potfiles.append(potfile) return potfiles def remove_potfiles(self): for path in self.locale_paths: pot_path = os.path.join(path, '%s.pot' % self.domain) if os.path.exists(pot_path): os.unlink(pot_path) def find_files(self, root): """ Get all files in the given root. Also check that there is a matching locale dir for each file. """ all_files = [] ignored_roots = [] if self.settings_available: ignored_roots = [os.path.normpath(p) for p in (settings.MEDIA_ROOT, settings.STATIC_ROOT) if p] for dirpath, dirnames, filenames in os.walk(root, topdown=True, followlinks=self.symlinks): for dirname in dirnames[:]: if (is_ignored_path(os.path.normpath(os.path.join(dirpath, dirname)), self.ignore_patterns) or os.path.join(os.path.abspath(dirpath), dirname) in ignored_roots): dirnames.remove(dirname) if self.verbosity > 1: self.stdout.write('ignoring directory %s\n' % dirname) elif dirname == 'locale': dirnames.remove(dirname) self.locale_paths.insert(0, os.path.join(os.path.abspath(dirpath), dirname)) for filename in filenames: file_path = os.path.normpath(os.path.join(dirpath, filename)) file_ext = os.path.splitext(filename)[1] if file_ext not in self.extensions or is_ignored_path(file_path, self.ignore_patterns): if self.verbosity > 1: self.stdout.write('ignoring file %s in %s\n' % (filename, dirpath)) else: locale_dir = None for path in self.locale_paths: if os.path.abspath(dirpath).startswith(os.path.dirname(path)): locale_dir = path break locale_dir = locale_dir or self.default_locale_path or NO_LOCALE_DIR all_files.append(self.translatable_file_class(dirpath, filename, locale_dir)) return sorted(all_files) def process_files(self, file_list): """ Group translatable files by locale directory and run pot file build process for each group. """ file_groups = {} for translatable in file_list: file_group = file_groups.setdefault(translatable.locale_dir, []) file_group.append(translatable) for locale_dir, files in file_groups.items(): self.process_locale_dir(locale_dir, files) def process_locale_dir(self, locale_dir, files): """ Extract translatable literals from the specified files, creating or updating the POT file for a given locale directory. Use the xgettext GNU gettext utility. """ build_files = [] for translatable in files: if self.verbosity > 1: self.stdout.write('processing file %s in %s\n' % ( translatable.file, translatable.dirpath )) if self.domain not in ('djangojs', 'django'): continue build_file = self.build_file_class(self, self.domain, translatable) try: build_file.preprocess() except UnicodeDecodeError as e: self.stdout.write( 'UnicodeDecodeError: skipped file %s in %s (reason: %s)' % ( translatable.file, translatable.dirpath, e, ) ) continue build_files.append(build_file) if self.domain == 'djangojs': is_templatized = build_file.is_templatized args = [ 'xgettext', '-d', self.domain, '--language=%s' % ('C' if is_templatized else 'JavaScript',), '--keyword=gettext_noop', '--keyword=gettext_lazy', '--keyword=ngettext_lazy:1,2', '--keyword=pgettext:1c,2', '--keyword=npgettext:1c,2,3', '--output=-', ] elif self.domain == 'django': args = [ 'xgettext', '-d', self.domain, '--language=Python', '--keyword=gettext_noop', '--keyword=gettext_lazy', '--keyword=ngettext_lazy:1,2', '--keyword=ugettext_noop', '--keyword=ugettext_lazy', '--keyword=ungettext_lazy:1,2', '--keyword=pgettext:1c,2', '--keyword=npgettext:1c,2,3', '--keyword=pgettext_lazy:1c,2', '--keyword=npgettext_lazy:1c,2,3', '--output=-', ] else: return input_files = [bf.work_path for bf in build_files] with NamedTemporaryFile(mode='w+') as input_files_list: input_files_list.write(('\n'.join(input_files))) input_files_list.flush() args.extend(['--files-from', input_files_list.name]) args.extend(self.xgettext_options) msgs, errors, status = popen_wrapper(args) if errors: if status != STATUS_OK: for build_file in build_files: build_file.cleanup() raise CommandError( 'errors happened while running xgettext on %s\n%s' % ('\n'.join(input_files), errors) ) elif self.verbosity > 0: # Print warnings self.stdout.write(errors) if msgs: if locale_dir is NO_LOCALE_DIR: file_path = os.path.normpath(build_files[0].path) raise CommandError( 'Unable to find a locale path to store translations for ' 'file %s' % file_path ) for build_file in build_files: msgs = build_file.postprocess_messages(msgs) potfile = os.path.join(locale_dir, '%s.pot' % self.domain) write_pot_file(potfile, msgs) for build_file in build_files: build_file.cleanup() def write_po_file(self, potfile, locale): """ Create or update the PO file for self.domain and `locale`. Use contents of the existing `potfile`. Use msgmerge and msgattrib GNU gettext utilities. """ basedir = os.path.join(os.path.dirname(potfile), locale, 'LC_MESSAGES') os.makedirs(basedir, exist_ok=True) pofile = os.path.join(basedir, '%s.po' % self.domain) if os.path.exists(pofile): args = ['msgmerge'] + self.msgmerge_options + [pofile, potfile] msgs, errors, status = popen_wrapper(args) if errors: if status != STATUS_OK: raise CommandError( "errors happened while running msgmerge\n%s" % errors) elif self.verbosity > 0: self.stdout.write(errors) else: with open(potfile, encoding='utf-8') as fp: msgs = fp.read() if not self.invoked_for_django: msgs = self.copy_plural_forms(msgs, locale) msgs = normalize_eols(msgs) msgs = msgs.replace( "#. #-#-#-#-# %s.pot (PACKAGE VERSION) #-#-#-#-#\n" % self.domain, "") with open(pofile, 'w', encoding='utf-8') as fp: fp.write(msgs) if self.no_obsolete: args = ['msgattrib'] + self.msgattrib_options + ['-o', pofile, pofile] msgs, errors, status = popen_wrapper(args) if errors: if status != STATUS_OK: raise CommandError( "errors happened while running msgattrib\n%s" % errors) elif self.verbosity > 0: self.stdout.write(errors) def copy_plural_forms(self, msgs, locale): """ Copy plural forms header contents from a Django catalog of locale to the msgs string, inserting it at the right place. msgs should be the contents of a newly created .po file. """ django_dir = os.path.normpath(os.path.join(os.path.dirname(django.__file__))) if self.domain == 'djangojs': domains = ('djangojs', 'django') else: domains = ('django',) for domain in domains: django_po = os.path.join(django_dir, 'conf', 'locale', locale, 'LC_MESSAGES', '%s.po' % domain) if os.path.exists(django_po): with open(django_po, encoding='utf-8') as fp: m = plural_forms_re.search(fp.read()) if m: plural_form_line = m.group('value') if self.verbosity > 1: self.stdout.write("copying plural forms: %s\n" % plural_form_line) lines = [] found = False for line in msgs.splitlines(): if not found and (not line or plural_forms_re.search(line)): line = plural_form_line found = True lines.append(line) msgs = '\n'.join(lines) break return msgs